From bcb90b0d7b8d167b52474e86ca549e4cf61504ca Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Fri, 25 Oct 2024 14:01:17 -0400 Subject: [PATCH] ref: upgrade black to 24.10.0 (#79590) getsentry change must merge first to minimize PR interruption: https://github.com/getsentry/getsentry/pull/15464 note that the getsentry PR takes a different approach such that the versions can be properly synchronized with the code --- bin/extension_language_map.py | 1 - bin/mock-replay | 2 +- requirements-dev-frozen.txt | 2 +- requirements-dev.txt | 2 +- src/flagpole/__init__.py | 1 + src/sentry/api/base.py | 6 +- .../api/endpoints/organization_events.py | 36 ++++---- .../endpoints/organization_events_stats.py | 16 ++-- .../endpoints/organization_events_trends.py | 56 +++++++----- .../source_map_debug_blue_thunder_edition.py | 30 +++---- src/sentry/api/serializers/models/activity.py | 16 ++-- .../api/serializers/models/artifactbundle.py | 8 +- .../api/serializers/models/auditlogentry.py | 8 +- .../api/serializers/models/orgauthtoken.py | 6 +- src/sentry/api/serializers/models/plugin.py | 6 +- src/sentry/api/serializers/snuba.py | 18 ++-- src/sentry/api/utils.py | 12 +-- src/sentry/conf/server.py | 6 +- src/sentry/consumers/synchronized.py | 6 +- src/sentry/datascrubbing.py | 6 +- .../models/fields/hybrid_cloud_foreign_key.py | 1 + src/sentry/db/models/fields/jsonfield.py | 1 + src/sentry/db/models/fields/text.py | 1 - src/sentry/db/models/fields/uuid.py | 1 + src/sentry/db/models/manager/base.py | 28 +++--- src/sentry/db/models/utils.py | 6 +- src/sentry/db/postgres/transactions.py | 6 +- src/sentry/deletions/__init__.py | 1 + src/sentry/deletions/tasks/hybrid_cloud.py | 3 +- .../tasks/boost_low_volume_transactions.py | 6 +- src/sentry/dynamic_sampling/tasks/common.py | 9 +- .../helpers/boost_low_volume_projects.py | 6 +- src/sentry/event_manager.py | 8 +- src/sentry/eventstore/base.py | 6 +- src/sentry/eventstore/compressor.py | 1 + src/sentry/eventstore/models.py | 6 +- src/sentry/eventstore/snuba/backend.py | 6 +- src/sentry/filestore/s3.py | 1 + src/sentry/grouping/strategies/base.py | 6 +- src/sentry/incidents/models/alert_rule.py | 3 +- src/sentry/ingest/slicing.py | 3 +- .../ingest/transaction_clusterer/rules.py | 6 +- src/sentry/integrations/gitlab/webhooks.py | 6 +- .../integrations/repository/__init__.py | 1 + src/sentry/integrations/slack/unfurl/types.py | 3 +- src/sentry/integrations/utils/sync.py | 6 +- src/sentry/issues/json_schemas.py | 2 +- src/sentry/issues/search.py | 6 +- src/sentry/middleware/ratelimit.py | 12 +-- src/sentry/models/files/abstractfile.py | 15 ++-- src/sentry/models/files/abstractfileblob.py | 6 +- src/sentry/models/releasefile.py | 1 + src/sentry/monitoring/queues.py | 12 +-- .../monitors/clock_tasks/check_timeout.py | 5 +- .../notifications/notificationcontroller.py | 12 +-- .../notifications/notifications/rules.py | 6 +- .../notifications/user_report.py | 2 +- src/sentry/plugins/base/manager.py | 9 +- src/sentry/plugins/base/v2.py | 3 +- src/sentry/profiles/flamegraph.py | 30 +++---- src/sentry/receivers/outbox/__init__.py | 1 + src/sentry/relay/config/measurements.py | 1 - .../release_health/metrics_sessions_v2.py | 6 +- ...roject_replay_recording_segment_details.py | 6 +- src/sentry/replays/lib/event_linking.py | 8 +- src/sentry/replays/lib/http.py | 3 +- src/sentry/replays/lib/new_query/fields.py | 3 +- src/sentry/replays/lib/new_query/parsers.py | 1 + src/sentry/replays/lib/new_query/utils.py | 1 + src/sentry/replays/lib/query.py | 1 + .../usecases/query/conditions/error_ids.py | 1 + .../usecases/query/conditions/selector.py | 1 + .../usecases/query/configs/aggregate_sort.py | 1 + src/sentry/reprocessing2.py | 7 +- src/sentry/runner/commands/devservices.py | 12 +-- .../commands/presenters/webhookpresenter.py | 4 +- src/sentry/runner/default_settings.py | 1 + src/sentry/search/eap/columns.py | 12 ++- .../events/builder/profile_functions.py | 9 +- src/sentry/search/events/builder/profiles.py | 9 +- .../search/events/builder/spans_indexed.py | 6 +- src/sentry/search/events/datasets/metrics.py | 8 +- src/sentry/search/events/fields.py | 12 +-- src/sentry/search/events/types.py | 6 +- src/sentry/seer/similarity/utils.py | 4 +- .../models/sentry_app_installation.py | 12 ++- src/sentry/sentry_apps/models/servicehook.py | 8 +- src/sentry/sentry_apps/services/app/model.py | 12 +-- src/sentry/sentry_metrics/client/base.py | 3 - src/sentry/sentry_metrics/client/kafka.py | 1 - src/sentry/sentry_metrics/client/snuba.py | 4 - .../consumers/indexer/slicing_router.py | 4 +- src/sentry/sentry_metrics/indexer/mock.py | 7 +- .../data/transformation/metrics_api.py | 6 +- src/sentry/shared_integrations/client/base.py | 6 +- .../exceptions/__init__.py | 3 +- src/sentry/snuba/metrics/datasource.py | 16 ++-- src/sentry/snuba/metrics/utils.py | 12 +-- src/sentry/snuba/metrics_performance.py | 6 +- .../snuba/query_subscriptions/consumer.py | 11 ++- src/sentry/snuba/sessions_v2.py | 8 +- src/sentry/stacktraces/processing.py | 4 +- src/sentry/statistical_detectors/algorithm.py | 3 +- src/sentry/statistical_detectors/base.py | 18 ++-- src/sentry/statistical_detectors/detector.py | 12 +-- src/sentry/statistical_detectors/store.py | 6 +- src/sentry/tasks/check_am2_compatibility.py | 6 +- ...kfill_seer_grouping_records_for_project.py | 6 +- src/sentry/tasks/summaries/utils.py | 6 +- src/sentry/testutils/helpers/link_header.py | 1 + src/sentry/testutils/helpers/options.py | 5 +- src/sentry/testutils/hybrid_cloud.py | 6 +- .../performance_issues/event_generators.py | 2 +- src/sentry/testutils/pytest/fixtures.py | 3 +- src/sentry/types/actor.py | 12 +-- src/sentry/utils/arroyo.py | 6 +- src/sentry/utils/avatar.py | 1 + src/sentry/utils/circuit_breaker2.py | 24 ++---- src/sentry/utils/cursors.py | 3 +- src/sentry/utils/dates.py | 6 +- src/sentry/utils/http.py | 6 +- src/sentry/utils/json.py | 6 +- src/sentry/utils/jwt.py | 1 + src/sentry/utils/lazy_service_wrapper.py | 1 + src/sentry/utils/options.py | 1 - src/sentry/utils/redis.py | 12 ++- .../utils/sentry_apps/request_buffer.py | 6 +- src/sentry/utils/snuba_rpc.py | 6 +- src/sentry/utils/strings.py | 6 +- src/sentry/utils/types.py | 18 ++-- src/sentry/web/frontend/base.py | 3 +- src/sentry/web/frontend/debug/mail.py | 6 +- src/sentry/web/frontend/error_page_embed.py | 6 +- src/sentry_plugins/client.py | 6 +- src/social_auth/backends/__init__.py | 1 + src/social_auth/views.py | 1 - src/sudo/forms.py | 1 + src/sudo/middleware.py | 1 + src/sudo/models.py | 1 + src/sudo/settings.py | 1 + src/sudo/signals.py | 1 + src/sudo/utils.py | 1 + src/sudo/views.py | 1 + tests/acceptance/test_account_settings.py | 10 ++- tests/acceptance/test_organization_switch.py | 5 +- .../test_accept_organization_invite.py | 5 +- .../api/endpoints/test_organization_traces.py | 4 +- .../api/endpoints/test_project_codeowners.py | 6 +- tests/sentry/api/helpers/test_group_index.py | 20 +++-- tests/sentry/attachments/test_redis.py | 6 +- tests/sentry/auth/test_helper.py | 5 +- tests/sentry/charts/test_chartcuterie.py | 5 +- tests/sentry/consumers/test_synchronized.py | 86 +++++++++++-------- tests/sentry/db/models/fields/test_slug.py | 4 +- .../db/models/manager/test_base_query_set.py | 30 ++++--- .../dynamic_sampling/tasks/test_tasks.py | 2 +- tests/sentry/event_manager/test_severity.py | 5 +- .../feedback/usecases/test_create_feedback.py | 17 ++-- .../grouping/test_builtin_fingerprinting.py | 6 +- tests/sentry/helpers/test_deprecation.py | 57 +++++++----- .../test_control_organization_provisioning.py | 21 +++-- tests/sentry/hybridcloud/test_region.py | 10 ++- .../test_organization_alert_rule_details.py | 6 +- .../incidents/endpoints/test_serializers.py | 5 +- .../incidents/test_subscription_processor.py | 14 +-- .../ingest/test_transaction_clusterer.py | 5 +- .../endpoints/test_doc_integration_avatar.py | 10 ++- .../discord/test_message_builder.py | 22 ++--- .../sentry/integrations/jira/test_webhooks.py | 6 +- tests/sentry/issues/test_escalating.py | 8 +- tests/sentry/lang/native/test_processing.py | 1 + .../integrations/parsers/test_jira_server.py | 13 +-- .../integrations/test_integration_control.py | 11 +-- .../test_organizationslugreservation.py | 6 +- tests/sentry/models/test_projectkey.py | 5 +- tests/sentry/nodestore/test_common.py | 1 + tests/sentry/quotas/test_base.py | 18 ++-- .../processing/test_delayed_processing.py | 6 +- .../sentry/runner/commands/test_migrations.py | 11 ++- tests/sentry/seer/similarity/test_utils.py | 6 +- .../test_sentry_app_installation_creator.py | 5 +- tests/sentry/sentry_metrics/test_snuba.py | 1 - .../shared_integrations/client/test_proxy.py | 15 ++-- tests/sentry/silo/test_base.py | 2 +- tests/sentry/silo/test_client.py | 68 ++++++++------- .../test_metrics_layer/test_release_health.py | 1 + tests/sentry/tasks/test_post_process.py | 3 +- .../consumers/test_results_consumers.py | 7 +- tests/sentry/uptime/detectors/test_ranking.py | 5 +- .../sentry/uptime/subscriptions/test_tasks.py | 10 ++- tests/sentry/uptime/test_models.py | 15 ++-- tests/sentry/utils/locking/test_lock.py | 7 +- .../utils/sdk_crashes/test_event_stripper.py | 18 ++-- tests/sentry/utils/test_json.py | 14 ++- .../test_vercel_extension_configuration.py | 5 +- tests/sentry/web/test_client_config.py | 5 +- .../test_discover_key_transactions.py | 5 +- .../endpoints/test_organization_sessions.py | 10 ++- tests/symbolicator/test_payload_full.py | 5 +- tests/symbolicator/test_unreal_full.py | 5 +- 200 files changed, 848 insertions(+), 778 deletions(-) diff --git a/bin/extension_language_map.py b/bin/extension_language_map.py index afa912cffcdbdd..3366fef414840b 100644 --- a/bin/extension_language_map.py +++ b/bin/extension_language_map.py @@ -2,7 +2,6 @@ Generate a mapping from file extensions to language for languages that are part of platforms supported by Sentry. """ - import requests import yaml diff --git a/bin/mock-replay b/bin/mock-replay index 774e2fddf1e9d8..999cc3f7e40eb5 100755 --- a/bin/mock-replay +++ b/bin/mock-replay @@ -73,7 +73,7 @@ def create_recording(replay_id, project_id, timestamp): 24, ), ] - for (segment_id, segment) in enumerate(segments): + for segment_id, segment in enumerate(segments): store_replay_segments(replay_id, project_id, segment_id, segment) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 714bb8fd3608f6..103bc8390a4be9 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -13,7 +13,7 @@ attrs==23.1.0 avalara==20.9.0 beautifulsoup4==4.7.1 billiard==4.2.0 -black==22.10.0 +black==24.10.0 boto3==1.34.128 botocore==1.34.128 brotli==1.1.0 diff --git a/requirements-dev.txt b/requirements-dev.txt index 2b4d2aec1c7e67..99c8b3c3299270 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -22,7 +22,7 @@ sentry-cli>=2.16.0 # pre-commit dependencies pre-commit>=4 -black>=22.10.0 +black>=24.10.0 flake8>=7 flake8-bugbear>=22.10 flake8-logging>=1.5 diff --git a/src/flagpole/__init__.py b/src/flagpole/__init__.py index 7018eaf175db7e..57a1bb7c3be006 100644 --- a/src/flagpole/__init__.py +++ b/src/flagpole/__init__.py @@ -59,6 +59,7 @@ Each condition has a single operator. An operator takes a kind (`OperatorKind` enum) and a value, the type of which depends on the operator specified. """ + from __future__ import annotations import dataclasses diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py index 52b49e991ea336..f71c6b2e4f9465 100644 --- a/src/sentry/api/base.py +++ b/src/sentry/api/base.py @@ -153,9 +153,9 @@ def apply_cors_headers( "Content-Type, Authentication, Authorization, Content-Encoding, " "sentry-trace, baggage, X-CSRFToken" ) - response[ - "Access-Control-Expose-Headers" - ] = "X-Sentry-Error, X-Sentry-Direct-Hit, X-Hits, X-Max-Hits, Endpoint, Retry-After, Link" + response["Access-Control-Expose-Headers"] = ( + "X-Sentry-Error, X-Sentry-Direct-Hit, X-Hits, X-Max-Hits, Endpoint, Retry-After, Link" + ) if request.META.get("HTTP_ORIGIN") == "null": # if ORIGIN header is explicitly specified as 'null' leave it alone diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index 3930d2130239fa..1fb6029fc8aad3 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -496,16 +496,16 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w if decision == DashboardWidgetTypes.DISCOVER: return _data_fn(discover, offset, limit, scoped_query) elif decision == DashboardWidgetTypes.TRANSACTION_LIKE: - original_results["meta"][ - "discoverSplitDecision" - ] = DashboardWidgetTypes.get_type_name( - DashboardWidgetTypes.TRANSACTION_LIKE + original_results["meta"]["discoverSplitDecision"] = ( + DashboardWidgetTypes.get_type_name( + DashboardWidgetTypes.TRANSACTION_LIKE + ) ) return original_results elif decision == DashboardWidgetTypes.ERROR_EVENTS and error_results: - error_results["meta"][ - "discoverSplitDecision" - ] = DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS) + error_results["meta"]["discoverSplitDecision"] = ( + DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS) + ) return error_results else: return original_results @@ -545,9 +545,9 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save limit, scoped_query, ) - result["meta"][ - "discoverSplitDecision" - ] = DiscoverSavedQueryTypes.get_type_name(dataset_inferred_from_query) + result["meta"]["discoverSplitDecision"] = ( + DiscoverSavedQueryTypes.get_type_name(dataset_inferred_from_query) + ) self.save_discover_saved_query_split_decision( discover_query, @@ -582,10 +582,10 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save try: error_results = map["errors"] - error_results["meta"][ - "discoverSplitDecision" - ] = DiscoverSavedQueryTypes.get_type_name( - DiscoverSavedQueryTypes.ERROR_EVENTS + error_results["meta"]["discoverSplitDecision"] = ( + DiscoverSavedQueryTypes.get_type_name( + DiscoverSavedQueryTypes.ERROR_EVENTS + ) ) has_errors = len(error_results["data"]) > 0 except KeyError: @@ -593,10 +593,10 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save try: transaction_results = map["transactions"] - transaction_results["meta"][ - "discoverSplitDecision" - ] = DiscoverSavedQueryTypes.get_type_name( - DiscoverSavedQueryTypes.TRANSACTION_LIKE + transaction_results["meta"]["discoverSplitDecision"] = ( + DiscoverSavedQueryTypes.get_type_name( + DiscoverSavedQueryTypes.TRANSACTION_LIKE + ) ) has_transactions = len(transaction_results["data"]) > 0 except KeyError: diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index 0a11b698e8c81a..56fa6c54f9cf24 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -470,10 +470,10 @@ def fn( ): if not result.data.get("meta"): result.data["meta"] = {} - result.data["meta"][ - "discoverSplitDecision" - ] = DashboardWidgetTypes.get_type_name( - DashboardWidgetTypes.TRANSACTION_LIKE + result.data["meta"]["discoverSplitDecision"] = ( + DashboardWidgetTypes.get_type_name( + DashboardWidgetTypes.TRANSACTION_LIKE + ) ) return original_results elif decision == DashboardWidgetTypes.ERROR_EVENTS and error_results: @@ -484,10 +484,10 @@ def fn( ): if not result.data.get("meta"): result.data["meta"] = {} - result.data["meta"][ - "discoverSplitDecision" - ] = DashboardWidgetTypes.get_type_name( - DashboardWidgetTypes.ERROR_EVENTS + result.data["meta"]["discoverSplitDecision"] = ( + DashboardWidgetTypes.get_type_name( + DashboardWidgetTypes.ERROR_EVENTS + ) ) return error_results else: diff --git a/src/sentry/api/endpoints/organization_events_trends.py b/src/sentry/api/endpoints/organization_events_trends.py index 1789215e1d2baa..9a2d72537500a6 100644 --- a/src/sentry/api/endpoints/organization_events_trends.py +++ b/src/sentry/api/endpoints/organization_events_trends.py @@ -261,9 +261,11 @@ def get_snql_function_aliases(trend_columns: TrendColumns, trend_type: str) -> d if trend_type == IMPROVED else aggregate_filter.operator ), - -1 * aggregate_filter.value.value - if trend_type == IMPROVED - else aggregate_filter.value.value, + ( + -1 * aggregate_filter.value.value + if trend_type == IMPROVED + else aggregate_filter.value.value + ), ), ["minus", "transaction.duration"], trend_columns["trend_difference"], @@ -276,9 +278,11 @@ def get_snql_function_aliases(trend_columns: TrendColumns, trend_type: str) -> d if trend_type == REGRESSION else aggregate_filter.operator ), - -1 * aggregate_filter.value.value - if trend_type == REGRESSION - else aggregate_filter.value.value, + ( + -1 * aggregate_filter.value.value + if trend_type == REGRESSION + else aggregate_filter.value.value + ), ), None, trend_columns["t_test"], @@ -304,9 +308,11 @@ def get_function_aliases(trend_type): "trend_percentage()": Alias( lambda aggregate_filter: [ "trend_percentage", - CORRESPONDENCE_MAP[aggregate_filter.operator] - if trend_type == IMPROVED - else aggregate_filter.operator, + ( + CORRESPONDENCE_MAP[aggregate_filter.operator] + if trend_type == IMPROVED + else aggregate_filter.operator + ), 1 + (aggregate_filter.value.value * (-1 if trend_type == IMPROVED else 1)), ], ["percentage", "transaction.duration"], @@ -315,12 +321,16 @@ def get_function_aliases(trend_type): "trend_difference()": Alias( lambda aggregate_filter: [ "trend_difference", - CORRESPONDENCE_MAP[aggregate_filter.operator] - if trend_type == IMPROVED - else aggregate_filter.operator, - -1 * aggregate_filter.value.value - if trend_type == IMPROVED - else aggregate_filter.value.value, + ( + CORRESPONDENCE_MAP[aggregate_filter.operator] + if trend_type == IMPROVED + else aggregate_filter.operator + ), + ( + -1 * aggregate_filter.value.value + if trend_type == IMPROVED + else aggregate_filter.value.value + ), ], ["minus", "transaction.duration"], None, @@ -328,12 +338,16 @@ def get_function_aliases(trend_type): "confidence()": Alias( lambda aggregate_filter: [ "t_test", - CORRESPONDENCE_MAP[aggregate_filter.operator] - if trend_type == REGRESSION - else aggregate_filter.operator, - -1 * aggregate_filter.value.value - if trend_type == REGRESSION - else aggregate_filter.value.value, + ( + CORRESPONDENCE_MAP[aggregate_filter.operator] + if trend_type == REGRESSION + else aggregate_filter.operator + ), + ( + -1 * aggregate_filter.value.value + if trend_type == REGRESSION + else aggregate_filter.value.value + ), ], None, None, diff --git a/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py b/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py index f5786082e3ddf2..e89f98b9e6c5c9 100644 --- a/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py +++ b/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py @@ -315,18 +315,18 @@ def __init__(self, abs_path: str, project: Project, release: Release, event): self.matching_source_file_names = ReleaseFile.normalize(abs_path) # Source file lookup result variables - self.source_file_lookup_result: Literal[ - "found", "wrong-dist", "unsuccessful" - ] = "unsuccessful" - self.found_source_file_name: None | ( - str - ) = None # The name of the source file artifact that was found, e.g. "~/static/bundle.min.js" - self.source_map_reference: None | ( - str - ) = None # The source map reference as found in the source file or its headers, e.g. "https://example.com/static/bundle.min.js.map" - self.matching_source_map_name: None | ( - str - ) = None # The location where Sentry will look for the source map (relative to the source file), e.g. "bundle.min.js.map" + self.source_file_lookup_result: Literal["found", "wrong-dist", "unsuccessful"] = ( + "unsuccessful" + ) + self.found_source_file_name: None | (str) = ( + None # The name of the source file artifact that was found, e.g. "~/static/bundle.min.js" + ) + self.source_map_reference: None | (str) = ( + None # The source map reference as found in the source file or its headers, e.g. "https://example.com/static/bundle.min.js.map" + ) + self.matching_source_map_name: None | (str) = ( + None # The location where Sentry will look for the source map (relative to the source file), e.g. "bundle.min.js.map" + ) # Cached db objects across operations self.artifact_index_release_files: QuerySet | list[ReleaseFile] | None = None @@ -337,9 +337,9 @@ def __init__(self, abs_path: str, project: Project, release: Release, event): self._find_source_file_in_artifact_bundles() # Source map lookup result variable - self.source_map_lookup_result: Literal[ - "found", "wrong-dist", "unsuccessful" - ] = "unsuccessful" + self.source_map_lookup_result: Literal["found", "wrong-dist", "unsuccessful"] = ( + "unsuccessful" + ) if self.source_map_reference is not None and self.found_source_file_name is not None: # type: ignore[unreachable] if self.source_map_reference.startswith("data:"): # type: ignore[unreachable] diff --git a/src/sentry/api/serializers/models/activity.py b/src/sentry/api/serializers/models/activity.py index af60cec3770f8f..f27c9484fdcbf6 100644 --- a/src/sentry/api/serializers/models/activity.py +++ b/src/sentry/api/serializers/models/activity.py @@ -83,12 +83,16 @@ def get_attrs(self, item_list, user, **kwargs): return { item: { "user": users.get(str(item.user_id)) if item.user_id else None, - "source": groups.get(item.data["source_id"]) - if item.type == ActivityType.UNMERGE_DESTINATION.value - else None, - "destination": groups.get(item.data["destination_id"]) - if item.type == ActivityType.UNMERGE_SOURCE.value - else None, + "source": ( + groups.get(item.data["source_id"]) + if item.type == ActivityType.UNMERGE_DESTINATION.value + else None + ), + "destination": ( + groups.get(item.data["destination_id"]) + if item.type == ActivityType.UNMERGE_SOURCE.value + else None + ), "commit": commits.get(item), "pull_request": pull_requests.get(item), } diff --git a/src/sentry/api/serializers/models/artifactbundle.py b/src/sentry/api/serializers/models/artifactbundle.py index de10b5d45aece1..ad4dcedfe59cc6 100644 --- a/src/sentry/api/serializers/models/artifactbundle.py +++ b/src/sentry/api/serializers/models/artifactbundle.py @@ -84,9 +84,11 @@ def serialize(self, obj, attrs, user, **kwargs): return { "id": base64.urlsafe_b64encode(attrs["file_path"].encode()).decode(), # In case the file type string was invalid, we return the sentinel value INVALID_SOURCE_FILE_TYPE. - "fileType": attrs["file_type"].value - if attrs["file_type"] is not None - else INVALID_SOURCE_FILE_TYPE, + "fileType": ( + attrs["file_type"].value + if attrs["file_type"] is not None + else INVALID_SOURCE_FILE_TYPE + ), # We decided to return the file url as file path for better searchability. "filePath": attrs["file_url"], "fileSize": attrs["file_info"].file_size if attrs["file_info"] is not None else None, diff --git a/src/sentry/api/serializers/models/auditlogentry.py b/src/sentry/api/serializers/models/auditlogentry.py index 6282c77a8c76bd..db128311df679e 100644 --- a/src/sentry/api/serializers/models/auditlogentry.py +++ b/src/sentry/api/serializers/models/auditlogentry.py @@ -49,9 +49,11 @@ def get_attrs(self, item_list, user, **kwargs): return { item: { - "actor": users[str(item.actor_id)] - if item.actor_id and not override_actor_id(item.actor) - else {"name": item.get_actor_name()}, + "actor": ( + users[str(item.actor_id)] + if item.actor_id and not override_actor_id(item.actor) + else {"name": item.get_actor_name()} + ), "targetUser": users.get(str(item.target_user_id)) or item.target_user_id, } for item in item_list diff --git a/src/sentry/api/serializers/models/orgauthtoken.py b/src/sentry/api/serializers/models/orgauthtoken.py index ddda66eacf72f5..83644b4c4591ed 100644 --- a/src/sentry/api/serializers/models/orgauthtoken.py +++ b/src/sentry/api/serializers/models/orgauthtoken.py @@ -13,9 +13,9 @@ def serialize(self, obj, attrs, user, **kwargs): "tokenLastCharacters": obj.token_last_characters, "dateCreated": obj.date_added, "dateLastUsed": obj.date_last_used, - "projectLastUsedId": str(obj.project_last_used_id) - if obj.project_last_used_id - else None, + "projectLastUsedId": ( + str(obj.project_last_used_id) if obj.project_last_used_id else None + ), } if token: diff --git a/src/sentry/api/serializers/models/plugin.py b/src/sentry/api/serializers/models/plugin.py index 1b5c2f0e1dc132..410985ab4572cd 100644 --- a/src/sentry/api/serializers/models/plugin.py +++ b/src/sentry/api/serializers/models/plugin.py @@ -76,9 +76,9 @@ def serialize(self, obj, attrs, user, **kwargs): ], "doc": doc, "firstPartyAlternative": getattr(obj, "alternative", None), - "deprecationDate": deprecation_date.strftime("%b %-d, %Y") - if deprecation_date - else None, + "deprecationDate": ( + deprecation_date.strftime("%b %-d, %Y") if deprecation_date else None + ), "altIsSentryApp": getattr(obj, "alt_is_sentry_app", None), } if self.project: diff --git a/src/sentry/api/serializers/snuba.py b/src/sentry/api/serializers/snuba.py index b927c4982f34f1..c42d6d76d4baa8 100644 --- a/src/sentry/api/serializers/snuba.py +++ b/src/sentry/api/serializers/snuba.py @@ -101,15 +101,17 @@ def serialize( rv.append((k, row)) res = { - "data": zerofill( - rv, - result.start, - result.end, - result.rollup, - allow_partial_buckets=allow_partial_buckets, + "data": ( + zerofill( + rv, + result.start, + result.end, + result.rollup, + allow_partial_buckets=allow_partial_buckets, + ) + if zerofill_results + else rv ) - if zerofill_results - else rv } if result.data.get("totals"): diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index 58f75ec001c611..fb0738df6b3ed3 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -92,8 +92,7 @@ def get_date_range_from_params( params: Mapping[str, Any], optional: Literal[False] = ..., default_stats_period: datetime.timedelta = ..., -) -> tuple[datetime.datetime, datetime.datetime]: - ... +) -> tuple[datetime.datetime, datetime.datetime]: ... @overload @@ -101,8 +100,7 @@ def get_date_range_from_params( params: Mapping[str, Any], optional: bool = ..., default_stats_period: datetime.timedelta = ..., -) -> tuple[None, None] | tuple[datetime.datetime, datetime.datetime]: - ... +) -> tuple[None, None] | tuple[datetime.datetime, datetime.datetime]: ... def get_date_range_from_params( @@ -165,8 +163,7 @@ def get_date_range_from_stats_period( params: dict[str, Any], optional: Literal[False] = ..., default_stats_period: datetime.timedelta = ..., -) -> tuple[datetime.datetime, datetime.datetime]: - ... +) -> tuple[datetime.datetime, datetime.datetime]: ... @overload @@ -174,8 +171,7 @@ def get_date_range_from_stats_period( params: dict[str, Any], optional: bool = ..., default_stats_period: datetime.timedelta = ..., -) -> tuple[None, None] | tuple[datetime.datetime, datetime.datetime]: - ... +) -> tuple[None, None] | tuple[datetime.datetime, datetime.datetime]: ... def get_date_range_from_stats_period( diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index de312f0be11c68..9c2abf0943e755 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -41,13 +41,11 @@ def gettext_noop(s: str) -> str: @overload -def env(key: str) -> str: - ... +def env(key: str) -> str: ... @overload -def env(key: str, default: _EnvTypes, type: Type | None = None) -> _EnvTypes: - ... +def env(key: str, default: _EnvTypes, type: Type | None = None) -> _EnvTypes: ... def env( diff --git a/src/sentry/consumers/synchronized.py b/src/sentry/consumers/synchronized.py index ec4330ade6343a..5c72a658f574ed 100644 --- a/src/sentry/consumers/synchronized.py +++ b/src/sentry/consumers/synchronized.py @@ -90,9 +90,9 @@ def __init__( self.__commit_log_topic = commit_log_topic self.__commit_log_groups = commit_log_groups - self.__remote_offsets: Synchronized[ - Mapping[str, MutableMapping[Partition, int]] - ] = Synchronized({group: {} for group in commit_log_groups}) + self.__remote_offsets: Synchronized[Mapping[str, MutableMapping[Partition, int]]] = ( + Synchronized({group: {} for group in commit_log_groups}) + ) self.__commit_log_worker_stop_requested = Event() self.__commit_log_worker_subscription_received = Event() diff --git a/src/sentry/datascrubbing.py b/src/sentry/datascrubbing.py index 677558445822c8..971fb633689254 100644 --- a/src/sentry/datascrubbing.py +++ b/src/sentry/datascrubbing.py @@ -125,9 +125,9 @@ def _merge_pii_configs(prefixes_and_configs: list[tuple[str, dict[str, Any]]]) - rules = partial_config.get("rules") or {} for rule_name, rule in rules.items(): prefixed_rule_name = f"{prefix}{rule_name}" - merged_config.setdefault("rules", {})[ - prefixed_rule_name - ] = _prefix_rule_references_in_rule(rules, rule, prefix) + merged_config.setdefault("rules", {})[prefixed_rule_name] = ( + _prefix_rule_references_in_rule(rules, rule, prefix) + ) for selector, applications in (partial_config.get("applications") or {}).items(): merged_applications = merged_config.setdefault("applications", {}).setdefault( diff --git a/src/sentry/db/models/fields/hybrid_cloud_foreign_key.py b/src/sentry/db/models/fields/hybrid_cloud_foreign_key.py index c141df9bcfcbf5..e0524f104ed614 100644 --- a/src/sentry/db/models/fields/hybrid_cloud_foreign_key.py +++ b/src/sentry/db/models/fields/hybrid_cloud_foreign_key.py @@ -43,6 +43,7 @@ Ideally, when applying this field, you write model test that validates that deletion of your parent model produces the expected cascade behavior in your field. """ + from __future__ import annotations from enum import IntEnum diff --git a/src/sentry/db/models/fields/jsonfield.py b/src/sentry/db/models/fields/jsonfield.py index 9f0ecb9961742d..b61c836979c925 100644 --- a/src/sentry/db/models/fields/jsonfield.py +++ b/src/sentry/db/models/fields/jsonfield.py @@ -25,6 +25,7 @@ OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ + from __future__ import annotations from django.core.exceptions import ValidationError diff --git a/src/sentry/db/models/fields/text.py b/src/sentry/db/models/fields/text.py index e6803ae5856eda..26f391f0148345 100644 --- a/src/sentry/db/models/fields/text.py +++ b/src/sentry/db/models/fields/text.py @@ -13,7 +13,6 @@ migrations. """ - from django.db import models from django.db.backends.base.base import BaseDatabaseWrapper diff --git a/src/sentry/db/models/fields/uuid.py b/src/sentry/db/models/fields/uuid.py index 78ec78697220cd..6c8d5b876d063a 100644 --- a/src/sentry/db/models/fields/uuid.py +++ b/src/sentry/db/models/fields/uuid.py @@ -27,6 +27,7 @@ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ + from __future__ import annotations import importlib diff --git a/src/sentry/db/models/manager/base.py b/src/sentry/db/models/manager/base.py index 6ea31d22dd665c..faf6399279a20c 100644 --- a/src/sentry/db/models/manager/base.py +++ b/src/sentry/db/models/manager/base.py @@ -549,22 +549,26 @@ def create_silo_limited_copy(self: BaseManager[M], limit: SiloLimit) -> BaseMana "bulk_create": limit.create_override(cls.bulk_create), "bulk_update": limit.create_override(cls.bulk_update), "create": limit.create_override(cls.create), - "create_or_update": limit.create_override(cls.create_or_update) - if hasattr(cls, "create_or_update") - else None, + "create_or_update": ( + limit.create_override(cls.create_or_update) + if hasattr(cls, "create_or_update") + else None + ), "get_or_create": limit.create_override(cls.get_or_create), - "post_delete": limit.create_override(cls.post_delete) - if hasattr(cls, "post_delete") - else None, + "post_delete": ( + limit.create_override(cls.post_delete) if hasattr(cls, "post_delete") else None + ), "select_for_update": limit.create_override(cls.select_for_update), "update": limit.create_override(cls.update), "update_or_create": limit.create_override(cls.update_or_create), - "get_from_cache": limit.create_override(cls.get_from_cache) - if hasattr(cls, "get_from_cache") - else None, - "get_many_from_cache": limit.create_override(cls.get_many_from_cache) - if hasattr(cls, "get_many_from_cache") - else None, + "get_from_cache": ( + limit.create_override(cls.get_from_cache) if hasattr(cls, "get_from_cache") else None + ), + "get_many_from_cache": ( + limit.create_override(cls.get_many_from_cache) + if hasattr(cls, "get_many_from_cache") + else None + ), } manager_subclass = type(cls.__name__, (cls,), overrides) manager_instance = manager_subclass() diff --git a/src/sentry/db/models/utils.py b/src/sentry/db/models/utils.py index 57899248960a24..b579fcdf63af82 100644 --- a/src/sentry/db/models/utils.py +++ b/src/sentry/db/models/utils.py @@ -94,12 +94,10 @@ def __init__(self, field: Field[FieldSetType, FieldGetType]) -> None: self.field = field @overload - def __get__(self, inst: Model, owner: type[Any]) -> Any: - ... + def __get__(self, inst: Model, owner: type[Any]) -> Any: ... @overload - def __get__(self, inst: None, owner: type[Any]) -> Self: - ... + def __get__(self, inst: None, owner: type[Any]) -> Self: ... def __get__(self, inst: Model | None, owner: type[Any]) -> Self | Any: if inst is None: diff --git a/src/sentry/db/postgres/transactions.py b/src/sentry/db/postgres/transactions.py index aed0972b6446b2..0f131612be4f0f 100644 --- a/src/sentry/db/postgres/transactions.py +++ b/src/sentry/db/postgres/transactions.py @@ -43,9 +43,9 @@ def django_test_transaction_water_mark(using: str | None = None): connection = transaction.get_connection(using) prev = hybrid_cloud.simulated_transaction_watermarks.state.get(using, 0) - hybrid_cloud.simulated_transaction_watermarks.state[ - using - ] = hybrid_cloud.simulated_transaction_watermarks.get_transaction_depth(connection) + hybrid_cloud.simulated_transaction_watermarks.state[using] = ( + hybrid_cloud.simulated_transaction_watermarks.get_transaction_depth(connection) + ) old_run_on_commit = connection.run_on_commit connection.run_on_commit = [] try: diff --git a/src/sentry/deletions/__init__.py b/src/sentry/deletions/__init__.py index 27aa2c3b58d159..5daf033399de8d 100644 --- a/src/sentry/deletions/__init__.py +++ b/src/sentry/deletions/__init__.py @@ -77,6 +77,7 @@ registered Group task. It will instead take a more efficient approach of batch deleting its indirect descendants, such as Event, so it can more efficiently bulk delete rows. """ + from __future__ import annotations from typing import TYPE_CHECKING, Any diff --git a/src/sentry/deletions/tasks/hybrid_cloud.py b/src/sentry/deletions/tasks/hybrid_cloud.py index 59e2eae95956d7..c7b982b84cc756 100644 --- a/src/sentry/deletions/tasks/hybrid_cloud.py +++ b/src/sentry/deletions/tasks/hybrid_cloud.py @@ -8,6 +8,7 @@ opposing silo and are stored in Tombstone rows. Deletions that are not successfully synchronized via Outbox to a Tombstone row will not, therefore, cascade to any related cross silo rows. """ + import datetime from collections import defaultdict from dataclasses import dataclass @@ -369,7 +370,7 @@ def _get_model_ids_for_tombstone_cascade( }, ) - for (row_id, tomb_created) in conn.fetchall(): + for row_id, tomb_created in conn.fetchall(): to_delete_ids.append(row_id) oldest_seen = min(oldest_seen, tomb_created) diff --git a/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py b/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py index f7699251fe37ae..1abc9f81411330 100644 --- a/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py +++ b/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py @@ -658,9 +658,9 @@ def merge_transactions( "org_id": left["org_id"], "project_id": left["project_id"], "transaction_counts": merged_transactions, - "total_num_transactions": totals.get("total_num_transactions") - if totals is not None - else None, + "total_num_transactions": ( + totals.get("total_num_transactions") if totals is not None else None + ), "total_num_classes": totals.get("total_num_classes") if totals is not None else None, } diff --git a/src/sentry/dynamic_sampling/tasks/common.py b/src/sentry/dynamic_sampling/tasks/common.py index f5abc9532f7bc7..ce36fe4dc65028 100644 --- a/src/sentry/dynamic_sampling/tasks/common.py +++ b/src/sentry/dynamic_sampling/tasks/common.py @@ -60,8 +60,7 @@ class LogStateCallable(Protocol): """ - def __call__(self, state: DynamicSamplingLogState, *args, **kwargs) -> Any: - ... + def __call__(self, state: DynamicSamplingLogState, *args, **kwargs) -> Any: ... __name__: str @@ -95,11 +94,9 @@ class ContextIterator(Protocol): An iterator that also can return its current state ( used for logging) """ - def __iter__(self): - ... + def __iter__(self): ... - def __next__(self): - ... + def __next__(self): ... def get_current_state(self) -> DynamicSamplingLogState: """ diff --git a/src/sentry/dynamic_sampling/tasks/helpers/boost_low_volume_projects.py b/src/sentry/dynamic_sampling/tasks/helpers/boost_low_volume_projects.py index 2b91730bb8d2af..64531f866c8282 100644 --- a/src/sentry/dynamic_sampling/tasks/helpers/boost_low_volume_projects.py +++ b/src/sentry/dynamic_sampling/tasks/helpers/boost_low_volume_projects.py @@ -16,15 +16,13 @@ def generate_boost_low_volume_projects_cache_key(org_id: int) -> str: @overload def get_boost_low_volume_projects_sample_rate( org_id: int, project_id: int, *, error_sample_rate_fallback: float -) -> tuple[float, bool]: - ... +) -> tuple[float, bool]: ... @overload def get_boost_low_volume_projects_sample_rate( org_id: int, project_id: int, *, error_sample_rate_fallback: float | None -) -> tuple[float | None, bool]: - ... +) -> tuple[float | None, bool]: ... def get_boost_low_volume_projects_sample_rate( diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index b99fd90de7b933..1656869043363a 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -268,13 +268,13 @@ def has_pending_commit_resolution(group: Group) -> bool: @overload -def get_max_crashreports(model: Project | Organization) -> int: - ... +def get_max_crashreports(model: Project | Organization) -> int: ... @overload -def get_max_crashreports(model: Project | Organization, *, allow_none: Literal[True]) -> int | None: - ... +def get_max_crashreports( + model: Project | Organization, *, allow_none: Literal[True] +) -> int | None: ... def get_max_crashreports(model: Project | Organization, *, allow_none: bool = False) -> int | None: diff --git a/src/sentry/eventstore/base.py b/src/sentry/eventstore/base.py index e5025d9014cc56..e645cade39e658 100644 --- a/src/sentry/eventstore/base.py +++ b/src/sentry/eventstore/base.py @@ -235,8 +235,7 @@ def get_event_by_id( occurrence_id: str | None = None, *, skip_transaction_groupevent: Literal[True], - ) -> Event | None: - ... + ) -> Event | None: ... @overload def get_event_by_id( @@ -248,8 +247,7 @@ def get_event_by_id( occurrence_id: str | None = None, *, skip_transaction_groupevent: bool = False, - ) -> Event | GroupEvent | None: - ... + ) -> Event | GroupEvent | None: ... def get_event_by_id( self, diff --git a/src/sentry/eventstore/compressor.py b/src/sentry/eventstore/compressor.py index c71c7ab9fb4f4c..4c14dc0b638296 100644 --- a/src/sentry/eventstore/compressor.py +++ b/src/sentry/eventstore/compressor.py @@ -5,6 +5,7 @@ This is not used in production yet, we are still collecting metrics there. """ + from __future__ import annotations import hashlib diff --git a/src/sentry/eventstore/models.py b/src/sentry/eventstore/models.py index 6014a7f2ff0884..bca5783584f736 100644 --- a/src/sentry/eventstore/models.py +++ b/src/sentry/eventstore/models.py @@ -305,12 +305,10 @@ def interfaces(self) -> Mapping[str, Interface]: return get_interfaces(self.data) @overload - def get_interface(self, name: Literal["user"]) -> User: - ... + def get_interface(self, name: Literal["user"]) -> User: ... @overload - def get_interface(self, name: str) -> Interface | None: - ... + def get_interface(self, name: str) -> Interface | None: ... def get_interface(self, name: str) -> Interface | None: return self.interfaces.get(name) diff --git a/src/sentry/eventstore/snuba/backend.py b/src/sentry/eventstore/snuba/backend.py index 9f507d65de44a5..88cda3ae6e4163 100644 --- a/src/sentry/eventstore/snuba/backend.py +++ b/src/sentry/eventstore/snuba/backend.py @@ -318,8 +318,7 @@ def get_event_by_id( occurrence_id: str | None = None, *, skip_transaction_groupevent: Literal[True], - ) -> Event | None: - ... + ) -> Event | None: ... @overload def get_event_by_id( @@ -331,8 +330,7 @@ def get_event_by_id( occurrence_id: str | None = None, *, skip_transaction_groupevent: bool = False, - ) -> Event | GroupEvent | None: - ... + ) -> Event | GroupEvent | None: ... def get_event_by_id( self, diff --git a/src/sentry/filestore/s3.py b/src/sentry/filestore/s3.py index 243f3c02440ebe..a7bf7bd9037c36 100644 --- a/src/sentry/filestore/s3.py +++ b/src/sentry/filestore/s3.py @@ -33,6 +33,7 @@ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ + from __future__ import annotations import mimetypes diff --git a/src/sentry/grouping/strategies/base.py b/src/sentry/grouping/strategies/base.py index 0298e9dd81a316..ab3e99949f8e4e 100644 --- a/src/sentry/grouping/strategies/base.py +++ b/src/sentry/grouping/strategies/base.py @@ -35,15 +35,13 @@ def __call__( event: Event, context: "GroupingContext", **meta: Any, - ) -> ReturnedVariants: - ... + ) -> ReturnedVariants: ... class VariantProcessor(Protocol): def __call__( self, variants: ReturnedVariants, context: "GroupingContext", **meta: Any - ) -> ReturnedVariants: - ... + ) -> ReturnedVariants: ... def strategy( diff --git a/src/sentry/incidents/models/alert_rule.py b/src/sentry/incidents/models/alert_rule.py index fe310ba0ed91a9..1493ff50c56bc6 100644 --- a/src/sentry/incidents/models/alert_rule.py +++ b/src/sentry/incidents/models/alert_rule.py @@ -52,8 +52,7 @@ class SubscriptionCallback(Protocol): - def __call__(self, subscription: QuerySubscription, *args: Any, **kwargs: Any) -> bool: - ... + def __call__(self, subscription: QuerySubscription, *args: Any, **kwargs: Any) -> bool: ... alert_subscription_callback_registry: dict[AlertRuleMonitorTypeInt, SubscriptionCallback] = {} diff --git a/src/sentry/ingest/slicing.py b/src/sentry/ingest/slicing.py index 6d27cab4bc6bec..32ea847cb15127 100644 --- a/src/sentry/ingest/slicing.py +++ b/src/sentry/ingest/slicing.py @@ -3,6 +3,7 @@ should be stored. These do not require individual physical slices but allow for repartitioning with less code changes per physical change. """ + from __future__ import annotations from typing import Literal, TypeGuard @@ -27,7 +28,7 @@ def map_logical_partition_to_slice(sliceable: Sliceable, logical_partition: int) assert is_sliced(sliceable), f"cannot retrieve slice of non-partitioned sliceable {sliceable}" - for ((logical_part_lo_incl, logical_part_hi_excl), slice_id) in settings.SENTRY_SLICING_CONFIG[ + for (logical_part_lo_incl, logical_part_hi_excl), slice_id in settings.SENTRY_SLICING_CONFIG[ sliceable ].items(): if logical_partition >= logical_part_lo_incl and logical_partition < logical_part_hi_excl: diff --git a/src/sentry/ingest/transaction_clusterer/rules.py b/src/sentry/ingest/transaction_clusterer/rules.py index 9bb6d5be2d1191..ee3612079a952c 100644 --- a/src/sentry/ingest/transaction_clusterer/rules.py +++ b/src/sentry/ingest/transaction_clusterer/rules.py @@ -20,11 +20,9 @@ class RuleStore(Protocol): - def read(self, project: Project) -> RuleSet: - ... + def read(self, project: Project) -> RuleSet: ... - def write(self, project: Project, rules: RuleSet) -> None: - ... + def write(self, project: Project, rules: RuleSet) -> None: ... class RedisRuleStore: diff --git a/src/sentry/integrations/gitlab/webhooks.py b/src/sentry/integrations/gitlab/webhooks.py index ad3a507153d766..9d24ce00dc3dc1 100644 --- a/src/sentry/integrations/gitlab/webhooks.py +++ b/src/sentry/integrations/gitlab/webhooks.py @@ -314,9 +314,9 @@ def post(self, request: HttpRequest) -> HttpResponse: logger.info("gitlab.webhook.wrong-event-type", extra=extra) supported_events = ", ".join(sorted(self._handlers.keys())) logger.info("We only support these kinds of events: %s", supported_events) - extra[ - "reason" - ] = "The customer has edited the webhook in Gitlab to include other types of events." + extra["reason"] = ( + "The customer has edited the webhook in Gitlab to include other types of events." + ) logger.exception(extra["reason"]) return HttpResponse(status=400, reason=extra["reason"]) diff --git a/src/sentry/integrations/repository/__init__.py b/src/sentry/integrations/repository/__init__.py index fbb4700667cfa9..3422aa793f6c8f 100644 --- a/src/sentry/integrations/repository/__init__.py +++ b/src/sentry/integrations/repository/__init__.py @@ -9,6 +9,7 @@ What we query from an interface level won't change, simply how we query will change, and these classes should be the only thing that need to change after we make the migration. """ + from sentry.integrations.repository.issue_alert import IssueAlertNotificationMessageRepository from sentry.integrations.repository.metric_alert import MetricAlertNotificationMessageRepository diff --git a/src/sentry/integrations/slack/unfurl/types.py b/src/sentry/integrations/slack/unfurl/types.py index 45e2ad0d56e678..21872e70750cd2 100644 --- a/src/sentry/integrations/slack/unfurl/types.py +++ b/src/sentry/integrations/slack/unfurl/types.py @@ -32,8 +32,7 @@ def __call__( integration: Integration, links: list[UnfurlableUrl], user: User | None = None, - ) -> UnfurledUrl: - ... + ) -> UnfurledUrl: ... class Handler(NamedTuple): diff --git a/src/sentry/integrations/utils/sync.py b/src/sentry/integrations/utils/sync.py index a2ac81f5670357..a672dc4daee464 100644 --- a/src/sentry/integrations/utils/sync.py +++ b/src/sentry/integrations/utils/sync.py @@ -139,8 +139,8 @@ def sync_group_assignee_outbound( "external_issue_id": external_issue_id, "user_id": user_id, "assign": assign, - "assignment_source_dict": assignment_source.to_dict() - if assignment_source - else None, + "assignment_source_dict": ( + assignment_source.to_dict() if assignment_source else None + ), } ) diff --git a/src/sentry/issues/json_schemas.py b/src/sentry/issues/json_schemas.py index 2adf2cdefc2136..8bf3c8f96b7267 100644 --- a/src/sentry/issues/json_schemas.py +++ b/src/sentry/issues/json_schemas.py @@ -109,7 +109,7 @@ }, "version": { "type": "string", - "pattern": "^(?P0|[1-9]\\d*)\\.(?P0|[1-9]\\d*)\\.(?P0|[1-9]\\d*)$" + "pattern": "^(?P0|[1-9]\\d*)\\.(?P0|[1-9]\\d*)\\.(?P0|[1-9]\\d*)$", # MAJOR.MINOR.PATCH }, }, diff --git a/src/sentry/issues/search.py b/src/sentry/issues/search.py index 3566bfc5bdd32f..181981573bed5f 100644 --- a/src/sentry/issues/search.py +++ b/src/sentry/issues/search.py @@ -33,8 +33,7 @@ def __call__( groupby: Sequence[str], having: Sequence[Any], orderby: Sequence[str], - ) -> Mapping[str, Any]: - ... + ) -> Mapping[str, Any]: ... class SearchQueryPartial(Protocol): @@ -46,8 +45,7 @@ def __call__( conditions: Sequence[Any], aggregations: Sequence[Any], condition_resolver: Any, - ) -> Mapping[str, Any]: - ... + ) -> Mapping[str, Any]: ... GroupSearchFilterUpdater = Callable[[Sequence[SearchFilter]], Sequence[SearchFilter]] diff --git a/src/sentry/middleware/ratelimit.py b/src/sentry/middleware/ratelimit.py index 8ad3dd5b817f6c..302f8bbb2effca 100644 --- a/src/sentry/middleware/ratelimit.py +++ b/src/sentry/middleware/ratelimit.py @@ -140,12 +140,12 @@ def process_response( response["X-Sentry-Rate-Limit-Remaining"] = rate_limit_metadata.remaining response["X-Sentry-Rate-Limit-Limit"] = rate_limit_metadata.limit response["X-Sentry-Rate-Limit-Reset"] = rate_limit_metadata.reset_time - response[ - "X-Sentry-Rate-Limit-ConcurrentRemaining" - ] = rate_limit_metadata.concurrent_remaining - response[ - "X-Sentry-Rate-Limit-ConcurrentLimit" - ] = rate_limit_metadata.concurrent_limit + response["X-Sentry-Rate-Limit-ConcurrentRemaining"] = ( + rate_limit_metadata.concurrent_remaining + ) + response["X-Sentry-Rate-Limit-ConcurrentLimit"] = ( + rate_limit_metadata.concurrent_limit + ) if hasattr(request, "rate_limit_key") and hasattr(request, "rate_limit_uid"): finish_request(request.rate_limit_key, request.rate_limit_uid) except Exception: diff --git a/src/sentry/models/files/abstractfile.py b/src/sentry/models/files/abstractfile.py index e05ded4949002e..95dae7f89fe574 100644 --- a/src/sentry/models/files/abstractfile.py +++ b/src/sentry/models/files/abstractfile.py @@ -229,24 +229,19 @@ class Meta: blobs: models.ManyToManyField @abc.abstractmethod - def _blob_index_records(self) -> Sequence[BlobIndexType]: - ... + def _blob_index_records(self) -> Sequence[BlobIndexType]: ... @abc.abstractmethod - def _create_blob_index(self, blob: BlobType, offset: int) -> BlobIndexType: - ... + def _create_blob_index(self, blob: BlobType, offset: int) -> BlobIndexType: ... @abc.abstractmethod - def _create_blob_from_file(self, contents: ContentFile, logger: Any) -> BlobType: - ... + def _create_blob_from_file(self, contents: ContentFile, logger: Any) -> BlobType: ... @abc.abstractmethod - def _get_blobs_by_id(self, blob_ids: Sequence[int]) -> models.QuerySet[BlobType]: - ... + def _get_blobs_by_id(self, blob_ids: Sequence[int]) -> models.QuerySet[BlobType]: ... @abc.abstractmethod - def _delete_unreferenced_blob_task(self) -> SentryTask: - ... + def _delete_unreferenced_blob_task(self) -> SentryTask: ... def _get_chunked_blob(self, mode=None, prefetch=False, prefetch_to=None, delete=True): return ChunkedFileBlobIndexWrapper( diff --git a/src/sentry/models/files/abstractfileblob.py b/src/sentry/models/files/abstractfileblob.py index f53a4dbb84dbf2..1d5211ba8cef83 100644 --- a/src/sentry/models/files/abstractfileblob.py +++ b/src/sentry/models/files/abstractfileblob.py @@ -52,12 +52,10 @@ class Meta: abstract = True @abstractmethod - def _create_blob_owner(self, organization_id: int) -> BlobOwnerType: - ... + def _create_blob_owner(self, organization_id: int) -> BlobOwnerType: ... @abstractmethod - def _delete_file_task(self) -> SentryTask: - ... + def _delete_file_task(self) -> SentryTask: ... @classmethod @abstractmethod diff --git a/src/sentry/models/releasefile.py b/src/sentry/models/releasefile.py index 8fb9dbd00d9122..a8dc3d84243175 100644 --- a/src/sentry/models/releasefile.py +++ b/src/sentry/models/releasefile.py @@ -67,6 +67,7 @@ class ReleaseFile(Model): The ident of the file should be sha1(name) or sha1(name '\x00\x00' dist.name) and must be unique per release. """ + __relocation_scope__ = RelocationScope.Excluded organization_id = BoundedBigIntegerField() diff --git a/src/sentry/monitoring/queues.py b/src/sentry/monitoring/queues.py index 4f0309eceefd44..fae06d19e5a5e5 100644 --- a/src/sentry/monitoring/queues.py +++ b/src/sentry/monitoring/queues.py @@ -11,17 +11,13 @@ class _QueueBackend(Protocol): - def __init__(self, broker_url: str) -> None: - ... + def __init__(self, broker_url: str) -> None: ... - def bulk_get_sizes(self, queues: list[str]) -> list[tuple[str, int]]: - ... + def bulk_get_sizes(self, queues: list[str]) -> list[tuple[str, int]]: ... - def get_size(self, queue: str) -> int: - ... + def get_size(self, queue: str) -> int: ... - def purge_queue(self, queue: str) -> int: - ... + def purge_queue(self, queue: str) -> int: ... class RedisBackend: diff --git a/src/sentry/monitors/clock_tasks/check_timeout.py b/src/sentry/monitors/clock_tasks/check_timeout.py index 4dfd4b0296bb44..acf16f756539df 100644 --- a/src/sentry/monitors/clock_tasks/check_timeout.py +++ b/src/sentry/monitors/clock_tasks/check_timeout.py @@ -30,7 +30,10 @@ def dispatch_check_timeout(ts: datetime): This will dispatch MarkTimeout messages into monitors-clock-tasks. """ timed_out_checkins = list( - MonitorCheckIn.objects.filter(status=CheckInStatus.IN_PROGRESS, timeout_at__lte=ts,).values( + MonitorCheckIn.objects.filter( + status=CheckInStatus.IN_PROGRESS, + timeout_at__lte=ts, + ).values( "id", "monitor_environment_id" )[:CHECKINS_LIMIT] ) diff --git a/src/sentry/notifications/notificationcontroller.py b/src/sentry/notifications/notificationcontroller.py index 41ffa9c0ed1e3c..9909452ea613ff 100644 --- a/src/sentry/notifications/notificationcontroller.py +++ b/src/sentry/notifications/notificationcontroller.py @@ -245,9 +245,9 @@ def _get_layered_setting_options( continue # sort the settings by scope type, with the most specific scope last so we override with the most specific value - most_specific_recipient_options[ - NotificationSettingEnum(setting.type) - ] = NotificationSettingsOptionEnum(setting.value) + most_specific_recipient_options[NotificationSettingEnum(setting.type)] = ( + NotificationSettingsOptionEnum(setting.value) + ) # if we have no settings for this user/team, use the defaults for type, default in get_type_defaults().items(): @@ -334,9 +334,9 @@ def _get_layered_setting_providers( provider_str ] = NotificationSettingsOptionEnum.NEVER else: - most_specific_recipient_providers[type][ - provider_str - ] = get_default_for_provider(type, provider) + most_specific_recipient_providers[type][provider_str] = ( + get_default_for_provider(type, provider) + ) return most_specific_setting_providers diff --git a/src/sentry/notifications/notifications/rules.py b/src/sentry/notifications/notifications/rules.py index a5abdd4d1a3c6d..6c54aaa8b80e9e 100644 --- a/src/sentry/notifications/notifications/rules.py +++ b/src/sentry/notifications/notifications/rules.py @@ -254,9 +254,9 @@ def get_context(self) -> MutableMapping[str, Any]: if len(self.rules) > 0: context["snooze_alert"] = True - context[ - "snooze_alert_url" - ] = f"/organizations/{self.organization.slug}/alerts/rules/{self.project.slug}/{self.rules[0].id}/details/{sentry_query_params}&{urlencode({'mute': '1'})}" + context["snooze_alert_url"] = ( + f"/organizations/{self.organization.slug}/alerts/rules/{self.project.slug}/{self.rules[0].id}/details/{sentry_query_params}&{urlencode({'mute': '1'})}" + ) if isinstance(self.event, GroupEvent) and self.event.occurrence: context["issue_title"] = self.event.occurrence.issue_title diff --git a/src/sentry/notifications/notifications/user_report.py b/src/sentry/notifications/notifications/user_report.py index b1b68ee6ca1501..3bbfcbb8f5f659 100644 --- a/src/sentry/notifications/notifications/user_report.py +++ b/src/sentry/notifications/notifications/user_report.py @@ -36,7 +36,7 @@ def get_participants_with_group_subscription_reason(self) -> ParticipantMap: email_participants = data_by_provider.get_participants_by_provider(ExternalProviders.EMAIL) result = ParticipantMap() - for (actor, reason) in email_participants: + for actor, reason in email_participants: result.add(ExternalProviders.EMAIL, actor, reason) return result diff --git a/src/sentry/plugins/base/manager.py b/src/sentry/plugins/base/manager.py index 5c832d5afb39ef..6edeb483a045dc 100644 --- a/src/sentry/plugins/base/manager.py +++ b/src/sentry/plugins/base/manager.py @@ -22,16 +22,13 @@ def __len__(self) -> int: return sum(1 for i in self.all()) @overload - def all(self) -> Generator[Plugin]: - ... + def all(self) -> Generator[Plugin]: ... @overload - def all(self, *, version: Literal[2]) -> Generator[Plugin2]: - ... + def all(self, *, version: Literal[2]) -> Generator[Plugin2]: ... @overload - def all(self, *, version: None) -> Generator[Plugin | Plugin2]: - ... + def all(self, *, version: None) -> Generator[Plugin | Plugin2]: ... def all(self, version: int | None = 1) -> Generator[Plugin | Plugin2]: for plugin in sorted(super().all(), key=lambda x: x.get_title()): diff --git a/src/sentry/plugins/base/v2.py b/src/sentry/plugins/base/v2.py index dc92493762cd06..f1c0d9fdf3e16b 100644 --- a/src/sentry/plugins/base/v2.py +++ b/src/sentry/plugins/base/v2.py @@ -20,8 +20,7 @@ class EventPreprocessor(Protocol): - def __call__(self, data: MutableMapping[str, Any]) -> MutableMapping[str, Any] | None: - ... + def __call__(self, data: MutableMapping[str, Any]) -> MutableMapping[str, Any] | None: ... class PluginMount(type): diff --git a/src/sentry/profiles/flamegraph.py b/src/sentry/profiles/flamegraph.py index 4fcbc80d0bba42..d732f9f6a01537 100644 --- a/src/sentry/profiles/flamegraph.py +++ b/src/sentry/profiles/flamegraph.py @@ -391,21 +391,21 @@ def get_profile_candidates_from_transactions(self) -> ProfileCandidates: ) results = builder.process_results(results) - continuous_profile_candidates: list[ - ContinuousProfileCandidate - ] = self.get_chunks_for_profilers( - [ - ProfilerMeta( - project_id=row["project.id"], - profiler_id=row["profiler.id"], - thread_id=row["thread.id"], - start=row["precise.start_ts"], - end=row["precise.finish_ts"], - transaction_id=row["id"], - ) - for row in results["data"] - if row["profiler.id"] is not None and row["thread.id"] - ] + continuous_profile_candidates: list[ContinuousProfileCandidate] = ( + self.get_chunks_for_profilers( + [ + ProfilerMeta( + project_id=row["project.id"], + profiler_id=row["profiler.id"], + thread_id=row["thread.id"], + start=row["precise.start_ts"], + end=row["precise.finish_ts"], + transaction_id=row["id"], + ) + for row in results["data"] + if row["profiler.id"] is not None and row["thread.id"] + ] + ) ) transaction_profile_candidates: list[TransactionProfileCandidate] = [ diff --git a/src/sentry/receivers/outbox/__init__.py b/src/sentry/receivers/outbox/__init__.py index 2bf566395336dc..2952dbd6e6cbe7 100644 --- a/src/sentry/receivers/outbox/__init__.py +++ b/src/sentry/receivers/outbox/__init__.py @@ -47,6 +47,7 @@ See https://www.notion.so/sentry/Async-cross-region-updates-outbox-9330293c8d2f4bd497361a505fd355d3 """ + from __future__ import annotations from typing import TypeVar diff --git a/src/sentry/relay/config/measurements.py b/src/sentry/relay/config/measurements.py index 77f9eb6f083f27..58f2db00025600 100644 --- a/src/sentry/relay/config/measurements.py +++ b/src/sentry/relay/config/measurements.py @@ -1,6 +1,5 @@ """ Relay configuration related to transaction measurements. """ - from collections.abc import Sequence from typing import Literal, TypedDict diff --git a/src/sentry/release_health/metrics_sessions_v2.py b/src/sentry/release_health/metrics_sessions_v2.py index 7c17abc256990b..51a253acf2dbbd 100644 --- a/src/sentry/release_health/metrics_sessions_v2.py +++ b/src/sentry/release_health/metrics_sessions_v2.py @@ -148,14 +148,12 @@ def __init__( self.metric_fields = self._get_metric_fields(raw_groupby, status_filter) @abstractmethod - def _get_session_status(self, metric_field: MetricField) -> SessionStatus | None: - ... + def _get_session_status(self, metric_field: MetricField) -> SessionStatus | None: ... @abstractmethod def _get_metric_fields( self, raw_groupby: Sequence[str], status_filter: StatusFilter - ) -> Sequence[MetricField]: - ... + ) -> Sequence[MetricField]: ... def extract_values( self, diff --git a/src/sentry/replays/endpoints/project_replay_recording_segment_details.py b/src/sentry/replays/endpoints/project_replay_recording_segment_details.py index d10b64cd2f7f8f..53732a819c757a 100644 --- a/src/sentry/replays/endpoints/project_replay_recording_segment_details.py +++ b/src/sentry/replays/endpoints/project_replay_recording_segment_details.py @@ -91,7 +91,7 @@ def download(self, segment: RecordingSegmentStorageMeta) -> StreamingHttpRespons content_type="application/json", ) response["Content-Length"] = len(segment_bytes) - response[ - "Content-Disposition" - ] = f'attachment; filename="{make_recording_filename(segment)}"' + response["Content-Disposition"] = ( + f'attachment; filename="{make_recording_filename(segment)}"' + ) return response diff --git a/src/sentry/replays/lib/event_linking.py b/src/sentry/replays/lib/event_linking.py index 3f361458602a70..f49b5dbbd9580f 100644 --- a/src/sentry/replays/lib/event_linking.py +++ b/src/sentry/replays/lib/event_linking.py @@ -15,7 +15,13 @@ class EventLinkKafkaMessage(TypedDict): replay_id: str project_id: int segment_id: None - payload: EventLinkPayloadDebugId | EventLinkPayloadInfoId | EventLinkPayloadWarningId | EventLinkPayloadErrorId | EventLinkPayloadFatalId + payload: ( + EventLinkPayloadDebugId + | EventLinkPayloadInfoId + | EventLinkPayloadWarningId + | EventLinkPayloadErrorId + | EventLinkPayloadFatalId + ) retention_days: int diff --git a/src/sentry/replays/lib/http.py b/src/sentry/replays/lib/http.py index 83b2887ce0e37e..2d3785372bceb3 100644 --- a/src/sentry/replays/lib/http.py +++ b/src/sentry/replays/lib/http.py @@ -12,8 +12,7 @@ class UnsatisfiableRange(Exception): class RangeProtocol(Protocol): - def make_range(self, last_index: int) -> tuple[int, int]: - ... + def make_range(self, last_index: int) -> tuple[int, int]: ... def read_range(self, bytes: io.BytesIO) -> bytes: """Return a byte range from a reader. diff --git a/src/sentry/replays/lib/new_query/fields.py b/src/sentry/replays/lib/new_query/fields.py index fc499fb9db5fef..cbd96a801d0b71 100644 --- a/src/sentry/replays/lib/new_query/fields.py +++ b/src/sentry/replays/lib/new_query/fields.py @@ -38,8 +38,7 @@ class FieldProtocol(Protocol): construct the "Condition". """ - def apply(self, search_filter: SearchFilter) -> Condition: - ... + def apply(self, search_filter: SearchFilter) -> Condition: ... class BaseField(Generic[T]): diff --git a/src/sentry/replays/lib/new_query/parsers.py b/src/sentry/replays/lib/new_query/parsers.py index 1b665392fec9e8..6d0ae2d5f2e585 100644 --- a/src/sentry/replays/lib/new_query/parsers.py +++ b/src/sentry/replays/lib/new_query/parsers.py @@ -2,6 +2,7 @@ Functions in this module coerce external types to internal types. Else they die. """ + import ipaddress import uuid diff --git a/src/sentry/replays/lib/new_query/utils.py b/src/sentry/replays/lib/new_query/utils.py index 8e39201c1ed7b9..faed7fbf4781b6 100644 --- a/src/sentry/replays/lib/new_query/utils.py +++ b/src/sentry/replays/lib/new_query/utils.py @@ -1,4 +1,5 @@ """Query utility module.""" + from __future__ import annotations from uuid import UUID diff --git a/src/sentry/replays/lib/query.py b/src/sentry/replays/lib/query.py index 387c2f2a58160f..57b041c4689845 100644 --- a/src/sentry/replays/lib/query.py +++ b/src/sentry/replays/lib/query.py @@ -1,4 +1,5 @@ """Dynamic query parsing library.""" + import uuid from typing import Any diff --git a/src/sentry/replays/usecases/query/conditions/error_ids.py b/src/sentry/replays/usecases/query/conditions/error_ids.py index 469239630ce48b..e1ed17f15ead16 100644 --- a/src/sentry/replays/usecases/query/conditions/error_ids.py +++ b/src/sentry/replays/usecases/query/conditions/error_ids.py @@ -4,6 +4,7 @@ way. The "SumOfErrorIdsArray" visitor composes the "ErrorIdsArray" visitor and asks if the aggregated result "contains" or "does not contain" a matching value. """ + from __future__ import annotations from uuid import UUID diff --git a/src/sentry/replays/usecases/query/conditions/selector.py b/src/sentry/replays/usecases/query/conditions/selector.py index 819db489b90afd..8ce3f6143feef3 100644 --- a/src/sentry/replays/usecases/query/conditions/selector.py +++ b/src/sentry/replays/usecases/query/conditions/selector.py @@ -4,6 +4,7 @@ row-wise operations against a complex type and aggregating the result into a single integer before asking whether any row in the aggregation set contained a result. """ + from __future__ import annotations from snuba_sdk import Column, Condition, Function, Op diff --git a/src/sentry/replays/usecases/query/configs/aggregate_sort.py b/src/sentry/replays/usecases/query/configs/aggregate_sort.py index ec1f1a7daf6e5f..cc81461bfce764 100644 --- a/src/sentry/replays/usecases/query/configs/aggregate_sort.py +++ b/src/sentry/replays/usecases/query/configs/aggregate_sort.py @@ -3,6 +3,7 @@ Very similar to our filtering configurations except in this module we do not need the field abstraction. We can pass any valid Snuba expression and the query will be sorted by it. """ + from __future__ import annotations from datetime import datetime diff --git a/src/sentry/reprocessing2.py b/src/sentry/reprocessing2.py index 53e393f3644207..ba3a99f5f29efc 100644 --- a/src/sentry/reprocessing2.py +++ b/src/sentry/reprocessing2.py @@ -78,6 +78,7 @@ * Mark the group as deleted in Redis. * All reprocessed events are "just" inserted over the old ones. """ + from __future__ import annotations import logging @@ -477,13 +478,11 @@ def pop_batched_events_from_redis(key: str) -> tuple[list[str], datetime | None, @overload -def mark_event_reprocessed(data: MutableMapping[str, Any], *, num_events: int = 1) -> None: - ... +def mark_event_reprocessed(data: MutableMapping[str, Any], *, num_events: int = 1) -> None: ... @overload -def mark_event_reprocessed(*, group_id: int, project_id: int, num_events: int = 1) -> None: - ... +def mark_event_reprocessed(*, group_id: int, project_id: int, num_events: int = 1) -> None: ... def mark_event_reprocessed( diff --git a/src/sentry/runner/commands/devservices.py b/src/sentry/runner/commands/devservices.py index 9b8f95ad0ba03f..5b0b91f5a7782c 100644 --- a/src/sentry/runner/commands/devservices.py +++ b/src/sentry/runner/commands/devservices.py @@ -125,15 +125,13 @@ def _client() -> ContextManager[docker.DockerClient]: @overload def get_or_create( client: docker.DockerClient, thing: Literal["network"], name: str -) -> docker.models.networks.Network: - ... +) -> docker.models.networks.Network: ... @overload def get_or_create( client: docker.DockerClient, thing: Literal["volume"], name: str -) -> docker.models.volumes.Volume: - ... +) -> docker.models.volumes.Volume: ... def get_or_create( @@ -426,8 +424,7 @@ def _start_service( project: str, always_start: Literal[False] = ..., recreate: bool = False, -) -> docker.models.containers.Container: - ... +) -> docker.models.containers.Container: ... @overload @@ -438,8 +435,7 @@ def _start_service( project: str, always_start: bool = False, recreate: bool = False, -) -> docker.models.containers.Container | None: - ... +) -> docker.models.containers.Container | None: ... def _start_service( diff --git a/src/sentry/runner/commands/presenters/webhookpresenter.py b/src/sentry/runner/commands/presenters/webhookpresenter.py index 5333d22d216cfd..455b896fae9d9c 100644 --- a/src/sentry/runner/commands/presenters/webhookpresenter.py +++ b/src/sentry/runner/commands/presenters/webhookpresenter.py @@ -53,9 +53,7 @@ def flush(self) -> None: region: str | None = ( settings.SENTRY_REGION if settings.SENTRY_REGION - else settings.CUSTOMER_ID - if settings.CUSTOMER_ID - else settings.SILO_MODE + else settings.CUSTOMER_ID if settings.CUSTOMER_ID else settings.SILO_MODE ) json_data = { diff --git a/src/sentry/runner/default_settings.py b/src/sentry/runner/default_settings.py index fd131dee8dcf47..ea7520bae46bfb 100644 --- a/src/sentry/runner/default_settings.py +++ b/src/sentry/runner/default_settings.py @@ -1,4 +1,5 @@ """this module is lazily loaded -- it is ~/.sentry/sentry.conf.py overlayed on sentry.conf.server.""" + from __future__ import annotations import sys diff --git a/src/sentry/search/eap/columns.py b/src/sentry/search/eap/columns.py index 1a690a4b1a1011..c40a3745caed74 100644 --- a/src/sentry/search/eap/columns.py +++ b/src/sentry/search/eap/columns.py @@ -18,7 +18,9 @@ @dataclass(frozen=True) class ResolvedColumn: # The alias for this column - public_alias: str # `p95() as foo` has the public alias `foo` and `p95()` has the public alias `p95()` + public_alias: ( + str # `p95() as foo` has the public alias `foo` and `p95()` has the public alias `p95()` + ) # The internal rpc alias for this column internal_name: str | Function.ValueType # The public type for this column @@ -53,9 +55,11 @@ def proto_definition(self) -> AttributeAggregation | AttributeKey: else: return AttributeKey( name=self.internal_name, - type=self.internal_type - if self.internal_type is not None - else constants.TYPE_MAP[self.search_type], + type=( + self.internal_type + if self.internal_type is not None + else constants.TYPE_MAP[self.search_type] + ), ) diff --git a/src/sentry/search/events/builder/profile_functions.py b/src/sentry/search/events/builder/profile_functions.py index 45dfea5015f185..40e3f23d1f8a1a 100644 --- a/src/sentry/search/events/builder/profile_functions.py +++ b/src/sentry/search/events/builder/profile_functions.py @@ -21,15 +21,12 @@ class ProfileFunctionsQueryBuilderProtocol(Protocol): @property - def config(self) -> ProfileFunctionsDatasetConfig: - ... + def config(self) -> ProfileFunctionsDatasetConfig: ... @property - def params(self) -> SnubaParams: - ... + def params(self) -> SnubaParams: ... - def column(self, name: str) -> Column: - ... + def column(self, name: str) -> Column: ... class ProfileFunctionsQueryBuilderMixin: diff --git a/src/sentry/search/events/builder/profiles.py b/src/sentry/search/events/builder/profiles.py index 21a78fce8ca00b..e1fcaff0bb4cda 100644 --- a/src/sentry/search/events/builder/profiles.py +++ b/src/sentry/search/events/builder/profiles.py @@ -10,15 +10,12 @@ class ProfilesQueryBuilderProtocol(Protocol): @property - def config(self) -> ProfilesDatasetConfig: - ... + def config(self) -> ProfilesDatasetConfig: ... @property - def params(self) -> SnubaParams: - ... + def params(self) -> SnubaParams: ... - def column(self, name: str) -> Column: - ... + def column(self, name: str) -> Column: ... class ProfilesQueryBuilderMixin: diff --git a/src/sentry/search/events/builder/spans_indexed.py b/src/sentry/search/events/builder/spans_indexed.py index 78ad9bc9d05e18..861693a740c38e 100644 --- a/src/sentry/search/events/builder/spans_indexed.py +++ b/src/sentry/search/events/builder/spans_indexed.py @@ -54,9 +54,9 @@ class SpansIndexedQueryBuilder(SpansIndexedQueryBuilderMixin, BaseQueryBuilder): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.value_resolver_map[ - constants.SPAN_STATUS - ] = lambda status: SPAN_STATUS_CODE_TO_NAME.get(status) + self.value_resolver_map[constants.SPAN_STATUS] = ( + lambda status: SPAN_STATUS_CODE_TO_NAME.get(status) + ) class SpansEAPQueryBuilder(SpansIndexedQueryBuilderMixin, BaseQueryBuilder): diff --git a/src/sentry/search/events/datasets/metrics.py b/src/sentry/search/events/datasets/metrics.py index ca675817de0c32..2e8c7fe0ccdb0e 100644 --- a/src/sentry/search/events/datasets/metrics.py +++ b/src/sentry/search/events/datasets/metrics.py @@ -768,9 +768,11 @@ def function_converter(self) -> Mapping[str, fields.MetricsFunction]: "spm", snql_distribution=self._resolve_spm, optional_args=[ - fields.NullColumn("interval") - if self.should_skip_interval_calculation - else fields.IntervalDefault("interval", 1, None) + ( + fields.NullColumn("interval") + if self.should_skip_interval_calculation + else fields.IntervalDefault("interval", 1, None) + ) ], default_result_type="rate", ), diff --git a/src/sentry/search/events/fields.py b/src/sentry/search/events/fields.py index 208283e1c781eb..206948b42cbb51 100644 --- a/src/sentry/search/events/fields.py +++ b/src/sentry/search/events/fields.py @@ -1649,9 +1649,9 @@ def find_combinator(self, kind: str | None) -> Combinator | None: calculated_args=[ { "name": "tolerated", - "fn": lambda args: args["satisfaction"] * 4.0 - if args["satisfaction"] is not None - else None, + "fn": lambda args: ( + args["satisfaction"] * 4.0 if args["satisfaction"] is not None else None + ), } ], conditional_transform=ConditionalFunction( @@ -1690,9 +1690,9 @@ def find_combinator(self, kind: str | None) -> Combinator | None: calculated_args=[ { "name": "tolerated", - "fn": lambda args: args["satisfaction"] * 4.0 - if args["satisfaction"] is not None - else None, + "fn": lambda args: ( + args["satisfaction"] * 4.0 if args["satisfaction"] is not None else None + ), }, {"name": "parameter_sum", "fn": lambda args: args["alpha"] + args["beta"]}, ], diff --git a/src/sentry/search/events/types.py b/src/sentry/search/events/types.py index 7f5a9941df2231..156ab50f4594dc 100644 --- a/src/sentry/search/events/types.py +++ b/src/sentry/search/events/types.py @@ -177,9 +177,9 @@ def filter_params(self) -> ParamsType: "project_objects": list(self.projects), "environment": list(self.environment_names), "team_id": list(self.team_ids), - "environment_objects": [env for env in self.environments if env is not None] - if self.environments - else [], + "environment_objects": ( + [env for env in self.environments if env is not None] if self.environments else [] + ), } if self.organization_id: filter_params["organization_id"] = self.organization_id diff --git a/src/sentry/seer/similarity/utils.py b/src/sentry/seer/similarity/utils.py index 6f8bd3c6dda4fc..1d5968f639fc1b 100644 --- a/src/sentry/seer/similarity/utils.py +++ b/src/sentry/seer/similarity/utils.py @@ -260,9 +260,7 @@ def _process_frames(frames: list[dict[str, Any]]) -> list[str]: "html_frames": ( "none" if html_frame_count == 0 - else "all" - if html_frame_count == final_frame_count - else "some" + else "all" if html_frame_count == final_frame_count else "some" ) }, ) diff --git a/src/sentry/sentry_apps/models/sentry_app_installation.py b/src/sentry/sentry_apps/models/sentry_app_installation.py index c0eb232c0b6f99..62cfb87dac734f 100644 --- a/src/sentry/sentry_apps/models/sentry_app_installation.py +++ b/src/sentry/sentry_apps/models/sentry_app_installation.py @@ -102,9 +102,9 @@ class SentryAppInstallation(ReplicatedControlModel, ParanoidModel): date_added = models.DateTimeField(default=timezone.now) date_updated = models.DateTimeField(default=timezone.now) - objects: ClassVar[ - SentryAppInstallationForProviderManager - ] = SentryAppInstallationForProviderManager() + objects: ClassVar[SentryAppInstallationForProviderManager] = ( + SentryAppInstallationForProviderManager() + ) class Meta: app_label = "sentry" @@ -212,8 +212,7 @@ def prepare_ui_component( component: SentryAppComponent, project_slug: str | None = None, values: list[Mapping[str, Any]] | None = None, -) -> SentryAppComponent | None: - ... +) -> SentryAppComponent | None: ... @overload @@ -222,8 +221,7 @@ def prepare_ui_component( component: RpcSentryAppComponent, project_slug: str | None = None, values: list[Mapping[str, Any]] | None = None, -) -> RpcSentryAppComponent | None: - ... +) -> RpcSentryAppComponent | None: ... def prepare_ui_component( diff --git a/src/sentry/sentry_apps/models/servicehook.py b/src/sentry/sentry_apps/models/servicehook.py index b2fea0813770a3..94160123299c0a 100644 --- a/src/sentry/sentry_apps/models/servicehook.py +++ b/src/sentry/sentry_apps/models/servicehook.py @@ -117,9 +117,11 @@ def add_project(self, project_or_project_id): from sentry.models.project import Project ServiceHookProject.objects.create( - project_id=project_or_project_id.id - if isinstance(project_or_project_id, Project) - else project_or_project_id, + project_id=( + project_or_project_id.id + if isinstance(project_or_project_id, Project) + else project_or_project_id + ), service_hook_id=self.id, ) diff --git a/src/sentry/sentry_apps/services/app/model.py b/src/sentry/sentry_apps/services/app/model.py index 09c885dd3547f9..20609618b50841 100644 --- a/src/sentry/sentry_apps/services/app/model.py +++ b/src/sentry/sentry_apps/services/app/model.py @@ -109,19 +109,15 @@ class SentryAppEventDataInterface(Protocol): """ @property - def id(self) -> str: - ... + def id(self) -> str: ... @property - def label(self) -> str: - ... + def label(self) -> str: ... @property - def actionType(self) -> str: - ... + def actionType(self) -> str: ... - def is_enabled(self) -> bool: - ... + def is_enabled(self) -> bool: ... class RpcSentryAppEventData(RpcModel, metaclass=RpcModelProtocolMeta): diff --git a/src/sentry/sentry_metrics/client/base.py b/src/sentry/sentry_metrics/client/base.py index 4420e6fc31f928..a65775dbab2cba 100644 --- a/src/sentry/sentry_metrics/client/base.py +++ b/src/sentry/sentry_metrics/client/base.py @@ -17,7 +17,6 @@ def counter( tags: dict[str, str], unit: str | None, ) -> None: - """ Used for emitting a counter metric for internal use cases only. Ensure that the use_case_id passed in has been registered @@ -36,7 +35,6 @@ def set( tags: dict[str, str], unit: str | None, ) -> None: - """ Used for emitting a set metric for internal use cases only. Can support a sequence of values. Ensure that the use_case_id passed in has @@ -54,7 +52,6 @@ def distribution( tags: dict[str, str], unit: str | None, ) -> None: - """ Used for emitting a distribution metric for internal use cases only. Can support a sequence of values. Ensure that the use_case_id passed in diff --git a/src/sentry/sentry_metrics/client/kafka.py b/src/sentry/sentry_metrics/client/kafka.py index 9e9a445f1262ad..297ace2f9589c6 100644 --- a/src/sentry/sentry_metrics/client/kafka.py +++ b/src/sentry/sentry_metrics/client/kafka.py @@ -66,7 +66,6 @@ def counter( tags: dict[str, str], unit: str | None, ) -> None: - """ Emit a counter metric for internal use cases only. Note that, as of now, this function will return diff --git a/src/sentry/sentry_metrics/client/snuba.py b/src/sentry/sentry_metrics/client/snuba.py index 35b531bfa2a07e..75d285474aa221 100644 --- a/src/sentry/sentry_metrics/client/snuba.py +++ b/src/sentry/sentry_metrics/client/snuba.py @@ -33,7 +33,6 @@ def get_retention_from_org_id(org_id: int) -> int: class SnubaMetricsBackend(GenericMetricsBackend): - """ This backend is meant for use in dev/testing environments. It allows for producing metrics @@ -52,7 +51,6 @@ def counter( tags: dict[str, str], unit: str | None, ) -> None: - """ Emit a counter metric for internal use cases only. """ @@ -76,7 +74,6 @@ def set( tags: dict[str, str], unit: str | None, ) -> None: - """ Emit a set metric for internal use cases only. Can support a sequence of values. @@ -102,7 +99,6 @@ def distribution( tags: dict[str, str], unit: str | None, ) -> None: - """ Emit a distribution metric for internal use cases only. Can support a sequence of values. diff --git a/src/sentry/sentry_metrics/consumers/indexer/slicing_router.py b/src/sentry/sentry_metrics/consumers/indexer/slicing_router.py index f7fae07468283a..8668da9c57cdbd 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/slicing_router.py +++ b/src/sentry/sentry_metrics/consumers/indexer/slicing_router.py @@ -39,9 +39,9 @@ def _validate_slicing_config() -> None: Validates the generalized slicing config (not focusing on an individual sliceable) """ - for (sliceable, assignments) in settings.SENTRY_SLICING_CONFIG.items(): + for sliceable, assignments in settings.SENTRY_SLICING_CONFIG.items(): acc = {} - for ((assign_lo, assign_hi), _slice_id) in assignments.items(): + for (assign_lo, assign_hi), _slice_id in assignments.items(): for logical_part in range(assign_lo, assign_hi): if logical_part in acc: raise SlicingConfigurationException( diff --git a/src/sentry/sentry_metrics/indexer/mock.py b/src/sentry/sentry_metrics/indexer/mock.py index eb870fd615fbd6..21a304857a1abe 100644 --- a/src/sentry/sentry_metrics/indexer/mock.py +++ b/src/sentry/sentry_metrics/indexer/mock.py @@ -18,14 +18,13 @@ class RawSimpleIndexer(StringIndexer): - """Simple indexer with in-memory store. Do not use in production.""" def __init__(self) -> None: self._counter = itertools.count(start=10000) - self._strings: DefaultDict[ - UseCaseID, DefaultDict[OrgId, DefaultDict[str, int | None]] - ] = defaultdict(lambda: defaultdict(lambda: defaultdict(self._counter.__next__))) + self._strings: DefaultDict[UseCaseID, DefaultDict[OrgId, DefaultDict[str, int | None]]] = ( + defaultdict(lambda: defaultdict(lambda: defaultdict(self._counter.__next__))) + ) self._reverse: dict[int, str] = {} def bulk_record( diff --git a/src/sentry/sentry_metrics/querying/data/transformation/metrics_api.py b/src/sentry/sentry_metrics/querying/data/transformation/metrics_api.py index 2928faa2f60520..f9cc8234048886 100644 --- a/src/sentry/sentry_metrics/querying/data/transformation/metrics_api.py +++ b/src/sentry/sentry_metrics/querying/data/transformation/metrics_api.py @@ -222,9 +222,9 @@ def _add_to_query_groups( order=query_result.order.value if query_result.order else None, limit=query_result.limit, has_more=query_result.has_more, - unit_family=query_result.unit_family.value - if query_result.unit_family - else None, + unit_family=( + query_result.unit_family.value if query_result.unit_family else None + ), unit=query_result.unit, scaling_factor=query_result.scaling_factor, ) diff --git a/src/sentry/shared_integrations/client/base.py b/src/sentry/shared_integrations/client/base.py index 753be0f7f5e235..4c85c98514f081 100644 --- a/src/sentry/shared_integrations/client/base.py +++ b/src/sentry/shared_integrations/client/base.py @@ -154,8 +154,7 @@ def _request( ignore_webhook_errors: bool = False, prepared_request: PreparedRequest | None = None, raw_response: Literal[True] = ..., - ) -> Response: - ... + ) -> Response: ... @overload def _request( @@ -173,8 +172,7 @@ def _request( ignore_webhook_errors: bool = False, prepared_request: PreparedRequest | None = None, raw_response: bool = ..., - ) -> BaseApiResponseX: - ... + ) -> BaseApiResponseX: ... def _request( self, diff --git a/src/sentry/shared_integrations/exceptions/__init__.py b/src/sentry/shared_integrations/exceptions/__init__.py index a871bbf4f75c7b..cadd41a2ba2b2a 100644 --- a/src/sentry/shared_integrations/exceptions/__init__.py +++ b/src/sentry/shared_integrations/exceptions/__init__.py @@ -95,8 +95,7 @@ def from_response(cls, response: Response, url: str | None = None) -> ApiError: class _RequestHasUrl(Protocol): @property - def url(self) -> str: - ... + def url(self) -> str: ... class ApiHostError(ApiError): diff --git a/src/sentry/snuba/metrics/datasource.py b/src/sentry/snuba/metrics/datasource.py index 0f88cf74993c11..ea290073a10fb2 100644 --- a/src/sentry/snuba/metrics/datasource.py +++ b/src/sentry/snuba/metrics/datasource.py @@ -771,14 +771,14 @@ def _get_group_limit_filters( # Creates a mapping of groupBy fields to their equivalent SnQL key_to_condition_dict: dict[Groupable, Any] = {} for metric_groupby_obj in metrics_query.groupby: - key_to_condition_dict[ - metric_groupby_obj.name - ] = SnubaQueryBuilder.generate_snql_for_action_by_fields( - metric_action_by_field=metric_groupby_obj, - use_case_id=use_case_id, - org_id=metrics_query.org_id, - projects=Project.objects.get_many_from_cache(metrics_query.project_ids), - is_column=True, + key_to_condition_dict[metric_groupby_obj.name] = ( + SnubaQueryBuilder.generate_snql_for_action_by_fields( + metric_action_by_field=metric_groupby_obj, + use_case_id=use_case_id, + org_id=metrics_query.org_id, + projects=Project.objects.get_many_from_cache(metrics_query.project_ids), + is_column=True, + ) ) aliased_group_keys: tuple[str] = tuple( diff --git a/src/sentry/snuba/metrics/utils.py b/src/sentry/snuba/metrics/utils.py index 91e1f3e2a580b8..cef92af6e666c6 100644 --- a/src/sentry/snuba/metrics/utils.py +++ b/src/sentry/snuba/metrics/utils.py @@ -455,25 +455,21 @@ class OrderByNotSupportedOverCompositeEntityException(NotSupportedOverCompositeE @overload -def to_intervals(start: None, end: datetime, interval_seconds: int) -> tuple[None, None, int]: - ... +def to_intervals(start: None, end: datetime, interval_seconds: int) -> tuple[None, None, int]: ... @overload -def to_intervals(start: datetime, end: None, interval_seconds: int) -> tuple[None, None, int]: - ... +def to_intervals(start: datetime, end: None, interval_seconds: int) -> tuple[None, None, int]: ... @overload -def to_intervals(start: None, end: None, interval_seconds: int) -> tuple[None, None, int]: - ... +def to_intervals(start: None, end: None, interval_seconds: int) -> tuple[None, None, int]: ... @overload def to_intervals( start: datetime, end: datetime, interval_seconds: int -) -> tuple[datetime, datetime, int]: - ... +) -> tuple[datetime, datetime, int]: ... def to_intervals( diff --git a/src/sentry/snuba/metrics_performance.py b/src/sentry/snuba/metrics_performance.py index 7a6080a00418e5..52213126fbf154 100644 --- a/src/sentry/snuba/metrics_performance.py +++ b/src/sentry/snuba/metrics_performance.py @@ -108,8 +108,7 @@ def bulk_timeseries_query( *, apply_formatting: Literal[False], query_source: QuerySource | None = None, -) -> EventsResponse: - ... +) -> EventsResponse: ... @overload @@ -129,8 +128,7 @@ def bulk_timeseries_query( on_demand_metrics_type: MetricSpecType | None = None, groupby: Column | None = None, query_source: QuerySource | None = None, -) -> SnubaTSResult: - ... +) -> SnubaTSResult: ... def bulk_timeseries_query( diff --git a/src/sentry/snuba/query_subscriptions/consumer.py b/src/sentry/snuba/query_subscriptions/consumer.py index 735a0f55d71738..d075488f8499a1 100644 --- a/src/sentry/snuba/query_subscriptions/consumer.py +++ b/src/sentry/snuba/query_subscriptions/consumer.py @@ -159,10 +159,13 @@ def handle_message( sentry_sdk.set_tag("query_subscription_id", contents["subscription_id"]) callback = subscriber_registry[subscription.type] - with sentry_sdk.start_span(op="process_message") as span, metrics.timer( - "snuba_query_subscriber.callback.duration", - instance=subscription.type, - tags={"dataset": dataset}, + with ( + sentry_sdk.start_span(op="process_message") as span, + metrics.timer( + "snuba_query_subscriber.callback.duration", + instance=subscription.type, + tags={"dataset": dataset}, + ), ): span.set_data("payload", contents) span.set_data("subscription_dataset", subscription.snuba_query.dataset) diff --git a/src/sentry/snuba/sessions_v2.py b/src/sentry/snuba/sessions_v2.py index 69dd7ead4fdbcc..184b3a479e96de 100644 --- a/src/sentry/snuba/sessions_v2.py +++ b/src/sentry/snuba/sessions_v2.py @@ -672,9 +672,11 @@ def get_category_stats( if not category_stats: category_stats = { "category": category, - "outcomes": {o.api_name(): 0 for o in Outcome} - if not outcome_query - else {o: 0 for o in outcome_query}, + "outcomes": ( + {o.api_name(): 0 for o in Outcome} + if not outcome_query + else {o: 0 for o in outcome_query} + ), "totals": {}, } if not outcome_query or any([o in dropped_outcomes for o in outcome_query]): diff --git a/src/sentry/stacktraces/processing.py b/src/sentry/stacktraces/processing.py index c66c5332eff1c2..3e466acbca570d 100644 --- a/src/sentry/stacktraces/processing.py +++ b/src/sentry/stacktraces/processing.py @@ -366,9 +366,7 @@ def normalize_stacktraces_for_grouping( event_metadata["in_app_frame_mix"] = ( "in-app-only" if frame_mixes["in-app-only"] == len(stacktrace_frames) - else "system-only" - if frame_mixes["system-only"] == len(stacktrace_frames) - else "mixed" + else "system-only" if frame_mixes["system-only"] == len(stacktrace_frames) else "mixed" ) data["metadata"] = event_metadata diff --git a/src/sentry/statistical_detectors/algorithm.py b/src/sentry/statistical_detectors/algorithm.py index 4b858d7c595225..124ea6c8c84eb2 100644 --- a/src/sentry/statistical_detectors/algorithm.py +++ b/src/sentry/statistical_detectors/algorithm.py @@ -97,8 +97,7 @@ def update( self, raw: Mapping[str | bytes, bytes | float | int | str], payload: DetectorPayload, - ) -> tuple[TrendType, float, DetectorState | None]: - ... + ) -> tuple[TrendType, float, DetectorState | None]: ... class MovingAverageRelativeChangeDetector(DetectorAlgorithm): diff --git a/src/sentry/statistical_detectors/base.py b/src/sentry/statistical_detectors/base.py index a0b9f05bd2e9bc..392827e3801fd5 100644 --- a/src/sentry/statistical_detectors/base.py +++ b/src/sentry/statistical_detectors/base.py @@ -29,28 +29,22 @@ class DetectorPayload: class DetectorState(ABC): @classmethod @abstractmethod - def from_redis_dict(cls, data: Any) -> DetectorState: - ... + def from_redis_dict(cls, data: Any) -> DetectorState: ... @abstractmethod - def to_redis_dict(self) -> Mapping[str | bytes, bytes | float | int | str]: - ... + def to_redis_dict(self) -> Mapping[str | bytes, bytes | float | int | str]: ... @abstractmethod - def should_auto_resolve(self, target: float, rel_threshold: float) -> bool: - ... + def should_auto_resolve(self, target: float, rel_threshold: float) -> bool: ... @abstractmethod def should_escalate( self, baseline: float, regressed: float, min_change: float, rel_threshold: float - ) -> bool: - ... + ) -> bool: ... @classmethod @abstractmethod - def empty(cls) -> DetectorState: - ... + def empty(cls) -> DetectorState: ... @abstractmethod - def get_moving_avg(self) -> float: - ... + def get_moving_avg(self) -> float: ... diff --git a/src/sentry/statistical_detectors/detector.py b/src/sentry/statistical_detectors/detector.py index 21b03a2b4b2fe0..0b9540ddc5a293 100644 --- a/src/sentry/statistical_detectors/detector.py +++ b/src/sentry/statistical_detectors/detector.py @@ -69,13 +69,11 @@ def configure_tags(cls): @classmethod @abstractmethod - def detector_algorithm_factory(cls) -> DetectorAlgorithm: - ... + def detector_algorithm_factory(cls) -> DetectorAlgorithm: ... @classmethod @abstractmethod - def detector_store_factory(cls) -> DetectorStore: - ... + def detector_store_factory(cls) -> DetectorStore: ... @classmethod def all_payloads( @@ -98,8 +96,7 @@ def query_payloads( cls, projects: list[Project], start: datetime, - ) -> Iterable[DetectorPayload]: - ... + ) -> Iterable[DetectorPayload]: ... @classmethod def detect_trends( @@ -195,8 +192,7 @@ def query_timeseries( objects: list[tuple[Project, int | str]], start: datetime, function: str, - ) -> Iterable[tuple[int, int | str, SnubaTSResult]]: - ... + ) -> Iterable[tuple[int, int | str, SnubaTSResult]]: ... @classmethod def detect_regressions( diff --git a/src/sentry/statistical_detectors/store.py b/src/sentry/statistical_detectors/store.py index e2533d6d77c2b1..fc8c2b8bb62cc7 100644 --- a/src/sentry/statistical_detectors/store.py +++ b/src/sentry/statistical_detectors/store.py @@ -8,9 +8,7 @@ class DetectorStore(ABC, Generic[T]): @abstractmethod - def bulk_read_states(self, payloads: list[DetectorPayload]) -> list[T]: - ... + def bulk_read_states(self, payloads: list[DetectorPayload]) -> list[T]: ... @abstractmethod - def bulk_write_states(self, payloads: list[DetectorPayload], states: list[T]): - ... + def bulk_write_states(self, payloads: list[DetectorPayload], states: list[T]): ... diff --git a/src/sentry/tasks/check_am2_compatibility.py b/src/sentry/tasks/check_am2_compatibility.py index 8cb9eb035d839f..bf7490ace79d61 100644 --- a/src/sentry/tasks/check_am2_compatibility.py +++ b/src/sentry/tasks/check_am2_compatibility.py @@ -428,9 +428,9 @@ def extract_sdks_from_data(cls, data): @classmethod def get_outdated_sdks(cls, found_sdks_per_project): - outdated_sdks_per_project: Mapping[ - str, Mapping[str, set[tuple[str, str | None]]] - ] = defaultdict(lambda: defaultdict(set)) + outdated_sdks_per_project: Mapping[str, Mapping[str, set[tuple[str, str | None]]]] = ( + defaultdict(lambda: defaultdict(set)) + ) for project, found_sdks in found_sdks_per_project.items(): for sdk_name, sdk_versions in found_sdks.items(): diff --git a/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py b/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py index 487b00ba86d691..737e3496bb3406 100644 --- a/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py +++ b/src/sentry/tasks/embeddings_grouping/backfill_seer_grouping_records_for_project.py @@ -100,7 +100,11 @@ def backfill_seer_grouping_records_for_project( ) try: - (project, last_processed_group_id, last_processed_project_index,) = initialize_backfill( + ( + project, + last_processed_group_id, + last_processed_project_index, + ) = initialize_backfill( current_project_id, last_processed_group_id_input, last_processed_project_index_input, diff --git a/src/sentry/tasks/summaries/utils.py b/src/sentry/tasks/summaries/utils.py index 218cfc543c0e65..8ddf1d1f99b7b0 100644 --- a/src/sentry/tasks/summaries/utils.py +++ b/src/sentry/tasks/summaries/utils.py @@ -42,9 +42,9 @@ def __init__( self.end = to_datetime(timestamp) self.organization: Organization = organization - self.projects_context_map: dict[ - int, ProjectContext | DailySummaryProjectContext - ] = {} # { project_id: ProjectContext } + self.projects_context_map: dict[int, ProjectContext | DailySummaryProjectContext] = ( + {} + ) # { project_id: ProjectContext } self.project_ownership: dict[int, set[int]] = {} # { user_id: set } self.daily = daily diff --git a/src/sentry/testutils/helpers/link_header.py b/src/sentry/testutils/helpers/link_header.py index cf55e9771c4c42..78417f03d4a7c9 100644 --- a/src/sentry/testutils/helpers/link_header.py +++ b/src/sentry/testutils/helpers/link_header.py @@ -20,6 +20,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ + from __future__ import annotations import re diff --git a/src/sentry/testutils/helpers/options.py b/src/sentry/testutils/helpers/options.py index 8a7872474f3a6e..3bbfb6cc999233 100644 --- a/src/sentry/testutils/helpers/options.py +++ b/src/sentry/testutils/helpers/options.py @@ -39,7 +39,8 @@ def new_lookup(self: OptionsManager, key): new_options = settings.SENTRY_OPTIONS.copy() new_options.update(options) with override_settings(SENTRY_OPTIONS=new_options): - with patch.object(default_manager.store, "get", side_effect=new_get), patch( - "sentry.options.OptionsManager.lookup_key", new=new_lookup + with ( + patch.object(default_manager.store, "get", side_effect=new_get), + patch("sentry.options.OptionsManager.lookup_key", new=new_lookup), ): yield diff --git a/src/sentry/testutils/hybrid_cloud.py b/src/sentry/testutils/hybrid_cloud.py index 20105205ecac5e..316ef3404361a6 100644 --- a/src/sentry/testutils/hybrid_cloud.py +++ b/src/sentry/testutils/hybrid_cloud.py @@ -241,9 +241,9 @@ def new_atomic_on_commit(func, using=None): if is_django_test_case: simulated_transaction_watermarks.state[conn.alias] = 2 else: - simulated_transaction_watermarks.state[ - conn.alias - ] = simulated_transaction_watermarks.get_transaction_depth(conn) + simulated_transaction_watermarks.state[conn.alias] = ( + simulated_transaction_watermarks.get_transaction_depth(conn) + ) functools.update_wrapper(new_atomic_exit, _old_atomic_exit) functools.update_wrapper(new_atomic_on_commit, _old_transaction_on_commit) diff --git a/src/sentry/testutils/performance_issues/event_generators.py b/src/sentry/testutils/performance_issues/event_generators.py index fcb2115d6f41e6..f9c943752b4b98 100644 --- a/src/sentry/testutils/performance_issues/event_generators.py +++ b/src/sentry/testutils/performance_issues/event_generators.py @@ -14,7 +14,7 @@ EVENTS = {} PROJECT_ID = 1 -for (dirpath, dirnames, filenames) in os.walk(_fixture_path): +for dirpath, dirnames, filenames in os.walk(_fixture_path): for filename in filenames: [name, extension] = filename.split(".") diff --git a/src/sentry/testutils/pytest/fixtures.py b/src/sentry/testutils/pytest/fixtures.py index fb50304db1b015..8191fe55d3ceb3 100644 --- a/src/sentry/testutils/pytest/fixtures.py +++ b/src/sentry/testutils/pytest/fixtures.py @@ -206,8 +206,7 @@ def __call__( reference_file: str | None = None, subname: str | None = None, inequality_comparator: InequalityComparator = default_comparator, - ) -> None: - ... + ) -> None: ... @pytest.fixture diff --git a/src/sentry/types/actor.py b/src/sentry/types/actor.py index 105c27a007a4d6..14bfbc81d2f1e7 100644 --- a/src/sentry/types/actor.py +++ b/src/sentry/types/actor.py @@ -159,13 +159,11 @@ def from_rpc_team(cls, team: "RpcTeam") -> "Actor": @overload @classmethod - def from_identifier(cls, id: None) -> None: - ... + def from_identifier(cls, id: None) -> None: ... @overload @classmethod - def from_identifier(cls, id: int | str) -> "Actor": - ... + def from_identifier(cls, id: int | str) -> "Actor": ... @classmethod def from_identifier(cls, id: str | int | None) -> "Actor | None": @@ -268,12 +266,10 @@ class ActorOwned(Protocol): """Protocol for objects that are owned by Actor but need to store ownership in discrete columns""" @property - def owner(self) -> Actor | None: - ... + def owner(self) -> Actor | None: ... @owner.setter - def owner(self, actor: Actor | None) -> None: - ... + def owner(self, actor: Actor | None) -> None: ... def parse_and_validate_actor(actor_identifier: str | None, organization_id: int) -> Actor | None: diff --git a/src/sentry/utils/arroyo.py b/src/sentry/utils/arroyo.py index 5c1ddeb3a12102..dcf3c77386d038 100644 --- a/src/sentry/utils/arroyo.py +++ b/src/sentry/utils/arroyo.py @@ -163,9 +163,9 @@ def run_task_with_multiprocessing( pool: MultiprocessingPool, function: Callable[[Message[TStrategyPayload]], TResult], **kwargs: Any, -) -> RunTask[TStrategyPayload, TResult] | ArroyoRunTaskWithMultiprocessing[ - TStrategyPayload, TResult -]: +) -> ( + RunTask[TStrategyPayload, TResult] | ArroyoRunTaskWithMultiprocessing[TStrategyPayload, TResult] +): """ A variant of arroyo's RunTaskWithMultiprocessing that can switch between multiprocessing and non-multiprocessing mode based on the diff --git a/src/sentry/utils/avatar.py b/src/sentry/utils/avatar.py index 8edbbf0c922a1b..7e1e524a18bfd6 100644 --- a/src/sentry/utils/avatar.py +++ b/src/sentry/utils/avatar.py @@ -2,6 +2,7 @@ Note: Also see letterAvatar.jsx. Anything changed in this file (how colors are selected, the svg, etc) will also need to be changed there. """ + from __future__ import annotations from collections.abc import MutableMapping diff --git a/src/sentry/utils/circuit_breaker2.py b/src/sentry/utils/circuit_breaker2.py index 3988fd917c9fa9..cc1c2600218c7c 100644 --- a/src/sentry/utils/circuit_breaker2.py +++ b/src/sentry/utils/circuit_breaker2.py @@ -331,16 +331,13 @@ def _get_state_and_remaining_time( @overload def _get_controlling_quota( self, state: Literal[CircuitBreakerState.OK, CircuitBreakerState.RECOVERY] - ) -> Quota: - ... + ) -> Quota: ... @overload - def _get_controlling_quota(self, state: Literal[CircuitBreakerState.BROKEN]) -> None: - ... + def _get_controlling_quota(self, state: Literal[CircuitBreakerState.BROKEN]) -> None: ... @overload - def _get_controlling_quota(self) -> Quota | None: - ... + def _get_controlling_quota(self) -> Quota | None: ... def _get_controlling_quota(self, state: CircuitBreakerState | None = None) -> Quota | None: """ @@ -358,24 +355,19 @@ def _get_controlling_quota(self, state: CircuitBreakerState | None = None) -> Qu return controlling_quota_by_state[_state] @overload - def _get_remaining_error_quota(self, quota: None, window_end: int | None) -> None: - ... + def _get_remaining_error_quota(self, quota: None, window_end: int | None) -> None: ... @overload - def _get_remaining_error_quota(self, quota: Quota, window_end: int | None) -> int: - ... + def _get_remaining_error_quota(self, quota: Quota, window_end: int | None) -> int: ... @overload - def _get_remaining_error_quota(self, quota: None) -> None: - ... + def _get_remaining_error_quota(self, quota: None) -> None: ... @overload - def _get_remaining_error_quota(self, quota: Quota) -> int: - ... + def _get_remaining_error_quota(self, quota: Quota) -> int: ... @overload - def _get_remaining_error_quota(self) -> int | None: - ... + def _get_remaining_error_quota(self) -> int | None: ... def _get_remaining_error_quota( self, quota: Quota | None = None, window_end: int | None = None diff --git a/src/sentry/utils/cursors.py b/src/sentry/utils/cursors.py index b58b62c5b25a5d..2d9ecf553d2771 100644 --- a/src/sentry/utils/cursors.py +++ b/src/sentry/utils/cursors.py @@ -8,8 +8,7 @@ class KeyCallable(Protocol): - def __call__(self, value: T, for_prev: bool = ...) -> CursorValue: - ... + def __call__(self, value: T, for_prev: bool = ...) -> CursorValue: ... OnResultCallable = Callable[[Sequence[T]], Any] diff --git a/src/sentry/utils/dates.py b/src/sentry/utils/dates.py index f5170cf2432e19..52367fab441948 100644 --- a/src/sentry/utils/dates.py +++ b/src/sentry/utils/dates.py @@ -29,13 +29,11 @@ def ensure_aware(value: datetime) -> datetime: @overload -def to_datetime(value: None) -> None: - ... +def to_datetime(value: None) -> None: ... @overload -def to_datetime(value: float | int) -> datetime: - ... +def to_datetime(value: float | int) -> datetime: ... def to_datetime(value: float | int | None) -> datetime | None: diff --git a/src/sentry/utils/http.py b/src/sentry/utils/http.py index 83885a7b59009e..ae556bb3aa3e31 100644 --- a/src/sentry/utils/http.py +++ b/src/sentry/utils/http.py @@ -42,13 +42,11 @@ def create_redirect_url(request: HttpRequest, redirect_url: str) -> str: @overload -def origin_from_url(url: str) -> str: - ... +def origin_from_url(url: str) -> str: ... @overload -def origin_from_url(url: None) -> None: - ... +def origin_from_url(url: None) -> None: ... def origin_from_url(url: str | None) -> str | None: diff --git a/src/sentry/utils/json.py b/src/sentry/utils/json.py index 7eb5cf28f859a0..69c29d4c7fc1b8 100644 --- a/src/sentry/utils/json.py +++ b/src/sentry/utils/json.py @@ -141,13 +141,11 @@ def dumps_htmlsafe(value: object) -> SafeString: @overload -def prune_empty_keys(obj: None) -> None: - ... +def prune_empty_keys(obj: None) -> None: ... @overload -def prune_empty_keys(obj: Mapping[TKey, TValue | None]) -> dict[TKey, TValue]: - ... +def prune_empty_keys(obj: Mapping[TKey, TValue | None]) -> dict[TKey, TValue]: ... def prune_empty_keys(obj: Mapping[TKey, TValue | None] | None) -> dict[TKey, TValue] | None: diff --git a/src/sentry/utils/jwt.py b/src/sentry/utils/jwt.py index 02e14332928b5b..3c6a25c8d5ea69 100644 --- a/src/sentry/utils/jwt.py +++ b/src/sentry/utils/jwt.py @@ -3,6 +3,7 @@ This is an attempt to have all the interactions with JWT in once place, so that we have once central place which handles JWT in a uniform way. """ + from __future__ import annotations from collections.abc import Mapping diff --git a/src/sentry/utils/lazy_service_wrapper.py b/src/sentry/utils/lazy_service_wrapper.py index 5d4f1fea2d38f7..547d410ea5f78b 100644 --- a/src/sentry/utils/lazy_service_wrapper.py +++ b/src/sentry/utils/lazy_service_wrapper.py @@ -1,4 +1,5 @@ """split out from sentry.utils.services to handle mypy plugin specialization""" + from __future__ import annotations import enum diff --git a/src/sentry/utils/options.py b/src/sentry/utils/options.py index 3b50b919328698..c1781e6c9fcb51 100644 --- a/src/sentry/utils/options.py +++ b/src/sentry/utils/options.py @@ -1,6 +1,5 @@ """ Helper functions for sentry.options """ - import logging from sentry import options diff --git a/src/sentry/utils/redis.py b/src/sentry/utils/redis.py index af8248ae2940dd..3a56aac2e44ffe 100644 --- a/src/sentry/utils/redis.py +++ b/src/sentry/utils/redis.py @@ -129,8 +129,7 @@ def _factory( hosts: list[dict[Any, Any]] | dict[Any, Any] | None = None, client_args: dict[str, Any] | None = None, **config: Any, - ) -> RedisCluster[bytes] | StrictRedis[bytes]: - ... + ) -> RedisCluster[bytes] | StrictRedis[bytes]: ... @overload def _factory( @@ -142,8 +141,7 @@ def _factory( hosts: list[dict[Any, Any]] | dict[Any, Any] | None = None, client_args: dict[str, Any] | None = None, **config: Any, - ) -> RedisCluster[str] | StrictRedis[str]: - ... + ) -> RedisCluster[str] | StrictRedis[str]: ... def _factory( self, @@ -171,9 +169,9 @@ def _factory( # Redis cluster does not wait to attempt to connect. We'd prefer to not # make TCP connections on boot. Wrap the client in a lazy proxy object. - def cluster_factory() -> RedisCluster[ - bytes - ] | StrictRedis[bytes] | RedisCluster[str] | StrictRedis[str]: + def cluster_factory() -> ( + RedisCluster[bytes] | StrictRedis[bytes] | RedisCluster[str] | StrictRedis[str] + ): if is_redis_cluster: return RetryingRedisCluster( # Intentionally copy hosts here because redis-cluster-py diff --git a/src/sentry/utils/sentry_apps/request_buffer.py b/src/sentry/utils/sentry_apps/request_buffer.py index b412198254d96c..e7d60fa4393714 100644 --- a/src/sentry/utils/sentry_apps/request_buffer.py +++ b/src/sentry/utils/sentry_apps/request_buffer.py @@ -81,12 +81,10 @@ def _add_to_buffer_pipeline( pipeline.expire(buffer_key, KEY_EXPIRY) @overload - def _get_all_from_buffer(self, buffer_key: str, pipeline: Pipeline[str]) -> None: - ... + def _get_all_from_buffer(self, buffer_key: str, pipeline: Pipeline[str]) -> None: ... @overload - def _get_all_from_buffer(self, buffer_key: str) -> list[str]: - ... + def _get_all_from_buffer(self, buffer_key: str) -> list[str]: ... def _get_all_from_buffer( self, buffer_key: str, pipeline: Pipeline[str] | None = None diff --git a/src/sentry/utils/snuba_rpc.py b/src/sentry/utils/snuba_rpc.py index 730a78ebb4cb22..4a18dcbd992c2f 100644 --- a/src/sentry/utils/snuba_rpc.py +++ b/src/sentry/utils/snuba_rpc.py @@ -18,12 +18,10 @@ class SnubaRPCError(SnubaError): class SnubaRPCRequest(Protocol): - def SerializeToString(self, deterministic: bool = ...) -> bytes: - ... + def SerializeToString(self, deterministic: bool = ...) -> bytes: ... @property - def meta(self) -> sentry_protos.snuba.v1alpha.request_common_pb2.RequestMeta: - ... + def meta(self) -> sentry_protos.snuba.v1alpha.request_common_pb2.RequestMeta: ... def rpc(req: SnubaRPCRequest, resp_type: type[RPCResponseType]) -> RPCResponseType: diff --git a/src/sentry/utils/strings.py b/src/sentry/utils/strings.py index e7fd2e97c4205b..79a797e9a87c6a 100644 --- a/src/sentry/utils/strings.py +++ b/src/sentry/utils/strings.py @@ -41,13 +41,11 @@ def strip_lone_surrogates(string: str) -> str: @overload -def truncatechars(value: None, arg: int, ellipsis: str = ...) -> None: - ... +def truncatechars(value: None, arg: int, ellipsis: str = ...) -> None: ... @overload -def truncatechars(value: str, arg: int, ellipsis: str = ...) -> str: - ... +def truncatechars(value: str, arg: int, ellipsis: str = ...) -> str: ... def truncatechars(value: str | None, arg: int, ellipsis: str = "...") -> str | None: diff --git a/src/sentry/utils/types.py b/src/sentry/utils/types.py index 987d4003bd1968..c0fdd43bf663b4 100644 --- a/src/sentry/utils/types.py +++ b/src/sentry/utils/types.py @@ -186,33 +186,27 @@ def convert(self, value): @typing.overload -def type_from_value(value: int) -> IntType: - ... +def type_from_value(value: int) -> IntType: ... @typing.overload -def type_from_value(value: float) -> FloatType: - ... +def type_from_value(value: float) -> FloatType: ... @typing.overload -def type_from_value(value: bytes) -> StringType: - ... +def type_from_value(value: bytes) -> StringType: ... @typing.overload -def type_from_value(value: str) -> StringType: - ... +def type_from_value(value: str) -> StringType: ... @typing.overload -def type_from_value(value: dict) -> DictType: - ... +def type_from_value(value: dict) -> DictType: ... @typing.overload -def type_from_value(value: list) -> SequenceType: - ... +def type_from_value(value: list) -> SequenceType: ... def type_from_value(value): diff --git a/src/sentry/web/frontend/base.py b/src/sentry/web/frontend/base.py index 7336750602928f..d0098bb8e9be84 100644 --- a/src/sentry/web/frontend/base.py +++ b/src/sentry/web/frontend/base.py @@ -125,8 +125,7 @@ class _HasRespond(Protocol): def respond( self, template: str, context: dict[str, Any] | None = None, status: int = 200 - ) -> HttpResponseBase: - ... + ) -> HttpResponseBase: ... class OrganizationMixin: diff --git a/src/sentry/web/frontend/debug/mail.py b/src/sentry/web/frontend/debug/mail.py index 2c83f192d92745..6e7072a88a806f 100644 --- a/src/sentry/web/frontend/debug/mail.py +++ b/src/sentry/web/frontend/debug/mail.py @@ -296,9 +296,9 @@ def get_shared_context(rule, org, project: Project, group, event): def add_unsubscribe_link(context): if "unsubscribe_link" not in context: - context[ - "unsubscribe_link" - ] = 'javascript:alert("This is a preview page, what did you expect to happen?");' + context["unsubscribe_link"] = ( + 'javascript:alert("This is a preview page, what did you expect to happen?");' + ) # TODO(dcramer): use https://github.com/disqus/django-mailviews diff --git a/src/sentry/web/frontend/error_page_embed.py b/src/sentry/web/frontend/error_page_embed.py index 90905f7adbdcc9..f2ad67733d2b5c 100644 --- a/src/sentry/web/frontend/error_page_embed.py +++ b/src/sentry/web/frontend/error_page_embed.py @@ -106,9 +106,9 @@ def _smart_response(self, request: HttpRequest, context=None, status=200): response["Access-Control-Allow-Origin"] = request.META.get("HTTP_ORIGIN", "") response["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS" response["Access-Control-Max-Age"] = "1000" - response[ - "Access-Control-Allow-Headers" - ] = "Content-Type, Authorization, X-Requested-With, baggage, sentry-trace" + response["Access-Control-Allow-Headers"] = ( + "Content-Type, Authorization, X-Requested-With, baggage, sentry-trace" + ) response["Vary"] = "Accept" if content == "" and context: response["X-Sentry-Context"] = json_context diff --git a/src/sentry_plugins/client.py b/src/sentry_plugins/client.py index ddd4908de346b1..1f6dc1ea07a2be 100644 --- a/src/sentry_plugins/client.py +++ b/src/sentry_plugins/client.py @@ -60,8 +60,7 @@ def _request( ignore_webhook_errors: bool = False, prepared_request: PreparedRequest | None = None, raw_response: Literal[True] = ..., - ) -> Response: - ... + ) -> Response: ... @overload def _request( @@ -79,8 +78,7 @@ def _request( ignore_webhook_errors: bool = False, prepared_request: PreparedRequest | None = None, raw_response: bool = ..., - ) -> BaseApiResponseX: - ... + ) -> BaseApiResponseX: ... def _request(self, method, path, **kwargs): headers = kwargs.setdefault("headers", {}) diff --git a/src/social_auth/backends/__init__.py b/src/social_auth/backends/__init__.py index 0acb0b7bad6738..f63cc29887b9dd 100644 --- a/src/social_auth/backends/__init__.py +++ b/src/social_auth/backends/__init__.py @@ -9,6 +9,7 @@ (which is used for URLs matching) and Auth class, otherwise it won't be enabled. """ + from __future__ import annotations import logging diff --git a/src/social_auth/views.py b/src/social_auth/views.py index d190f16613ab4f..57299992f4ab17 100644 --- a/src/social_auth/views.py +++ b/src/social_auth/views.py @@ -6,7 +6,6 @@ token back. """ - from django.conf import settings from django.contrib import messages from django.contrib.auth import REDIRECT_FIELD_NAME diff --git a/src/sudo/forms.py b/src/sudo/forms.py index b7ebdf1bc68217..0fb888ee5121a3 100644 --- a/src/sudo/forms.py +++ b/src/sudo/forms.py @@ -5,6 +5,7 @@ :copyright: (c) 2020 by Matt Robenolt. :license: BSD, see LICENSE for more details. """ + from __future__ import annotations from typing import Any diff --git a/src/sudo/middleware.py b/src/sudo/middleware.py index 064d4362594a72..7b561f60fcb86d 100644 --- a/src/sudo/middleware.py +++ b/src/sudo/middleware.py @@ -5,6 +5,7 @@ :copyright: (c) 2020 by Matt Robenolt. :license: BSD, see LICENSE for more details. """ + from django.http.request import HttpRequest from django.http.response import HttpResponseBase from django.utils.deprecation import MiddlewareMixin diff --git a/src/sudo/models.py b/src/sudo/models.py index 8c74009ee7f7ab..aa3c525ea84ccd 100644 --- a/src/sudo/models.py +++ b/src/sudo/models.py @@ -5,5 +5,6 @@ :copyright: (c) 2020 by Matt Robenolt. :license: BSD, see LICENSE for more details. """ + # Register signals automatically by installing the app from sudo.signals import * # noqa diff --git a/src/sudo/settings.py b/src/sudo/settings.py index 82c6025fc5de97..382fb8b79a6914 100644 --- a/src/sudo/settings.py +++ b/src/sudo/settings.py @@ -5,6 +5,7 @@ :copyright: (c) 2020 by Matt Robenolt. :license: BSD, see LICENSE for more details. """ + from django.conf import settings # Default url to be redirected to after elevating permissions diff --git a/src/sudo/signals.py b/src/sudo/signals.py index d87c122e7b750b..af35a87c0f9f05 100644 --- a/src/sudo/signals.py +++ b/src/sudo/signals.py @@ -5,6 +5,7 @@ :copyright: (c) 2020 by Matt Robenolt. :license: BSD, see LICENSE for more details. """ + from django.contrib.auth.signals import user_logged_in, user_logged_out from django.dispatch import receiver from django.http.request import HttpRequest diff --git a/src/sudo/utils.py b/src/sudo/utils.py index b07e708cf38f0a..ccc82027f7915c 100644 --- a/src/sudo/utils.py +++ b/src/sudo/utils.py @@ -5,6 +5,7 @@ :copyright: (c) 2020 by Matt Robenolt. :license: BSD, see LICENSE for more details. """ + from __future__ import annotations from typing import cast diff --git a/src/sudo/views.py b/src/sudo/views.py index 28288739a8e0e3..8e0255fef28edc 100644 --- a/src/sudo/views.py +++ b/src/sudo/views.py @@ -5,6 +5,7 @@ :copyright: (c) 2020 by Matt Robenolt. :license: BSD, see LICENSE for more details. """ + from __future__ import annotations from typing import Any diff --git a/tests/acceptance/test_account_settings.py b/tests/acceptance/test_account_settings.py index 14e22895d7f3a8..6f7e667eed2eb2 100644 --- a/tests/acceptance/test_account_settings.py +++ b/tests/acceptance/test_account_settings.py @@ -22,15 +22,17 @@ def setUp(self): self.login_as(self.user) def test_account_security_settings(self): - with self.options({"system.url-prefix": self.browser.live_server_url}), self.feature( - "organizations:onboarding" + with ( + self.options({"system.url-prefix": self.browser.live_server_url}), + self.feature("organizations:onboarding"), ): self.browser.get("/settings/account/security/") self.browser.wait_until_not('[data-test-id="loading-indicator"]') def test_account_notifications(self): - with self.options({"system.url-prefix": self.browser.live_server_url}), self.feature( - "organizations:onboarding" + with ( + self.options({"system.url-prefix": self.browser.live_server_url}), + self.feature("organizations:onboarding"), ): self.browser.get("/settings/account/notifications/") self.browser.wait_until_not('[data-test-id="loading-indicator"]') diff --git a/tests/acceptance/test_organization_switch.py b/tests/acceptance/test_organization_switch.py index ec958225f789c1..72f52b6cd549bd 100644 --- a/tests/acceptance/test_organization_switch.py +++ b/tests/acceptance/test_organization_switch.py @@ -55,8 +55,9 @@ def get_project_elements_from_project_selector_dropdown(): for page in ["issues", "releases", "discover", "user-feedback"] ] - with self.settings(SENTRY_SINGLE_ORGANIZATION=False), self.feature( - "organizations:discover" + with ( + self.settings(SENTRY_SINGLE_ORGANIZATION=False), + self.feature("organizations:discover"), ): for transition_url in transition_urls: navigate_to_issues_page(self.organization.slug) diff --git a/tests/sentry/api/endpoints/test_accept_organization_invite.py b/tests/sentry/api/endpoints/test_accept_organization_invite.py index 48f05243f814c0..5c20b99f7bb73e 100644 --- a/tests/sentry/api/endpoints/test_accept_organization_invite.py +++ b/tests/sentry/api/endpoints/test_accept_organization_invite.py @@ -283,8 +283,9 @@ def test_cannot_accept_expired(self): om = Factories.create_member( email="newuser@example.com", token="abc", organization=self.organization ) - with assume_test_silo_mode_of(OrganizationMember), unguarded_write( - using=router.db_for_write(OrganizationMember) + with ( + assume_test_silo_mode_of(OrganizationMember), + unguarded_write(using=router.db_for_write(OrganizationMember)), ): OrganizationMember.objects.filter(id=om.id).update( token_expires_at=om.token_expires_at - timedelta(days=31) diff --git a/tests/sentry/api/endpoints/test_organization_traces.py b/tests/sentry/api/endpoints/test_organization_traces.py index 02b71e011d600b..75e492d45287dc 100644 --- a/tests/sentry/api/endpoints/test_organization_traces.py +++ b/tests/sentry/api/endpoints/test_organization_traces.py @@ -820,7 +820,7 @@ def test_matching_tag_metrics(self): span_ids, ) = self.create_mock_traces() - for (mri, op) in [ + for mri, op in [ (TransactionMRI.DURATION.value, "count"), ("d:transactions/measurements.lcp@millisecond", "max"), (SpanMRI.DURATION.value, "min"), @@ -1067,7 +1067,7 @@ def test_get_spans_for_trace_matching_tags_metrics(self): span_ids, ) = self.create_mock_traces() - for (mri, op) in [ + for mri, op in [ (TransactionMRI.DURATION.value, "count"), ("d:transactions/measurements.lcp@millisecond", "max"), (SpanMRI.DURATION.value, "min"), diff --git a/tests/sentry/api/endpoints/test_project_codeowners.py b/tests/sentry/api/endpoints/test_project_codeowners.py index 8f76c82afe1d99..91fc00ca3a7a9e 100644 --- a/tests/sentry/api/endpoints/test_project_codeowners.py +++ b/tests/sentry/api/endpoints/test_project_codeowners.py @@ -522,9 +522,9 @@ def test_get_multiple_rules_deleted_owners(self, get_codeowner_mock_file): self.external_delete_user2 = self.create_external_user( user=self.member_user_delete, external_name="@delete2", integration=self.integration ) - self.data[ - "raw" - ] = "docs/* @delete\n*.py @getsentry/ecosystem @delete\n*.css @delete2\n*.rb @NisanthanNanthakumar" + self.data["raw"] = ( + "docs/* @delete\n*.py @getsentry/ecosystem @delete\n*.css @delete2\n*.rb @NisanthanNanthakumar" + ) with self.feature({"organizations:integrations-codeowners": True}): self.client.post(self.url, self.data) diff --git a/tests/sentry/api/helpers/test_group_index.py b/tests/sentry/api/helpers/test_group_index.py index 1ad1e035d7ac1c..010caa15e989e6 100644 --- a/tests/sentry/api/helpers/test_group_index.py +++ b/tests/sentry/api/helpers/test_group_index.py @@ -52,8 +52,9 @@ def assert_analytics_recorded(self, mock_record: Mock) -> None: @patch("sentry.analytics.record") def test_negative(self, mock_record: Mock) -> None: query = "!has:user" - with self.feature({"organizations:advanced-search": False}), pytest.raises( - ValidationError, match=".*negative search.*" + with ( + self.feature({"organizations:advanced-search": False}), + pytest.raises(ValidationError, match=".*negative search.*"), ): self.run_test(query) @@ -61,8 +62,9 @@ def test_negative(self, mock_record: Mock) -> None: self.assert_analytics_recorded(mock_record) query = "!something:123" - with self.feature({"organizations:advanced-search": False}), pytest.raises( - ValidationError, match=".*negative search.*" + with ( + self.feature({"organizations:advanced-search": False}), + pytest.raises(ValidationError, match=".*negative search.*"), ): self.run_test(query) @@ -72,8 +74,9 @@ def test_negative(self, mock_record: Mock) -> None: @patch("sentry.analytics.record") def test_wildcard(self, mock_record: Mock) -> None: query = "abc:hello*" - with self.feature({"organizations:advanced-search": False}), pytest.raises( - ValidationError, match=".*wildcard search.*" + with ( + self.feature({"organizations:advanced-search": False}), + pytest.raises(ValidationError, match=".*wildcard search.*"), ): self.run_test(query) @@ -81,8 +84,9 @@ def test_wildcard(self, mock_record: Mock) -> None: self.assert_analytics_recorded(mock_record) query = "raw * search" - with self.feature({"organizations:advanced-search": False}), pytest.raises( - ValidationError, match=".*wildcard search.*" + with ( + self.feature({"organizations:advanced-search": False}), + pytest.raises(ValidationError, match=".*wildcard search.*"), ): self.run_test(query) diff --git a/tests/sentry/attachments/test_redis.py b/tests/sentry/attachments/test_redis.py index 8e8824e02bee58..0b0153d7be512f 100644 --- a/tests/sentry/attachments/test_redis.py +++ b/tests/sentry/attachments/test_redis.py @@ -55,9 +55,9 @@ def test_process_pending_one_batch(mocked_attachment_cache, mock_client): def test_chunked(mocked_attachment_cache, mock_client): - mock_client.data[ - KEY_FMT % "foo:a" - ] = '[{"name":"foo.txt","content_type":"text/plain","chunks":3}]' + mock_client.data[KEY_FMT % "foo:a"] = ( + '[{"name":"foo.txt","content_type":"text/plain","chunks":3}]' + ) mock_client.data[KEY_FMT % "foo:a:0:0"] = zlib.compress(b"Hello World!") mock_client.data[KEY_FMT % "foo:a:0:1"] = zlib.compress(b" This attachment is ") mock_client.data[KEY_FMT % "foo:a:0:2"] = zlib.compress(b"chunked up.") diff --git a/tests/sentry/auth/test_helper.py b/tests/sentry/auth/test_helper.py index fb71ea44977572..9929cfb5447152 100644 --- a/tests/sentry/auth/test_helper.py +++ b/tests/sentry/auth/test_helper.py @@ -289,8 +289,9 @@ def test_new_identity_with_existing_om(self, mock_messages): def test_new_identity_with_existing_om_idp_flags(self, mock_messages): user = self.set_up_user() with assume_test_silo_mode(SiloMode.REGION): - with assume_test_silo_mode(SiloMode.REGION), outbox_context( - transaction.atomic(using=router.db_for_write(OrganizationMember)) + with ( + assume_test_silo_mode(SiloMode.REGION), + outbox_context(transaction.atomic(using=router.db_for_write(OrganizationMember))), ): existing_om = OrganizationMember.objects.create( user_id=user.id, diff --git a/tests/sentry/charts/test_chartcuterie.py b/tests/sentry/charts/test_chartcuterie.py index cfae89c5531777..38ec19ffbc61f6 100644 --- a/tests/sentry/charts/test_chartcuterie.py +++ b/tests/sentry/charts/test_chartcuterie.py @@ -83,8 +83,9 @@ def test_failed(self): "chart-rendering.chartcuterie": {"url": service_url}, } - with self.options(options), pytest.raises( - RuntimeError, match="Chartcuterie responded with 500: Service down" + with ( + self.options(options), + pytest.raises(RuntimeError, match="Chartcuterie responded with 500: Service down"), ): charts.generate_chart(ChartType.SLACK_DISCOVER_TOTAL_PERIOD, chart_data) diff --git a/tests/sentry/consumers/test_synchronized.py b/tests/sentry/consumers/test_synchronized.py index eeff6b2bca80c8..53d5f590c004ab 100644 --- a/tests/sentry/consumers/test_synchronized.py +++ b/tests/sentry/consumers/test_synchronized.py @@ -99,8 +99,9 @@ def test_synchronized_consumer() -> None: # The consumer should not consume any messages until it receives a # commit from both groups that are being followed. - with assert_changes(consumer.paused, [], [Partition(topic, 0)]), assert_changes( - consumer.tell, {}, {Partition(topic, 0): messages[0].offset} + with ( + assert_changes(consumer.paused, [], [Partition(topic, 0)]), + assert_changes(consumer.tell, {}, {Partition(topic, 0): messages[0].offset}), ): assert synchronized_consumer.poll(0.0) is None @@ -122,8 +123,9 @@ def test_synchronized_consumer() -> None: # The consumer should remain paused, since it needs both groups to # advance before it may continue. - with assert_does_not_change(consumer.paused, [Partition(topic, 0)]), assert_does_not_change( - consumer.tell, {Partition(topic, 0): messages[0].offset} + with ( + assert_does_not_change(consumer.paused, [Partition(topic, 0)]), + assert_does_not_change(consumer.tell, {Partition(topic, 0): messages[0].offset}), ): assert synchronized_consumer.poll(0.0) is None @@ -145,17 +147,21 @@ def test_synchronized_consumer() -> None: # The consumer should be able to resume consuming, since both consumers # have processed the first message. - with assert_changes(consumer.paused, [Partition(topic, 0)], []), assert_changes( - consumer.tell, - {Partition(topic, 0): messages[0].offset}, - {Partition(topic, 0): messages[0].next_offset}, + with ( + assert_changes(consumer.paused, [Partition(topic, 0)], []), + assert_changes( + consumer.tell, + {Partition(topic, 0): messages[0].offset}, + {Partition(topic, 0): messages[0].next_offset}, + ), ): assert synchronized_consumer.poll(0.0) == messages[0] # After consuming the one available message, the consumer should be # paused again until the remote offsets advance. - with assert_changes(consumer.paused, [], [Partition(topic, 0)]), assert_does_not_change( - consumer.tell, {Partition(topic, 0): messages[1].offset} + with ( + assert_changes(consumer.paused, [], [Partition(topic, 0)]), + assert_does_not_change(consumer.tell, {Partition(topic, 0): messages[1].offset}), ): assert synchronized_consumer.poll(0.0) is None @@ -195,10 +201,13 @@ def test_synchronized_consumer() -> None: # The consumer should be able to resume consuming, since both consumers # have processed the first message. - with assert_changes(consumer.paused, [Partition(topic, 0)], []), assert_changes( - consumer.tell, - {Partition(topic, 0): messages[1].offset}, - {Partition(topic, 0): messages[1].next_offset}, + with ( + assert_changes(consumer.paused, [Partition(topic, 0)], []), + assert_changes( + consumer.tell, + {Partition(topic, 0): messages[1].offset}, + {Partition(topic, 0): messages[1].next_offset}, + ), ): assert synchronized_consumer.poll(0.0) == messages[1] @@ -214,8 +223,9 @@ def test_synchronized_consumer() -> None: # ``leader-a``), and the local offset is the offset of message #4, when # message #4 is consumed, it should be discarded and the offset should # be rolled back to wait for the commit log to advance. - with assert_changes(consumer.paused, [], [Partition(topic, 0)]), assert_does_not_change( - consumer.tell, {Partition(topic, 0): messages[4].offset} + with ( + assert_changes(consumer.paused, [], [Partition(topic, 0)]), + assert_does_not_change(consumer.tell, {Partition(topic, 0): messages[4].offset}), ): assert synchronized_consumer.poll(0.0) is None @@ -236,10 +246,13 @@ def test_synchronized_consumer() -> None: ) # The consumer should be able to resume consuming. - with assert_changes(consumer.paused, [Partition(topic, 0)], []), assert_changes( - consumer.tell, - {Partition(topic, 0): messages[4].offset}, - {Partition(topic, 0): messages[4].next_offset}, + with ( + assert_changes(consumer.paused, [Partition(topic, 0)], []), + assert_changes( + consumer.tell, + {Partition(topic, 0): messages[4].offset}, + {Partition(topic, 0): messages[4].next_offset}, + ), ): assert synchronized_consumer.poll(0.0) == messages[4] @@ -275,9 +288,10 @@ def assignment_callback(offsets: Mapping[Partition, int]) -> None: synchronized_consumer.subscribe([topic], on_assign=assignment_callback) - with assert_changes( - synchronized_consumer.paused, [], [Partition(topic, 0)] - ), assert_changes(consumer.paused, [], [Partition(topic, 0)]): + with ( + assert_changes(synchronized_consumer.paused, [], [Partition(topic, 0)]), + assert_changes(consumer.paused, [], [Partition(topic, 0)]), + ): assert synchronized_consumer.poll(0.0) is None # Advancing the commit log offset should not cause the consumer to @@ -303,9 +317,10 @@ def assignment_callback(offsets: Mapping[Partition, int]) -> None: # Resuming the partition does not immediately cause the partition to # resume, but it should look as if it is resumed to the caller. - with assert_changes( - synchronized_consumer.paused, [Partition(topic, 0)], [] - ), assert_does_not_change(consumer.paused, [Partition(topic, 0)]): + with ( + assert_changes(synchronized_consumer.paused, [Partition(topic, 0)], []), + assert_does_not_change(consumer.paused, [Partition(topic, 0)]), + ): synchronized_consumer.resume([Partition(topic, 0)]) # The partition should be resumed on the next poll call, however. @@ -314,21 +329,24 @@ def assignment_callback(offsets: Mapping[Partition, int]) -> None: # Pausing due to hitting the offset fence should not appear as a paused # partition to the caller. - with assert_does_not_change(synchronized_consumer.paused, []), assert_changes( - consumer.paused, [], [Partition(topic, 0)] + with ( + assert_does_not_change(synchronized_consumer.paused, []), + assert_changes(consumer.paused, [], [Partition(topic, 0)]), ): assert synchronized_consumer.poll(0) is None # Other pause and resume actions should not cause the inner consumer to # change its state while up against the fence. - with assert_changes( - synchronized_consumer.paused, [], [Partition(topic, 0)] - ), assert_does_not_change(consumer.paused, [Partition(topic, 0)]): + with ( + assert_changes(synchronized_consumer.paused, [], [Partition(topic, 0)]), + assert_does_not_change(consumer.paused, [Partition(topic, 0)]), + ): synchronized_consumer.pause([Partition(topic, 0)]) - with assert_changes( - synchronized_consumer.paused, [Partition(topic, 0)], [] - ), assert_does_not_change(consumer.paused, [Partition(topic, 0)]): + with ( + assert_changes(synchronized_consumer.paused, [Partition(topic, 0)], []), + assert_does_not_change(consumer.paused, [Partition(topic, 0)]), + ): synchronized_consumer.resume([Partition(topic, 0)]) diff --git a/tests/sentry/db/models/fields/test_slug.py b/tests/sentry/db/models/fields/test_slug.py index 1fde012744b939..04f57aa09462db 100644 --- a/tests/sentry/db/models/fields/test_slug.py +++ b/tests/sentry/db/models/fields/test_slug.py @@ -55,8 +55,8 @@ class IdOrSlugLookupTests(TestCase): def setUp(self) -> None: self.compiler = Mock() # Simulate the quoting behavior for simplicity in tests - self.compiler.quote_name_unless_alias = ( - lambda name: f"{name}" if '"' in name else f'"{name}"' + self.compiler.quote_name_unless_alias = lambda name: ( + f"{name}" if '"' in name else f'"{name}"' ) self.connection = Mock() diff --git a/tests/sentry/db/models/manager/test_base_query_set.py b/tests/sentry/db/models/manager/test_base_query_set.py index f37f8e8996b0cd..3a136b18953ee7 100644 --- a/tests/sentry/db/models/manager/test_base_query_set.py +++ b/tests/sentry/db/models/manager/test_base_query_set.py @@ -40,30 +40,34 @@ def test_empty_query(self): class TestSendPostUpdateSignal(TestCase): def test_not_triggered(self): - with catch_signal(post_update) as handler, override_options( - {"groups.enable-post-update-signal": True} + with ( + catch_signal(post_update) as handler, + override_options({"groups.enable-post-update-signal": True}), ): self.group.message = "hi" self.group.save() assert not handler.called - with catch_signal(post_update) as handler, override_options( - {"groups.enable-post-update-signal": True} + with ( + catch_signal(post_update) as handler, + override_options({"groups.enable-post-update-signal": True}), ): self.group.update(message="hi") assert not handler.called - with catch_signal(post_update) as handler, override_options( - {"groups.enable-post-update-signal": False} + with ( + catch_signal(post_update) as handler, + override_options({"groups.enable-post-update-signal": False}), ): assert Group.objects.filter(id=self.group.id).update(message="hi") == 1 assert not handler.called - with catch_signal(post_update) as handler, override_options( - {"groups.enable-post-update-signal": True} + with ( + catch_signal(post_update) as handler, + override_options({"groups.enable-post-update-signal": True}), ): assert ( Group.objects.filter(id=self.group.id) @@ -75,8 +79,9 @@ def test_not_triggered(self): assert not handler.called # Test signal not fired when Django detects the query will return no results - with catch_signal(post_update) as handler, override_options( - {"groups.enable-post-update-signal": True} + with ( + catch_signal(post_update) as handler, + override_options({"groups.enable-post-update-signal": True}), ): assert ( Group.objects.filter(id__in=[]).with_post_update_signal(True).update(message="hi") @@ -95,8 +100,9 @@ def test_enable(self): def test_triggered(self): message = "hi" - with catch_signal(post_update) as handler, override_options( - {"groups.enable-post-update-signal": True} + with ( + catch_signal(post_update) as handler, + override_options({"groups.enable-post-update-signal": True}), ): assert Group.objects.filter(id=self.group.id).update(message=message) == 1 diff --git a/tests/sentry/dynamic_sampling/tasks/test_tasks.py b/tests/sentry/dynamic_sampling/tasks/test_tasks.py index 8a3420dae611a6..320a9a9d3d7494 100644 --- a/tests/sentry/dynamic_sampling/tasks/test_tasks.py +++ b/tests/sentry/dynamic_sampling/tasks/test_tasks.py @@ -458,7 +458,7 @@ def test_boost_low_volume_transactions_with_sliding_window_org(self, get_blended BLENDED_RATE = 0.25 get_blended_sample_rate.return_value = BLENDED_RATE - for (sliding_window_step, used_sample_rate) in ((1, 1.0), (2, BLENDED_RATE), (3, 0.5)): + for sliding_window_step, used_sample_rate in ((1, 1.0), (2, BLENDED_RATE), (3, 0.5)): # We flush redis after each run, to make sure no data persists. self.flush_redis() diff --git a/tests/sentry/event_manager/test_severity.py b/tests/sentry/event_manager/test_severity.py index d7f705941d07bf..943e9289d3eb12 100644 --- a/tests/sentry/event_manager/test_severity.py +++ b/tests/sentry/event_manager/test_severity.py @@ -77,8 +77,9 @@ def test_error_event_simple(self, mock_urlopen: MagicMock) -> None: assert reason == "ml" assert cache.get(SEER_ERROR_COUNT_KEY) == 0 - with override_options({"seer.api.use-shared-secret": 1.0}), override_settings( - SEER_API_SHARED_SECRET="some-secret" + with ( + override_options({"seer.api.use-shared-secret": 1.0}), + override_settings(SEER_API_SHARED_SECRET="some-secret"), ): _get_severity_score(event) mock_urlopen.assert_called_with( diff --git a/tests/sentry/feedback/usecases/test_create_feedback.py b/tests/sentry/feedback/usecases/test_create_feedback.py index aa56710bb0de94..2c42e2d9166496 100644 --- a/tests/sentry/feedback/usecases/test_create_feedback.py +++ b/tests/sentry/feedback/usecases/test_create_feedback.py @@ -793,13 +793,16 @@ def test_create_feedback_large_message_skips_spam_detection( default_project, set_sentry_option, monkeypatch ): """If spam is enabled, large messages are marked as spam without making an LLM request.""" - with Feature( - { - "organizations:user-feedback-spam-filter-actions": True, - "organizations:user-feedback-spam-filter-ingest": True, - "organizations:feedback-ingest": True, - } - ), set_sentry_option("feedback.message.max-size", 4096): + with ( + Feature( + { + "organizations:user-feedback-spam-filter-actions": True, + "organizations:user-feedback-spam-filter-ingest": True, + "organizations:feedback-ingest": True, + } + ), + set_sentry_option("feedback.message.max-size", 4096), + ): event = mock_feedback_event(default_project.id, datetime.now(UTC)) event["contexts"]["feedback"]["message"] = "a" * 7007 diff --git a/tests/sentry/grouping/test_builtin_fingerprinting.py b/tests/sentry/grouping/test_builtin_fingerprinting.py index 2ba42788189048..f8c0889c8f80fc 100644 --- a/tests/sentry/grouping/test_builtin_fingerprinting.py +++ b/tests/sentry/grouping/test_builtin_fingerprinting.py @@ -613,9 +613,9 @@ def test_built_in_hydration_rules_same_transactions(self): event_message1 = self.store_event(data=self.hydration_error_trace, project_id=self.project) data_message2 = self.hydration_error_trace.copy() - data_message2[ - "message" - ] = "Hydration failed because the initial UI does not match what was rendered on the server." + data_message2["message"] = ( + "Hydration failed because the initial UI does not match what was rendered on the server." + ) event_message2 = self.store_event(data=data_message2, project_id=self.project) assert event_message1.data.data["fingerprint"] == ["hydrationerror", "{{tags.transaction}}"] diff --git a/tests/sentry/helpers/test_deprecation.py b/tests/sentry/helpers/test_deprecation.py index e8da19d3756c66..c6691accb625c3 100644 --- a/tests/sentry/helpers/test_deprecation.py +++ b/tests/sentry/helpers/test_deprecation.py @@ -107,11 +107,14 @@ def test_no_decorator(self): self.assert_not_deprecated("HEAD") def test_default_key(self): - with self.settings(SENTRY_SELF_HOSTED=False), override_options( - { - "api.deprecation.brownout-duration": custom_duration, - "api.deprecation.brownout-cron": custom_cron, - } + with ( + self.settings(SENTRY_SELF_HOSTED=False), + override_options( + { + "api.deprecation.brownout-duration": custom_duration, + "api.deprecation.brownout-cron": custom_cron, + } + ), ): options.delete("api.deprecation.brownout-cron") options.delete("api.deprecation.brownout-duration") @@ -161,34 +164,46 @@ def test_custom_key(self): def test_bad_schedule_format(self): brownout_start = timeiter.get_next(datetime) with freeze_time(brownout_start): - with self.settings(SENTRY_SELF_HOSTED=False), override_options( - { - "api.deprecation.brownout-duration": "bad duration", - }, + with ( + self.settings(SENTRY_SELF_HOSTED=False), + override_options( + { + "api.deprecation.brownout-duration": "bad duration", + }, + ), ): options.delete("api.deprecation.brownout-duration") self.assert_allowed_request("GET") - with self.settings(SENTRY_SELF_HOSTED=False), override_options( - { - "api.deprecation.brownout-duration": "PT1M", - }, + with ( + self.settings(SENTRY_SELF_HOSTED=False), + override_options( + { + "api.deprecation.brownout-duration": "PT1M", + }, + ), ): options.delete("api.deprecation.brownout-duration") self.assert_denied_request("GET") - with self.settings(SENTRY_SELF_HOSTED=False), override_options( - { - "api.deprecation.brownout-cron": "bad schedule", - }, + with ( + self.settings(SENTRY_SELF_HOSTED=False), + override_options( + { + "api.deprecation.brownout-cron": "bad schedule", + }, + ), ): options.delete("api.deprecation.brownout-cron") self.assert_allowed_request("GET") - with self.settings(SENTRY_SELF_HOSTED=False), override_options( - { - "api.deprecation.brownout-cron": "0 12 * * *", - }, + with ( + self.settings(SENTRY_SELF_HOSTED=False), + override_options( + { + "api.deprecation.brownout-cron": "0 12 * * *", + }, + ), ): options.delete("api.deprecation.brownout-cron") self.assert_denied_request("GET") diff --git a/tests/sentry/hybridcloud/services/test_control_organization_provisioning.py b/tests/sentry/hybridcloud/services/test_control_organization_provisioning.py index 053137ab60da14..736a3df0924e04 100644 --- a/tests/sentry/hybridcloud/services/test_control_organization_provisioning.py +++ b/tests/sentry/hybridcloud/services/test_control_organization_provisioning.py @@ -129,8 +129,11 @@ def test_organization_already_provisioned_for_different_user(self) -> None: ) # De-register the conflicting organization to create the collision - with assume_test_silo_mode(SiloMode.CONTROL), outbox_context( - transaction.atomic(using=router.db_for_write(OrganizationSlugReservation)) + with ( + assume_test_silo_mode(SiloMode.CONTROL), + outbox_context( + transaction.atomic(using=router.db_for_write(OrganizationSlugReservation)) + ), ): OrganizationSlugReservation.objects.filter( organization_id=region_only_organization.id @@ -281,8 +284,11 @@ def test_conflicting_unregistered_organization_with_slug_exists(self) -> None: new_user = self.create_user() unregistered_org = self.create_organization(slug=conflicting_slug, owner=new_user) - with assume_test_silo_mode(SiloMode.CONTROL), outbox_context( - transaction.atomic(using=router.db_for_write(OrganizationSlugReservation)) + with ( + assume_test_silo_mode(SiloMode.CONTROL), + outbox_context( + transaction.atomic(using=router.db_for_write(OrganizationSlugReservation)) + ), ): OrganizationSlugReservation.objects.filter(organization_id=unregistered_org.id).delete() assert not OrganizationSlugReservation.objects.filter(slug=conflicting_slug).exists() @@ -320,8 +326,11 @@ def test_swap_for_org_without_primary_slug(self) -> None: new_user = self.create_user() unregistered_org = self.create_organization(slug=desired_primary_slug, owner=new_user) - with assume_test_silo_mode(SiloMode.CONTROL), outbox_context( - transaction.atomic(using=router.db_for_write(OrganizationSlugReservation)) + with ( + assume_test_silo_mode(SiloMode.CONTROL), + outbox_context( + transaction.atomic(using=router.db_for_write(OrganizationSlugReservation)) + ), ): OrganizationSlugReservation.objects.filter(organization_id=unregistered_org.id).delete() assert not OrganizationSlugReservation.objects.filter( diff --git a/tests/sentry/hybridcloud/test_region.py b/tests/sentry/hybridcloud/test_region.py index c56d31c886b150..fb536dc992bf90 100644 --- a/tests/sentry/hybridcloud/test_region.py +++ b/tests/sentry/hybridcloud/test_region.py @@ -52,14 +52,16 @@ def test_by_organization_id_attribute(self) -> None: def test_require_single_organization(self) -> None: region_resolution = RequireSingleOrganization() - with override_regions([self.target_region]), override_settings( - SENTRY_SINGLE_ORGANIZATION=True + with ( + override_regions([self.target_region]), + override_settings(SENTRY_SINGLE_ORGANIZATION=True), ): actual_region = region_resolution.resolve({}) assert actual_region == self.target_region - with override_regions([self.target_region]), override_settings( - SENTRY_SINGLE_ORGANIZATION=False + with ( + override_regions([self.target_region]), + override_settings(SENTRY_SINGLE_ORGANIZATION=False), ): with pytest.raises(RegionResolutionError): region_resolution.resolve({}) diff --git a/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py b/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py index e002d87d08b4d3..30075f849cf9ad 100644 --- a/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py +++ b/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py @@ -665,9 +665,9 @@ def test_monitor_type_with_condition(self): alert_rule = self.alert_rule serialized_alert_rule = self.get_serialized_alert_rule() serialized_alert_rule["monitorType"] = AlertRuleMonitorTypeInt.ACTIVATED - serialized_alert_rule[ - "activationCondition" - ] = AlertRuleActivationConditionType.RELEASE_CREATION.value + serialized_alert_rule["activationCondition"] = ( + AlertRuleActivationConditionType.RELEASE_CREATION.value + ) with ( outbox_runner(), self.feature(["organizations:incidents", "organizations:activated-alert-rules"]), diff --git a/tests/sentry/incidents/endpoints/test_serializers.py b/tests/sentry/incidents/endpoints/test_serializers.py index aebdf529340f79..72f29c94e68a6f 100644 --- a/tests/sentry/incidents/endpoints/test_serializers.py +++ b/tests/sentry/incidents/endpoints/test_serializers.py @@ -727,8 +727,9 @@ def test_owner_validation(self): assert alert_rule.team_id is None def test_invalid_detection_type(self): - with self.feature("organizations:anomaly-detection-alerts"), self.feature( - "organizations:anomaly-detection-rollout" + with ( + self.feature("organizations:anomaly-detection-alerts"), + self.feature("organizations:anomaly-detection-rollout"), ): params = self.valid_params.copy() params["detection_type"] = AlertRuleDetectionType.PERCENT # requires comparison delta diff --git a/tests/sentry/incidents/test_subscription_processor.py b/tests/sentry/incidents/test_subscription_processor.py index af0c2e57ff6b5f..1d265fe8ebe6c1 100644 --- a/tests/sentry/incidents/test_subscription_processor.py +++ b/tests/sentry/incidents/test_subscription_processor.py @@ -392,9 +392,10 @@ def test_removed_alert_rule(self): self.rule.delete() subscription_id = self.sub.id snuba_query = self.sub.snuba_query - with self.feature( - ["organizations:incidents", "organizations:performance-view"] - ), self.tasks(): + with ( + self.feature(["organizations:incidents", "organizations:performance-view"]), + self.tasks(), + ): SubscriptionProcessor(self.sub).process_update(message) self.metrics.incr.assert_called_once_with( "incidents.alert_rules.no_alert_rule_for_subscription" @@ -416,9 +417,10 @@ def test_removed_alert_rule_one_project(self): rule.delete() subscription_id = subscription.id snuba_query = subscription.snuba_query - with self.feature( - ["organizations:incidents", "organizations:performance-view"] - ), self.tasks(): + with ( + self.feature(["organizations:incidents", "organizations:performance-view"]), + self.tasks(), + ): SubscriptionProcessor(subscription).process_update(message) self.metrics.incr.assert_called_once_with( "incidents.alert_rules.no_alert_rule_for_subscription" diff --git a/tests/sentry/ingest/test_transaction_clusterer.py b/tests/sentry/ingest/test_transaction_clusterer.py index 96d0aa49f02134..472aed36b4c98a 100644 --- a/tests/sentry/ingest/test_transaction_clusterer.py +++ b/tests/sentry/ingest/test_transaction_clusterer.py @@ -321,8 +321,9 @@ def _add_mock_data(proj, number): # Add a transaction to project2 so it runs again _record_sample(ClustererNamespace.TRANSACTIONS, project2, "foo") - with mock.patch("sentry.ingest.transaction_clusterer.tasks.PROJECTS_PER_TASK", 1), freeze_time( - "2000-01-01 01:00:01" + with ( + mock.patch("sentry.ingest.transaction_clusterer.tasks.PROJECTS_PER_TASK", 1), + freeze_time("2000-01-01 01:00:01"), ): spawn_clusterers() diff --git a/tests/sentry/integrations/api/endpoints/test_doc_integration_avatar.py b/tests/sentry/integrations/api/endpoints/test_doc_integration_avatar.py index 8be73de75e0561..7c83181e61d6fe 100644 --- a/tests/sentry/integrations/api/endpoints/test_doc_integration_avatar.py +++ b/tests/sentry/integrations/api/endpoints/test_doc_integration_avatar.py @@ -97,8 +97,9 @@ def test_superuser_upload_avatar(self): ): self.login_as(user=self.superuser, superuser=True) - with assume_test_silo_mode(SiloMode.CONTROL), override_settings( - SILO_MODE=SiloMode.CONTROL + with ( + assume_test_silo_mode(SiloMode.CONTROL), + override_settings(SILO_MODE=SiloMode.CONTROL), ): for doc in [self.published_doc, self.draft_doc]: prev_avatar = doc.avatar.get() @@ -122,8 +123,9 @@ def test_staff_upload_avatar(self): ): self.login_as(user=self.staff_user, staff=True) - with assume_test_silo_mode(SiloMode.CONTROL), override_settings( - SILO_MODE=SiloMode.CONTROL + with ( + assume_test_silo_mode(SiloMode.CONTROL), + override_settings(SILO_MODE=SiloMode.CONTROL), ): for doc in [self.published_doc, self.draft_doc]: prev_avatar = doc.avatar.get() diff --git a/tests/sentry/integrations/discord/test_message_builder.py b/tests/sentry/integrations/discord/test_message_builder.py index 1dc2bcc3fee792..ea07ffae9099f5 100644 --- a/tests/sentry/integrations/discord/test_message_builder.py +++ b/tests/sentry/integrations/discord/test_message_builder.py @@ -42,9 +42,9 @@ def test_metric_alert_without_incidents(self): ) uuid = "uuid" - assert DiscordMetricAlertMessageBuilder(alert_rule=self.alert_rule,).build( - notification_uuid=uuid - ) == { + assert DiscordMetricAlertMessageBuilder( + alert_rule=self.alert_rule, + ).build(notification_uuid=uuid) == { "content": "", "embeds": [ { @@ -110,9 +110,9 @@ def test_metric_alert_with_active_incident(self): ) ) uuid = "uuid" - assert DiscordMetricAlertMessageBuilder(alert_rule=self.alert_rule,).build( - notification_uuid=uuid - ) == { + assert DiscordMetricAlertMessageBuilder( + alert_rule=self.alert_rule, + ).build(notification_uuid=uuid) == { "content": "", "embeds": [ { @@ -218,7 +218,9 @@ def test_metric_alert_no_uuid(self): ) ) - assert DiscordMetricAlertMessageBuilder(alert_rule=self.alert_rule,).build() == { + assert DiscordMetricAlertMessageBuilder( + alert_rule=self.alert_rule, + ).build() == { "content": "", "embeds": [ { @@ -261,9 +263,9 @@ def test_metric_alert_with_anomaly_detection(self, mock_seer_request): ) ) uuid = "uuid" - assert DiscordMetricAlertMessageBuilder(alert_rule=alert_rule,).build( - notification_uuid=uuid - ) == { + assert DiscordMetricAlertMessageBuilder( + alert_rule=alert_rule, + ).build(notification_uuid=uuid) == { "content": "", "embeds": [ { diff --git a/tests/sentry/integrations/jira/test_webhooks.py b/tests/sentry/integrations/jira/test_webhooks.py index caad5142bfaacd..8eef96ebafd829 100644 --- a/tests/sentry/integrations/jira/test_webhooks.py +++ b/tests/sentry/integrations/jira/test_webhooks.py @@ -223,9 +223,9 @@ def test_atlassian_pen_testing_bot( mock_endpoint = MockErroringJiraEndpoint.as_view(error=MethodNotAllowed("GET")) request = self.make_request(method="GET") - request.META[ - "HTTP_USER_AGENT" - ] = "CSRT (github.com/atlassian-labs/connect-security-req-tester)" + request.META["HTTP_USER_AGENT"] = ( + "CSRT (github.com/atlassian-labs/connect-security-req-tester)" + ) response = mock_endpoint(request) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED diff --git a/tests/sentry/issues/test_escalating.py b/tests/sentry/issues/test_escalating.py index 333164555ca18e..dce83fbab56a18 100644 --- a/tests/sentry/issues/test_escalating.py +++ b/tests/sentry/issues/test_escalating.py @@ -190,9 +190,11 @@ def test_query_optimization(self) -> None: # Force pagination to only three elements per page # Once we get to Python 3.10+ the formating of this multiple with statement will not be an eye sore - with patch("sentry.issues.escalating._query_with_pagination") as query_mock, patch( - "sentry.issues.escalating.ELEMENTS_PER_SNUBA_PAGE", new=3 - ), patch("sentry.issues.escalating.BUCKETS_PER_GROUP", new=2): + with ( + patch("sentry.issues.escalating._query_with_pagination") as query_mock, + patch("sentry.issues.escalating.ELEMENTS_PER_SNUBA_PAGE", new=3), + patch("sentry.issues.escalating.BUCKETS_PER_GROUP", new=2), + ): query_groups_past_counts(groups) # Proj X will expect potentially 4 elements because it has two groups, thus, no other # project will be called with it. diff --git a/tests/sentry/lang/native/test_processing.py b/tests/sentry/lang/native/test_processing.py index 78f0cfa78582e4..c467a113a3b77b 100644 --- a/tests/sentry/lang/native/test_processing.py +++ b/tests/sentry/lang/native/test_processing.py @@ -2,6 +2,7 @@ This file is intended for unit tests that don't require fixtures or a live service. Most tests live in tests/symbolicator/ """ + from __future__ import annotations from typing import Any diff --git a/tests/sentry/middleware/integrations/parsers/test_jira_server.py b/tests/sentry/middleware/integrations/parsers/test_jira_server.py index 635a84e1856abb..0323d6e6acee59 100644 --- a/tests/sentry/middleware/integrations/parsers/test_jira_server.py +++ b/tests/sentry/middleware/integrations/parsers/test_jira_server.py @@ -119,11 +119,14 @@ def test_routing_webhook_with_mailbox_buckets_high_volume(self): ) parser = JiraServerRequestParser(request=request, response_handler=self.get_response) - with mock.patch( - "sentry.integrations.middleware.hybrid_cloud.parser.ratelimiter.is_limited" - ) as mock_is_limited, mock.patch( - "sentry.middleware.integrations.parsers.jira_server.get_integration_from_token" - ) as mock_get_token: + with ( + mock.patch( + "sentry.integrations.middleware.hybrid_cloud.parser.ratelimiter.is_limited" + ) as mock_is_limited, + mock.patch( + "sentry.middleware.integrations.parsers.jira_server.get_integration_from_token" + ) as mock_get_token, + ): mock_is_limited.return_value = True mock_get_token.return_value = self.integration response = parser.get_response() diff --git a/tests/sentry/middleware/integrations/test_integration_control.py b/tests/sentry/middleware/integrations/test_integration_control.py index 8458ea210bf981..b15bffb61b090a 100644 --- a/tests/sentry/middleware/integrations/test_integration_control.py +++ b/tests/sentry/middleware/integrations/test_integration_control.py @@ -70,11 +70,12 @@ class NewClassification(BaseClassification): self.middleware.register_classifications(classifications=[NewClassification]) - with patch.object( - NewClassification, "should_operate", return_value=True - ) as mock_new_should_operate, patch.object( - NewClassification, "get_response" - ) as mock_new_get_response: + with ( + patch.object( + NewClassification, "should_operate", return_value=True + ) as mock_new_should_operate, + patch.object(NewClassification, "get_response") as mock_new_get_response, + ): self.middleware(self.factory.post("/")) assert mock_integration_operate.called assert mock_plugin_operate.called diff --git a/tests/sentry/models/test_organizationslugreservation.py b/tests/sentry/models/test_organizationslugreservation.py index 2036db4f186e5d..aacefe2443edf1 100644 --- a/tests/sentry/models/test_organizationslugreservation.py +++ b/tests/sentry/models/test_organizationslugreservation.py @@ -44,9 +44,9 @@ def assert_all_replicas_match_slug_reservations(self): slug_res = org_slug_reservations.get(slug) assert slug_res is not None - org_slug_reservation_replica: None | ( - OrganizationSlugReservationReplica - ) = org_slug_replicas.pop(slug, None) + org_slug_reservation_replica: None | (OrganizationSlugReservationReplica) = ( + org_slug_replicas.pop(slug, None) + ) if org_slug_reservation_replica is None: slug_reservations_missing_replicas.append(slug_res) diff --git a/tests/sentry/models/test_projectkey.py b/tests/sentry/models/test_projectkey.py index cccb735491f117..3c085b1b3316c7 100644 --- a/tests/sentry/models/test_projectkey.py +++ b/tests/sentry/models/test_projectkey.py @@ -110,8 +110,9 @@ def test_get_dsn(self): assert key.js_sdk_loader_cdn_url == "http://testserver/js-sdk-loader/abc.min.js" def test_get_dsn_org_subdomain(self): - with self.feature("organizations:org-ingest-subdomains"), self.options( - {"system.region-api-url-template": ""} + with ( + self.feature("organizations:org-ingest-subdomains"), + self.options({"system.region-api-url-template": ""}), ): key = self.model(project_id=self.project.id, public_key="abc", secret_key="xyz") host = f"o{key.project.organization_id}.ingest.testserver" diff --git a/tests/sentry/nodestore/test_common.py b/tests/sentry/nodestore/test_common.py index d48b1c8c614a3c..4d7e7535245253 100644 --- a/tests/sentry/nodestore/test_common.py +++ b/tests/sentry/nodestore/test_common.py @@ -2,6 +2,7 @@ Testsuite of backend-independent nodestore tests. Add your backend to the `ns` fixture to have it tested. """ + from contextlib import nullcontext import pytest diff --git a/tests/sentry/quotas/test_base.py b/tests/sentry/quotas/test_base.py index 82d17b88723078..0cb659448c8940 100644 --- a/tests/sentry/quotas/test_base.py +++ b/tests/sentry/quotas/test_base.py @@ -74,18 +74,24 @@ def test_get_organization_quota_with_account_limit_and_no_system_limit(self): def test_get_organization_quota_with_no_account_limit_and_system_limit(self): org = self.create_organization() - with self.settings( - SENTRY_DEFAULT_MAX_EVENTS_PER_MINUTE="50%", SENTRY_SINGLE_ORGANIZATION=False - ), self.options({"system.rate-limit": 10}): + with ( + self.settings( + SENTRY_DEFAULT_MAX_EVENTS_PER_MINUTE="50%", SENTRY_SINGLE_ORGANIZATION=False + ), + self.options({"system.rate-limit": 10}), + ): assert self.backend.get_organization_quota(org) == (5, 60) def test_get_organization_quota_with_no_account_limit_and_relative_system_limit_single_org( self, ): org = self.create_organization() - with self.settings( - SENTRY_DEFAULT_MAX_EVENTS_PER_MINUTE="50%", SENTRY_SINGLE_ORGANIZATION=True - ), self.options({"system.rate-limit": 10}): + with ( + self.settings( + SENTRY_DEFAULT_MAX_EVENTS_PER_MINUTE="50%", SENTRY_SINGLE_ORGANIZATION=True + ), + self.options({"system.rate-limit": 10}), + ): assert self.backend.get_organization_quota(org) == (10, 60) def test_get_blended_sample_rate(self): diff --git a/tests/sentry/rules/processing/test_delayed_processing.py b/tests/sentry/rules/processing/test_delayed_processing.py index c35037e25572e5..de9e4f3f95fa5d 100644 --- a/tests/sentry/rules/processing/test_delayed_processing.py +++ b/tests/sentry/rules/processing/test_delayed_processing.py @@ -483,9 +483,9 @@ def setUp(self): ): {self.group1.id: 2, self.group2.id: 1} } - self.rules_to_slow_conditions: DefaultDict[ - Rule, list[EventFrequencyConditionData] - ] = defaultdict(list) + self.rules_to_slow_conditions: DefaultDict[Rule, list[EventFrequencyConditionData]] = ( + defaultdict(list) + ) self.rules_to_slow_conditions[self.rule1].append(TEST_RULE_SLOW_CONDITION) self.rules_to_groups: DefaultDict[int, set[int]] = defaultdict(set) diff --git a/tests/sentry/runner/commands/test_migrations.py b/tests/sentry/runner/commands/test_migrations.py index c5ff1fb00a8e8a..60d8a230eb245e 100644 --- a/tests/sentry/runner/commands/test_migrations.py +++ b/tests/sentry/runner/commands/test_migrations.py @@ -62,10 +62,13 @@ def test_index_creation(self): assert len(matched) == 0 def test_migration_skipped_by_router(self): - with override_settings( - INSTALLED_APPS=("fixtures.safe_migrations_apps.migration_test_app",), - MIGRATION_MODULES={}, - ), patch.object(router, "allow_migrate") as mock_allow: + with ( + override_settings( + INSTALLED_APPS=("fixtures.safe_migrations_apps.migration_test_app",), + MIGRATION_MODULES={}, + ), + patch.object(router, "allow_migrate") as mock_allow, + ): mock_allow.return_value = False result = self.invoke("run", "migration_test_app", "0001") diff --git a/tests/sentry/seer/similarity/test_utils.py b/tests/sentry/seer/similarity/test_utils.py index 4016098c93fd94..2bbe76601e432f 100644 --- a/tests/sentry/seer/similarity/test_utils.py +++ b/tests/sentry/seer/similarity/test_utils.py @@ -495,9 +495,9 @@ def test_contributing_exception_no_frames(self): def test_contributing_exception_no_contributing_frames(self): data_no_contributing_frame = copy.deepcopy(self.BASE_APP_DATA) - data_no_contributing_frame["app"]["component"]["values"][0]["values"][0][ - "values" - ] = self.create_frames(1, False) + data_no_contributing_frame["app"]["component"]["values"][0]["values"][0]["values"] = ( + self.create_frames(1, False) + ) stacktrace_str = get_stacktrace_string(data_no_contributing_frame) assert stacktrace_str == "ZeroDivisionError: division by zero" diff --git a/tests/sentry/sentry_apps/test_sentry_app_installation_creator.py b/tests/sentry/sentry_apps/test_sentry_app_installation_creator.py index 4bccdd5edc9ce9..06ac7442bba103 100644 --- a/tests/sentry/sentry_apps/test_sentry_app_installation_creator.py +++ b/tests/sentry/sentry_apps/test_sentry_app_installation_creator.py @@ -127,7 +127,10 @@ def test_installed_status(self): @responses.activate @patch("sentry.analytics.record") def test_records_analytics(self, record): - SentryAppInstallationCreator(organization_id=self.org.id, slug="nulldb",).run( + SentryAppInstallationCreator( + organization_id=self.org.id, + slug="nulldb", + ).run( user=self.user, request=self.make_request(user=self.user, method="GET"), ) diff --git a/tests/sentry/sentry_metrics/test_snuba.py b/tests/sentry/sentry_metrics/test_snuba.py index 3f6212551cb0f7..6a1a8b76a3beba 100644 --- a/tests/sentry/sentry_metrics/test_snuba.py +++ b/tests/sentry/sentry_metrics/test_snuba.py @@ -16,7 +16,6 @@ def setUp(self): class SnubaMetricsInterfaceTest(MetricsInterfaceTestCase): - """ A sample test case that shows the process of writing the metric via the Snuba HTTP endpoint, and then diff --git a/tests/sentry/shared_integrations/client/test_proxy.py b/tests/sentry/shared_integrations/client/test_proxy.py index a7bc7e7f42d74c..d447b7afef073f 100644 --- a/tests/sentry/shared_integrations/client/test_proxy.py +++ b/tests/sentry/shared_integrations/client/test_proxy.py @@ -206,17 +206,20 @@ def test_get_control_silo_ip_address(): with override_settings(SENTRY_CONTROL_ADDRESS=control_address): get_control_silo_ip_address.cache_clear() - with patch("socket.gethostbyname") as mock_gethostbyname, patch( - "sentry_sdk.capture_exception" - ) as mock_capture_exception: + with ( + patch("socket.gethostbyname") as mock_gethostbyname, + patch("sentry_sdk.capture_exception") as mock_capture_exception, + ): mock_gethostbyname.return_value = "172.31.255.255" assert get_control_silo_ip_address() == ipaddress.ip_address("172.31.255.255") assert mock_capture_exception.call_count == 0 get_control_silo_ip_address.cache_clear() - with patch("socket.gethostbyname") as mock_gethostbyname, patch( - "urllib3.util.parse_url" - ) as mock_parse_url, patch("sentry_sdk.capture_exception") as mock_capture_exception: + with ( + patch("socket.gethostbyname") as mock_gethostbyname, + patch("urllib3.util.parse_url") as mock_parse_url, + patch("sentry_sdk.capture_exception") as mock_capture_exception, + ): mock_parse_url.return_value = MagicMock(host=None) assert get_control_silo_ip_address() is None assert mock_gethostbyname.call_count == 0 diff --git a/tests/sentry/silo/test_base.py b/tests/sentry/silo/test_base.py index 8b1857dbfee392..e33b9f85b354ec 100644 --- a/tests/sentry/silo/test_base.py +++ b/tests/sentry/silo/test_base.py @@ -26,7 +26,7 @@ def test_all_endpoints_have_silo_mode_decorator(self): undecorated_endpoint_classes = [] url_mappings = django.urls.get_resolver().reverse_dict.items() - for (view_function, bindings) in url_mappings: + for view_function, bindings in url_mappings: view_class = getattr(view_function, "view_class", None) if ( view_class diff --git a/tests/sentry/silo/test_client.py b/tests/sentry/silo/test_client.py index 1124f5af862ff7..bc017ad352b56d 100644 --- a/tests/sentry/silo/test_client.py +++ b/tests/sentry/silo/test_client.py @@ -313,9 +313,11 @@ def test_invalid_region_silo_ip_address(self): region_config = (region,) # Disallow any region silo ip address by default. - with override_regions(region_config), patch( - "sentry_sdk.capture_exception" - ) as mock_capture_exception, raises(ApiHostError): + with ( + override_regions(region_config), + patch("sentry_sdk.capture_exception") as mock_capture_exception, + raises(ApiHostError), + ): assert mock_capture_exception.call_count == 0 client = RegionSiloClient(region) @@ -328,12 +330,11 @@ def test_invalid_region_silo_ip_address(self): assert isinstance(err, RegionResolutionError) assert err.args == ("Disallowed Region Silo IP address: 172.31.255.31",) - with override_regions(region_config), patch( - "sentry_sdk.capture_exception" - ) as mock_capture_exception, override_allowed_region_silo_ip_addresses( - "172.31.255.255" - ), raises( - ApiHostError + with ( + override_regions(region_config), + patch("sentry_sdk.capture_exception") as mock_capture_exception, + override_allowed_region_silo_ip_addresses("172.31.255.255"), + raises(ApiHostError), ): assert mock_capture_exception.call_count == 0 @@ -354,9 +355,12 @@ def test_client_restricted_ip_address(self): region = Region("eu", 1, internal_region_address, RegionCategory.MULTI_TENANT) region_config = (region,) - with override_regions(region_config), patch( - "sentry.silo.client.validate_region_ip_address" - ) as mock_validate_region_ip_address: + with ( + override_regions(region_config), + patch( + "sentry.silo.client.validate_region_ip_address" + ) as mock_validate_region_ip_address, + ): client = RegionSiloClient(region) path = f"{internal_region_address}/api/0/imaginary-public-endpoint/" request = self.factory.get(path, HTTP_HOST="https://control.sentry.io") @@ -379,28 +383,29 @@ def test_validate_region_ip_address(ip): def test_validate_region_ip_address(): - with patch( - "sentry_sdk.capture_exception" - ) as mock_capture_exception, override_allowed_region_silo_ip_addresses(): + with ( + patch("sentry_sdk.capture_exception") as mock_capture_exception, + override_allowed_region_silo_ip_addresses(), + ): assert validate_region_ip_address("172.31.255.255") is False assert mock_capture_exception.call_count == 1 err = mock_capture_exception.call_args.args[0] assert isinstance(err, RegionResolutionError) assert err.args == ("allowed_region_ip_addresses is empty for: 172.31.255.255",) - with patch( - "sentry_sdk.capture_exception" - ) as mock_capture_exception, override_allowed_region_silo_ip_addresses("192.88.99.0"): + with ( + patch("sentry_sdk.capture_exception") as mock_capture_exception, + override_allowed_region_silo_ip_addresses("192.88.99.0"), + ): assert validate_region_ip_address("172.31.255.255") is False assert mock_capture_exception.call_count == 1 err = mock_capture_exception.call_args.args[0] assert isinstance(err, RegionResolutionError) assert err.args == ("Disallowed Region Silo IP address: 172.31.255.255",) - with patch( - "sentry_sdk.capture_exception" - ) as mock_capture_exception, override_allowed_region_silo_ip_addresses( - "192.88.99.0", "172.31.255.255" + with ( + patch("sentry_sdk.capture_exception") as mock_capture_exception, + override_allowed_region_silo_ip_addresses("192.88.99.0", "172.31.255.255"), ): assert validate_region_ip_address("172.31.255.255") is True assert mock_capture_exception.call_count == 0 @@ -411,18 +416,21 @@ def test_get_region_ip_addresses(): region = Region("eu", 1, internal_region_address, RegionCategory.MULTI_TENANT) region_config = (region,) - with override_regions(region_config), patch( - "socket.gethostbyname" - ) as mock_gethostbyname, patch("sentry_sdk.capture_exception") as mock_capture_exception: + with ( + override_regions(region_config), + patch("socket.gethostbyname") as mock_gethostbyname, + patch("sentry_sdk.capture_exception") as mock_capture_exception, + ): mock_gethostbyname.return_value = "172.31.255.255" assert get_region_ip_addresses() == frozenset([ipaddress.ip_address("172.31.255.255")]) assert mock_capture_exception.call_count == 0 - with override_regions(region_config), patch( - "socket.gethostbyname" - ) as mock_gethostbyname, patch("urllib3.util.parse_url") as mock_parse_url, patch( - "sentry_sdk.capture_exception" - ) as mock_capture_exception: + with ( + override_regions(region_config), + patch("socket.gethostbyname") as mock_gethostbyname, + patch("urllib3.util.parse_url") as mock_parse_url, + patch("sentry_sdk.capture_exception") as mock_capture_exception, + ): mock_parse_url.return_value = MagicMock(host=None) assert get_region_ip_addresses() == frozenset([]) assert mock_gethostbyname.call_count == 0 diff --git a/tests/sentry/snuba/metrics/test_metrics_layer/test_release_health.py b/tests/sentry/snuba/metrics/test_metrics_layer/test_release_health.py index c25ab701395345..7ce24680192aa3 100644 --- a/tests/sentry/snuba/metrics/test_metrics_layer/test_release_health.py +++ b/tests/sentry/snuba/metrics/test_metrics_layer/test_release_health.py @@ -1,6 +1,7 @@ """ Metrics Service Layer Tests for Release Health """ + import time import pytest diff --git a/tests/sentry/tasks/test_post_process.py b/tests/sentry/tasks/test_post_process.py index c6fc9285445e7e..80de27eadeb67e 100644 --- a/tests/sentry/tasks/test_post_process.py +++ b/tests/sentry/tasks/test_post_process.py @@ -3081,8 +3081,7 @@ def test_ran_if_crash_feedback_envelope(self): @pytest.mark.skip( reason="Skip this test since there's no way to have issueless events in the issue platform" ) - def test_issueless(self): - ... + def test_issueless(self): ... def test_no_cache_abort(self): # We don't use the cache for generic issues, so skip this test diff --git a/tests/sentry/uptime/consumers/test_results_consumers.py b/tests/sentry/uptime/consumers/test_results_consumers.py index 1affefef3120b9..b498603d3468ac 100644 --- a/tests/sentry/uptime/consumers/test_results_consumers.py +++ b/tests/sentry/uptime/consumers/test_results_consumers.py @@ -341,9 +341,10 @@ def test_resolve(self): status=CHECKSTATUS_SUCCESS, scheduled_check_time=datetime.now() - timedelta(minutes=3), ) - with mock.patch( - "sentry.uptime.consumers.results_consumer.metrics" - ) as metrics, self.feature("organizations:uptime-create-issues"): + with ( + mock.patch("sentry.uptime.consumers.results_consumer.metrics") as metrics, + self.feature("organizations:uptime-create-issues"), + ): self.send_result(result) metrics.incr.assert_has_calls( [ diff --git a/tests/sentry/uptime/detectors/test_ranking.py b/tests/sentry/uptime/detectors/test_ranking.py index dc2f8c5e1084bc..db19837c29e18d 100644 --- a/tests/sentry/uptime/detectors/test_ranking.py +++ b/tests/sentry/uptime/detectors/test_ranking.py @@ -86,8 +86,9 @@ def test(self): self.assert_url_count(project_2, url_2, 1, project_2_url_expiry) def test_trim(self): - with mock.patch("sentry.uptime.detectors.ranking.RANKED_TRIM_CHANCE", new=1), mock.patch( - "sentry.uptime.detectors.ranking.RANKED_MAX_SIZE", new=2 + with ( + mock.patch("sentry.uptime.detectors.ranking.RANKED_TRIM_CHANCE", new=1), + mock.patch("sentry.uptime.detectors.ranking.RANKED_MAX_SIZE", new=2), ): key = get_project_base_url_rank_key(self.project) url_1 = "https://sentry.io" diff --git a/tests/sentry/uptime/subscriptions/test_tasks.py b/tests/sentry/uptime/subscriptions/test_tasks.py index 17eb5a8b2734a1..49420e022ad5df 100644 --- a/tests/sentry/uptime/subscriptions/test_tasks.py +++ b/tests/sentry/uptime/subscriptions/test_tasks.py @@ -35,11 +35,13 @@ def _setup_producer(self): def assert_producer_calls(self, *args: UptimeSubscription | str): expected_payloads = [ - UPTIME_CONFIGS_CODEC.encode( - uptime_subscription_to_check_config(arg, str(arg.subscription_id)) + ( + UPTIME_CONFIGS_CODEC.encode( + uptime_subscription_to_check_config(arg, str(arg.subscription_id)) + ) + if isinstance(arg, UptimeSubscription) + else None ) - if isinstance(arg, UptimeSubscription) - else None for arg in args ] expected_message_ids = [ diff --git a/tests/sentry/uptime/test_models.py b/tests/sentry/uptime/test_models.py index a98e236f2302e5..06a77d8ab89279 100644 --- a/tests/sentry/uptime/test_models.py +++ b/tests/sentry/uptime/test_models.py @@ -30,8 +30,9 @@ def test(self): interval_seconds=60, method="GET", ) - with pytest.raises(IntegrityError), transaction.atomic( - router.db_for_write(UptimeSubscription) + with ( + pytest.raises(IntegrityError), + transaction.atomic(router.db_for_write(UptimeSubscription)), ): self.create_uptime_subscription( url="https://santry.io", @@ -50,8 +51,9 @@ def test(self): headers={"hi": "santry", "auth": "sentaur"}, ) - with pytest.raises(IntegrityError), transaction.atomic( - router.db_for_write(UptimeSubscription) + with ( + pytest.raises(IntegrityError), + transaction.atomic(router.db_for_write(UptimeSubscription)), ): self.create_uptime_subscription( url="https://santry.io", @@ -65,8 +67,9 @@ def test(self): headers={"hi": "santry", "auth": "sentaur"}, body="hello", ) - with pytest.raises(IntegrityError), transaction.atomic( - router.db_for_write(UptimeSubscription) + with ( + pytest.raises(IntegrityError), + transaction.atomic(router.db_for_write(UptimeSubscription)), ): self.create_uptime_subscription( url="https://santry.io", diff --git a/tests/sentry/utils/locking/test_lock.py b/tests/sentry/utils/locking/test_lock.py index 4eec90e68eecd0..60551b5df3bf4a 100644 --- a/tests/sentry/utils/locking/test_lock.py +++ b/tests/sentry/utils/locking/test_lock.py @@ -61,9 +61,10 @@ class MockTime: def incr(cls, delta): cls.time += delta - with patch( - "sentry.utils.locking.lock.time.monotonic", side_effect=lambda: MockTime.time - ), patch("sentry.utils.locking.lock.time.sleep", side_effect=MockTime.incr) as mock_sleep: + with ( + patch("sentry.utils.locking.lock.time.monotonic", side_effect=lambda: MockTime.time), + patch("sentry.utils.locking.lock.time.sleep", side_effect=MockTime.incr) as mock_sleep, + ): with pytest.raises(UnableToAcquireLock): lock.blocking_acquire(initial_delay=0.1, timeout=1, exp_base=2) diff --git a/tests/sentry/utils/sdk_crashes/test_event_stripper.py b/tests/sentry/utils/sdk_crashes/test_event_stripper.py index 7cd70eb571931f..8cf5b71078e06c 100644 --- a/tests/sentry/utils/sdk_crashes/test_event_stripper.py +++ b/tests/sentry/utils/sdk_crashes/test_event_stripper.py @@ -332,15 +332,15 @@ def test_strip_frames_sdk_frames_keep_after_matcher(store_and_strip_event, confi sentry_sdk_frame = frames[-1] - sentry_sdk_frame[ - "module" - ] = "Users/sentry/git-repos/sentry-react-native/dist/js/integrations/reactnative" - sentry_sdk_frame[ - "filename" - ] = "/Users/sentry/git-repos/sentry-react-native/dist/js/integrations/reactnative.js" - sentry_sdk_frame[ - "abs_path" - ] = "app:///Users/sentry/git-repos/sentry-react-native/dist/js/integrations/reactnative.js" + sentry_sdk_frame["module"] = ( + "Users/sentry/git-repos/sentry-react-native/dist/js/integrations/reactnative" + ) + sentry_sdk_frame["filename"] = ( + "/Users/sentry/git-repos/sentry-react-native/dist/js/integrations/reactnative.js" + ) + sentry_sdk_frame["abs_path"] = ( + "app:///Users/sentry/git-repos/sentry-react-native/dist/js/integrations/reactnative.js" + ) event_data = get_crash_event_with_frames(frames) diff --git a/tests/sentry/utils/test_json.py b/tests/sentry/utils/test_json.py index 4c1b8d323c48d8..49079e4e9694d1 100644 --- a/tests/sentry/utils/test_json.py +++ b/tests/sentry/utils/test_json.py @@ -98,7 +98,10 @@ def test_apply_key_filter_with_key_list(self): } keep_keys = ["dogs_are_great", "good_dogs"] - assert json.apply_key_filter(dog_data, keep_keys=keep_keys,) == { + assert json.apply_key_filter( + dog_data, + keep_keys=keep_keys, + ) == { "dogs_are_great": True, "good_dogs": "all", } @@ -111,7 +114,10 @@ def test_apply_key_filter_with_callback(self): } keep_keys = ["dogs_are_great", "good_dogs"] - assert json.apply_key_filter(dog_data, key_filter=lambda key: key in keep_keys,) == { + assert json.apply_key_filter( + dog_data, + key_filter=lambda key: key in keep_keys, + ) == { "dogs_are_great": True, "good_dogs": "all", } @@ -123,7 +129,9 @@ def test_apply_key_filter_no_filter(self): "bad_dogs": None, } - assert json.apply_key_filter(dog_data,) == { + assert json.apply_key_filter( + dog_data, + ) == { "dogs_are_great": True, "good_dogs": "all", "bad_dogs": None, diff --git a/tests/sentry/web/frontend/test_vercel_extension_configuration.py b/tests/sentry/web/frontend/test_vercel_extension_configuration.py index f5b449cadedd89..81b013f6a2d5f1 100644 --- a/tests/sentry/web/frontend/test_vercel_extension_configuration.py +++ b/tests/sentry/web/frontend/test_vercel_extension_configuration.py @@ -75,8 +75,9 @@ def test_logged_in_one_org(self): @responses.activate def test_logged_in_as_member(self): - with assume_test_silo_mode(SiloMode.REGION), unguarded_write( - using=router.db_for_write(OrganizationMember) + with ( + assume_test_silo_mode(SiloMode.REGION), + unguarded_write(using=router.db_for_write(OrganizationMember)), ): OrganizationMember.objects.filter(user_id=self.user.id, organization=self.org).update( role="member" diff --git a/tests/sentry/web/test_client_config.py b/tests/sentry/web/test_client_config.py index 584ef6c901a6a2..35bc6eddf5cc98 100644 --- a/tests/sentry/web/test_client_config.py +++ b/tests/sentry/web/test_client_config.py @@ -138,8 +138,9 @@ def test_client_config_features(): assert "organizations:create" in result["features"] assert "system:multi-region" not in result["features"] - with override_options({"auth.allow-registration": True}), Feature( - {"auth:register": True, "system:multi-region": True} + with ( + override_options({"auth.allow-registration": True}), + Feature({"auth:register": True, "system:multi-region": True}), ): result = get_client_config(request) diff --git a/tests/snuba/api/endpoints/test_discover_key_transactions.py b/tests/snuba/api/endpoints/test_discover_key_transactions.py index b2b08963b7a561..768dac5aab7d28 100644 --- a/tests/snuba/api/endpoints/test_discover_key_transactions.py +++ b/tests/snuba/api/endpoints/test_discover_key_transactions.py @@ -26,8 +26,9 @@ def setUp(self): class ClientCallable(Protocol): - def __call__(self, url: str, data: dict[str, Any], format: str, **kwargs: Any) -> HttpResponse: - ... + def __call__( + self, url: str, data: dict[str, Any], format: str, **kwargs: Any + ) -> HttpResponse: ... class TeamKeyTransactionTest(TeamKeyTransactionTestBase): diff --git a/tests/snuba/api/endpoints/test_organization_sessions.py b/tests/snuba/api/endpoints/test_organization_sessions.py index 8019ce8f760bb9..cdd58ef258e068 100644 --- a/tests/snuba/api/endpoints/test_organization_sessions.py +++ b/tests/snuba/api/endpoints/test_organization_sessions.py @@ -956,8 +956,9 @@ def test_duration_percentiles_groupby(self): @freeze_time(MOCK_DATETIME) def test_snuba_limit_exceeded(self): # 2 * 4 => only show two groups - with patch("sentry.snuba.sessions_v2.SNUBA_LIMIT", 8), patch( - "sentry.snuba.metrics.query.MAX_POINTS", 8 + with ( + patch("sentry.snuba.sessions_v2.SNUBA_LIMIT", 8), + patch("sentry.snuba.metrics.query.MAX_POINTS", 8), ): response = self.do_request( { @@ -996,8 +997,9 @@ def test_snuba_limit_exceeded(self): def test_snuba_limit_exceeded_groupby_status(self): """Get consistent result when grouping by status""" # 2 * 4 => only show two groups - with patch("sentry.snuba.sessions_v2.SNUBA_LIMIT", 8), patch( - "sentry.snuba.metrics.query.MAX_POINTS", 8 + with ( + patch("sentry.snuba.sessions_v2.SNUBA_LIMIT", 8), + patch("sentry.snuba.metrics.query.MAX_POINTS", 8), ): response = self.do_request( { diff --git a/tests/symbolicator/test_payload_full.py b/tests/symbolicator/test_payload_full.py index b4a5f549265ce2..6b76dfb24e5b65 100644 --- a/tests/symbolicator/test_payload_full.py +++ b/tests/symbolicator/test_payload_full.py @@ -92,8 +92,9 @@ def initialize(self, live_server): self.project.update_option("sentry:builtin_symbol_sources", []) self.min_ago = before_now(minutes=1).isoformat() - with patch("sentry.auth.system.is_internal_ip", return_value=True), self.options( - {"system.url-prefix": live_server.url} + with ( + patch("sentry.auth.system.is_internal_ip", return_value=True), + self.options({"system.url-prefix": live_server.url}), ): # Run test case yield diff --git a/tests/symbolicator/test_unreal_full.py b/tests/symbolicator/test_unreal_full.py index 7da423a79b288b..b9c8cc11dca6c6 100644 --- a/tests/symbolicator/test_unreal_full.py +++ b/tests/symbolicator/test_unreal_full.py @@ -41,8 +41,9 @@ class SymbolicatorUnrealIntegrationTest(RelayStoreHelper, TransactionTestCase): def initialize(self, live_server): self.project.update_option("sentry:builtin_symbol_sources", []) - with patch("sentry.auth.system.is_internal_ip", return_value=True), self.options( - {"system.url-prefix": live_server.url} + with ( + patch("sentry.auth.system.is_internal_ip", return_value=True), + self.options({"system.url-prefix": live_server.url}), ): # Run test case yield