Skip to content

Commit

Permalink
ref: upgrade black to 24.10.0 (#79590)
Browse files Browse the repository at this point in the history
getsentry change must merge first to minimize PR interruption:
getsentry/getsentry#15464

note that the getsentry PR takes a different approach such that the
versions can be properly synchronized with the code

<!-- Describe your PR here. -->
  • Loading branch information
asottile-sentry authored Oct 25, 2024
1 parent 6c0f21f commit bcb90b0
Show file tree
Hide file tree
Showing 200 changed files with 848 additions and 778 deletions.
1 change: 0 additions & 1 deletion bin/extension_language_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
Generate a mapping from file extensions to language for languages that are part of platforms supported by Sentry.
"""


import requests
import yaml

Expand Down
2 changes: 1 addition & 1 deletion bin/mock-replay
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def create_recording(replay_id, project_id, timestamp):
24,
),
]
for (segment_id, segment) in enumerate(segments):
for segment_id, segment in enumerate(segments):
store_replay_segments(replay_id, project_id, segment_id, segment)


Expand Down
2 changes: 1 addition & 1 deletion requirements-dev-frozen.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ attrs==23.1.0
avalara==20.9.0
beautifulsoup4==4.7.1
billiard==4.2.0
black==22.10.0
black==24.10.0
boto3==1.34.128
botocore==1.34.128
brotli==1.1.0
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ sentry-cli>=2.16.0

# pre-commit dependencies
pre-commit>=4
black>=22.10.0
black>=24.10.0
flake8>=7
flake8-bugbear>=22.10
flake8-logging>=1.5
Expand Down
1 change: 1 addition & 0 deletions src/flagpole/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
Each condition has a single operator. An operator takes a kind (`OperatorKind` enum)
and a value, the type of which depends on the operator specified.
"""

from __future__ import annotations

import dataclasses
Expand Down
6 changes: 3 additions & 3 deletions src/sentry/api/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,9 +153,9 @@ def apply_cors_headers(
"Content-Type, Authentication, Authorization, Content-Encoding, "
"sentry-trace, baggage, X-CSRFToken"
)
response[
"Access-Control-Expose-Headers"
] = "X-Sentry-Error, X-Sentry-Direct-Hit, X-Hits, X-Max-Hits, Endpoint, Retry-After, Link"
response["Access-Control-Expose-Headers"] = (
"X-Sentry-Error, X-Sentry-Direct-Hit, X-Hits, X-Max-Hits, Endpoint, Retry-After, Link"
)

if request.META.get("HTTP_ORIGIN") == "null":
# if ORIGIN header is explicitly specified as 'null' leave it alone
Expand Down
36 changes: 18 additions & 18 deletions src/sentry/api/endpoints/organization_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -496,16 +496,16 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w
if decision == DashboardWidgetTypes.DISCOVER:
return _data_fn(discover, offset, limit, scoped_query)
elif decision == DashboardWidgetTypes.TRANSACTION_LIKE:
original_results["meta"][
"discoverSplitDecision"
] = DashboardWidgetTypes.get_type_name(
DashboardWidgetTypes.TRANSACTION_LIKE
original_results["meta"]["discoverSplitDecision"] = (
DashboardWidgetTypes.get_type_name(
DashboardWidgetTypes.TRANSACTION_LIKE
)
)
return original_results
elif decision == DashboardWidgetTypes.ERROR_EVENTS and error_results:
error_results["meta"][
"discoverSplitDecision"
] = DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS)
error_results["meta"]["discoverSplitDecision"] = (
DashboardWidgetTypes.get_type_name(DashboardWidgetTypes.ERROR_EVENTS)
)
return error_results
else:
return original_results
Expand Down Expand Up @@ -545,9 +545,9 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save
limit,
scoped_query,
)
result["meta"][
"discoverSplitDecision"
] = DiscoverSavedQueryTypes.get_type_name(dataset_inferred_from_query)
result["meta"]["discoverSplitDecision"] = (
DiscoverSavedQueryTypes.get_type_name(dataset_inferred_from_query)
)

self.save_discover_saved_query_split_decision(
discover_query,
Expand Down Expand Up @@ -582,21 +582,21 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save

try:
error_results = map["errors"]
error_results["meta"][
"discoverSplitDecision"
] = DiscoverSavedQueryTypes.get_type_name(
DiscoverSavedQueryTypes.ERROR_EVENTS
error_results["meta"]["discoverSplitDecision"] = (
DiscoverSavedQueryTypes.get_type_name(
DiscoverSavedQueryTypes.ERROR_EVENTS
)
)
has_errors = len(error_results["data"]) > 0
except KeyError:
error_results = None

try:
transaction_results = map["transactions"]
transaction_results["meta"][
"discoverSplitDecision"
] = DiscoverSavedQueryTypes.get_type_name(
DiscoverSavedQueryTypes.TRANSACTION_LIKE
transaction_results["meta"]["discoverSplitDecision"] = (
DiscoverSavedQueryTypes.get_type_name(
DiscoverSavedQueryTypes.TRANSACTION_LIKE
)
)
has_transactions = len(transaction_results["data"]) > 0
except KeyError:
Expand Down
16 changes: 8 additions & 8 deletions src/sentry/api/endpoints/organization_events_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -470,10 +470,10 @@ def fn(
):
if not result.data.get("meta"):
result.data["meta"] = {}
result.data["meta"][
"discoverSplitDecision"
] = DashboardWidgetTypes.get_type_name(
DashboardWidgetTypes.TRANSACTION_LIKE
result.data["meta"]["discoverSplitDecision"] = (
DashboardWidgetTypes.get_type_name(
DashboardWidgetTypes.TRANSACTION_LIKE
)
)
return original_results
elif decision == DashboardWidgetTypes.ERROR_EVENTS and error_results:
Expand All @@ -484,10 +484,10 @@ def fn(
):
if not result.data.get("meta"):
result.data["meta"] = {}
result.data["meta"][
"discoverSplitDecision"
] = DashboardWidgetTypes.get_type_name(
DashboardWidgetTypes.ERROR_EVENTS
result.data["meta"]["discoverSplitDecision"] = (
DashboardWidgetTypes.get_type_name(
DashboardWidgetTypes.ERROR_EVENTS
)
)
return error_results
else:
Expand Down
56 changes: 35 additions & 21 deletions src/sentry/api/endpoints/organization_events_trends.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,9 +261,11 @@ def get_snql_function_aliases(trend_columns: TrendColumns, trend_type: str) -> d
if trend_type == IMPROVED
else aggregate_filter.operator
),
-1 * aggregate_filter.value.value
if trend_type == IMPROVED
else aggregate_filter.value.value,
(
-1 * aggregate_filter.value.value
if trend_type == IMPROVED
else aggregate_filter.value.value
),
),
["minus", "transaction.duration"],
trend_columns["trend_difference"],
Expand All @@ -276,9 +278,11 @@ def get_snql_function_aliases(trend_columns: TrendColumns, trend_type: str) -> d
if trend_type == REGRESSION
else aggregate_filter.operator
),
-1 * aggregate_filter.value.value
if trend_type == REGRESSION
else aggregate_filter.value.value,
(
-1 * aggregate_filter.value.value
if trend_type == REGRESSION
else aggregate_filter.value.value
),
),
None,
trend_columns["t_test"],
Expand All @@ -304,9 +308,11 @@ def get_function_aliases(trend_type):
"trend_percentage()": Alias(
lambda aggregate_filter: [
"trend_percentage",
CORRESPONDENCE_MAP[aggregate_filter.operator]
if trend_type == IMPROVED
else aggregate_filter.operator,
(
CORRESPONDENCE_MAP[aggregate_filter.operator]
if trend_type == IMPROVED
else aggregate_filter.operator
),
1 + (aggregate_filter.value.value * (-1 if trend_type == IMPROVED else 1)),
],
["percentage", "transaction.duration"],
Expand All @@ -315,25 +321,33 @@ def get_function_aliases(trend_type):
"trend_difference()": Alias(
lambda aggregate_filter: [
"trend_difference",
CORRESPONDENCE_MAP[aggregate_filter.operator]
if trend_type == IMPROVED
else aggregate_filter.operator,
-1 * aggregate_filter.value.value
if trend_type == IMPROVED
else aggregate_filter.value.value,
(
CORRESPONDENCE_MAP[aggregate_filter.operator]
if trend_type == IMPROVED
else aggregate_filter.operator
),
(
-1 * aggregate_filter.value.value
if trend_type == IMPROVED
else aggregate_filter.value.value
),
],
["minus", "transaction.duration"],
None,
),
"confidence()": Alias(
lambda aggregate_filter: [
"t_test",
CORRESPONDENCE_MAP[aggregate_filter.operator]
if trend_type == REGRESSION
else aggregate_filter.operator,
-1 * aggregate_filter.value.value
if trend_type == REGRESSION
else aggregate_filter.value.value,
(
CORRESPONDENCE_MAP[aggregate_filter.operator]
if trend_type == REGRESSION
else aggregate_filter.operator
),
(
-1 * aggregate_filter.value.value
if trend_type == REGRESSION
else aggregate_filter.value.value
),
],
None,
None,
Expand Down
30 changes: 15 additions & 15 deletions src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,18 +315,18 @@ def __init__(self, abs_path: str, project: Project, release: Release, event):
self.matching_source_file_names = ReleaseFile.normalize(abs_path)

# Source file lookup result variables
self.source_file_lookup_result: Literal[
"found", "wrong-dist", "unsuccessful"
] = "unsuccessful"
self.found_source_file_name: None | (
str
) = None # The name of the source file artifact that was found, e.g. "~/static/bundle.min.js"
self.source_map_reference: None | (
str
) = None # The source map reference as found in the source file or its headers, e.g. "https://example.com/static/bundle.min.js.map"
self.matching_source_map_name: None | (
str
) = None # The location where Sentry will look for the source map (relative to the source file), e.g. "bundle.min.js.map"
self.source_file_lookup_result: Literal["found", "wrong-dist", "unsuccessful"] = (
"unsuccessful"
)
self.found_source_file_name: None | (str) = (
None # The name of the source file artifact that was found, e.g. "~/static/bundle.min.js"
)
self.source_map_reference: None | (str) = (
None # The source map reference as found in the source file or its headers, e.g. "https://example.com/static/bundle.min.js.map"
)
self.matching_source_map_name: None | (str) = (
None # The location where Sentry will look for the source map (relative to the source file), e.g. "bundle.min.js.map"
)

# Cached db objects across operations
self.artifact_index_release_files: QuerySet | list[ReleaseFile] | None = None
Expand All @@ -337,9 +337,9 @@ def __init__(self, abs_path: str, project: Project, release: Release, event):
self._find_source_file_in_artifact_bundles()

# Source map lookup result variable
self.source_map_lookup_result: Literal[
"found", "wrong-dist", "unsuccessful"
] = "unsuccessful"
self.source_map_lookup_result: Literal["found", "wrong-dist", "unsuccessful"] = (
"unsuccessful"
)

if self.source_map_reference is not None and self.found_source_file_name is not None: # type: ignore[unreachable]
if self.source_map_reference.startswith("data:"): # type: ignore[unreachable]
Expand Down
16 changes: 10 additions & 6 deletions src/sentry/api/serializers/models/activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,12 +83,16 @@ def get_attrs(self, item_list, user, **kwargs):
return {
item: {
"user": users.get(str(item.user_id)) if item.user_id else None,
"source": groups.get(item.data["source_id"])
if item.type == ActivityType.UNMERGE_DESTINATION.value
else None,
"destination": groups.get(item.data["destination_id"])
if item.type == ActivityType.UNMERGE_SOURCE.value
else None,
"source": (
groups.get(item.data["source_id"])
if item.type == ActivityType.UNMERGE_DESTINATION.value
else None
),
"destination": (
groups.get(item.data["destination_id"])
if item.type == ActivityType.UNMERGE_SOURCE.value
else None
),
"commit": commits.get(item),
"pull_request": pull_requests.get(item),
}
Expand Down
8 changes: 5 additions & 3 deletions src/sentry/api/serializers/models/artifactbundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,11 @@ def serialize(self, obj, attrs, user, **kwargs):
return {
"id": base64.urlsafe_b64encode(attrs["file_path"].encode()).decode(),
# In case the file type string was invalid, we return the sentinel value INVALID_SOURCE_FILE_TYPE.
"fileType": attrs["file_type"].value
if attrs["file_type"] is not None
else INVALID_SOURCE_FILE_TYPE,
"fileType": (
attrs["file_type"].value
if attrs["file_type"] is not None
else INVALID_SOURCE_FILE_TYPE
),
# We decided to return the file url as file path for better searchability.
"filePath": attrs["file_url"],
"fileSize": attrs["file_info"].file_size if attrs["file_info"] is not None else None,
Expand Down
8 changes: 5 additions & 3 deletions src/sentry/api/serializers/models/auditlogentry.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,11 @@ def get_attrs(self, item_list, user, **kwargs):

return {
item: {
"actor": users[str(item.actor_id)]
if item.actor_id and not override_actor_id(item.actor)
else {"name": item.get_actor_name()},
"actor": (
users[str(item.actor_id)]
if item.actor_id and not override_actor_id(item.actor)
else {"name": item.get_actor_name()}
),
"targetUser": users.get(str(item.target_user_id)) or item.target_user_id,
}
for item in item_list
Expand Down
6 changes: 3 additions & 3 deletions src/sentry/api/serializers/models/orgauthtoken.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ def serialize(self, obj, attrs, user, **kwargs):
"tokenLastCharacters": obj.token_last_characters,
"dateCreated": obj.date_added,
"dateLastUsed": obj.date_last_used,
"projectLastUsedId": str(obj.project_last_used_id)
if obj.project_last_used_id
else None,
"projectLastUsedId": (
str(obj.project_last_used_id) if obj.project_last_used_id else None
),
}

if token:
Expand Down
6 changes: 3 additions & 3 deletions src/sentry/api/serializers/models/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,9 @@ def serialize(self, obj, attrs, user, **kwargs):
],
"doc": doc,
"firstPartyAlternative": getattr(obj, "alternative", None),
"deprecationDate": deprecation_date.strftime("%b %-d, %Y")
if deprecation_date
else None,
"deprecationDate": (
deprecation_date.strftime("%b %-d, %Y") if deprecation_date else None
),
"altIsSentryApp": getattr(obj, "alt_is_sentry_app", None),
}
if self.project:
Expand Down
18 changes: 10 additions & 8 deletions src/sentry/api/serializers/snuba.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,15 +101,17 @@ def serialize(
rv.append((k, row))

res = {
"data": zerofill(
rv,
result.start,
result.end,
result.rollup,
allow_partial_buckets=allow_partial_buckets,
"data": (
zerofill(
rv,
result.start,
result.end,
result.rollup,
allow_partial_buckets=allow_partial_buckets,
)
if zerofill_results
else rv
)
if zerofill_results
else rv
}

if result.data.get("totals"):
Expand Down
Loading

0 comments on commit bcb90b0

Please sign in to comment.