Skip to content

Commit

Permalink
Merge branch 'master' into raj/api-idorslug/rename-path-params/organi…
Browse files Browse the repository at this point in the history
…zation_slug
  • Loading branch information
iamrajjoshi authored May 13, 2024
2 parents 6447350 + 5d320d2 commit 6dbff1a
Show file tree
Hide file tree
Showing 19 changed files with 234 additions and 149 deletions.
8 changes: 8 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -566,6 +566,7 @@ module = [
"sentry.issues.endpoints.group_events",
"sentry.issues.endpoints.organization_activity",
"sentry.issues.endpoints.organization_release_previous_commits",
"sentry.issues.endpoints.organization_searches",
"sentry.issues.endpoints.project_stacktrace_link",
"sentry.issues.escalating_group_forecast",
"sentry.issues.escalating_issues_alg",
Expand Down Expand Up @@ -643,17 +644,24 @@ module = [
"tests.sentry.issues.endpoints.test_actionable_items",
"tests.sentry.issues.endpoints.test_organization_activity",
"tests.sentry.issues.endpoints.test_organization_searches",
"tests.sentry.issues.endpoints.test_project_stacktrace_link",
"tests.sentry.issues.endpoints.test_source_map_debug",
"tests.sentry.issues.test_attributes",
"tests.sentry.issues.test_escalating",
"tests.sentry.issues.test_escalating_issues_alg",
"tests.sentry.issues.test_group_attributes_dataset",
"tests.sentry.issues.test_grouptype",
"tests.sentry.issues.test_ignored",
"tests.sentry.issues.test_ingest",
"tests.sentry.issues.test_issue_occurrence",
"tests.sentry.issues.test_issue_velocity",
"tests.sentry.issues.test_json_schemas",
"tests.sentry.issues.test_merge",
"tests.sentry.issues.test_occurrence_consumer",
"tests.sentry.issues.test_ongoing",
"tests.sentry.issues.test_priority",
"tests.sentry.issues.test_producer",
"tests.sentry.issues.test_run",
"tests.sentry.issues.test_search_issues_dataset",
"tests.sentry.issues.test_status_change",
"tests.sentry.issues.test_status_change_consumer",
Expand Down
2 changes: 2 additions & 0 deletions src/sentry/conf/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -1794,6 +1794,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
"organizations:related-events": False,
# Enable related issues feature
"organizations:related-issues": False,
# Enable related issues in issue details page
"organizations:related-issues-issue-details-page": False,
# Enable usage of external relays, for use with Relay. See
# https://github.com/getsentry/relay.
"organizations:relay": True,
Expand Down
1 change: 1 addition & 0 deletions src/sentry/features/temporary.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:project-stats", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:related-events", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
manager.add("organizations:related-issues", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:related-issues-issue-details-page", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:relay-cardinality-limiter", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
manager.add("organizations:release-comparison-performance", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:releases-v2", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
Expand Down
20 changes: 12 additions & 8 deletions src/sentry/issues/attributes.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def _bulk_retrieve_snapshot_values(
@receiver(
post_save, sender=Group, dispatch_uid="post_save_log_group_attributes_changed", weak=False
)
def post_save_log_group_attributes_changed(instance, sender, created, *args, **kwargs):
def post_save_log_group_attributes_changed(instance, sender, created, *args, **kwargs) -> None:
try:
if created:
_log_group_attributes_changed(Operation.CREATED, "group", None)
Expand All @@ -224,7 +224,7 @@ def post_save_log_group_attributes_changed(instance, sender, created, *args, **k


@receiver(post_update, sender=Group, dispatch_uid="post_update_group", weak=False)
def post_update_group(sender, updated_fields, model_ids, *args, **kwargs):
def post_update_group(sender, updated_fields, model_ids, *args, **kwargs) -> None:
try:
updated_fields = process_update_fields(updated_fields)
if updated_fields:
Expand All @@ -233,7 +233,7 @@ def post_update_group(sender, updated_fields, model_ids, *args, **kwargs):
logger.exception("failed to log group attributes after group_owner updated")


def process_update_fields(updated_fields):
def process_update_fields(updated_fields) -> set[str]:
if not updated_fields:
# we have no guarantees update_fields is used everywhere save() is called
# we'll need to assume any of the attributes are updated in that case
Expand All @@ -247,7 +247,7 @@ def process_update_fields(updated_fields):


@issue_deleted.connect(weak=False)
def on_issue_deleted_log_deleted(group, user, delete_type, **kwargs):
def on_issue_deleted_log_deleted(group, user, delete_type, **kwargs) -> None:
try:
_log_group_attributes_changed(Operation.DELETED, "group", "all")
send_snapshot_values(None, group, True)
Expand All @@ -256,7 +256,7 @@ def on_issue_deleted_log_deleted(group, user, delete_type, **kwargs):


@issue_assigned.connect(weak=False)
def on_issue_assigned_log_group_assignee_attributes_changed(project, group, user, **kwargs):
def on_issue_assigned_log_group_assignee_attributes_changed(project, group, user, **kwargs) -> None:
try:
_log_group_attributes_changed(Operation.UPDATED, "group_assignee", "all")
send_snapshot_values(None, group, False)
Expand All @@ -265,7 +265,9 @@ def on_issue_assigned_log_group_assignee_attributes_changed(project, group, user


@issue_unassigned.connect(weak=False)
def on_issue_unassigned_log_group_assignee_attributes_changed(project, group, user, **kwargs):
def on_issue_unassigned_log_group_assignee_attributes_changed(
project, group, user, **kwargs
) -> None:
try:
_log_group_attributes_changed(Operation.DELETED, "group_assignee", "all")
send_snapshot_values(None, group, False)
Expand All @@ -276,7 +278,9 @@ def on_issue_unassigned_log_group_assignee_attributes_changed(project, group, us
@receiver(
post_save, sender=GroupOwner, dispatch_uid="post_save_log_group_owner_changed", weak=False
)
def post_save_log_group_owner_changed(instance, sender, created, update_fields, *args, **kwargs):
def post_save_log_group_owner_changed(
instance, sender, created, update_fields, *args, **kwargs
) -> None:
try:
_log_group_attributes_changed(
Operation.CREATED if created else Operation.UPDATED, "group_owner", "all"
Expand All @@ -289,7 +293,7 @@ def post_save_log_group_owner_changed(instance, sender, created, update_fields,
@receiver(
post_delete, sender=GroupOwner, dispatch_uid="post_delete_log_group_owner_changed", weak=False
)
def post_delete_log_group_owner_changed(instance, sender, *args, **kwargs):
def post_delete_log_group_owner_changed(instance, sender, *args, **kwargs) -> None:
try:
_log_group_attributes_changed(Operation.DELETED, "group_owner", "all")
send_snapshot_values(instance.group_id, None, False)
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/issues/endpoints/organization_group_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ def _search(
result = inbox_search(**query_kwargs)
else:

def use_group_snuba_dataset():
def use_group_snuba_dataset() -> bool:
# if useGroupSnubaDataset we consider using the snuba dataset
if not request.GET.get("useGroupSnubaDataset"):
return False
Expand Down
5 changes: 3 additions & 2 deletions src/sentry/issues/endpoints/organization_searches.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
OrganizationSearchAdminSerializer,
OrganizationSearchMemberSerializer,
)
from sentry.models.organization import Organization
from sentry.models.savedsearch import SavedSearch, Visibility
from sentry.models.search_common import SearchType

Expand All @@ -26,7 +27,7 @@ class OrganizationSearchesEndpoint(OrganizationEndpoint):
owner = ApiOwner.ISSUES
permission_classes = (OrganizationSearchPermission,)

def get(self, request: Request, organization) -> Response:
def get(self, request: Request, organization: Organization) -> Response:
"""
List an Organization's saved searches
`````````````````````````````````````
Expand Down Expand Up @@ -60,7 +61,7 @@ def get(self, request: Request, organization) -> Response:

return Response(serialize(list(query), request.user))

def post(self, request: Request, organization) -> Response:
def post(self, request: Request, organization: Organization) -> Response:
serializer: BaseOrganizationSearchSerializer
if request.access.has_scope("org:write"):
serializer = OrganizationSearchAdminSerializer(data=request.data)
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/issues/endpoints/source_map_debug.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def get(self, request: Request, project: Project, event_id: str) -> Response:

return self._create_response(issue, data)

def _create_response(self, issue=None, data=None):
def _create_response(self, issue=None, data=None) -> Response:
errors_list = []
if issue:
response = SourceMapProcessingIssue(issue, data=data).get_api_context()
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/issues/escalating.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def _query_metrics_with_pagination(
end_date: datetime,
all_results: list[GroupsCountResponse],
category: GroupCategory | None = None,
):
) -> None:
"""
Paginates Snuba metric queries for event counts for the
given list of project ids and groups ids in a time range.
Expand Down Expand Up @@ -296,7 +296,7 @@ def _generate_generic_metrics_backend_query(
end_date: datetime,
offset: int,
category: GroupCategory | None = None,
):
) -> MetricsQuery:
"""
This function generates a query to fetch the hourly events
for a group_id through the Generic Metrics Backend.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,8 +143,9 @@ function DefaultGroupEventDetailsContent({
?.filter((x): x is EntryException => x.type === EntryType.EXCEPTION)
.flatMap(x => x.data.values ?? [])
.some(({value}) => {
const lowerText = value.toLowerCase();
const lowerText = value?.toLowerCase();
return (
lowerText &&
(lowerText.includes('api key') || lowerText.includes('429')) &&
(lowerText.includes('openai') ||
lowerText.includes('anthropic') ||
Expand Down
1 change: 1 addition & 0 deletions static/app/views/performance/cache/referrers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,5 @@ export enum Referrer {
SAMPLES_CACHE_TRANSACTION_DURATION = 'api.performance.cache.samples-cache-transaction-duration',
SAMPLES_CACHE_SPAN_SAMPLES = 'api.performance.cache.samples-cache-span-samples',
SAMPLES_CACHE_SPAN_SAMPLES_TRANSACTION_DURATION = 'api.performance.cache.samples-cache-span-samples',
SAMPLES_CACHE_HIT_MISS_CHART = 'api.performance.cache.samples-cache-hit-miss-chart',
}
Loading

0 comments on commit 6dbff1a

Please sign in to comment.