From b6b49f480a24865836efc4d61820104a841087c3 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 6 May 2024 10:47:19 -0400 Subject: [PATCH 001/376] ref: move py2 crc32 function to the only user (#70333) --- src/sentry/tsdb/redis.py | 10 ++++++++-- src/sentry/utils/compat/__init__.py | 24 ------------------------ 2 files changed, 8 insertions(+), 26 deletions(-) delete mode 100644 src/sentry/utils/compat/__init__.py diff --git a/src/sentry/tsdb/redis.py b/src/sentry/tsdb/redis.py index 6848321bc0752f..37088486c45f7d 100644 --- a/src/sentry/tsdb/redis.py +++ b/src/sentry/tsdb/redis.py @@ -1,3 +1,4 @@ +import binascii import itertools import logging import random @@ -15,7 +16,6 @@ from redis.client import Script from sentry.tsdb.base import BaseTSDB, IncrMultiOptions, TSDBModel -from sentry.utils.compat import crc32 from sentry.utils.dates import to_datetime from sentry.utils.redis import ( check_cluster_versions, @@ -34,6 +34,12 @@ CountMinScript = load_redis_script("tsdb/cmsketch.lua") +def _crc32(data: bytes) -> int: + # python 2 equivalent crc32 to return signed + rv = binascii.crc32(data) + return rv - ((rv & 0x80000000) << 1) + + class SuppressionWrapper(Generic[T]): """\ Wraps a context manager and prevents any exceptions raised either during @@ -196,7 +202,7 @@ def make_counter_key( if isinstance(model_key, int): vnode = model_key % self.vnodes else: - vnode = crc32(force_bytes(model_key)) % self.vnodes + vnode = _crc32(force_bytes(model_key)) % self.vnodes return ( "{prefix}{model}:{epoch}:{vnode}".format( diff --git a/src/sentry/utils/compat/__init__.py b/src/sentry/utils/compat/__init__.py deleted file mode 100644 index 146041ef91a27b..00000000000000 --- a/src/sentry/utils/compat/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -from binascii import crc32 as _crc32 - - -# In python3 crc32 was changed to never return a signed value, which is -# different from the python2 implementation. As noted in -# https://docs.python.org/3/library/binascii.html#binascii.crc32 -# -# Note the documentation suggests the following: -# -# > Changed in version 3.0: The result is always unsigned. To generate the -# > same numeric value across all Python versions and platforms, use -# > crc32(data) & 0xffffffff. -# -# However this will not work when transitioning between versions, as the -# value MUST match what was generated in python 2. -# -# We can sign the return value using the following bit math to ensure we -# match the python2 output of crc32. -# -# XXX(BYK): This needs to stay as we transitioned from PY2 and still need to -# keep these compatible due to values stored in various places. -def crc32(*args): - rt = _crc32(*args) - return rt - ((rt & 0x80000000) << 1) From f904da640c8b27bfc0c71bcbfaeea4ab2cce8c42 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Mon, 6 May 2024 10:58:57 -0400 Subject: [PATCH 002/376] style: Module variable spacing (#70335) --- src/sentry/monitors/clock_dispatch.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/sentry/monitors/clock_dispatch.py b/src/sentry/monitors/clock_dispatch.py index 03cc1f2d93625d..28d2b436c67d91 100644 --- a/src/sentry/monitors/clock_dispatch.py +++ b/src/sentry/monitors/clock_dispatch.py @@ -20,6 +20,7 @@ from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition logger = logging.getLogger("sentry") + # This key is used to store the last timestamp that the tasks were triggered. MONITOR_TASKS_LAST_TRIGGERED_KEY = "sentry.monitors.last_tasks_ts" From 381b1a9bff2d3f038c76d1689339166d3f738d3e Mon Sep 17 00:00:00 2001 From: Mark Story Date: Mon, 6 May 2024 11:02:37 -0400 Subject: [PATCH 003/376] chore(actor) Add methods to RpcActor to match ActorTuple (#70168) As we remove the Actor model, I'd also like to streamline the Actor concept internally in the app. We currently have both `ActorTuple` and `RpcActor`. These changes add methods to `RpcActor` to ease callsites into using `RpcActor`. My plan is to replace usage of `ActorTuple` with `RpcActor` and then rename `RpcActor` to `Actor` so that we have fewer similarly named objects in the application. --- src/sentry/services/hybrid_cloud/actor.py | 78 +++++++++++++++++++- src/sentry/utils/actor.py | 8 ++- tests/sentry/hybridcloud/test_actor.py | 86 +++++++++++++++++++++++ 3 files changed, 170 insertions(+), 2 deletions(-) diff --git a/src/sentry/services/hybrid_cloud/actor.py b/src/sentry/services/hybrid_cloud/actor.py index 657aa2bb819ccf..d8f085dfc813c2 100644 --- a/src/sentry/services/hybrid_cloud/actor.py +++ b/src/sentry/services/hybrid_cloud/actor.py @@ -45,10 +45,36 @@ def __post_init__(self) -> None: def __hash__(self) -> int: return hash((self.id, self.actor_type)) + @classmethod + def resolve_many(cls, actors: Iterable["RpcActor"]) -> list["Team | RpcUser"]: + """ + Resolve a list of actors in a batch to the Team/User the Actor references. + + Will generate more efficient queries to load actors than calling + RpcActor.resolve() individually will. + """ + from sentry.models.team import Team + + if not actors: + return [] + actors_by_type: dict[ActorType, list[RpcActor]] = defaultdict(list) + for actor in actors: + actors_by_type[actor.actor_type].append(actor) + results: dict[tuple[ActorType, int], Team | RpcUser] = {} + for actor_type, actor_list in actors_by_type.items(): + if actor_type == ActorType.USER: + for user in user_service.get_many(filter={"user_ids": [u.id for u in actor_list]}): + results[(actor_type, user.id)] = user + if actor_type == ActorType.TEAM: + for team in Team.objects.filter(id__in=[t.id for t in actor_list]): + results[(actor_type, team.id)] = team + + return list(filter(None, [results.get((actor.actor_type, actor.id)) for actor in actors])) + @classmethod def many_from_object(cls, objects: Iterable[ActorTarget]) -> list["RpcActor"]: """ - Create a list of RpcActor instaces based on a collection of 'objects' + Create a list of RpcActor instances based on a collection of 'objects' Objects will be grouped by the kind of actor they would be related to. Queries for actors are batched to increase efficiency. Users that are @@ -129,6 +155,52 @@ def from_orm_team(cls, team: "Team") -> "RpcActor": def from_rpc_team(cls, team: RpcTeam) -> "RpcActor": return cls(id=team.id, actor_type=ActorType.TEAM, slug=team.slug) + @classmethod + def from_identifier(cls, id: str | int | None) -> "RpcActor | None": + """ + Parse an actor identifier into an RpcActor + + Forms `id` can take: + 1231 -> look up User by id + "1231" -> look up User by id + "user:1231" -> look up User by id + "team:1231" -> look up Team by id + "maiseythedog" -> look up User by username + "maisey@dogsrule.com" -> look up User by primary email + """ + if not id: + return None + # If we have an integer, fall back to assuming it's a User + if isinstance(id, int): + return cls(id=id, actor_type=ActorType.USER) + + # If the actor_identifier is a simple integer as a string, + # we're also a User + if id.isdigit(): + return cls(id=int(id), actor_type=ActorType.USER) + + if id.startswith("user:"): + return cls(id=int(id[5:]), actor_type=ActorType.USER) + + if id.startswith("team:"): + return cls(id=int(id[5:]), actor_type=ActorType.TEAM) + + try: + user = user_service.get_by_username(username=id)[0] + return cls(id=user.id, actor_type=ActorType.USER) + except IndexError as e: + raise ValueError(f"Unable to resolve actor identifier: {e}") + + @classmethod + def from_id(cls, user_id: int | None = None, team_id: int | None = None) -> "RpcActor": + if user_id and team_id: + raise ValueError("You can only provide one of user_id and team_id") + if user_id: + return cls(id=user_id, actor_type=ActorType.USER) + if team_id: + return cls(id=team_id, actor_type=ActorType.TEAM) + raise ValueError("You must provide one of user_id and team_id") + def __eq__(self, other: Any) -> bool: return ( isinstance(other, self.__class__) @@ -143,3 +215,7 @@ def resolve(self) -> Union["Team", "RpcUser"] | None: return Team.objects.filter(id=self.id).first() if self.actor_type == ActorType.USER: return user_service.get_user(user_id=self.id) + + @property + def identifier(self) -> str: + return f"{self.actor_type.lower()}:{self.id}" diff --git a/src/sentry/utils/actor.py b/src/sentry/utils/actor.py index a80f0358e46bb0..60c77997f497de 100644 --- a/src/sentry/utils/actor.py +++ b/src/sentry/utils/actor.py @@ -45,8 +45,9 @@ def from_actor_identifier(cls, actor_identifier: int | str | None) -> ActorTuple "team:1231" -> look up Team by id "maiseythedog" -> look up User by username "maisey@dogsrule.com" -> look up User by primary email - """ + Deprecated: Use RpcActor.from_identifier instead. + """ if not actor_identifier: return None @@ -73,6 +74,9 @@ def from_actor_identifier(cls, actor_identifier: int | str | None) -> ActorTuple @classmethod def from_id(cls, user_id: int | None, team_id: int | None) -> ActorTuple | None: + """ + Deprecated: Use RpcActor.from_id() instead. + """ from sentry.models.team import Team from sentry.models.user import User @@ -105,6 +109,8 @@ def resolve_many(cls, actors: Sequence[ActorTuple]) -> Sequence[Team | RpcUser]: as the input, minus any actors we couldn't resolve. :param actors: :return: + + Deprecated: Replace with RpcActor.from_many_object() """ from sentry.models.user import User from sentry.services.hybrid_cloud.user.service import user_service diff --git a/tests/sentry/hybridcloud/test_actor.py b/tests/sentry/hybridcloud/test_actor.py index 72246c202cefa8..7ed45d9a8f3c13 100644 --- a/tests/sentry/hybridcloud/test_actor.py +++ b/tests/sentry/hybridcloud/test_actor.py @@ -1,4 +1,8 @@ +import pytest + +from sentry.models.team import Team from sentry.services.hybrid_cloud.actor import ActorType, RpcActor +from sentry.services.hybrid_cloud.user.model import RpcUser from sentry.services.hybrid_cloud.user.service import user_service from sentry.testutils.factories import Factories from sentry.testutils.pytest.fixtures import django_db_all @@ -17,6 +21,62 @@ def test_many_from_object_users(): assert actors[1].actor_type == ActorType.USER +@django_db_all(transaction=True) +def test_from_identifier(): + user = Factories.create_user() + org = Factories.create_organization(owner=user) + team = Factories.create_team(organization=org) + + actor = RpcActor.from_identifier(user.id) + assert actor + assert actor.id == user.id + assert actor.actor_type == ActorType.USER + + actor = RpcActor.from_identifier(str(user.id)) + assert actor + assert actor.id == user.id + assert actor.actor_type == ActorType.USER + + actor = RpcActor.from_identifier(f"user:{user.id}") + assert actor + assert actor.id == user.id + assert actor.actor_type == ActorType.USER + + actor = RpcActor.from_identifier(user.username) + assert actor + assert actor.id == user.id + assert actor.actor_type == ActorType.USER + + actor = RpcActor.from_identifier(user.email) + assert actor + assert actor.id == user.id + assert actor.actor_type == ActorType.USER + assert actor.identifier == f"user:{user.id}" + + actor = RpcActor.from_identifier(f"team:{team.id}") + assert actor + assert actor.id == team.id + assert actor.actor_type == ActorType.TEAM + assert actor.identifier == f"team:{team.id}" + + +def test_from_id(): + actor = RpcActor.from_id(team_id=1) + assert actor + assert actor.id == 1 + assert actor.actor_type == ActorType.TEAM + + actor = RpcActor.from_id(user_id=11) + assert actor + assert actor.id == 11 + assert actor.actor_type == ActorType.USER + + with pytest.raises(ValueError): + RpcActor.from_id(user_id=11, team_id=99) + with pytest.raises(ValueError): + RpcActor.from_id(user_id=None) + + @django_db_all(transaction=True) def test_many_from_object_rpc_users(): orm_users = [Factories.create_user(), Factories.create_user()] @@ -71,3 +131,29 @@ def test_many_from_object_mixed(): assert actors[1].id == teams[1].id assert actors[1].actor_type == ActorType.TEAM assert actors[1].slug + + +@django_db_all(transaction=True) +def test_resolve_many(): + organization = Factories.create_organization() + team_one = Factories.create_team(organization=organization) + team_two = Factories.create_team(organization=organization) + user_one = Factories.create_user() + user_two = Factories.create_user() + + members = [user_one, user_two, team_two, team_one] + actors = [RpcActor.from_object(m) for m in members] + resolved = RpcActor.resolve_many(actors) + assert len(resolved) == len(actors) + + assert isinstance(resolved[0], RpcUser) + assert resolved[0].id == user_one.id + + assert isinstance(resolved[1], RpcUser) + assert resolved[1].id == user_two.id + + assert isinstance(resolved[2], Team) + assert resolved[2].id == team_two.id + + assert isinstance(resolved[3], Team) + assert resolved[3].id == team_one.id From dab07edb6ae9e1f86a3a4e08f020431af87422e9 Mon Sep 17 00:00:00 2001 From: Abdkhan14 <60121741+Abdkhan14@users.noreply.github.com> Date: Mon, 6 May 2024 11:04:44 -0400 Subject: [PATCH 004/376] feat(new-trace): Fixed issue count mismatch. (#70291) Screenshot 2024-05-03 at 6 34 37 PM Co-authored-by: Abdullah Khan --- .../traceDrawer/details/issues/issues.tsx | 77 ++++++++++++++----- .../newTraceDetails/traceModels/traceTree.tsx | 2 +- 2 files changed, 57 insertions(+), 22 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/issues/issues.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/issues/issues.tsx index e30e25b6c23bcb..aa572c91cc3283 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/issues/issues.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/issues/issues.tsx @@ -15,7 +15,11 @@ import {IconUser} from 'sentry/icons'; import {t, tct, tn} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {Group, Organization} from 'sentry/types'; -import type {TraceErrorOrIssue} from 'sentry/utils/performance/quickTrace/types'; +import type { + TraceError, + TraceErrorOrIssue, + TracePerformanceIssue, +} from 'sentry/utils/performance/quickTrace/types'; import {useApiQuery} from 'sentry/utils/queryClient'; import type { TraceTree, @@ -118,11 +122,12 @@ type IssueListProps = { }; export function IssueList({issues, node, organization}: IssueListProps) { - const uniqueIssues = useMemo(() => { - const unique: TraceErrorOrIssue[] = []; + const uniqueErrorIssues = useMemo(() => { + const unique: TraceError[] = []; + const seenIssues: Set = new Set(); - for (const issue of issues) { + for (const issue of node.errors) { if (seenIssues.has(issue.issue_id)) { continue; } @@ -131,7 +136,26 @@ export function IssueList({issues, node, organization}: IssueListProps) { } return unique; - }, [issues]); + }, [node]); + + const uniquePerformanceIssues = useMemo(() => { + const unique: TracePerformanceIssue[] = []; + const seenIssues: Set = new Set(); + + for (const issue of node.performance_issues) { + if (seenIssues.has(issue.issue_id)) { + continue; + } + seenIssues.add(issue.issue_id); + unique.push(issue); + } + + return unique; + }, [node]); + + const uniqueIssues = useMemo(() => { + return [...uniqueErrorIssues, ...uniquePerformanceIssues]; + }, [uniqueErrorIssues, uniquePerformanceIssues]); if (!issues.length) { return null; @@ -139,7 +163,11 @@ export function IssueList({issues, node, organization}: IssueListProps) { return ( - + {uniqueIssues.slice(0, MAX_DISPLAYED_ISSUES_COUNT).map((issue, index) => ( ))} @@ -147,50 +175,57 @@ export function IssueList({issues, node, organization}: IssueListProps) { ); } -function IssueListHeader({node}: {node: TraceTreeNode}) { - const {errors, performance_issues} = node; +function IssueListHeader({ + node, + errorIssues, + performanceIssues, +}: { + errorIssues: TraceError[]; + node: TraceTreeNode; + performanceIssues: TracePerformanceIssue[]; +}) { const [singular, plural] = useMemo((): [string, string] => { const label = [t('Issue'), t('Issues')] as [string, string]; - for (const event of errors) { + for (const event of errorIssues) { if (event.level === 'error' || event.level === 'fatal') { return [t('Error'), t('Errors')]; } } return label; - }, [errors]); + }, [errorIssues]); return ( - {errors.size + performance_issues.size > MAX_DISPLAYED_ISSUES_COUNT + {errorIssues.length + performanceIssues.length > MAX_DISPLAYED_ISSUES_COUNT ? tct(`[count]+ issues, [link]`, { count: MAX_DISPLAYED_ISSUES_COUNT, link: {t('View All')}, }) - : errors.size > 0 && performance_issues.size === 0 + : errorIssues.length > 0 && performanceIssues.length === 0 ? tct('[count] [text]', { - count: errors.size, - text: errors.size > 1 ? plural : singular, + count: errorIssues.length, + text: errorIssues.length > 1 ? plural : singular, }) - : performance_issues.size > 0 && errors.size === 0 + : performanceIssues.length > 0 && performanceIssues.length === 0 ? tct('[count] [text]', { - count: performance_issues.size, + count: performanceIssues.length, text: tn( 'Performance issue', 'Performance Issues', - performance_issues.size + performanceIssues.length ), }) : tct( '[errors] [errorsText] and [performance_issues] [performanceIssuesText]', { - errors: errors.size, - performance_issues: performance_issues.size, - errorsText: errors.size > 1 ? plural : singular, + errors: errorIssues.length, + performance_issues: performanceIssues.length, + errorsText: errorIssues.length > 1 ? plural : singular, performanceIssuesText: tn( 'performance issue', 'performance issues', - performance_issues.size + performanceIssues.length ), } )} diff --git a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx index 7768ecaa41a918..31a7b65c984f03 100644 --- a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx +++ b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx @@ -2140,7 +2140,7 @@ function getRelatedPerformanceIssuesFromTransaction( return []; } - if (!node?.value?.performance_issues?.length && !node?.value?.errors?.length) { + if (!node?.value?.performance_issues?.length) { return []; } From 5cee6c255e63b063cb212587da1d05a8bd77750d Mon Sep 17 00:00:00 2001 From: Francesco Novy Date: Mon, 6 May 2024 17:14:24 +0200 Subject: [PATCH 005/376] feat: Bump SDK to 8.0.0-beta.6 (#70339) This bumps the SDK to v8.0.0-beta.6: https://github.com/getsentry/sentry-javascript/releases/tag/8.0.0-beta.6. Especially, with this feedback screenshots are enabled (and lazy loaded) by default. --- package.json | 12 +-- yarn.lock | 258 +++++++++++++++++++++++++++++---------------------- 2 files changed, 152 insertions(+), 118 deletions(-) diff --git a/package.json b/package.json index 41d5d766e82045..6fcf18068931b0 100644 --- a/package.json +++ b/package.json @@ -57,13 +57,13 @@ "@sentry-internal/rrweb-player": "2.12.0", "@sentry-internal/rrweb-snapshot": "2.12.0", "@sentry/babel-plugin-component-annotate": "^2.16.0", - "@sentry/core": "^8.0.0-beta.5", - "@sentry/node": "^8.0.0-beta.5", - "@sentry/react": "^8.0.0-beta.5", + "@sentry/core": "^8.0.0-beta.6", + "@sentry/node": "^8.0.0-beta.6", + "@sentry/react": "^8.0.0-beta.6", "@sentry/release-parser": "^1.3.1", "@sentry/status-page-list": "^0.1.0", - "@sentry/types": "^8.0.0-beta.5", - "@sentry/utils": "^8.0.0-beta.5", + "@sentry/types": "^8.0.0-beta.6", + "@sentry/utils": "^8.0.0-beta.6", "@spotlightjs/spotlight": "^2.0.0-alpha.1", "@tanstack/react-query": "^4.29.7", "@tanstack/react-query-devtools": "^4.36.1", @@ -178,7 +178,7 @@ "@codecov/webpack-plugin": "^0.0.1-beta.6", "@pmmmwh/react-refresh-webpack-plugin": "0.5.11", "@sentry/jest-environment": "^4.0.0", - "@sentry/profiling-node": "^8.0.0-beta.5", + "@sentry/profiling-node": "^8.0.0-beta.6", "@styled/typescript-styled-plugin": "^1.0.1", "@testing-library/jest-dom": "^6.4.2", "@testing-library/react": "^14.2.1", diff --git a/yarn.lock b/yarn.lock index 025ee6293d2b69..06db0fa8381d4d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2081,6 +2081,13 @@ dependencies: "@opentelemetry/api" "^1.0.0" +"@opentelemetry/api-logs@0.51.0": + version "0.51.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.51.0.tgz#71f296661d2215167c748ca044ff184a65d9426b" + integrity sha512-m/jtfBPEIXS1asltl8fPQtO3Sb1qMpuL61unQajUmM8zIxeMF1AlqzWXM3QedcYgTTFiJCew5uJjyhpmqhc0+g== + dependencies: + "@opentelemetry/api" "^1.0.0" + "@opentelemetry/api@1.8.0", "@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.6.0", "@opentelemetry/api@^1.8.0": version "1.8.0" resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.8.0.tgz#5aa7abb48f23f693068ed2999ae627d2f7d902ec" @@ -2091,13 +2098,6 @@ resolved "https://registry.yarnpkg.com/@opentelemetry/context-async-hooks/-/context-async-hooks-1.23.0.tgz#4c4627fe2857324459b0a78b5a83cbc64a415d14" integrity sha512-wazGJZDRevibOJ+VgyrT+9+8sybZAxpZx2G7vy30OAtk92OpZCg7HgNxT11NUx0VBDWcRx1dOatMYGOVplQ7QA== -"@opentelemetry/core@1.21.0": - version "1.21.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.21.0.tgz#8c16faf16edf861b073c03c9d45977b3f4003ee1" - integrity sha512-KP+OIweb3wYoP7qTYL/j5IpOlu52uxBv5M4+QhSmmUfLyTgu1OIS71msK3chFo1D6Y61BIH3wMiMYRCxJCQctA== - dependencies: - "@opentelemetry/semantic-conventions" "1.21.0" - "@opentelemetry/core@1.23.0", "@opentelemetry/core@^1.1.0", "@opentelemetry/core@^1.23.0", "@opentelemetry/core@^1.8.0": version "1.23.0" resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.23.0.tgz#f2e7ada7f35750f3c1674aef1e52c879005c0731" @@ -2105,6 +2105,13 @@ dependencies: "@opentelemetry/semantic-conventions" "1.23.0" +"@opentelemetry/core@1.24.0": + version "1.24.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.24.0.tgz#5568b6c1328a6b9c94a77f9b2c7f872b852bba40" + integrity sha512-FP2oN7mVPqcdxJDTTnKExj4mi91EH+DNuArKfHTjPuJWe2K1JfMIVXNfahw1h3onJxQnxS8K0stKkogX05s+Aw== + dependencies: + "@opentelemetry/semantic-conventions" "1.24.0" + "@opentelemetry/instrumentation-connect@0.35.0": version "0.35.0" resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation-connect/-/instrumentation-connect-0.35.0.tgz#d7c68f46ab04f10dc8792ef1fd023eb01748d8db" @@ -2150,16 +2157,25 @@ "@opentelemetry/semantic-conventions" "^1.0.0" "@types/hapi__hapi" "20.0.13" -"@opentelemetry/instrumentation-http@0.48.0": - version "0.48.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation-http/-/instrumentation-http-0.48.0.tgz#88266dfcd2dddb45f755a0f1fc882472e6e30a87" - integrity sha512-uXqOsLhW9WC3ZlGm6+PSX0xjSDTCfy4CMjfYj6TPWusOO8dtdx040trOriF24y+sZmS3M+5UQc6/3/ZxBJh4Mw== +"@opentelemetry/instrumentation-http@0.51.0": + version "0.51.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation-http/-/instrumentation-http-0.51.0.tgz#f23fb24e2eed551859a14486893fe68ba6449de2" + integrity sha512-6VsGPBnU6iVKWhVBnuRpwrmiHfxt8EYrqfnH2glfsMpsn4xy+O6U0yGlggPLhoYeOVafV3h70EEk5MU0tpsbiw== dependencies: - "@opentelemetry/core" "1.21.0" - "@opentelemetry/instrumentation" "0.48.0" - "@opentelemetry/semantic-conventions" "1.21.0" + "@opentelemetry/core" "1.24.0" + "@opentelemetry/instrumentation" "0.51.0" + "@opentelemetry/semantic-conventions" "1.24.0" semver "^7.5.2" +"@opentelemetry/instrumentation-ioredis@0.40.0": + version "0.40.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation-ioredis/-/instrumentation-ioredis-0.40.0.tgz#3a747dc44c6244d7f4c8cc98a6b75b9856241eaf" + integrity sha512-Jv/fH7KhpWe4KBirsiqeUJIYrsdR2iu2l4nWhfOlRvaZ+zYIiLEzTQR6QhBbyRoAbU4OuYJzjWusOmmpGBnwng== + dependencies: + "@opentelemetry/instrumentation" "^0.51.0" + "@opentelemetry/redis-common" "^0.36.2" + "@opentelemetry/semantic-conventions" "^1.0.0" + "@opentelemetry/instrumentation-koa@0.39.0": version "0.39.0" resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation-koa/-/instrumentation-koa-0.39.0.tgz#9c01d40a444e592a95b6e39ba0bbe94e096bfc31" @@ -2249,6 +2265,18 @@ semver "^7.5.2" shimmer "^1.2.1" +"@opentelemetry/instrumentation@0.51.0", "@opentelemetry/instrumentation@^0.51.0": + version "0.51.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation/-/instrumentation-0.51.0.tgz#93dbe96c87da539081d0ccd07475cfc0b0c61233" + integrity sha512-Eg/+Od5bEvzpvZQGhvMyKIkrzB9S7jW+6z9LHEI2VXhl/GrqQ3oBqlzJt4tA6pGtxRmqQWKWGM1wAbwDdW/gUA== + dependencies: + "@opentelemetry/api-logs" "0.51.0" + "@types/shimmer" "^1.0.2" + import-in-the-middle "1.7.1" + require-in-the-middle "^7.1.1" + semver "^7.5.2" + shimmer "^1.2.1" + "@opentelemetry/instrumentation@^0.43.0": version "0.43.0" resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation/-/instrumentation-0.43.0.tgz#749521415df03396f969bf42341fcb4acd2e9c7b" @@ -2260,6 +2288,11 @@ semver "^7.5.2" shimmer "^1.2.1" +"@opentelemetry/redis-common@^0.36.2": + version "0.36.2" + resolved "https://registry.yarnpkg.com/@opentelemetry/redis-common/-/redis-common-0.36.2.tgz#906ac8e4d804d4109f3ebd5c224ac988276fdc47" + integrity sha512-faYX1N0gpLhej/6nyp6bgRjzAKXn5GOEMYY7YhciSfCoITAktLUtQ36d24QEWNA1/WA1y6qQunCe0OhHRkVl9g== + "@opentelemetry/resources@1.23.0", "@opentelemetry/resources@^1.23.0": version "1.23.0" resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-1.23.0.tgz#4c71430f3e20c4d88b67ef5629759fae108485e5" @@ -2286,16 +2319,16 @@ "@opentelemetry/resources" "1.23.0" "@opentelemetry/semantic-conventions" "1.23.0" -"@opentelemetry/semantic-conventions@1.21.0": - version "1.21.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.21.0.tgz#83f7479c524ab523ac2df702ade30b9724476c72" - integrity sha512-lkC8kZYntxVKr7b8xmjCVUgE0a8xgDakPyDo9uSWavXPyYqLgYYGdEd2j8NxihRyb6UwpX3G/hFUF4/9q2V+/g== - "@opentelemetry/semantic-conventions@1.23.0", "@opentelemetry/semantic-conventions@^1.0.0", "@opentelemetry/semantic-conventions@^1.17.0", "@opentelemetry/semantic-conventions@^1.22.0", "@opentelemetry/semantic-conventions@^1.23.0": version "1.23.0" resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.23.0.tgz#627f2721b960fe586b7f72a07912cb7699f06eef" integrity sha512-MiqFvfOzfR31t8cc74CTP1OZfz7MbqpAnLCra8NqQoaHJX6ncIRTdYOQYBDQ2uFISDq0WY8Y9dDTWvsgzzBYRg== +"@opentelemetry/semantic-conventions@1.24.0": + version "1.24.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.24.0.tgz#f074db930a7feb4d64103a9a576c5fbad046fcac" + integrity sha512-yL0jI6Ltuz8R+Opj7jClGrul6pOoYrdfVmzQS4SITXRPH7I5IRZbrwe/6/v8v4WYMa6MYZG480S1+uc/IGfqsA== + "@opentelemetry/sql-common@^0.40.0": version "0.40.0" resolved "https://registry.yarnpkg.com/@opentelemetry/sql-common/-/sql-common-0.40.0.tgz#8cbed0722354d62997c3b9e1adf0e16257be6b15" @@ -3034,23 +3067,23 @@ fs-extra "^11.1.1" lodash "^4.17.21" -"@sentry-internal/browser-utils@8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry-internal/browser-utils/-/browser-utils-8.0.0-beta.5.tgz#1af061f5cbed854ebf2cda18d3da15bca08afc9b" - integrity sha512-3+XhQPnOBpoukUX+N+gs6Klon7oH8esHG/J9K37ho8hH63A5JIobGNV/dhtzZTKXsS31qGu4X2PM3aA9jjKR+w== +"@sentry-internal/browser-utils@8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry-internal/browser-utils/-/browser-utils-8.0.0-beta.6.tgz#a218ca0408c758b87741db783fb4ab88805ac991" + integrity sha512-uINoOFvOF5G9gpWKKfVIap46AZuUY6miOM1EHGd3p6ZCbQc0Ncg4O0NvvIepjfL5hkqJQrywbW+uFRnNTCgYow== dependencies: - "@sentry/core" "8.0.0-beta.5" - "@sentry/types" "8.0.0-beta.5" - "@sentry/utils" "8.0.0-beta.5" + "@sentry/core" "8.0.0-beta.6" + "@sentry/types" "8.0.0-beta.6" + "@sentry/utils" "8.0.0-beta.6" -"@sentry-internal/feedback@8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry-internal/feedback/-/feedback-8.0.0-beta.5.tgz#08457cd7e8a83ac45ca3c36d173bda149ed789e4" - integrity sha512-mqHBiBufJDEv5mZ7alRJpNRO38QVzLAxIdx7aYskiXi57/tPr8AWNkTRQQoDjckQjzgn+lgadYD8nJscSAdyrw== +"@sentry-internal/feedback@8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry-internal/feedback/-/feedback-8.0.0-beta.6.tgz#828c9f28885d5f5287cb52ed5e8c6f9c666dde10" + integrity sha512-SxiXfqUPdcOR/PRCq8TsHbrRs7L3yuEGVB6uDVC1BSzYYkiFLpilu1k75UJM5FhwTC6VHXqRa9h4Qh7guSvBow== dependencies: - "@sentry/core" "8.0.0-beta.5" - "@sentry/types" "8.0.0-beta.5" - "@sentry/utils" "8.0.0-beta.5" + "@sentry/core" "8.0.0-beta.6" + "@sentry/types" "8.0.0-beta.6" + "@sentry/utils" "8.0.0-beta.6" "@sentry-internal/global-search@^1.0.0": version "1.0.0" @@ -3071,25 +3104,25 @@ resolved "https://registry.yarnpkg.com/@sentry-internal/react-inspector/-/react-inspector-6.0.1-4.tgz#10758f3461cf2cf48df8c80f0514c55ca18872c5" integrity sha512-uL2RyvW8EqDEchnbo8Hu/c4IpBqM3LLxUpZPHs8o40kynerzPset6bC/m5SU124gEhy4PqjdvJ7DhTYR75NetQ== -"@sentry-internal/replay-canvas@8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry-internal/replay-canvas/-/replay-canvas-8.0.0-beta.5.tgz#98d1292f43d07289e85c81f3929a387405f03748" - integrity sha512-WBVptOKJUyrv5i39e9BBJ5XPIJLACtenR7PVgFsYm7XkDCaA6ctaK+JHkzWsEwjbEUctjQsxS7a/Bsu6lnBbSQ== +"@sentry-internal/replay-canvas@8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry-internal/replay-canvas/-/replay-canvas-8.0.0-beta.6.tgz#93576af9113b852deccf688b7dfcf05302f3419f" + integrity sha512-zwpj/5yoJ07rb/yNkRcC+YzURbJUuWPeKTFUVzicO+zBe4j4FNEOpTHPRdc2/J4rMDfCP45ih54ilF2mYB8obQ== dependencies: - "@sentry-internal/replay" "8.0.0-beta.5" - "@sentry/core" "8.0.0-beta.5" - "@sentry/types" "8.0.0-beta.5" - "@sentry/utils" "8.0.0-beta.5" + "@sentry-internal/replay" "8.0.0-beta.6" + "@sentry/core" "8.0.0-beta.6" + "@sentry/types" "8.0.0-beta.6" + "@sentry/utils" "8.0.0-beta.6" -"@sentry-internal/replay@8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry-internal/replay/-/replay-8.0.0-beta.5.tgz#f044b3d14a5a1e48283fa10c3c30a1face23a730" - integrity sha512-cOq52IEKrIBlp0iF0eLPtfDHC+AWrQraeWAJ4UEFprBIJt5oSZiYP5LIDPR4cmFfcFQ0mHaUg9lSdEmdhITn3Q== +"@sentry-internal/replay@8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry-internal/replay/-/replay-8.0.0-beta.6.tgz#4341a54ddab5821b71b595990c44fe7040fdb747" + integrity sha512-vmcAr3SXbP1P0p6k1j6e2iD1/XOdhXVkw4ov/n1FHhtTWqO/Yy+4clTSj6Z0+S14NH8cDGUBSQcE+Y1IxGowvA== dependencies: - "@sentry-internal/browser-utils" "8.0.0-beta.5" - "@sentry/core" "8.0.0-beta.5" - "@sentry/types" "8.0.0-beta.5" - "@sentry/utils" "8.0.0-beta.5" + "@sentry-internal/browser-utils" "8.0.0-beta.6" + "@sentry/core" "8.0.0-beta.6" + "@sentry/types" "8.0.0-beta.6" + "@sentry/utils" "8.0.0-beta.6" "@sentry-internal/rrdom@2.12.0": version "2.12.0" @@ -3137,36 +3170,36 @@ resolved "https://registry.yarnpkg.com/@sentry/babel-plugin-component-annotate/-/babel-plugin-component-annotate-2.16.0.tgz#c831713b85516fb3f9da2985836ddf444dc634e6" integrity sha512-+uy1qPkA5MSNgJ0L9ur/vNTydfdHwHnBX2RQ+0thsvkqf90fU788YjkkXwUiBBNuqNyI69JiOW6frixAWy7oUg== -"@sentry/browser@8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-8.0.0-beta.5.tgz#2cb93a56863c3fd5ab808bf624b7405b6aa24dd4" - integrity sha512-K0lHodd8HI/OLYec+N5cKLlsIu/MhR8R57OzNpU5gM1r+KlOrjhLaT1a49fqE+OLYSu5RnozwFLqxfqY6ePlww== +"@sentry/browser@8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-8.0.0-beta.6.tgz#83d01bc839e71b25b0284feb880ba0c4c8aa473b" + integrity sha512-mCAAOsmTefnOuf+emwuLm76o4Kqmkpj8t3p5UP4hZcL6nZJX7kawnDyUWnc+knvEhRpSSNxxwTSb+layhAV7Ww== dependencies: - "@sentry-internal/browser-utils" "8.0.0-beta.5" - "@sentry-internal/feedback" "8.0.0-beta.5" - "@sentry-internal/replay" "8.0.0-beta.5" - "@sentry-internal/replay-canvas" "8.0.0-beta.5" - "@sentry/core" "8.0.0-beta.5" - "@sentry/types" "8.0.0-beta.5" - "@sentry/utils" "8.0.0-beta.5" + "@sentry-internal/browser-utils" "8.0.0-beta.6" + "@sentry-internal/feedback" "8.0.0-beta.6" + "@sentry-internal/replay" "8.0.0-beta.6" + "@sentry-internal/replay-canvas" "8.0.0-beta.6" + "@sentry/core" "8.0.0-beta.6" + "@sentry/types" "8.0.0-beta.6" + "@sentry/utils" "8.0.0-beta.6" -"@sentry/core@8.0.0-beta.5", "@sentry/core@^8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry/core/-/core-8.0.0-beta.5.tgz#b68956a863c60af20a0a16ca83885124d6867cf5" - integrity sha512-DVPTRDMGAZXQY5AxQ3j24CnO0KKHQCZT7IP2ZtN4bVQd685inpSKNhVjNCOssI7zhV9QuW62mqbksJ2+JbBdPA== +"@sentry/core@8.0.0-beta.6", "@sentry/core@^8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry/core/-/core-8.0.0-beta.6.tgz#18844386e8a194689d8e9a90097be10f629f79af" + integrity sha512-SW9WSJ4qPayRSCOhbrA0dzbs1Z0Q+Sh2MmjQuVz6m6IAYc9TaSMsThVVBwY840hS7FvEOBcb0BuVrQn4Ly3Mlg== dependencies: - "@sentry/types" "8.0.0-beta.5" - "@sentry/utils" "8.0.0-beta.5" + "@sentry/types" "8.0.0-beta.6" + "@sentry/utils" "8.0.0-beta.6" "@sentry/jest-environment@^4.0.0": version "4.0.0" resolved "https://registry.yarnpkg.com/@sentry/jest-environment/-/jest-environment-4.0.0.tgz#037844bed70c8f13259ee01ab65ff8d36aef0209" integrity sha512-91jLBS8KbX2Ng0aDSP7kdE9sjiLc4qjp/jczTbmvOvuHxoaQ9hSLaEpsthnnUQ/zNeprZMkOC9xlS+zABw3Zmw== -"@sentry/node@8.0.0-beta.5", "@sentry/node@^8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry/node/-/node-8.0.0-beta.5.tgz#a986672e23ab2694aecd562148d0b6fd6da2d520" - integrity sha512-eYEcQQy/ra7FOXYJRuUZJq+EddXY+E4mNqcAWY+VM45QFtzpfdlMOG8KjpN4Z+ptoeWj5cquao1JZSMWo4cOEw== +"@sentry/node@8.0.0-beta.6", "@sentry/node@^8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry/node/-/node-8.0.0-beta.6.tgz#308ea61e31c20183b5ff6586ce1cc71518bfd049" + integrity sha512-mWb54Fq+JTDRCLelFcam6CanPlj6V5P9wzf842Qddrkq5iRWBCnR1pL/hR0vhzUSQ1A9PfBERPZtR3SRjPqAiA== dependencies: "@opentelemetry/api" "^1.8.0" "@opentelemetry/context-async-hooks" "^1.23.0" @@ -3177,7 +3210,8 @@ "@opentelemetry/instrumentation-fastify" "0.35.0" "@opentelemetry/instrumentation-graphql" "0.39.0" "@opentelemetry/instrumentation-hapi" "0.36.0" - "@opentelemetry/instrumentation-http" "0.48.0" + "@opentelemetry/instrumentation-http" "0.51.0" + "@opentelemetry/instrumentation-ioredis" "0.40.0" "@opentelemetry/instrumentation-koa" "0.39.0" "@opentelemetry/instrumentation-mongodb" "0.39.0" "@opentelemetry/instrumentation-mongoose" "0.37.0" @@ -3189,43 +3223,43 @@ "@opentelemetry/sdk-trace-base" "^1.23.0" "@opentelemetry/semantic-conventions" "^1.23.0" "@prisma/instrumentation" "5.13.0" - "@sentry/core" "8.0.0-beta.5" - "@sentry/opentelemetry" "8.0.0-beta.5" - "@sentry/types" "8.0.0-beta.5" - "@sentry/utils" "8.0.0-beta.5" + "@sentry/core" "8.0.0-beta.6" + "@sentry/opentelemetry" "8.0.0-beta.6" + "@sentry/types" "8.0.0-beta.6" + "@sentry/utils" "8.0.0-beta.6" optionalDependencies: opentelemetry-instrumentation-fetch-node "1.2.0" -"@sentry/opentelemetry@8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry/opentelemetry/-/opentelemetry-8.0.0-beta.5.tgz#01456904b7947848d6915c419ff088d85522d859" - integrity sha512-Xpwo+8jHSEXUCfTzUh7arvgaijW1+Icp2oZTCCEAlP9Egt+oc+IIeW4sTTur2yYkKSu2brxgF1Hd9ap0krws4Q== +"@sentry/opentelemetry@8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry/opentelemetry/-/opentelemetry-8.0.0-beta.6.tgz#c3f3cdee39f6b151fa06a6884108c5e19b26bdc2" + integrity sha512-gEG91QhZKAHwGyUid+4V4fsloOUgBdj65hHzWl0CNkFnSXa/KuFVMWpXOaIMCIWEyJPZk3cVbZdm05QNCaGn2Q== dependencies: - "@sentry/core" "8.0.0-beta.5" - "@sentry/types" "8.0.0-beta.5" - "@sentry/utils" "8.0.0-beta.5" + "@sentry/core" "8.0.0-beta.6" + "@sentry/types" "8.0.0-beta.6" + "@sentry/utils" "8.0.0-beta.6" -"@sentry/profiling-node@^8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry/profiling-node/-/profiling-node-8.0.0-beta.5.tgz#2cbd5444b9cea33f37c25d0946c40b850801f57c" - integrity sha512-R99xCLArQLF2SrY9JYyZPUfsx02fTI9wXCGfCnutan3UAAG8SXIv9qnGkLWIaUE1QJ1EGjztX7HIgY+Ih0l52A== +"@sentry/profiling-node@^8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry/profiling-node/-/profiling-node-8.0.0-beta.6.tgz#98f29cfa6444e4d9f7507d6788f151ff91e4220c" + integrity sha512-LwsW5jlCLAwnQ9A1+E7Sv8PHHhTjjseidx+/jKs5El5QeWD1GVfIQoDMfFJ6SCW9oDiLcqvfFluWKwTLZCshwQ== dependencies: - "@sentry/core" "8.0.0-beta.5" - "@sentry/node" "8.0.0-beta.5" - "@sentry/types" "8.0.0-beta.5" - "@sentry/utils" "8.0.0-beta.5" + "@sentry/core" "8.0.0-beta.6" + "@sentry/node" "8.0.0-beta.6" + "@sentry/types" "8.0.0-beta.6" + "@sentry/utils" "8.0.0-beta.6" detect-libc "^2.0.2" - node-abi "^3.52.0" + node-abi "^3.61.0" -"@sentry/react@^8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry/react/-/react-8.0.0-beta.5.tgz#21d1468d46338ddc7d613f114567b6378957df1e" - integrity sha512-wSoG0aVQUUGIEPTm3iqZoKyq8IclPduGY3ZqyMgyNPLNc3bm7RMtVrVF5gAS2oeYXrPf53AVPQmjatIWrg/mug== +"@sentry/react@^8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry/react/-/react-8.0.0-beta.6.tgz#487fe30cb9695d97446eda1870ba81f0a8c0b6a0" + integrity sha512-2Kqn5YtAkPAj0E2hrjCd0KfkJ1DAQiqBENbupkRhbeKLjj43Ut3Ag6hOh+z9b914+3EDzfV4txxBL6D+NYDd7w== dependencies: - "@sentry/browser" "8.0.0-beta.5" - "@sentry/core" "8.0.0-beta.5" - "@sentry/types" "8.0.0-beta.5" - "@sentry/utils" "8.0.0-beta.5" + "@sentry/browser" "8.0.0-beta.6" + "@sentry/core" "8.0.0-beta.6" + "@sentry/types" "8.0.0-beta.6" + "@sentry/utils" "8.0.0-beta.6" hoist-non-react-statics "^3.3.2" "@sentry/release-parser@^1.3.1": @@ -3238,17 +3272,17 @@ resolved "https://registry.yarnpkg.com/@sentry/status-page-list/-/status-page-list-0.1.0.tgz#49e8683091de0531aba96fc95f19891970929701" integrity sha512-wXWu3IihxFO0l5WQkr6V138ZJKHpL8G7fw/9l0Dl6Nl1ggWcJZOaBN/o5sXasS1e0Atvy2dL9DiPsKmBq8D4MA== -"@sentry/types@8.0.0-beta.5", "@sentry/types@^8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry/types/-/types-8.0.0-beta.5.tgz#1a877194f20ce867743bce514af2f8475f0a91c8" - integrity sha512-BRGvEDtPO5lj70ELF3gujH3z1evkMTOzzF7E+kG1pHzIARS6tSLZgaGsnVVKPA1AIbe7MQ2iWrtTlO1dZWQNpw== +"@sentry/types@8.0.0-beta.6", "@sentry/types@^8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry/types/-/types-8.0.0-beta.6.tgz#9b194aa21f1a1b0e4b866ffcb20f015e48ca78bf" + integrity sha512-R8GyKBblGujXxTeT4hdxi5tp9P6tzG8saY1AuPnyJMaUYBznoGCPPuLpR19DFFPwvcxKElqFRcJfYThmrZXWGg== -"@sentry/utils@8.0.0-beta.5", "@sentry/utils@^8.0.0-beta.5": - version "8.0.0-beta.5" - resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-8.0.0-beta.5.tgz#2e12e1bd39bb0175bd279e40841e182657cc89d8" - integrity sha512-irInCFhFDejT57iKH6lRCGZMSTRXlcRoExt2QZ84cISC96SgoR4q4kHLzmqsXEtSYD8aKCxnnMpWsYsDrg7wug== +"@sentry/utils@8.0.0-beta.6", "@sentry/utils@^8.0.0-beta.6": + version "8.0.0-beta.6" + resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-8.0.0-beta.6.tgz#04a2845f2c239bb879471a29c103f0effcc5affb" + integrity sha512-ccwYA1xkgGpyS+q1AqcS7M9Y16xUd4fFlyNXLDTlt7LacmJdu+H3f4sHOiulKsL+1ewusZFUC2cV8oHzgGCgVw== dependencies: - "@sentry/types" "8.0.0-beta.5" + "@sentry/types" "8.0.0-beta.6" "@sideway/address@^4.1.5": version "4.1.5" @@ -9350,10 +9384,10 @@ no-case@^3.0.4: lower-case "^2.0.2" tslib "^2.0.3" -node-abi@^3.52.0: - version "3.54.0" - resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.54.0.tgz#f6386f7548817acac6434c6cba02999c9aebcc69" - integrity sha512-p7eGEiQil0YUV3ItH4/tBb781L5impVmmx2E9FRKF7d18XXzp4PGT2tdYMFY6wQqgxD0IwNZOiSJ0/K0fSi/OA== +node-abi@^3.61.0: + version "3.62.0" + resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.62.0.tgz#017958ed120f89a3a14a7253da810f5d724e3f36" + integrity sha512-CPMcGa+y33xuL1E0TcNIu4YyaZCxnnvkVaEXrsosR3FxN+fV8xvb7Mzpb7IgKler10qeMkE6+Dp8qJhpzdq35g== dependencies: semver "^7.3.5" From 3f6b2a23e0c1c6e600b89b2edae7fbdbb8c5ca6e Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Mon, 6 May 2024 11:24:14 -0400 Subject: [PATCH 006/376] chore(perf): Add instrumentation around performance issue detection (#70334) Wrap every detector in a span, so we can see which specific detectors are slow on any given transaction. --- src/sentry/utils/performance_issues/performance_detection.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/sentry/utils/performance_issues/performance_detection.py b/src/sentry/utils/performance_issues/performance_detection.py index 8fe102d760bc7e..412745666df985 100644 --- a/src/sentry/utils/performance_issues/performance_detection.py +++ b/src/sentry/utils/performance_issues/performance_detection.py @@ -342,7 +342,10 @@ def _detect_performance_problems( ] for detector in detectors: - run_detector_on_data(detector, data) + with sentry_sdk.start_span( + op="function", description=f"run_detector_on_data.{detector.type.value}" + ): + run_detector_on_data(detector, data) # Metrics reporting only for detection, not created issues. report_metrics_for_detectors( From 85efb03f3d2fe81938157562e1094d55ee8714fc Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 6 May 2024 11:38:59 -0400 Subject: [PATCH 007/376] ref: also delete sentry-functions settings (#70253) --- src/sentry/conf/server.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 06046bdab211a2..055b0695156832 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3738,10 +3738,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: } SENTRY_POSTGRES_INDEXER_RETRY_COUNT = 2 -SENTRY_FUNCTIONS_PROJECT_NAME: str | None = None - -SENTRY_FUNCTIONS_REGION = "us-central1" - # Settings related to SiloMode FAIL_ON_UNAVAILABLE_API_CALL = False From 540e6b0adb9cb67a02730c94fc5a33842e04d89d Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Mon, 6 May 2024 11:44:07 -0400 Subject: [PATCH 008/376] fix(spans): Search on span op and status (#70331) A few more bugs, searching on span op should support INs and status should map to the corresponding integer. --- src/sentry/search/events/constants.py | 1 + src/sentry/search/events/datasets/discover.py | 19 ++---------- .../search/events/datasets/filter_aliases.py | 30 +++++++++++++++++-- .../search/events/datasets/spans_indexed.py | 3 ++ .../events/builder/test_spans_indexed.py | 20 +++++++++++++ 5 files changed, 54 insertions(+), 19 deletions(-) diff --git a/src/sentry/search/events/constants.py b/src/sentry/search/events/constants.py index 40f282ebaa0bb5..dfe2261a18ca14 100644 --- a/src/sentry/search/events/constants.py +++ b/src/sentry/search/events/constants.py @@ -51,6 +51,7 @@ SPAN_IS_SEGMENT_ALIAS = "span.is_segment" SPAN_OP = "span.op" SPAN_DESCRIPTION = "span.description" +SPAN_STATUS = "span.status" class ThresholdDict(TypedDict): diff --git a/src/sentry/search/events/datasets/discover.py b/src/sentry/search/events/datasets/discover.py index 67030d18d6dfb0..dee268391a7792 100644 --- a/src/sentry/search/events/datasets/discover.py +++ b/src/sentry/search/events/datasets/discover.py @@ -91,7 +91,7 @@ normalize_percentile_alias, with_default, ) -from sentry.search.events.filter import to_list, translate_transaction_status +from sentry.search.events.filter import to_list from sentry.search.events.types import SelectType, WhereType from sentry.search.utils import DEVICE_CLASS from sentry.snuba.referrer import Referrer @@ -1933,22 +1933,7 @@ def _trace_parent_span_converter(self, search_filter: SearchFilter) -> WhereType return self.builder.default_filter_converter(search_filter) def _transaction_status_filter_converter(self, search_filter: SearchFilter) -> WhereType | None: - # Handle "has" queries - if search_filter.value.raw_value == "": - return Condition( - self.builder.resolve_field(search_filter.key.name), - Op.IS_NULL if search_filter.operator == "=" else Op.IS_NOT_NULL, - ) - internal_value = ( - [translate_transaction_status(val) for val in search_filter.value.raw_value] - if search_filter.is_in_filter - else translate_transaction_status(search_filter.value.raw_value) - ) - return Condition( - self.builder.resolve_field(search_filter.key.name), - Op(search_filter.operator), - internal_value, - ) + return filter_aliases.span_status_filter_converter(self.builder, search_filter) def _performance_issue_ids_filter_converter( self, search_filter: SearchFilter diff --git a/src/sentry/search/events/datasets/filter_aliases.py b/src/sentry/search/events/datasets/filter_aliases.py index 30203f013ef4f2..4e6121a9e2976a 100644 --- a/src/sentry/search/events/datasets/filter_aliases.py +++ b/src/sentry/search/events/datasets/filter_aliases.py @@ -15,6 +15,7 @@ handle_operator_negation, parse_semver, to_list, + translate_transaction_status, ) from sentry.search.events.types import WhereType from sentry.search.utils import DEVICE_CLASS, parse_release @@ -305,7 +306,32 @@ def lowercase_search( builder: builder.QueryBuilder, search_filter: SearchFilter ) -> WhereType | None: """Convert the search value to lower case""" - value = search_filter.value.value + raw_value = search_filter.value.raw_value + if isinstance(raw_value, list): + raw_value = [val.lower() for val in raw_value] + else: + raw_value = raw_value.lower() return builder.default_filter_converter( - SearchFilter(search_filter.key, search_filter.operator, SearchValue(value.lower())) + SearchFilter(search_filter.key, search_filter.operator, SearchValue(raw_value)) + ) + + +def span_status_filter_converter( + builder: builder.QueryBuilder, search_filter: SearchFilter +) -> WhereType | None: + # Handle "has" queries + if search_filter.value.raw_value == "": + return Condition( + builder.resolve_field(search_filter.key.name), + Op.IS_NULL if search_filter.operator == "=" else Op.IS_NOT_NULL, + ) + internal_value = ( + [translate_transaction_status(val) for val in search_filter.value.raw_value] + if search_filter.is_in_filter + else translate_transaction_status(search_filter.value.raw_value) + ) + return Condition( + builder.resolve_field(search_filter.key.name), + Op(search_filter.operator), + internal_value, ) diff --git a/src/sentry/search/events/datasets/spans_indexed.py b/src/sentry/search/events/datasets/spans_indexed.py index 98873f27c30ec8..0007461f30f6c7 100644 --- a/src/sentry/search/events/datasets/spans_indexed.py +++ b/src/sentry/search/events/datasets/spans_indexed.py @@ -43,6 +43,9 @@ def search_filter_converter( constants.SPAN_OP: lambda search_filter: filter_aliases.lowercase_search( self.builder, search_filter ), + constants.SPAN_STATUS: lambda search_filter: filter_aliases.span_status_filter_converter( + self.builder, search_filter + ), } @property diff --git a/tests/sentry/search/events/builder/test_spans_indexed.py b/tests/sentry/search/events/builder/test_spans_indexed.py index e450bc65fa5e41..9f51ac3f310e1d 100644 --- a/tests/sentry/search/events/builder/test_spans_indexed.py +++ b/tests/sentry/search/events/builder/test_spans_indexed.py @@ -93,6 +93,26 @@ def test_field_alias(params, field, expected): pytest.param( "span.duration:<=1s", Condition(span_duration, Op.LTE, 1000), id="span.duration:<=1s" ), + pytest.param( + "span.op:db", + Condition(Column("op"), Op.EQ, "db"), + id="span.op:db", + ), + pytest.param( + "span.op:[db,http.client]", + Condition(Column("op"), Op.IN, ["db", "http.client"]), + id="span.op:[db,http.client]", + ), + pytest.param( + "span.status:ok", + Condition(Column("span_status"), Op.EQ, 0), + id="span.status:ok", + ), + pytest.param( + "span.status:[invalid_argument,not_found]", + Condition(Column("span_status"), Op.IN, [3, 5]), + id="span.status:[invalid_argument,not_found]", + ), ], ) @django_db_all From 5a7d45703a3cf7b649e79660a0766a01ee6abf36 Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Mon, 6 May 2024 17:48:19 +0200 Subject: [PATCH 009/376] Fix `ProjectOption` checks in `delete_raw_event` (#68170) This pretty much reverts https://github.com/getsentry/sentry/pull/66959, as that was buggy for multiple reasons: - It switched to an `exists` check, although we want to check the actual value. It thus had false-positives for projects that had reprocessing set to `False`. - It introduced a buggy `update_value` function which had an infinite recursion bug. Though the function was most likely never ever invoked. This now moves some of those checks into the `reprocessing.py` file, which hopefully will go away altogether soon with the removal of legacy reprocessing. I also added a code comment why I believe the previous `update_value` was never invoked. If reviewers agree, I will just remove all those checks altogether, as I believe that code to be unreachable. --------- Co-authored-by: Sebastian Zivota Co-authored-by: Sebastian Zivota --- .../api/endpoints/project_processingissues.py | 5 ++- src/sentry/models/options/project_option.py | 18 +++++---- src/sentry/receivers/reprocessing.py | 4 +- src/sentry/reprocessing.py | 30 ++++++++++---- src/sentry/tasks/store.py | 40 ++++++------------- tests/sentry/tasks/test_reprocessing2.py | 25 ++++++++++++ 6 files changed, 75 insertions(+), 47 deletions(-) diff --git a/src/sentry/api/endpoints/project_processingissues.py b/src/sentry/api/endpoints/project_processingissues.py index 93daa17bfc08d7..af3bb50fa95f48 100644 --- a/src/sentry/api/endpoints/project_processingissues.py +++ b/src/sentry/api/endpoints/project_processingissues.py @@ -8,7 +8,7 @@ from sentry.api.helpers.processing_issues import get_processing_issues from sentry.api.serializers import serialize from sentry.models.processingissue import ProcessingIssue -from sentry.reprocessing import trigger_reprocessing +from sentry.reprocessing import REPROCESSING_OPTION, trigger_reprocessing @region_silo_endpoint @@ -48,7 +48,8 @@ def delete(self, request: Request, project) -> Response: This deletes all open processing issues and triggers reprocessing if the user disabled the checkbox """ - reprocessing_active = bool(project.get_option("sentry:reprocessing_active", True)) + # XXX: Why does this default to `True` here? + reprocessing_active = bool(project.get_option(REPROCESSING_OPTION, True)) if not reprocessing_active: ProcessingIssue.objects.resolve_all_processing_issue(project=project) trigger_reprocessing(project) diff --git a/src/sentry/models/options/project_option.py b/src/sentry/models/options/project_option.py index 47e8d1f4678b44..ca55696f3c5716 100644 --- a/src/sentry/models/options/project_option.py +++ b/src/sentry/models/options/project_option.py @@ -103,16 +103,18 @@ def unset_value(self, project: Project, key: str) -> None: self.filter(project=project, key=key).delete() self.reload_cache(project.id, "projectoption.unset_value") - def set_value(self, project: Project, key: str, value: Value) -> bool: - inst, created = self.create_or_update(project=project, key=key, values={"value": value}) - self.reload_cache(project.id, "projectoption.set_value") + def set_value(self, project: int | Project, key: str, value: Value) -> bool: + if isinstance(project, models.Model): + project_id = project.id + else: + project_id = project - return created or inst > 0 + inst, created = self.create_or_update( + project_id=project_id, key=key, values={"value": value} + ) + self.reload_cache(project_id, "projectoption.set_value") - def update_value(self, project_id: int, key: str, value: Value): - # Updates a value with the assumption of the entry being existent. - self.update_value(project_id=project_id, key=key, value=value) - self.reload_cache(project_id, "projectoption.update_value") + return created or inst > 0 def get_all_values(self, project: Project | int) -> Mapping[str, Value]: if isinstance(project, models.Model): diff --git a/src/sentry/receivers/reprocessing.py b/src/sentry/receivers/reprocessing.py index 540ceec2e1aa2a..59a695aaa55113 100644 --- a/src/sentry/receivers/reprocessing.py +++ b/src/sentry/receivers/reprocessing.py @@ -7,9 +7,9 @@ sender=ProjectOption, dispatch_uid="bump_reprocessing_revision_receiver", weak=False ) def bump_reprocessing_revision_receiver(filters, **_): - from sentry.reprocessing import REPROCESSING_OPTION, bump_reprocessing_revision + from sentry.reprocessing import REPROCESSING_REVISION_OPTION, bump_reprocessing_revision - if filters.get("key") == REPROCESSING_OPTION: + if filters.get("key") == REPROCESSING_REVISION_OPTION: bump_reprocessing_revision( Project.objects.get_from_cache(id=filters["project_id"]), use_buffer=False ) diff --git a/src/sentry/reprocessing.py b/src/sentry/reprocessing.py index 298a40b7341972..4a89d8380a067d 100644 --- a/src/sentry/reprocessing.py +++ b/src/sentry/reprocessing.py @@ -3,7 +3,13 @@ import sentry_sdk -REPROCESSING_OPTION = "sentry:processing-rev" +from sentry.models.options.project_option import ProjectOption + +# Is reprocessing on or off by default? +REPROCESSING_DEFAULT = False +REPROCESSING_OPTION = "sentry:reprocessing_active" +REPROCESSING_REVISION_OPTION = "sentry:processing-rev" +SENT_NOTIFICATION_OPTION = "sentry:sent_failed_event_hint" logger = logging.getLogger("sentry.events") @@ -28,25 +34,35 @@ def event_supports_reprocessing(data): return False +def is_active(project_id: int) -> bool: + return ProjectOption.objects.get_value(project_id, REPROCESSING_OPTION, REPROCESSING_DEFAULT) + + +def did_send_notification(project_id: int) -> bool: + return ProjectOption.objects.get_value(project_id, SENT_NOTIFICATION_OPTION, False) + + +def mark_notification_sent(project_id: int, value: bool): + ProjectOption.objects.set_value(project_id, SENT_NOTIFICATION_OPTION, value) + + @sentry_sdk.tracing.trace def get_reprocessing_revision(project, cached=True): """Returns the current revision of the projects reprocessing config set.""" - from sentry.models.options.project_option import ProjectOption from sentry.models.project import Project if cached: - return ProjectOption.objects.get_value(project, REPROCESSING_OPTION) + return ProjectOption.objects.get_value(project, REPROCESSING_REVISION_OPTION) try: if isinstance(project, Project): project = project.id - return ProjectOption.objects.get(project=project, key=REPROCESSING_OPTION).value + return ProjectOption.objects.get(project=project, key=REPROCESSING_REVISION_OPTION).value except ProjectOption.DoesNotExist: pass def bump_reprocessing_revision(project, use_buffer=False): """Bumps the reprocessing revision.""" - from sentry.models.options.project_option import ProjectOption from sentry.tasks.process_buffer import buffer_incr rev = uuid.uuid4().hex @@ -54,11 +70,11 @@ def bump_reprocessing_revision(project, use_buffer=False): buffer_incr( ProjectOption, columns={}, - filters={"project_id": project.id, "key": REPROCESSING_OPTION}, + filters={"project_id": project.id, "key": REPROCESSING_REVISION_OPTION}, signal_only=True, ) else: - ProjectOption.objects.set_value(project, REPROCESSING_OPTION, rev) + ProjectOption.objects.set_value(project, REPROCESSING_REVISION_OPTION, rev) return rev diff --git a/src/sentry/tasks/store.py b/src/sentry/tasks/store.py index 980d1c3229c7fc..e02f62e347d7cf 100644 --- a/src/sentry/tasks/store.py +++ b/src/sentry/tasks/store.py @@ -20,7 +20,6 @@ from sentry.killswitches import killswitch_matches_context from sentry.lang.native.symbolicator import SymbolicatorTaskKind from sentry.models.activity import Activity -from sentry.models.options.project_option import ProjectOption from sentry.models.organization import Organization from sentry.models.project import Project from sentry.silo.base import SiloMode @@ -36,9 +35,6 @@ error_logger = logging.getLogger("sentry.errors.events") info_logger = logging.getLogger("sentry.store") -# Is reprocessing on or off by default? -REPROCESSING_DEFAULT = False - class RetryProcessing(Exception): pass @@ -578,7 +574,7 @@ def process_event_from_reprocessing( @sentry_sdk.tracing.trace -def delete_raw_event(project_id: int, event_id: str | None, allow_hint_clear: bool = False) -> None: +def delete_raw_event(project_id: int, event_id: str | None) -> None: set_current_event_project(project_id) if event_id is None: @@ -593,18 +589,12 @@ def delete_raw_event(project_id: int, event_id: str | None, allow_hint_clear: bo # Clear the sent notification if we reprocessed everything # successfully and reprocessing is enabled - reprocessing_active = ProjectOption.objects.filter( - project_id=project_id, key="sentry:reprocessing_active" - ).exists() - if reprocessing_active: - sent_notification = ProjectOption.objects.filter( - project_id=project_id, key="sentry:sent_failed_event_hint" - ).exists() - if sent_notification: - if ReprocessingReport.objects.filter(project_id=project_id, event_id=event_id).exists(): - ProjectOption.objects.update_value( - project_id=project_id, key="sentry:sent_failed_event_hint", value=False - ) + reprocessing_active = reprocessing.is_active(project_id) + if reprocessing_active and reprocessing.did_send_notification(project_id): + # XXX: We just `delete`d all the `ReprocessingReport`s a few lines above. + # The only way this can ever be true here is if we have a race? + if ReprocessingReport.objects.filter(project_id=project_id, event_id=event_id).exists(): + reprocessing.mark_notification_sent(project_id, False) @sentry_sdk.tracing.trace @@ -634,9 +624,7 @@ def create_failed_event( if reprocessing2.is_reprocessed_event(data): return False - reprocessing_active = ProjectOption.objects.get_value( - project_id, "sentry:reprocessing_active", REPROCESSING_DEFAULT - ) + reprocessing_active = reprocessing.is_active(project_id) # In case there is reprocessing active but the current reprocessing # revision is already different than when we started, we want to @@ -651,18 +639,14 @@ def create_failed_event( # The first time we encounter a failed event and the hint was cleared # we send a notification. - sent_notification = ProjectOption.objects.get_value( - project_id, "sentry:sent_failed_event_hint", False - ) - if not sent_notification: - project = Project.objects.get_from_cache(id=project_id) + if not reprocessing.did_send_notification(project_id): Activity.objects.create( type=ActivityType.NEW_PROCESSING_ISSUES.value, - project=project, + project_id=project_id, datetime=to_datetime(start_time), data={"reprocessing_active": reprocessing_active, "issues": issues}, ).send_notification() - ProjectOption.objects.set_value(project, "sentry:sent_failed_event_hint", True) + reprocessing.mark_notification_sent(project_id, True) # If reprocessing is not active we bail now without creating the # processing issues @@ -747,7 +731,7 @@ def _do_save_event( # reprocessing. If the data cannot be found we want to assume # that we need to delete the raw event. if not data or reprocessing.event_supports_reprocessing(data): - delete_raw_event(project_id, event_id, allow_hint_clear=True) + delete_raw_event(project_id, event_id) # This covers two cases: where data is None because we did not manage # to fetch it from the default cache or the empty dictionary was diff --git a/tests/sentry/tasks/test_reprocessing2.py b/tests/sentry/tasks/test_reprocessing2.py index 55cf8ec3909b5c..39dc4aca58b603 100644 --- a/tests/sentry/tasks/test_reprocessing2.py +++ b/tests/sentry/tasks/test_reprocessing2.py @@ -25,6 +25,7 @@ from sentry.reprocessing2 import is_group_finished from sentry.tasks.reprocessing2 import finish_reprocessing, reprocess_group from sentry.tasks.store import preprocess_event +from sentry.testutils.cases import TestCase from sentry.testutils.helpers import Feature from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.testutils.pytest.fixtures import django_db_all @@ -639,3 +640,27 @@ def test_finish_reprocessing(default_project): old_group.activity_set.create(project=default_project, type=ActivityType.NOTE.value) finish_reprocessing(old_group.project_id, old_group.id) + + +class LegacyReprocessingTest(TestCase): + def setUp(self): + super().setUp() + self.owner = self.create_user(is_superuser=False) + self.organization = self.create_organization(owner=self.owner) + self.team = self.create_team(organization=self.organization) + self.project = self.create_project(organization=self.organization) + + @django_db_all + def test_reprocessing_disabled(self): + # Asserts that reprocessing.is_active is not the + # same as checking for the existence ofthe option key. + # See https://github.com/getsentry/sentry/pull/68170. + from sentry.models.options.project_option import ProjectOption + from sentry.reprocessing import REPROCESSING_OPTION, is_active + + ProjectOption.objects.set_value(self.project, REPROCESSING_OPTION, False) + + assert ProjectOption.objects.filter( + project_id=self.project, key=REPROCESSING_OPTION + ).exists() + assert not is_active(self.project) From 38d7536d088cc92fd216128f7510c78d7189757f Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 6 May 2024 09:03:01 -0700 Subject: [PATCH 010/376] chore(replay): add comment in types.tsx (#70349) followup from https://github.com/getsentry/sentry/pull/70294#discussion_r1589812163 We decided to not send `from` for now: SCR-20240506-kqau --- static/app/utils/replays/types.tsx | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/static/app/utils/replays/types.tsx b/static/app/utils/replays/types.tsx index 865aef34aab942..c7a56ec347d347 100644 --- a/static/app/utils/replays/types.tsx +++ b/static/app/utils/replays/types.tsx @@ -49,8 +49,10 @@ type MobileBreadcrumbTypes = }; /** - * Extra breadcrumb types not included in `@sentry/replay` - * Also includes mobile types + * Extra breadcrumb types not included in `@sentry/replay`. + * Also includes mobile types. + * The navigation breadcrumb has data['from'] marked as optional + * because the mobile SDK does not send that property currently. */ type ExtraBreadcrumbTypes = | MobileBreadcrumbTypes From e16222e7c7f4401a6cafc654a21a90ac4e8b5dfa Mon Sep 17 00:00:00 2001 From: Stephen Cefali Date: Mon, 6 May 2024 09:10:21 -0700 Subject: [PATCH 011/376] fix(search): fixes short id lookup logic (#70247) This PR fixes a bug where putting a short id as part of a query like `issue:SENTRY-38VM` actually fails the postgres direct hit search and doesn't redirect the user even though it limits the results to the single result. However, just searching `SENTRY-38VM` works as expected. The problem is the regex for the project slug matches `issue:SENTRY` instead of `SENTRY`. --- src/sentry/models/group.py | 3 +-- .../issues/endpoints/test_organization_group_index.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/src/sentry/models/group.py b/src/sentry/models/group.py index 4e060aeb87623a..4f8b07f57888ae 100644 --- a/src/sentry/models/group.py +++ b/src/sentry/models/group.py @@ -67,8 +67,7 @@ logger = logging.getLogger(__name__) -_short_id_re = re.compile(r"^(.*?)(?:[\s_-])([A-Za-z0-9]+)$") - +_short_id_re = re.compile(r"^(?:issue+:)?(.*?)(?:[\s_-])([A-Za-z0-9]+)$") ShortId = namedtuple("ShortId", ["project_slug", "short_id"]) diff --git a/tests/sentry/issues/endpoints/test_organization_group_index.py b/tests/sentry/issues/endpoints/test_organization_group_index.py index f1bfa58e795e5e..325950232b8a7f 100644 --- a/tests/sentry/issues/endpoints/test_organization_group_index.py +++ b/tests/sentry/issues/endpoints/test_organization_group_index.py @@ -588,6 +588,7 @@ def test_lookup_by_short_id(self): self.login_as(user=self.user) response = self.get_success_response(query=short_id, shortIdLookup=1) assert len(response.data) == 1 + assert response["X-Sentry-Direct-Hit"] == "1" def test_lookup_by_short_id_alias(self): event_id = "f" * 32 @@ -598,8 +599,9 @@ def test_lookup_by_short_id_alias(self): short_id = group.qualified_short_id self.login_as(user=self.user) - response = self.get_success_response(query=f"issue:{short_id}") + response = self.get_success_response(query=f"issue:{short_id}", shortIdLookup=1) assert len(response.data) == 1 + assert response["X-Sentry-Direct-Hit"] == "1" def test_lookup_by_multiple_short_id_alias(self): self.login_as(self.user) @@ -615,9 +617,11 @@ def test_lookup_by_multiple_short_id_alias(self): ) with self.feature("organizations:global-views"): response = self.get_success_response( - query=f"issue:[{event.group.qualified_short_id},{event2.group.qualified_short_id}]" + query=f"issue:[{event.group.qualified_short_id},{event2.group.qualified_short_id}]", + shortIdLookup=1, ) assert len(response.data) == 2 + assert response.get("X-Sentry-Direct-Hit") != "1" def test_lookup_by_short_id_ignores_project_list(self): organization = self.create_organization() @@ -635,6 +639,7 @@ def test_lookup_by_short_id_ignores_project_list(self): organization.slug, project=project.id, query=short_id, shortIdLookup=1 ) assert len(response.data) == 1 + assert response.get("X-Sentry-Direct-Hit") == "1" def test_lookup_by_short_id_no_perms(self): organization = self.create_organization() @@ -649,6 +654,7 @@ def test_lookup_by_short_id_no_perms(self): response = self.get_success_response(organization.slug, query=short_id, shortIdLookup=1) assert len(response.data) == 0 + assert response.get("X-Sentry-Direct-Hit") != "1" def test_lookup_by_group_id(self): self.login_as(user=self.user) From 118c4d92c14db1c89226e69cc995c9cc7f1a7b91 Mon Sep 17 00:00:00 2001 From: Armen Zambrano G <44410+armenzg@users.noreply.github.com> Date: Mon, 6 May 2024 12:13:46 -0400 Subject: [PATCH 012/376] feat(related_issues): Add analytics to Open Issues button (#70328) --- static/app/views/issueDetails/groupRelatedIssues/index.tsx | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/static/app/views/issueDetails/groupRelatedIssues/index.tsx b/static/app/views/issueDetails/groupRelatedIssues/index.tsx index 16c59402322a58..66728baea36b3f 100644 --- a/static/app/views/issueDetails/groupRelatedIssues/index.tsx +++ b/static/app/views/issueDetails/groupRelatedIssues/index.tsx @@ -87,6 +87,8 @@ function GroupRelatedIssues({params}: Props) { {t('Open in Issues')} @@ -124,6 +126,8 @@ function GroupRelatedIssues({params}: Props) { {t('Open in Issues')} From e856a701358659d7f8f9387a30dd5d60d16df356 Mon Sep 17 00:00:00 2001 From: Armen Zambrano G <44410+armenzg@users.noreply.github.com> Date: Mon, 6 May 2024 12:14:13 -0400 Subject: [PATCH 013/376] ref(GroupList): Prefer queryParams over query (#70027) It can get confusing as to when to use `query` vs `queryParams`. This simplifies the logic. --------- Co-authored-by: Evan Purkhiser --- static/app/components/issues/groupList.tsx | 16 ++++++++++++---- .../issueDetails/groupRelatedIssues/index.tsx | 8 ++------ .../views/monitors/components/monitorIssues.tsx | 2 -- .../transactionOverview/relatedIssues.tsx | 9 +++------ static/app/views/projectDetail/projectIssues.tsx | 2 -- 5 files changed, 17 insertions(+), 20 deletions(-) diff --git a/static/app/components/issues/groupList.tsx b/static/app/components/issues/groupList.tsx index b5e5a68709dbad..45fa0540cc97c0 100644 --- a/static/app/components/issues/groupList.tsx +++ b/static/app/components/issues/groupList.tsx @@ -51,10 +51,13 @@ export type GroupListColumn = type Props = WithRouterProps & { api: Client; - endpointPath: string; orgSlug: string; - query: string; + queryParams: Record; customStatsPeriod?: TimePeriodType; + /** + * Defaults to `/organizations/${orgSlug}/issues/` + */ + endpointPath?: string; onFetchSuccess?: ( groupListState: State, onCursor: ( @@ -64,8 +67,11 @@ type Props = WithRouterProps & { pageDiff: number ) => void ) => void; + /** + * Use `query` within `queryParams` for passing the parameter to the endpoint + */ + query?: string; queryFilterDescription?: string; - queryParams?: Record; renderEmptyMessage?: () => React.ReactNode; renderErrorMessage?: (props: {detail: string}, retry: () => void) => React.ReactNode; // where the group list is rendered @@ -142,7 +148,9 @@ class GroupList extends Component { const endpoint = this.getGroupListEndpoint(); - const parsedQuery = parseSearch((queryParams ?? this.getQueryParams()).query); + const parsedQuery = parseSearch( + (queryParams ?? this.getQueryParams()).query as string + ); const hasLogicBoolean = parsedQuery ? treeResultLocator({ tree: parsedQuery, diff --git a/static/app/views/issueDetails/groupRelatedIssues/index.tsx b/static/app/views/issueDetails/groupRelatedIssues/index.tsx index 66728baea36b3f..31ffbce4bf2c67 100644 --- a/static/app/views/issueDetails/groupRelatedIssues/index.tsx +++ b/static/app/views/issueDetails/groupRelatedIssues/index.tsx @@ -94,11 +94,9 @@ function GroupRelatedIssues({params}: Props) { @@ -133,11 +131,9 @@ function GroupRelatedIssues({params}: Props) { diff --git a/static/app/views/monitors/components/monitorIssues.tsx b/static/app/views/monitors/components/monitorIssues.tsx index 065631d63a211b..86c850f0ce95f0 100644 --- a/static/app/views/monitors/components/monitorIssues.tsx +++ b/static/app/views/monitors/components/monitorIssues.tsx @@ -93,14 +93,12 @@ function MonitorIssues({orgSlug, monitor, monitorEnvs}: Props) { { - getIssuesEndpoint() { - const {transaction, organization, start, end, statsPeriod, location} = this.props; + getIssuesEndpointQueryParams() { + const {transaction, start, end, statsPeriod, location} = this.props; const queryParams = { start, @@ -48,7 +48,6 @@ class RelatedIssues extends Component { .setFilterValues('transaction', [transaction]); return { - path: `/organizations/${organization.slug}/issues/`, queryParams: { ...queryParams, query: currentFilter.formatString(), @@ -88,7 +87,7 @@ class RelatedIssues extends Component { render() { const {organization} = this.props; - const {path, queryParams} = this.getIssuesEndpoint(); + const {queryParams} = this.getIssuesEndpointQueryParams(); const issueSearch = { pathname: `/organizations/${organization.slug}/issues/`, query: {referrer: 'performance-related-issues', ...queryParams}, @@ -111,9 +110,7 @@ class RelatedIssues extends Component { Date: Mon, 6 May 2024 09:14:51 -0700 Subject: [PATCH 014/376] feat(api): Add `organization_id_or_slug` Support to Customer Domain Middleware (#70172) --- src/sentry/middleware/customer_domain.py | 19 +++-- .../sentry/middleware/test_customer_domain.py | 79 +++++++++++++++++++ 2 files changed, 93 insertions(+), 5 deletions(-) diff --git a/src/sentry/middleware/customer_domain.py b/src/sentry/middleware/customer_domain.py index ea63e2d70e5f5a..4b7e4b7d4967b8 100644 --- a/src/sentry/middleware/customer_domain.py +++ b/src/sentry/middleware/customer_domain.py @@ -52,16 +52,25 @@ def _resolve_redirect_url(request, activeorg): if redirect_subdomain: redirect_url = generate_organization_url(activeorg) result = resolve(request.path) - org_slug_path_mismatch = ( - result.kwargs - and "organization_slug" in result.kwargs - and result.kwargs["organization_slug"] != activeorg + org_slug_path_mismatch = result.kwargs and ( + ("organization_slug" in result.kwargs and result.kwargs["organization_slug"] != activeorg) + or ( + "organization_id_or_slug" in result.kwargs + and result.kwargs["organization_id_or_slug"] != activeorg + and not str(result.kwargs["organization_id_or_slug"]).isdecimal() + ) ) if not redirect_subdomain and not org_slug_path_mismatch: return None kwargs = {**result.kwargs} + + # Make sure if organization_id_or_slug is passed in, it is a slug if org_slug_path_mismatch: - kwargs["organization_slug"] = activeorg + if "organization_slug" in kwargs: + kwargs["organization_slug"] = activeorg + else: + kwargs["organization_id_or_slug"] = activeorg + path = reverse(result.url_name or result.func, kwargs=kwargs) qs = _query_string(request) redirect_url = f"{redirect_url}{path}{qs}" diff --git a/tests/sentry/middleware/test_customer_domain.py b/tests/sentry/middleware/test_customer_domain.py index fb31e4df4dda6f..e52e37c5409cc0 100644 --- a/tests/sentry/middleware/test_customer_domain.py +++ b/tests/sentry/middleware/test_customer_domain.py @@ -188,6 +188,19 @@ def post(self, request, organization_slug): ) +class OrganizationIdOrSlugTestEndpoint(Endpoint): + permission_classes = (AllowAny,) + + def get(self, request, organization_id_or_slug): + return Response( + { + "organization_id_or_slug": organization_id_or_slug, + "subdomain": request.subdomain, + "activeorg": request.session.get("activeorg", None), + } + ) + + urlpatterns = [ re_path( r"^api/0/(?P[^\/]+)/$", @@ -198,6 +211,11 @@ def post(self, request, organization_slug): r"^api/0/(?P[^\/]+)/nameless/$", OrganizationTestEndpoint.as_view(), ), + re_path( + r"^api/0/(?P[^\/]+)/idorslug/$", + OrganizationIdOrSlugTestEndpoint.as_view(), + name="org-events-endpoint-id-or-slug", + ), re_path(r"^logout/$", AuthLogoutView.as_view(), name="sentry-logout"), ] @@ -265,6 +283,36 @@ def test_with_middleware_no_customer_domain(self): assert "activeorg" in self.client.session assert self.client.session["activeorg"] == "test" + response = self.client.get( + reverse("org-events-endpoint-id-or-slug", kwargs={"organization_id_or_slug": 1234}), + follow=True, + ) + assert response.status_code == 200 + assert response.redirect_chain == [] + assert response.data == { + "organization_id_or_slug": "1234", + "subdomain": None, + "activeorg": "test", + } + assert "activeorg" in self.client.session + assert self.client.session["activeorg"] == "test" + + response = self.client.get( + reverse( + "org-events-endpoint-id-or-slug", kwargs={"organization_id_or_slug": "some-org"} + ), + follow=True, + ) + assert response.status_code == 200 + assert response.redirect_chain == [] + assert response.data == { + "organization_id_or_slug": "some-org", + "subdomain": None, + "activeorg": "test", + } + assert "activeorg" in self.client.session + assert self.client.session["activeorg"] == "test" + def test_with_middleware_and_customer_domain(self): self.create_organization(name="albertos-apples") @@ -343,6 +391,37 @@ def test_with_middleware_and_customer_domain(self): assert response.status_code == 200 assert response.redirect_chain == [] + # Redirect response for org id or slug path mismatch + response = self.client.get( + reverse( + "org-events-endpoint-id-or-slug", kwargs={"organization_id_or_slug": "some-org"} + ), + data={"querystring": "value"}, + HTTP_HOST="albertos-apples.testserver", + follow=True, + ) + assert response.status_code == 200 + assert response.redirect_chain == [ + ("/api/0/albertos-apples/idorslug/?querystring=value", 302) + ] + assert response.data == { + "organization_id_or_slug": "albertos-apples", + "subdomain": "albertos-apples", + "activeorg": "albertos-apples", + } + assert "activeorg" in self.client.session + assert self.client.session["activeorg"] == "albertos-apples" + + # No redirect for id + response = self.client.get( + reverse("org-events-endpoint-id-or-slug", kwargs={"organization_id_or_slug": 1234}), + data={"querystring": "value"}, + HTTP_HOST="albertos-apples.testserver", + follow=True, + ) + assert response.status_code == 200 + assert response.redirect_chain == [] + def test_with_middleware_and_non_staff(self): self.create_organization(name="albertos-apples") non_staff_user = self.create_user(is_staff=False) From ac308ce1ac0ca84bccd50acddc5477ada2f1b37c Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Mon, 6 May 2024 09:41:58 -0700 Subject: [PATCH 015/376] fix(github-comments): use bigint for commitId (#70358) --- src/sentry/tasks/integrations/github/pr_comment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/tasks/integrations/github/pr_comment.py b/src/sentry/tasks/integrations/github/pr_comment.py index f0330ad2ba13bf..b6889944b649a3 100644 --- a/src/sentry/tasks/integrations/github/pr_comment.py +++ b/src/sentry/tasks/integrations/github/pr_comment.py @@ -83,7 +83,7 @@ def pr_to_issue_query(pr_id: int): pr.organization_id org_id, array_agg(go.group_id ORDER BY go.date_added) issues FROM sentry_groupowner go - JOIN sentry_pullrequest_commit c ON c.commit_id = (go.context::jsonb->>'commitId')::int + JOIN sentry_pullrequest_commit c ON c.commit_id = (go.context::jsonb->>'commitId')::bigint JOIN sentry_pull_request pr ON c.pull_request_id = pr.id WHERE go.type=0 AND pr.id={pr_id} From 312668af661c21130529152df131092eb65b593d Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 6 May 2024 12:58:23 -0400 Subject: [PATCH 016/376] ref: type a few more sentry.utils modules (#70357) --- pyproject.toml | 5 +++++ src/sentry/utils/celery.py | 3 ++- src/sentry/utils/event.py | 9 ++++++--- src/sentry/utils/files.py | 9 ++++++++- src/sentry/utils/javascript.py | 10 ++++++++-- src/sentry/utils/migrations.py | 4 ++-- 6 files changed, 31 insertions(+), 9 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ab9b3422362960..2cefa9f7ba4e77 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -591,12 +591,17 @@ module = [ "sentry.utils.assets", "sentry.utils.audit", "sentry.utils.canonical", + "sentry.utils.celery", "sentry.utils.codeowners", "sentry.utils.colors", "sentry.utils.email.*", "sentry.utils.env", + "sentry.utils.event", + "sentry.utils.files", "sentry.utils.iterators", + "sentry.utils.javascript", "sentry.utils.locking.backends.redis", + "sentry.utils.migrations", "sentry.utils.otp", "sentry.utils.redis", "sentry.utils.redis_metrics", diff --git a/src/sentry/utils/celery.py b/src/sentry/utils/celery.py index 2ec98f6ed26cf4..05e27fec298bf7 100644 --- a/src/sentry/utils/celery.py +++ b/src/sentry/utils/celery.py @@ -1,8 +1,9 @@ from random import randint +from typing import Any from celery.schedules import crontab -def crontab_with_minute_jitter(*args, **kwargs): +def crontab_with_minute_jitter(*args: Any, **kwargs: Any) -> crontab: kwargs["minute"] = randint(0, 59) return crontab(*args, **kwargs) diff --git a/src/sentry/utils/event.py b/src/sentry/utils/event.py index 8b21a4559ad9a7..f950f4d1c706fa 100644 --- a/src/sentry/utils/event.py +++ b/src/sentry/utils/event.py @@ -1,10 +1,13 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any +from typing import TYPE_CHECKING, Any from sentry.utils.safe import get_path +if TYPE_CHECKING: + from sentry.eventstore.models import Event + def has_stacktrace(event_data: Mapping[str, Any]) -> bool: """ @@ -55,7 +58,7 @@ def is_handled(event_data: Mapping[str, Any]) -> bool | None: # Check if an event contains a minified stack trace (source maps for javascript) -def has_event_minified_stack_trace(event): +def has_event_minified_stack_trace(event: Event) -> bool: exception_values = get_path(event.data, "exception", "values", filter=True) if exception_values: @@ -66,7 +69,7 @@ def has_event_minified_stack_trace(event): return False -def is_event_from_browser_javascript_sdk(event): +def is_event_from_browser_javascript_sdk(event: dict[str, Any]) -> bool: sdk_name = get_path(event, "sdk", "name") if sdk_name is None: return False diff --git a/src/sentry/utils/files.py b/src/sentry/utils/files.py index 9977ccc32d28cb..6aea61b4c365b7 100644 --- a/src/sentry/utils/files.py +++ b/src/sentry/utils/files.py @@ -1,8 +1,15 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + from sentry import features, options from sentry.models.files.utils import MAX_FILE_SIZE +if TYPE_CHECKING: + from sentry.models.organization import Organization + -def get_max_file_size(organization): +def get_max_file_size(organization: Organization) -> int: """Returns the maximum allowed debug file size for this organization.""" if features.has("organizations:large-debug-files", organization): return MAX_FILE_SIZE diff --git a/src/sentry/utils/javascript.py b/src/sentry/utils/javascript.py index 79bd788999306d..62c37acba2fcdd 100644 --- a/src/sentry/utils/javascript.py +++ b/src/sentry/utils/javascript.py @@ -1,11 +1,17 @@ +from __future__ import annotations + import re +from typing import TYPE_CHECKING from sentry.utils.safe import get_path +if TYPE_CHECKING: + from sentry.eventstore.models import Event + SOURCE_MAPPING_URL_RE = re.compile(b"//# sourceMappingURL=(.*)$") -def has_sourcemap(event): +def has_sourcemap(event: Event) -> bool: if event.platform not in ("javascript", "node"): return False @@ -17,7 +23,7 @@ def has_sourcemap(event): return False -def find_sourcemap(sourcemap_header, body): +def find_sourcemap(sourcemap_header: bytes | None, body: bytes) -> bytes | None: sourcemap_url = sourcemap_header if not sourcemap_header: parsed_body = body.split(b"\n") diff --git a/src/sentry/utils/migrations.py b/src/sentry/utils/migrations.py index c39ec7f06d6ad9..e5a9bed7e57acc 100644 --- a/src/sentry/utils/migrations.py +++ b/src/sentry/utils/migrations.py @@ -1,11 +1,11 @@ from django.db import router -from django.db.models import F +from django.db.models import F, Model from sentry.silo.safety import unguarded_write from sentry.utils.query import RangeQuerySetWrapperWithProgressBar -def clear_flag(Model, flag_name, flag_attr_name="flags"): +def clear_flag(Model: type[Model], flag_name: str, flag_attr_name: str = "flags") -> None: """ This function is used to clear an existing flag value for all items in a given model """ From a7f88f7f380566477aa19eba8ab39e4cfd11f29c Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 6 May 2024 12:59:09 -0400 Subject: [PATCH 017/376] ref: fix typing by converting RunTaskWithMultiprocessing to a plain function (#70356) --- pyproject.toml | 1 + src/sentry/conf/server.py | 2 +- src/sentry/ingest/consumer/factory.py | 4 +- src/sentry/issues/run.py | 4 +- .../post_process_forwarder.py | 4 +- src/sentry/replays/consumers/recording.py | 4 +- src/sentry/sentry_metrics/configuration.py | 4 +- .../consumers/indexer/parallel.py | 4 +- src/sentry/snuba/query_subscriptions/run.py | 4 +- .../detect_performance_issues/factory.py | 4 +- src/sentry/spans/consumers/process/factory.py | 4 +- src/sentry/utils/arroyo.py | 56 +++++++++---------- 12 files changed, 45 insertions(+), 50 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2cefa9f7ba4e77..5cfa26bafeb9d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -588,6 +588,7 @@ module = [ "sentry.tasks.on_demand_metrics", "sentry.tasks.reprocessing2", "sentry.utils.actor", + "sentry.utils.arroyo", "sentry.utils.assets", "sentry.utils.audit", "sentry.utils.canonical", diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 055b0695156832..5b8615e16a1305 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3444,7 +3444,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: } -# If True, sentry.utils.arroyo.RunTaskWithMultiprocessing will actually be +# If True, sentry.utils.arroyo.run_task_with_multiprocessing will actually be # single-threaded under the hood for performance KAFKA_CONSUMER_FORCE_DISABLE_MULTIPROCESSING = False diff --git a/src/sentry/ingest/consumer/factory.py b/src/sentry/ingest/consumer/factory.py index f2c548c6075002..7e139c5b353da3 100644 --- a/src/sentry/ingest/consumer/factory.py +++ b/src/sentry/ingest/consumer/factory.py @@ -16,7 +16,7 @@ from sentry.ingest.types import ConsumerType from sentry.processing.backpressure.arroyo import HealthChecker, create_backpressure_step -from sentry.utils.arroyo import MultiprocessingPool, RunTaskWithMultiprocessing +from sentry.utils.arroyo import MultiprocessingPool, run_task_with_multiprocessing from .attachment_event import decode_and_process_chunks, process_attachments_and_events from .simple_event import process_simple_event_message @@ -42,7 +42,7 @@ def maybe_multiprocess_step( ) -> ProcessingStrategy[FilteredPayload | TInput]: if mp is not None: assert pool is not None - return RunTaskWithMultiprocessing( + return run_task_with_multiprocessing( function=function, next_step=next_step, max_batch_size=mp.max_batch_size, diff --git a/src/sentry/issues/run.py b/src/sentry/issues/run.py index 1e5fd0e4f42107..5d365dcd67ba7e 100644 --- a/src/sentry/issues/run.py +++ b/src/sentry/issues/run.py @@ -14,7 +14,7 @@ from arroyo.processing.strategies.run_task import RunTask from arroyo.types import Commit, Message, Partition -from sentry.utils.arroyo import MultiprocessingPool, RunTaskWithMultiprocessing +from sentry.utils.arroyo import MultiprocessingPool, run_task_with_multiprocessing logger = logging.getLogger(__name__) @@ -52,7 +52,7 @@ def crate_parallel_worker( commit: Commit, ) -> ProcessingStrategy[KafkaPayload]: assert self.pool is not None - return RunTaskWithMultiprocessing( + return run_task_with_multiprocessing( function=process_message, next_step=CommitOffsets(commit), max_batch_size=self.max_batch_size, diff --git a/src/sentry/post_process_forwarder/post_process_forwarder.py b/src/sentry/post_process_forwarder/post_process_forwarder.py index 1875a5cb3e6e60..97b6e729555b11 100644 --- a/src/sentry/post_process_forwarder/post_process_forwarder.py +++ b/src/sentry/post_process_forwarder/post_process_forwarder.py @@ -11,7 +11,7 @@ ) from arroyo.types import Commit, Message, Partition -from sentry.utils.arroyo import MultiprocessingPool, RunTaskWithMultiprocessing +from sentry.utils.arroyo import MultiprocessingPool, run_task_with_multiprocessing logger = logging.getLogger(__name__) @@ -56,7 +56,7 @@ def create_with_partitions( ) elif self.mode == "multiprocess": logger.info("Starting multiprocess post process forwarder") - return RunTaskWithMultiprocessing( + return run_task_with_multiprocessing( function=self._dispatch_function, next_step=CommitOffsets(commit), max_batch_size=self.max_batch_size, diff --git a/src/sentry/replays/consumers/recording.py b/src/sentry/replays/consumers/recording.py index 21028ae834eb01..abd5ac57d1b71f 100644 --- a/src/sentry/replays/consumers/recording.py +++ b/src/sentry/replays/consumers/recording.py @@ -17,7 +17,7 @@ from sentry_sdk.tracing import Span from sentry.replays.usecases.ingest import ingest_recording -from sentry.utils.arroyo import MultiprocessingPool, RunTaskWithMultiprocessing +from sentry.utils.arroyo import MultiprocessingPool, run_task_with_multiprocessing logger = logging.getLogger(__name__) @@ -76,7 +76,7 @@ def create_with_partitions( ) elif self.use_processes: assert self.pool is not None - return RunTaskWithMultiprocessing( + return run_task_with_multiprocessing( function=process_message, next_step=CommitOffsets(commit), max_batch_size=self.max_batch_size, diff --git a/src/sentry/sentry_metrics/configuration.py b/src/sentry/sentry_metrics/configuration.py index c715de60f444ca..fbc824ff4481b5 100644 --- a/src/sentry/sentry_metrics/configuration.py +++ b/src/sentry/sentry_metrics/configuration.py @@ -147,9 +147,9 @@ def initialize_subprocess_state(config: MetricsIngestConfiguration) -> None: This function should ideally be kept minimal and not contain too much logic. Commonly reusable bits should be added to - sentry.utils.arroyo.RunTaskWithMultiprocessing. + sentry.utils.arroyo.run_task_with_multiprocessing. - We already rely on sentry.utils.arroyo.RunTaskWithMultiprocessing to copy + We already rely on sentry.utils.arroyo.run_task_with_multiprocessing to copy statsd tags into the subprocess, eventually we should do the same for Sentry tags. """ diff --git a/src/sentry/sentry_metrics/consumers/indexer/parallel.py b/src/sentry/sentry_metrics/consumers/indexer/parallel.py index 36457c1869e20f..b86c63913fca08 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/parallel.py +++ b/src/sentry/sentry_metrics/consumers/indexer/parallel.py @@ -27,7 +27,7 @@ RoutingProducerStep, ) from sentry.sentry_metrics.consumers.indexer.slicing_router import SlicingRouter -from sentry.utils.arroyo import MultiprocessingPool, RunTaskWithMultiprocessing +from sentry.utils.arroyo import MultiprocessingPool, run_task_with_multiprocessing from sentry.utils.kafka import delay_kafka_rebalance logger = logging.getLogger(__name__) @@ -170,7 +170,7 @@ def create_with_partitions( slicing_router=self.__slicing_router, ) - parallel_strategy = RunTaskWithMultiprocessing( + parallel_strategy = run_task_with_multiprocessing( function=MessageProcessor(self.config).process_messages, next_step=Unbatcher(next_step=producer), pool=self.__pool, diff --git a/src/sentry/snuba/query_subscriptions/run.py b/src/sentry/snuba/query_subscriptions/run.py index f33a6307bd3aa1..532d68e73cbb66 100644 --- a/src/sentry/snuba/query_subscriptions/run.py +++ b/src/sentry/snuba/query_subscriptions/run.py @@ -17,7 +17,7 @@ from sentry.features.rollout import in_random_rollout from sentry.snuba.dataset import Dataset from sentry.snuba.query_subscriptions.constants import dataset_to_logical_topic -from sentry.utils.arroyo import MultiprocessingPool, RunTaskWithMultiprocessing +from sentry.utils.arroyo import MultiprocessingPool, run_task_with_multiprocessing from sentry.utils.kafka_config import get_topic_definition logger = logging.getLogger(__name__) @@ -51,7 +51,7 @@ def create_with_partitions( ) -> ProcessingStrategy[KafkaPayload]: callable = partial(process_message, self.dataset, self.topic, self.logical_topic) if self.multi_proc: - return RunTaskWithMultiprocessing( + return run_task_with_multiprocessing( function=callable, next_step=CommitOffsets(commit), max_batch_size=self.max_batch_size, diff --git a/src/sentry/spans/consumers/detect_performance_issues/factory.py b/src/sentry/spans/consumers/detect_performance_issues/factory.py index 6a59f2b2fd2261..7efc00163b26e6 100644 --- a/src/sentry/spans/consumers/detect_performance_issues/factory.py +++ b/src/sentry/spans/consumers/detect_performance_issues/factory.py @@ -13,7 +13,7 @@ from sentry import options from sentry.spans.consumers.detect_performance_issues.message import process_segment -from sentry.utils.arroyo import MultiprocessingPool, RunTaskWithMultiprocessing +from sentry.utils.arroyo import MultiprocessingPool, run_task_with_multiprocessing BUFFERED_SEGMENT_SCHEMA: Codec[BufferedSegment] = get_codec("buffered-segments") @@ -70,7 +70,7 @@ def create_with_partitions( commit: Commit, partitions: Mapping[Partition, int], ) -> ProcessingStrategy[KafkaPayload]: - return RunTaskWithMultiprocessing( + return run_task_with_multiprocessing( function=_process_message, next_step=CommitOffsets(commit), max_batch_size=self.max_batch_size, diff --git a/src/sentry/spans/consumers/process/factory.py b/src/sentry/spans/consumers/process/factory.py index ca989cbc44930d..d8245f14426c95 100644 --- a/src/sentry/spans/consumers/process/factory.py +++ b/src/sentry/spans/consumers/process/factory.py @@ -25,7 +25,7 @@ from sentry.spans.buffer.redis import ProcessSegmentsContext, RedisSpansBuffer, SegmentKey from sentry.spans.consumers.process.strategy import CommitSpanOffsets, NoOp from sentry.utils import metrics -from sentry.utils.arroyo import MultiprocessingPool, RunTaskWithMultiprocessing +from sentry.utils.arroyo import MultiprocessingPool, run_task_with_multiprocessing from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition logger = logging.getLogger(__name__) @@ -309,7 +309,7 @@ def create_with_partitions( next_step=batch_processor, ) - return RunTaskWithMultiprocessing( + return run_task_with_multiprocessing( function=process_message, next_step=batch_step, max_batch_size=self.max_batch_size, diff --git a/src/sentry/utils/arroyo.py b/src/sentry/utils/arroyo.py index d78558c9065571..5c1ddeb3a12102 100644 --- a/src/sentry/utils/arroyo.py +++ b/src/sentry/utils/arroyo.py @@ -5,6 +5,7 @@ from functools import partial from typing import Any +from arroyo.processing.strategies.run_task import RunTask from arroyo.processing.strategies.run_task_with_multiprocessing import ( MultiprocessingPool as ArroyoMultiprocessingPool, ) @@ -12,7 +13,7 @@ RunTaskWithMultiprocessing as ArroyoRunTaskWithMultiprocessing, ) from arroyo.processing.strategies.run_task_with_multiprocessing import TResult -from arroyo.types import TStrategyPayload +from arroyo.types import Message, TStrategyPayload from arroyo.utils.metrics import Metrics from django.conf import settings @@ -157,43 +158,36 @@ def close(self) -> None: self.__pool.close() -class RunTaskWithMultiprocessing(ArroyoRunTaskWithMultiprocessing[TStrategyPayload, TResult]): +def run_task_with_multiprocessing( + *, + pool: MultiprocessingPool, + function: Callable[[Message[TStrategyPayload]], TResult], + **kwargs: Any, +) -> RunTask[TStrategyPayload, TResult] | ArroyoRunTaskWithMultiprocessing[ + TStrategyPayload, TResult +]: """ A variant of arroyo's RunTaskWithMultiprocessing that can switch between multiprocessing and non-multiprocessing mode based on the `KAFKA_CONSUMER_FORCE_DISABLE_MULTIPROCESSING` setting. """ - def __new__( - cls, - *, - pool: MultiprocessingPool, - **kwargs: Any, - ) -> RunTaskWithMultiprocessing: - if settings.KAFKA_CONSUMER_FORCE_DISABLE_MULTIPROCESSING: - from arroyo.processing.strategies.run_task import RunTask - - kwargs.pop("num_processes", None) - kwargs.pop("input_block_size", None) - kwargs.pop("output_block_size", None) - kwargs.pop("max_batch_size", None) - kwargs.pop("max_batch_time", None) - - if pool.initializer is not None: - pool.initializer() + if settings.KAFKA_CONSUMER_FORCE_DISABLE_MULTIPROCESSING: + kwargs.pop("num_processes", None) + kwargs.pop("input_block_size", None) + kwargs.pop("output_block_size", None) + kwargs.pop("max_batch_size", None) + kwargs.pop("max_batch_time", None) - # Assert that initializer can be pickled and loaded again from subprocesses. - pickle.loads(pickle.dumps(pool.initializer)) - pickle.loads(pickle.dumps(kwargs["function"])) + if pool.initializer is not None: + pool.initializer() - return RunTask(**kwargs) # type: ignore[return-value] - else: - from arroyo.processing.strategies.run_task_with_multiprocessing import ( - RunTaskWithMultiprocessing as ArroyoRunTaskWithMultiprocessing, - ) + # Assert that initializer can be pickled and loaded again from subprocesses. + pickle.loads(pickle.dumps(pool.initializer)) + pickle.loads(pickle.dumps(function)) - assert pool.pool is not None + return RunTask(function=function, **kwargs) + else: + assert pool.pool is not None - return ArroyoRunTaskWithMultiprocessing( # type: ignore[return-value] - pool=pool.pool, **kwargs - ) + return ArroyoRunTaskWithMultiprocessing(pool=pool.pool, function=function, **kwargs) From 2163d6ad9a0ba79ea037251c9a2acd8bd7548e45 Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Mon, 6 May 2024 13:08:48 -0400 Subject: [PATCH 018/376] perf: move assemble status to redis (#70344) Dual writes to memcache and redis while transitioning assemble status to redis Ref: https://github.com/getsentry/sentry/issues/68598 --- src/sentry/conf/server.py | 1 + src/sentry/options/defaults.py | 4 +++ src/sentry/tasks/assemble.py | 36 ++++++++++++++++++++++++--- src/sentry/testutils/helpers/redis.py | 4 +++ tests/sentry/tasks/test_assemble.py | 25 +++++++++++++++++++ 5 files changed, 66 insertions(+), 4 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 5b8615e16a1305..023da91512f2a2 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -135,6 +135,7 @@ def env( SENTRY_METRIC_META_REDIS_CLUSTER = "default" SENTRY_ESCALATION_THRESHOLDS_REDIS_CLUSTER = "default" SENTRY_SPAN_BUFFER_CLUSTER = "default" +SENTRY_ASSEMBLE_CLUSTER = "default" # Hosts that are allowed to use system token authentication. # http://en.wikipedia.org/wiki/Reserved_IP_addresses diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index ac39b1a3a94c1e..0eb45b0b25ab23 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -2298,6 +2298,10 @@ flags=FLAG_BOOL | FLAG_AUTOMATOR_MODIFIABLE, ) + +# Switch to read assemble status from Redis instead of memcache +register("assemble.read_from_redis", default=False, flags=FLAG_AUTOMATOR_MODIFIABLE) + # Sampling rates for testing Rust-based grouping enhancers # Rate at which to run the Rust implementation of `assemble_stacktrace_component` diff --git a/src/sentry/tasks/assemble.py b/src/sentry/tasks/assemble.py index dcfdde8895f3bd..cce0b046e1394a 100644 --- a/src/sentry/tasks/assemble.py +++ b/src/sentry/tasks/assemble.py @@ -6,9 +6,11 @@ from abc import ABC, abstractmethod from datetime import datetime from os import path -from typing import IO, Generic, NamedTuple, Protocol, TypeVar +from typing import IO, TYPE_CHECKING, Generic, NamedTuple, Protocol, TypeVar +import orjson import sentry_sdk +from django.conf import settings from django.db import IntegrityError, router from django.db.models import Q from django.utils import timezone @@ -39,13 +41,16 @@ from sentry.models.releasefile import ReleaseArchive, ReleaseFile, update_artifact_index from sentry.silo.base import SiloMode from sentry.tasks.base import instrumented_task -from sentry.utils import metrics +from sentry.utils import metrics, redis from sentry.utils.db import atomic_transaction from sentry.utils.files import get_max_file_size from sentry.utils.sdk import bind_organization_context, configure_scope logger = logging.getLogger(__name__) +if TYPE_CHECKING: + from rediscluster import RedisCluster + class ChunkFileState: OK = "ok" # File in database @@ -164,12 +169,18 @@ def _get_cache_key(task, scope, checksum): % ( str(scope).encode("ascii"), checksum.encode("ascii"), - str(task).encode("utf-8"), + str(task).encode(), ) ).hexdigest() ) +def _get_redis_cluster_for_assemble() -> RedisCluster: + cluster_key = settings.SENTRY_ASSEMBLE_CLUSTER + return redis.redis_clusters.get(cluster_key) # type: ignore[return-value] + + +@sentry_sdk.tracing.trace def get_assemble_status(task, scope, checksum): """ Checks the current status of an assembling task. @@ -179,26 +190,43 @@ def get_assemble_status(task, scope, checksum): notice or error message. """ cache_key = _get_cache_key(task, scope, checksum) - rv = default_cache.get(cache_key) + + if options.get("assemble.read_from_redis"): + client = _get_redis_cluster_for_assemble() + rv = client.get(cache_key) + + # It is stored as bytes with [state, detail] on Redis. + if rv: + [state, detail] = orjson.loads(rv) + rv = (state, detail) + else: + rv = default_cache.get(cache_key) + if rv is None: return None, None return tuple(rv) +@sentry_sdk.tracing.trace def set_assemble_status(task, scope, checksum, state, detail=None): """ Updates the status of an assembling task. It is cached for 10 minutes. """ cache_key = _get_cache_key(task, scope, checksum) default_cache.set(cache_key, (state, detail), 600) + redis_client = _get_redis_cluster_for_assemble() + redis_client.set(name=cache_key, value=orjson.dumps([state, detail]), ex=600) +@sentry_sdk.tracing.trace def delete_assemble_status(task, scope, checksum): """ Deletes the status of an assembling task. """ cache_key = _get_cache_key(task, scope, checksum) default_cache.delete(cache_key) + redis_client = _get_redis_cluster_for_assemble() + redis_client.delete(cache_key) @instrumented_task( diff --git a/src/sentry/testutils/helpers/redis.py b/src/sentry/testutils/helpers/redis.py index 1537f02ade42f9..88da4e737e106c 100644 --- a/src/sentry/testutils/helpers/redis.py +++ b/src/sentry/testutils/helpers/redis.py @@ -12,6 +12,7 @@ def use_redis_cluster( cluster_id: str = "cluster", high_watermark: int = 100, with_settings: dict[str, Any] | None = None, + with_options: dict[str, Any] | None = None, ) -> Generator[None, None, None]: # Cluster id needs to be different than "default" to distinguish redis instance with redis cluster. @@ -32,6 +33,9 @@ def use_redis_cluster( }, } + if with_options: + options.update(with_options) + settings = dict(with_settings or {}) settings["SENTRY_PROCESSING_SERVICES"] = {"redis": {"redis": cluster_id}} diff --git a/tests/sentry/tasks/test_assemble.py b/tests/sentry/tasks/test_assemble.py index 786d1acd838976..0393c40836664f 100644 --- a/tests/sentry/tasks/test_assemble.py +++ b/tests/sentry/tasks/test_assemble.py @@ -1,5 +1,6 @@ import io import os +import uuid from datetime import UTC, datetime, timedelta from hashlib import sha1 from unittest import mock @@ -28,10 +29,13 @@ assemble_artifacts, assemble_dif, assemble_file, + delete_assemble_status, get_assemble_status, + set_assemble_status, ) from sentry.testutils.cases import TestCase from sentry.testutils.helpers.datetime import freeze_time +from sentry.testutils.helpers.redis import use_redis_cluster class BaseAssembleTest(TestCase): @@ -1047,3 +1051,24 @@ def test_index_if_needed_with_newer_bundle_already_stored( organization_id=self.organization.id, artifact_bundles=[(artifact_bundle_1, mock.ANY)], ) + + +@use_redis_cluster(with_options={"assemble.read_from_redis": True}) +def test_redis_assemble_status(): + task = AssembleTask.DIF + project_id = uuid.uuid4().hex + checksum = uuid.uuid4().hex + + # If it doesn't exist, it should return correct values. + assert get_assemble_status(task=task, scope=project_id, checksum=checksum) == (None, None) + + # Test setter + set_assemble_status(task, project_id, checksum, ChunkFileState.CREATED, detail="cylons") + assert get_assemble_status(task=task, scope=project_id, checksum=checksum) == ( + "created", + "cylons", + ) + + # Deleting should actually delete it. + delete_assemble_status(task, project_id, checksum=checksum) + assert get_assemble_status(task=task, scope=project_id, checksum=checksum) == (None, None) From 72ee59a34711c2708225b36bd1e5545a7b0ef8fb Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Mon, 6 May 2024 13:10:22 -0400 Subject: [PATCH 019/376] fix(screenloads): Add sentry.dart to supported platforms (#70361) It looks like `sentry.dart` is a possible platform name from SDKs. Add it to the supported list. --- static/app/views/performance/mobile/screenload/index.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/static/app/views/performance/mobile/screenload/index.tsx b/static/app/views/performance/mobile/screenload/index.tsx index d133e66ff1edcd..1e0965a12c1502 100644 --- a/static/app/views/performance/mobile/screenload/index.tsx +++ b/static/app/views/performance/mobile/screenload/index.tsx @@ -92,6 +92,7 @@ export default function PageloadModule() { 'sentry.cocoa', 'sentry.javascript.react-native', 'sentry.dart.flutter', + 'sentry.dart', ]} docsUrl="https://docs.sentry.io/product/performance/mobile-vitals/screen-loads/#minimum-sdk-requirements" > From e95f9792ec712d6c3c87242fff75a3a598dfbc22 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Mon, 6 May 2024 13:21:10 -0400 Subject: [PATCH 020/376] fix(hybridcloud) Fix SentryApp external install flow not showing EU orgs (#70191) Update the organization loading to also fetch organizations from non-us regions. I also had to upgrade this component from an `AsyncComponent` to an FC one as fetching orgs from multiple regions would have been hard in the `AsyncComponent` implementation. --- .../index.spec.tsx | 36 ++ .../sentryAppExternalInstallation/index.tsx | 530 ++++++++++-------- 2 files changed, 320 insertions(+), 246 deletions(-) diff --git a/static/app/views/sentryAppExternalInstallation/index.spec.tsx b/static/app/views/sentryAppExternalInstallation/index.spec.tsx index b11aad58c937f3..c0f1c4556eb100 100644 --- a/static/app/views/sentryAppExternalInstallation/index.spec.tsx +++ b/static/app/views/sentryAppExternalInstallation/index.spec.tsx @@ -103,6 +103,7 @@ describe('SentryAppExternalInstallation', () => { params={{sentryAppSlug: sentryApp.slug}} /> ); + await waitFor(() => expect(getInstallationsMock).toHaveBeenCalled()); expect( await screen.findByText( @@ -136,6 +137,7 @@ describe('SentryAppExternalInstallation', () => { params={{sentryAppSlug: sentryApp.slug}} /> ); + await waitFor(() => expect(getInstallationsMock).toHaveBeenCalled()); await userEvent.click(await screen.findByTestId('install')); // failing currently @@ -194,6 +196,7 @@ describe('SentryAppExternalInstallation', () => { params={{sentryAppSlug: sentryApp.slug}} /> ); + await waitFor(() => expect(getInstallationsMock).toHaveBeenCalled()); expect(getAppMock).toHaveBeenCalled(); expect(getOrgsMock).toHaveBeenCalled(); @@ -203,6 +206,38 @@ describe('SentryAppExternalInstallation', () => { await waitFor(() => expect(screen.getByTestId('install')).toBeEnabled()); }); + it('loads orgs from multiple regions', async () => { + window.__initialData = { + ...window.__initialData, + memberRegions: [ + {name: 'us', url: 'https://us.example.org'}, + {name: 'de', url: 'https://de.example.org'}, + ], + }; + ConfigStore.loadInitialData(window.__initialData); + + const deorg = OrganizationFixture({slug: 'de-org'}); + const getDeOrgs = MockApiClient.addMockResponse({ + url: '/organizations/', + body: [deorg], + match: [ + function (_url: string, options: Record) { + return options.host === 'https://de.example.org'; + }, + ], + }); + + render( + + ); + await waitFor(() => expect(getInstallationsMock).toHaveBeenCalled()); + + expect(getDeOrgs).toHaveBeenCalled(); + }); + it('selecting org changes the url', async () => { const preselectedOrg = OrganizationFixture(); const {routerProps} = initializeOrg({organization: preselectedOrg}); @@ -235,6 +270,7 @@ describe('SentryAppExternalInstallation', () => { params={{sentryAppSlug: sentryApp.slug}} /> ); + await waitFor(() => expect(getInstallationsMock).toHaveBeenCalled()); await selectEvent.select(screen.getByRole('textbox'), 'org2'); expect(window.location.assign).toHaveBeenCalledWith(generateOrgSlugUrl('org2')); diff --git a/static/app/views/sentryAppExternalInstallation/index.tsx b/static/app/views/sentryAppExternalInstallation/index.tsx index e667c99ce48a27..a58223cdb44eac 100644 --- a/static/app/views/sentryAppExternalInstallation/index.tsx +++ b/static/app/views/sentryAppExternalInstallation/index.tsx @@ -1,115 +1,157 @@ +import {useCallback, useEffect, useState} from 'react'; import type {RouteComponentProps} from 'react-router'; import styled from '@emotion/styled'; import {addErrorMessage} from 'sentry/actionCreators/indicator'; +import {fetchOrganizations} from 'sentry/actionCreators/organizations'; import {installSentryApp} from 'sentry/actionCreators/sentryAppInstallations'; import {Alert} from 'sentry/components/alert'; import OrganizationAvatar from 'sentry/components/avatar/organizationAvatar'; import SelectControl from 'sentry/components/forms/controls/selectControl'; import FieldGroup from 'sentry/components/forms/fieldGroup'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; import SentryAppDetailsModal from 'sentry/components/modals/sentryAppDetailsModal'; import NarrowLayout from 'sentry/components/narrowLayout'; import {t, tct} from 'sentry/locale'; import ConfigStore from 'sentry/stores/configStore'; -import type {Organization, SentryApp, SentryAppInstallation} from 'sentry/types'; +import type { + Organization, + OrganizationSummary, + SentryApp, + SentryAppInstallation, +} from 'sentry/types'; import {generateOrgSlugUrl} from 'sentry/utils'; import {trackIntegrationAnalytics} from 'sentry/utils/integrationUtil'; +import {useApiQuery} from 'sentry/utils/queryClient'; import {addQueryParamsToExistingUrl} from 'sentry/utils/queryString'; -import DeprecatedAsyncView from 'sentry/views/deprecatedAsyncView'; +import useApi from 'sentry/utils/useApi'; import {OrganizationContext} from '../organizationContext'; type Props = RouteComponentProps<{sentryAppSlug: string}, {}>; -type State = DeprecatedAsyncView['state'] & { - organization: Organization | null; - organizations: Organization[]; - reloading: boolean; - selectedOrgSlug: string | null; - sentryApp: SentryApp; -}; - -export default class SentryAppExternalInstallation extends DeprecatedAsyncView< - Props, - State -> { - disableErrorReport = false; - - getDefaultState() { - const state = super.getDefaultState(); - return { - ...state, - selectedOrgSlug: null, - organization: null, - organizations: [], - reloading: false, - }; - } - - getEndpoints(): ReturnType { - return [ - ['organizations', '/organizations/'], - ['sentryApp', `/sentry-apps/${this.sentryAppSlug}/`], - ]; - } +// Page Layout +export default function SentryAppExternalInstallation(props: Props) { + return ( + + +

{t('Finish integration installation')}

+ +
+
+ ); +} - onLoadAllEndpointsSuccess() { - // auto select the org if there is only one - const {organizations} = this.state; +// View Contents +function SentryAppExternalInstallationContent({params, ...props}: Props) { + const api = useApi(); + // The selected organization fetched from org details + const [organization, setOrganization] = useState(); + // The selected organization's slug. Should be removed as we have the selected organization as well. + const [selectedOrgSlug, setSelectedOrgSlug] = useState(); + + const [organizations, setOrganizations] = useState>([]); + const [orgsLoading, setOrgsLoading] = useState(true); + const [isInstalled, setIsInstalled] = useState(); + + // Load data on mount. + const {data: sentryApp, isLoading: sentryAppLoading} = useApiQuery( + [`/sentry-apps/${params.sentryAppSlug}/`], + { + staleTime: 0, + } + ); + + useEffect( + function () { + async function loadOrgs() { + try { + const orgs = await fetchOrganizations(api); + setOrganizations(orgs); + setOrgsLoading(false); + } catch (e) { + setOrgsLoading(false); + // Do nothing. + } + } + loadOrgs(); + }, + [api] + ); + + const onSelectOrg = useCallback( + async function (orgSlug: string) { + const customerDomain = ConfigStore.get('customerDomain'); + // redirect to the org if it's different than the org being selected + if (customerDomain?.subdomain && orgSlug !== customerDomain?.subdomain) { + const urlWithQuery = generateOrgSlugUrl(orgSlug) + props.location.search; + window.location.assign(urlWithQuery); + return; + } + // otherwise proceed as normal + setSelectedOrgSlug(orgSlug); + + try { + const [org, installations]: [Organization, SentryAppInstallation[]] = + await Promise.all([ + api.requestPromise(`/organizations/${orgSlug}/`), + api.requestPromise(`/organizations/${orgSlug}/sentry-app-installations/`), + ]); + const installed = installations + .map(install => install.app.slug) + .includes(params.sentryAppSlug); + + setOrganization(org); + setSelectedOrgSlug(org.slug); + setIsInstalled(installed); + } catch (err) { + addErrorMessage(t('Failed to retrieve organization or integration details')); + } + }, + [ + api, + params.sentryAppSlug, + props.location.search, + setOrganization, + setSelectedOrgSlug, + setIsInstalled, + ] + ); + + useEffect(function () { + // Skip if we have a selected org, or if there aren't any orgs loaded yet. + if (organization || organizations.length < 1) { + return; + } if (organizations.length === 1) { - this.onSelectOrg(organizations[0].slug); + // auto select the org if there is only one + onSelectOrg(organizations[0].slug); } // now check the subomdain and use that org slug if it exists const customerDomain = ConfigStore.get('customerDomain'); if (customerDomain?.subdomain) { - this.onSelectOrg(customerDomain.subdomain); + onSelectOrg(customerDomain.subdomain); } - } - - getTitle() { - return t('Choose Installation Organization'); - } - - get sentryAppSlug() { - return this.props.params.sentryAppSlug; - } - - get isSingleOrg() { - return this.state.organizations.length === 1; - } - - get isSentryAppInternal() { - const {sentryApp} = this.state; - return sentryApp && sentryApp.status === 'internal'; - } - - get isSentryAppUnavailableForOrg() { - const {sentryApp, selectedOrgSlug} = this.state; - // if the app is unpublished for a different org - return ( - selectedOrgSlug && - sentryApp?.owner?.slug !== selectedOrgSlug && - sentryApp.status === 'unpublished' - ); - } - - get disableInstall() { - const {reloading, isInstalled} = this.state; - return isInstalled || reloading || this.isSentryAppUnavailableForOrg; - } - - hasAccess = (org: Organization) => org.access.includes('org:integrations'); + }); - onClose = () => { + const onClose = useCallback(() => { // if we came from somewhere, go back there. Otherwise, back to the integrations page - const {selectedOrgSlug} = this.state; const newUrl = document.referrer || `/settings/${selectedOrgSlug}/integrations/`; window.location.assign(newUrl); - }; - - onInstall = async (): Promise => { - const {organization, sentryApp} = this.state; + }, [selectedOrgSlug]); + + const disableInstall = useCallback( + function () { + if (!(sentryApp && selectedOrgSlug)) { + return false; + } + return isInstalled || isSentryAppUnavailableForOrg(sentryApp, selectedOrgSlug); + }, + [isInstalled, selectedOrgSlug, sentryApp] + ); + + const onInstall = useCallback(async (): Promise => { if (!organization || !sentryApp) { return undefined; } @@ -121,7 +163,7 @@ export default class SentryAppExternalInstallation extends DeprecatedAsyncView< organization, }); - const install = await installSentryApp(this.api, organization.slug, sentryApp); + const install = await installSentryApp(api, organization.slug, sentryApp); // installation is complete if the status is installed if (install.status === 'installed') { trackIntegrationAnalytics('integrations.installation_complete', { @@ -142,196 +184,192 @@ export default class SentryAppExternalInstallation extends DeprecatedAsyncView< const redirectUrl = addQueryParamsToExistingUrl(sentryApp.redirectUrl, queryParams); return window.location.assign(redirectUrl); } - return this.onClose(); - }; - - onSelectOrg = async (orgSlug: string) => { - const customerDomain = ConfigStore.get('customerDomain'); - // redirect to the org if it's different than the org being selected - if (customerDomain?.subdomain && orgSlug !== customerDomain?.subdomain) { - const urlWithQuery = generateOrgSlugUrl(orgSlug) + this.props.location.search; - window.location.assign(urlWithQuery); - return; - } - // otherwise proceed as normal - this.setState({selectedOrgSlug: orgSlug, reloading: true}); - - try { - const [organization, installations]: [Organization, SentryAppInstallation[]] = - await Promise.all([ - this.api.requestPromise(`/organizations/${orgSlug}/`), - this.api.requestPromise(`/organizations/${orgSlug}/sentry-app-installations/`), - ]); - const isInstalled = installations - .map(install => install.app.slug) - .includes(this.sentryAppSlug); - // all state fields should be set at the same time so analytics in SentryAppDetailsModal works properly - this.setState({organization, isInstalled, reloading: false}); - } catch (err) { - addErrorMessage(t('Failed to retrieve organization or integration details')); - this.setState({reloading: false}); - } - }; + return onClose(); + }, [api, organization, sentryApp, onClose]); - onRequestSuccess = ({stateKey, data}) => { - // if only one org, we can immediately update our selected org - if (stateKey === 'organizations' && data.length === 1) { - this.onSelectOrg(data[0].slug); - } - }; - - getOptions() { - return this.state.organizations.map(org => ({ - value: org.slug, - label: ( -
- - {org.slug} -
- ), - })); + if (sentryAppLoading || orgsLoading || !sentryApp) { + return ; } - renderInternalAppError() { - const {sentryApp} = this.state; - return ( - - {tct( - 'Integration [sentryAppName] is an internal integration. Internal integrations are automatically installed', - { - sentryAppName: {sentryApp.name}, - } + return ( +
+ + {isSingleOrg(organizations) ? ( + + ) : ( + )} - - ); - } - - checkAndRenderError() { - const {organization, selectedOrgSlug, isInstalled, sentryApp} = this.state; - if (selectedOrgSlug && organization && !this.hasAccess(organization)) { - return ( - -

- {tct( - `You do not have permission to install integrations in - [organization]. Ask an organization owner or manager to - visit this page to finish installing this integration.`, - {organization: {organization.slug}} - )} -

- {generateOrgSlugUrl(selectedOrgSlug)} -
- ); - } - if (isInstalled && organization) { - return ( - - {tct('Integration [sentryAppName] already installed for [organization]', { - organization: {organization.name}, - sentryAppName: {sentryApp.name}, - })} - - ); - } - - if (this.isSentryAppUnavailableForOrg) { - // use the slug of the owner if we have it, otherwise use 'another organization' - const ownerSlug = sentryApp?.owner?.slug ?? 'another organization'; - return ( - - {tct( - 'Integration [sentryAppName] is an unpublished integration for [otherOrg]. An unpublished integration can only be installed on the organization which created it.', - { - sentryAppName: {sentryApp.name}, - otherOrg: {ownerSlug}, - } - )} - - ); - } +
+ + {organization && ( + + + + )} +
+ ); +} - return null; - } +type CheckAndRenderProps = { + isInstalled: boolean | undefined; + organization: Organization | undefined; + selectedOrgSlug: string | undefined; + sentryApp: SentryApp; +}; - renderMultiOrgView() { - const {selectedOrgSlug, sentryApp} = this.state; +function CheckAndRenderError({ + organization, + selectedOrgSlug, + isInstalled, + sentryApp, +}: CheckAndRenderProps) { + if (selectedOrgSlug && organization && !hasAccess(organization)) { return ( -
+

{tct( - 'Please pick a specific [organization:organization] to install [sentryAppName]', - { - organization: , - sentryAppName: {sentryApp.name}, - } + `You do not have permission to install integrations in + [organization]. Ask an organization owner or manager to + visit this page to finish installing this integration.`, + {organization: {organization.slug}} )}

- - {() => ( - this.onSelectOrg(value)} - value={selectedOrgSlug} - placeholder={t('Select an organization')} - options={this.getOptions()} - data-test-id="org-select" - /> - )} - -
+ {generateOrgSlugUrl(selectedOrgSlug)} +
); } - renderSingleOrgView() { - const {organizations, sentryApp} = this.state; - // pull the name out of organizations since state.organization won't be loaded initially - const organizationName = organizations[0].name; + if (isInstalled && organization && sentryApp) { return ( -
-

- {tct('You are installing [sentryAppName] for organization [organization]', { - organization: {organizationName}, - sentryAppName: {sentryApp.name}, - })} -

-
+ + {tct('Integration [sentryAppName] already installed for [organization]', { + organization: {organization.name}, + sentryAppName: {sentryApp.name}, + })} + ); } - renderMainContent() { - const {organization, sentryApp} = this.state; + if (isSentryAppUnavailableForOrg(sentryApp, selectedOrgSlug)) { + // use the slug of the owner if we have it, otherwise use 'another organization' + const ownerSlug = sentryApp?.owner?.slug ?? 'another organization'; return ( -
- - {this.isSingleOrg ? this.renderSingleOrgView() : this.renderMultiOrgView()} - - {this.checkAndRenderError()} - {organization && ( - - - + + {tct( + 'Integration [sentryAppName] is an unpublished integration for [otherOrg]. An unpublished integration can only be installed on the organization which created it.', + { + sentryAppName: {sentryApp.name}, + otherOrg: {ownerSlug}, + } )} -
+ ); } - renderBody() { - return ( - - -

{t('Finish integration installation')}

- {this.isSentryAppInternal - ? this.renderInternalAppError() - : this.renderMainContent()} -
-
- ); + return null; +} + +type SingleOrgProps = { + organizations: Array; + sentryApp: SentryApp; +}; +function SingleOrgView({organizations, sentryApp}: SingleOrgProps) { + const organizationName = organizations[0].name; + return ( +
+

+ {tct('You are installing [sentryAppName] for organization [organization]', { + organization: {organizationName}, + sentryAppName: {sentryApp.name}, + })} +

+
+ ); +} + +type SelectOrgCallback = (slug: string) => void; + +type MultiOrgProps = { + onSelectOrg: SelectOrgCallback; + organizations: Array; + selectedOrgSlug: string | undefined; + sentryApp: SentryApp; +}; +function MultiOrgView({ + onSelectOrg, + organizations, + selectedOrgSlug, + sentryApp, +}: MultiOrgProps) { + return ( +
+

+ {tct( + 'Please pick a specific [organization:organization] to install [sentryAppName]', + { + organization: , + sentryAppName: {sentryApp.name}, + } + )} +

+ + {() => ( + onSelectOrg(value)} + value={selectedOrgSlug} + placeholder={t('Select an organization')} + options={getOrganizationOptions(organizations)} + data-test-id="org-select" + /> + )} + +
+ ); +} + +const hasAccess = (org: Organization) => org.access.includes('org:integrations'); + +function isSingleOrg(organizations: Array): boolean { + return organizations.length === 1; +} + +function getOrganizationOptions(organizations: Array) { + return organizations.map(org => ({ + value: org.slug, + label: ( +
+ + {org.slug} +
+ ), + })); +} + +function isSentryAppUnavailableForOrg( + sentryApp: SentryApp, + selectedOrgSlug: string | undefined +): boolean { + if (!selectedOrgSlug) { + return false; } + // if the app is unpublished for a different org + return sentryApp?.owner?.slug !== selectedOrgSlug && sentryApp.status === 'unpublished'; } const InstallLink = styled('pre')` From ef82c7002bc3c54749193eea21f5af58ee2c7c70 Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Mon, 6 May 2024 13:30:11 -0400 Subject: [PATCH 021/376] ref(highlights): Remove Replay ID from frontend default highlights (#70348) Just the one change in a separate PR since it's backend, any other default suggestions are welcome --- src/sentry/issues/highlights.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/sentry/issues/highlights.py b/src/sentry/issues/highlights.py index 22e7ee68ffde68..c9e50e66a5e9d1 100644 --- a/src/sentry/issues/highlights.py +++ b/src/sentry/issues/highlights.py @@ -39,14 +39,13 @@ class HighlightPreset(TypedDict): "context": {"trace": ["trace_id"], "runtime": ["name", "version"]}, } FRONTEND_HIGHLIGHTS: HighlightPreset = { - "tags": SENTRY_TAGS + ["url", "transaction", "browser", "replayId", "user"], + "tags": SENTRY_TAGS + ["url", "transaction", "browser", "user"], "context": {"browser": ["name"], "user": ["email"]}, } MOBILE_HIGHLIGHTS: HighlightPreset = { "tags": SENTRY_TAGS + ["mobile", "main_thread"], "context": {"profile": ["profile_id"], "app": ["name"], "device": ["family"]}, } - FALLBACK_HIGHLIGHTS: HighlightPreset = { "tags": SENTRY_TAGS, "context": {"user": ["email"], "trace": ["trace_id"]}, From a148aa4d82cb28934c04d1f1bb77f359269ff279 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Tue, 7 May 2024 02:31:50 +0900 Subject: [PATCH 022/376] fix(staff): Remove incorrrect u2f error message (#70362) Hopefully this fixes the random u2f error msg popping up when it shouldn't be --------- Co-authored-by: Alberto Leal --- static/app/components/superuserStaffAccessForm.tsx | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/static/app/components/superuserStaffAccessForm.tsx b/static/app/components/superuserStaffAccessForm.tsx index b2272041de2366..0e2d53d12598e6 100644 --- a/static/app/components/superuserStaffAccessForm.tsx +++ b/static/app/components/superuserStaffAccessForm.tsx @@ -59,9 +59,11 @@ class SuperuserStaffAccessForm extends Component { return; } - await this.getAuthenticators(); + const authenticators = await this.getAuthenticators(); + this.setState({authenticators: authenticators}); + // Set the error state if there are no authenticators and U2F is on - if (!this.state.authenticators.length && !disableU2FForSUForm) { + if (!authenticators.length && !disableU2FForSUForm) { this.handleError(ErrorCodes.NO_AUTHENTICATOR); } this.setState({isLoading: false}); @@ -183,10 +185,11 @@ class SuperuserStaffAccessForm extends Component { try { const authenticators = await api.requestPromise('/authenticators/'); - this.setState({authenticators: authenticators ?? []}); + return authenticators ?? []; } catch { // ignore errors } + return []; } render() { From b3d2b74a869b7832a353d851839f73de97cc5dc4 Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Mon, 6 May 2024 13:42:32 -0400 Subject: [PATCH 023/376] chore(code-mappings): Remove check for Go automatic code mapping feature flag (#69045) Title Order of operations: - This PR - [Remove feature from options automator](https://github.com/getsentry/sentry-options-automator/pull/1144) - [Unregister feature in getSentry](https://github.com/getsentry/getsentry/pull/13670) - [Unregister feature in sentry](https://github.com/getsentry/sentry/pull/69049) --- src/sentry/tasks/derive_code_mappings.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/sentry/tasks/derive_code_mappings.py b/src/sentry/tasks/derive_code_mappings.py index 48b33439e379a7..66204da9926573 100644 --- a/src/sentry/tasks/derive_code_mappings.py +++ b/src/sentry/tasks/derive_code_mappings.py @@ -102,11 +102,6 @@ def derive_code_mappings( logger.info("Event should not be processed.", extra=extra) return - if data["platform"].startswith("go") and not features.has( - "organizations:derive-code-mappings-go", org - ): - return - stacktrace_paths: list[str] = identify_stacktrace_paths(data) if not stacktrace_paths: logger.info("No stacktrace paths found.", extra=extra) From 462a4e89e220a05d3b1524b3c276e99e235b9269 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Mon, 6 May 2024 13:56:34 -0400 Subject: [PATCH 024/376] feat(mobile-ui): Render screen charts (#70337) Adds support for charts based off of top results in the screens table --- .../screens/averageComparisonChart.tsx | 16 +-- .../mobile/appStarts/screens/countChart.tsx | 16 +-- .../mobile/appStarts/screens/index.tsx | 19 +-- .../mobile/appStarts/screens/screensTable.tsx | 2 +- .../views/performance/mobile/constants.tsx | 2 + .../mobile/screenload/screens/index.tsx | 40 +++--- .../screenload/screens/screensTable.tsx | 2 +- .../views/performance/mobile/ui/referrers.tsx | 1 + .../mobile/ui/screens/index.spec.tsx | 72 +++++++++++ .../performance/mobile/ui/screens/index.tsx | 117 ++++++++++++++++-- .../performance/mobile/ui/screens/table.tsx | 2 +- .../mobile/ui/screens/topScreensChart.tsx | 62 ++++++++++ .../mobile/useTruncatedRelease.spec.tsx | 39 ++++++ .../mobile/useTruncatedRelease.tsx | 20 +++ .../views/starfish/utils/centerTruncate.ts | 4 +- 15 files changed, 349 insertions(+), 65 deletions(-) create mode 100644 static/app/views/performance/mobile/constants.tsx create mode 100644 static/app/views/performance/mobile/ui/screens/topScreensChart.tsx create mode 100644 static/app/views/performance/mobile/useTruncatedRelease.spec.tsx create mode 100644 static/app/views/performance/mobile/useTruncatedRelease.tsx diff --git a/static/app/views/performance/mobile/appStarts/screens/averageComparisonChart.tsx b/static/app/views/performance/mobile/appStarts/screens/averageComparisonChart.tsx index a0a30807352d99..37323076577e06 100644 --- a/static/app/views/performance/mobile/appStarts/screens/averageComparisonChart.tsx +++ b/static/app/views/performance/mobile/appStarts/screens/averageComparisonChart.tsx @@ -11,15 +11,14 @@ import {decodeScalar} from 'sentry/utils/queryString'; import {MutableSearch} from 'sentry/utils/tokenizeSearch'; import {useLocation} from 'sentry/utils/useLocation'; import usePageFilters from 'sentry/utils/usePageFilters'; -import {MAX_CHART_RELEASE_CHARS} from 'sentry/views/performance/mobile/appStarts/screens'; import {COLD_START_TYPE} from 'sentry/views/performance/mobile/appStarts/screenSummary/startTypeSelector'; import {YAxis, YAXIS_COLUMNS} from 'sentry/views/performance/mobile/screenload/screens'; import {ScreensBarChart} from 'sentry/views/performance/mobile/screenload/screens/screenBarChart'; import {useTableQuery} from 'sentry/views/performance/mobile/screenload/screens/screensTable'; +import useTruncatedReleaseNames from 'sentry/views/performance/mobile/useTruncatedRelease'; import {PRIMARY_RELEASE_COLOR} from 'sentry/views/starfish/colors'; import {useReleaseSelection} from 'sentry/views/starfish/queries/useReleases'; import {SpanMetricsField} from 'sentry/views/starfish/types'; -import {formatVersionAndCenterTruncate} from 'sentry/views/starfish/utils/centerTruncate'; import {appendReleaseFilters} from 'sentry/views/starfish/utils/releaseComparison'; interface Props { @@ -90,14 +89,7 @@ export function AverageComparisonChart({chartHeight}: Props) { return transformData(data?.data, appStartType); }, [data, appStartType]); - const truncatedPrimaryChart = formatVersionAndCenterTruncate( - primaryRelease ?? '', - MAX_CHART_RELEASE_CHARS - ); - const truncatedSecondaryChart = formatVersionAndCenterTruncate( - secondaryRelease ?? '', - MAX_CHART_RELEASE_CHARS - ); + const {truncatedPrimaryRelease, truncatedSecondaryRelease} = useTruncatedReleaseNames(); return ( > = { [YAxis.WARM_START]: 'avg(measurements.app_start_warm)', [YAxis.COLD_START]: 'avg(measurements.app_start_cold)', @@ -71,6 +72,11 @@ export const YAXIS_COLUMNS: Readonly> = { [YAxis.FROZEN_FRAME_RATE]: 'avg(measurements.frames_frozen_rate)', [YAxis.THROUGHPUT]: 'tpm()', [YAxis.COUNT]: 'count()', + + // Using span metrics + [YAxis.SLOW_FRAMES]: 'avg(mobile.slow_frames)', + [YAxis.FROZEN_FRAMES]: 'avg(mobile.frozen_frames)', + [YAxis.FRAMES_DELAY]: 'avg(mobile.frames_delay)', }; export const READABLE_YAXIS_LABELS: Readonly> = { @@ -82,6 +88,9 @@ export const READABLE_YAXIS_LABELS: Readonly> = { [YAxis.FROZEN_FRAME_RATE]: 'avg(frames_frozen_rate)', [YAxis.THROUGHPUT]: 'tpm()', [YAxis.COUNT]: 'count()', + [YAxis.SLOW_FRAMES]: 'avg(mobile.slow_frames)', + [YAxis.FROZEN_FRAMES]: 'avg(mobile.frozen_frames)', + [YAxis.FRAMES_DELAY]: 'avg(mobile.frames_delay)', }; export const CHART_TITLES: Readonly> = { @@ -93,6 +102,9 @@ export const CHART_TITLES: Readonly> = { [YAxis.FROZEN_FRAME_RATE]: t('Frozen Frame Rate'), [YAxis.THROUGHPUT]: t('Throughput'), [YAxis.COUNT]: t('Count'), + [YAxis.SLOW_FRAMES]: t('Slow Frames'), + [YAxis.FROZEN_FRAMES]: t('Frozen Frames'), + [YAxis.FRAMES_DELAY]: t('Frames Delay'), }; export const OUTPUT_TYPE: Readonly> = { @@ -104,6 +116,9 @@ export const OUTPUT_TYPE: Readonly> = { [YAxis.FROZEN_FRAME_RATE]: 'percentage', [YAxis.THROUGHPUT]: 'number', [YAxis.COUNT]: 'number', + [YAxis.SLOW_FRAMES]: 'number', + [YAxis.FROZEN_FRAMES]: 'number', + [YAxis.FRAMES_DELAY]: 'duration', }; type Props = { @@ -135,6 +150,7 @@ export function ScreensView({yAxes, additionalFilters, chartHeight, project}: Pr secondaryRelease, isLoading: isReleasesLoading, } = useReleaseSelection(); + const {truncatedPrimaryRelease, truncatedSecondaryRelease} = useTruncatedReleaseNames(); const router = useRouter(); @@ -277,14 +293,6 @@ export function ScreensView({yAxes, additionalFilters, chartHeight, project}: Pr topTransactions, }); - const truncatedPrimaryChart = formatVersionAndCenterTruncate( - primaryRelease ?? '', - MAX_CHART_RELEASE_CHARS - ); - const truncatedSecondaryChart = formatVersionAndCenterTruncate( - secondaryRelease ?? '', - MAX_CHART_RELEASE_CHARS - ); const derivedQuery = getTransactionSearchQuery(location, tableEventView.query); const tableSearchFilters = new MutableSearch(['transaction.op:ui.load']); @@ -313,8 +321,8 @@ export function ScreensView({yAxes, additionalFilters, chartHeight, project}: Pr subtitle: primaryRelease ? t( '%s v. %s', - truncatedPrimaryChart, - secondaryRelease ? truncatedSecondaryChart : '' + truncatedPrimaryRelease, + secondaryRelease ? truncatedSecondaryRelease : '' ) : '', }, @@ -345,8 +353,8 @@ export function ScreensView({yAxes, additionalFilters, chartHeight, project}: Pr subtitle: primaryRelease ? t( '%s v. %s', - truncatedPrimaryChart, - secondaryRelease ? truncatedSecondaryChart : '' + truncatedPrimaryRelease, + secondaryRelease ? truncatedSecondaryRelease : '' ) : '', }, diff --git a/static/app/views/performance/mobile/screenload/screens/screensTable.tsx b/static/app/views/performance/mobile/screenload/screens/screensTable.tsx index dc827e67ffd0ea..62d130ab6a59da 100644 --- a/static/app/views/performance/mobile/screenload/screens/screensTable.tsx +++ b/static/app/views/performance/mobile/screenload/screens/screensTable.tsx @@ -24,7 +24,7 @@ import usePageFilters from 'sentry/utils/usePageFilters'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; import TopResultsIndicator from 'sentry/views/discover/table/topResultsIndicator'; import type {TableColumn} from 'sentry/views/discover/table/types'; -import {TOP_SCREENS} from 'sentry/views/performance/mobile/screenload/screens'; +import {TOP_SCREENS} from 'sentry/views/performance/mobile/constants'; import { PRIMARY_RELEASE_ALIAS, SECONDARY_RELEASE_ALIAS, diff --git a/static/app/views/performance/mobile/ui/referrers.tsx b/static/app/views/performance/mobile/ui/referrers.tsx index d064ab5c99a7e3..c0246f26efb6e7 100644 --- a/static/app/views/performance/mobile/ui/referrers.tsx +++ b/static/app/views/performance/mobile/ui/referrers.tsx @@ -1,3 +1,4 @@ export enum Referrer { OVERVIEW_SCREENS_TABLE = 'api.performance.module.ui.screen-table', + MOBILE_UI_BAR_CHART = 'api.performance.mobile.ui.bar-chart', } diff --git a/static/app/views/performance/mobile/ui/screens/index.spec.tsx b/static/app/views/performance/mobile/ui/screens/index.spec.tsx index 76ff78778ffc65..b21453a2e03758 100644 --- a/static/app/views/performance/mobile/ui/screens/index.spec.tsx +++ b/static/app/views/performance/mobile/ui/screens/index.spec.tsx @@ -2,6 +2,7 @@ import {ProjectFixture} from 'sentry-fixture/project'; import {render, screen} from 'sentry-test/reactTestingLibrary'; +import type {Project} from 'sentry/types'; import usePageFilters from 'sentry/utils/usePageFilters'; import {Referrer} from 'sentry/views/performance/mobile/ui/referrers'; import {UIScreens} from 'sentry/views/performance/mobile/ui/screens'; @@ -16,6 +17,29 @@ jest.mocked(useReleaseSelection).mockReturnValue({ secondaryRelease: 'com.example.vu.android@2.10.3+42', }); +const createMockTablePayload = ({ + transaction, + project, +}: { + project: Project; + transaction: string; +}) => ({ + 'avg_compare(mobile.frames_delay,release,com.example.vu.android@2.10.5,com.example.vu.android@2.10.3+42)': + null, + 'avg_compare(mobile.frozen_frames,release,com.example.vu.android@2.10.5,com.example.vu.android@2.10.3+42)': + null, + 'avg_compare(mobile.slow_frames,release,com.example.vu.android@2.10.5,com.example.vu.android@2.10.3+42)': + null, + 'avg_if(mobile.frames_delay,release,com.example.vu.android@2.10.5)': 0, + 'avg_if(mobile.frames_delay,release,com.example.vu.android@2.10.3+42)': 0.259326119, + 'avg_if(mobile.frozen_frames,release,com.example.vu.android@2.10.5)': 0, + 'avg_if(mobile.frozen_frames,release,com.example.vu.android@2.10.3+42)': 0, + 'avg_if(mobile.slow_frames,release,com.example.vu.android@2.10.5)': 0, + 'avg_if(mobile.slow_frames,release,com.example.vu.android@2.10.3+42)': 2, + 'project.id': project.id, + transaction, +}); + describe('Performance Mobile UI Screens', () => { const project = ProjectFixture({platform: 'apple-ios'}); @@ -83,4 +107,52 @@ describe('Performance Mobile UI Screens', () => { }) ); }); + + it('queries for the correct chart data using the top transactions', async () => { + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/', + body: { + data: [ + createMockTablePayload({transaction: 'top 1', project}), + createMockTablePayload({transaction: 'top 2', project}), + createMockTablePayload({transaction: 'top 3', project}), + createMockTablePayload({transaction: 'top 4', project}), + createMockTablePayload({transaction: 'top 5', project}), + createMockTablePayload({transaction: 'top 6', project}), // excluded + ], + }, + match: [MockApiClient.matchQuery({referrer: Referrer.OVERVIEW_SCREENS_TABLE})], + }); + + const chartDataRequest = MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/', + body: [], + match: [MockApiClient.matchQuery({referrer: Referrer.MOBILE_UI_BAR_CHART})], + }); + + render(); + + await screen.findByText('top 1'); + + screen.getByText('Top 5 Screen Slow Frames'); + screen.getByText('Top 5 Screen Frozen Frames'); + screen.getByText('Top 5 Screen Frames Delay'); + + expect(chartDataRequest).toHaveBeenCalledWith( + '/organizations/org-slug/events/', + expect.objectContaining({ + query: expect.objectContaining({ + field: [ + 'transaction', + 'release', + 'avg(mobile.slow_frames)', + 'avg(mobile.frozen_frames)', + 'avg(mobile.frames_delay)', + ], + query: + 'release:[com.example.vu.android@2.10.5,com.example.vu.android@2.10.3+42] transaction:["top 1","top 2","top 3","top 4","top 5"]', + }), + }) + ); + }); }); diff --git a/static/app/views/performance/mobile/ui/screens/index.tsx b/static/app/views/performance/mobile/ui/screens/index.tsx index 32a2298cef5f84..71f39d81ee8263 100644 --- a/static/app/views/performance/mobile/ui/screens/index.tsx +++ b/static/app/views/performance/mobile/ui/screens/index.tsx @@ -1,28 +1,46 @@ +import {useTheme} from '@emotion/react'; import styled from '@emotion/styled'; +import Alert from 'sentry/components/alert'; import SearchBar from 'sentry/components/performance/searchBar'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {NewQuery} from 'sentry/types'; +import {defined} from 'sentry/utils'; import EventView from 'sentry/utils/discover/eventView'; import {DiscoverDatasets} from 'sentry/utils/discover/types'; import {decodeScalar} from 'sentry/utils/queryString'; -import {MutableSearch} from 'sentry/utils/tokenizeSearch'; +import {escapeFilterValue, MutableSearch} from 'sentry/utils/tokenizeSearch'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; import useRouter from 'sentry/utils/useRouter'; import {prepareQueryForLandingPage} from 'sentry/views/performance/data'; -import {getFreeTextFromQuery} from 'sentry/views/performance/mobile/screenload/screens'; +import {TOP_SCREENS} from 'sentry/views/performance/mobile/constants'; +import { + getFreeTextFromQuery, + YAxis, + YAXIS_COLUMNS, +} from 'sentry/views/performance/mobile/screenload/screens'; import {useTableQuery} from 'sentry/views/performance/mobile/screenload/screens/screensTable'; +import {transformReleaseEvents} from 'sentry/views/performance/mobile/screenload/screens/utils'; import {Referrer} from 'sentry/views/performance/mobile/ui/referrers'; import {UIScreensTable} from 'sentry/views/performance/mobile/ui/screens/table'; +import {TopScreensChart} from 'sentry/views/performance/mobile/ui/screens/topScreensChart'; import {getTransactionSearchQuery} from 'sentry/views/performance/utils'; import {useReleaseSelection} from 'sentry/views/starfish/queries/useReleases'; import {SpanMetricsField} from 'sentry/views/starfish/types'; import {appendReleaseFilters} from 'sentry/views/starfish/utils/releaseComparison'; +const Y_AXES = [YAxis.SLOW_FRAMES, YAxis.FROZEN_FRAMES, YAxis.FRAMES_DELAY]; +const Y_AXIS_COLUMNS = [ + 'avg(mobile.slow_frames)', + 'avg(mobile.frozen_frames)', + 'avg(mobile.frames_delay)', +]; + export function UIScreens() { + const theme = useTheme(); const router = useRouter(); const {selection} = usePageFilters(); const location = useLocation(); @@ -80,14 +98,91 @@ export function UIScreens() { referrer: Referrer.OVERVIEW_SCREENS_TABLE, }); + const topTransactions = + topTransactionsData?.data?.slice(0, 5).map(datum => datum.transaction as string) ?? + []; + + // TODO: Fill with transaction.op filter + const topEventsQuery = new MutableSearch([]); + + const topEventsQueryString = `${appendReleaseFilters( + topEventsQuery, + primaryRelease, + secondaryRelease + )} ${ + topTransactions.length > 0 + ? escapeFilterValue( + `transaction:[${topTransactions.map(name => `"${name}"`).join()}]` + ) + : '' + }`.trim(); + + const {data: releaseEvents, isLoading: isReleaseEventsLoading} = useTableQuery({ + eventView: EventView.fromNewQueryWithPageFilters( + { + name: '', + fields: ['transaction', 'release', ...Y_AXIS_COLUMNS], + yAxis: Y_AXIS_COLUMNS, + query: topEventsQueryString, + dataset: DiscoverDatasets.SPANS_METRICS, + version: 2, + }, + selection + ), + enabled: !topTransactionsLoading, + referrer: Referrer.MOBILE_UI_BAR_CHART, + }); + + if (!defined(primaryRelease) && !isReleasesLoading) { + return ( + + {t( + 'No screens found on recent releases. Please try a single iOS or Android project, a single environment or a smaller date range.' + )} + + ); + } + // TODO: Add transaction.op:ui.load when collecting begins const tableSearchFilters = new MutableSearch([]); const derivedQuery = getTransactionSearchQuery(location, tableEventView.query); + const transformedReleaseEvents = transformReleaseEvents({ + yAxes: Y_AXES, + primaryRelease, + secondaryRelease, + colorPalette: theme.charts.getColorPalette(TOP_SCREENS - 2), + releaseEvents, + topTransactions, + }); + return ( -
- + + + + + + { router.push({ @@ -114,10 +209,18 @@ export function UIScreens() { isLoading={topTransactionsLoading} pageLinks={pageLinks} /> -
+ ); } -const StyledSearchBar = styled(SearchBar)` - margin-bottom: ${space(1)}; +const Layout = styled('div')` + display: flex; + flex-direction: column; + gap: ${space(1)}; +`; + +const ChartContainer = styled('div')` + display: grid; + grid-template-columns: 33% 33% 33%; + gap: ${space(1)}; `; diff --git a/static/app/views/performance/mobile/ui/screens/table.tsx b/static/app/views/performance/mobile/ui/screens/table.tsx index 21e5fdc1e216e6..f224c06215987f 100644 --- a/static/app/views/performance/mobile/ui/screens/table.tsx +++ b/static/app/views/performance/mobile/ui/screens/table.tsx @@ -12,7 +12,7 @@ import useOrganization from 'sentry/utils/useOrganization'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; import TopResultsIndicator from 'sentry/views/discover/table/topResultsIndicator'; import {ScreensTable} from 'sentry/views/performance/mobile/components/screensTable'; -import {TOP_SCREENS} from 'sentry/views/performance/mobile/screenload/screens'; +import {TOP_SCREENS} from 'sentry/views/performance/mobile/constants'; import { PRIMARY_RELEASE_ALIAS, SECONDARY_RELEASE_ALIAS, diff --git a/static/app/views/performance/mobile/ui/screens/topScreensChart.tsx b/static/app/views/performance/mobile/ui/screens/topScreensChart.tsx new file mode 100644 index 00000000000000..7bc4331a118cfb --- /dev/null +++ b/static/app/views/performance/mobile/ui/screens/topScreensChart.tsx @@ -0,0 +1,62 @@ +import {t} from 'sentry/locale'; +import {TOP_SCREENS} from 'sentry/views/performance/mobile/constants'; +import {ScreensBarChart} from 'sentry/views/performance/mobile/screenload/screens/screenBarChart'; +import useTruncatedReleaseNames from 'sentry/views/performance/mobile/useTruncatedRelease'; +import {useReleaseSelection} from 'sentry/views/starfish/queries/useReleases'; + +function getChartTitle(yAxis: string, countTopScreens: number) { + const TITLES = { + ['avg(mobile.slow_frames)']: [ + t('Top Screen Slow Frames'), + t('Top %s Screen Slow Frames', countTopScreens), + ], + ['avg(mobile.frozen_frames)']: [ + t('Top Screen Frozen Frames'), + t('Top %s Screen Frozen Frames', countTopScreens), + ], + ['avg(mobile.frames_delay)']: [ + t('Top Screen Frames Delay'), + t('Top %s Screen Frames Delay', countTopScreens), + ], + }; + + const [singularTopScreenTitle, pluralTopScreenTitle] = TITLES[yAxis]; + + return countTopScreens > 1 ? pluralTopScreenTitle : singularTopScreenTitle; +} + +export function TopScreensChart({ + yAxis, + topTransactions, + transformedReleaseEvents, + chartHeight, + isLoading, +}) { + const {primaryRelease, secondaryRelease} = useReleaseSelection(); + const {truncatedPrimaryRelease, truncatedSecondaryRelease} = useTruncatedReleaseNames(); + + const countTopScreens = Math.min(TOP_SCREENS, topTransactions.length); + + return ( + + ); +} diff --git a/static/app/views/performance/mobile/useTruncatedRelease.spec.tsx b/static/app/views/performance/mobile/useTruncatedRelease.spec.tsx new file mode 100644 index 00000000000000..95e5057d98f5a1 --- /dev/null +++ b/static/app/views/performance/mobile/useTruncatedRelease.spec.tsx @@ -0,0 +1,39 @@ +import {renderHook} from 'sentry-test/reactTestingLibrary'; + +import useTruncatedReleaseNames from 'sentry/views/performance/mobile/useTruncatedRelease'; +import {useReleaseSelection} from 'sentry/views/starfish/queries/useReleases'; +import {ELLIPSIS} from 'sentry/views/starfish/utils/centerTruncate'; + +jest.mock('sentry/views/starfish/queries/useReleases'); + +jest.mocked(useReleaseSelection).mockReturnValue({ + primaryRelease: 'com.example.vu.android@2.10.5-alpha.1+42', + isLoading: false, + secondaryRelease: 'com.example.vu.android@2.10.3+42', +}); + +describe('useTruncatedRelease', () => { + it('truncates long release names to 12 characters by default', () => { + const {result} = renderHook(useTruncatedReleaseNames); + + expect( + [...result.current.truncatedPrimaryRelease].filter(char => char !== ELLIPSIS) + ).toHaveLength(12); + + expect(result.current.truncatedPrimaryRelease).toBe('2.10.5…1 (42)'); + expect(result.current.truncatedSecondaryRelease).toBe('2.10.3 (42)'); + }); + + it('truncates long release names to provided length limit', () => { + const {result} = renderHook(useTruncatedReleaseNames, { + initialProps: 5, + }); + + expect( + [...result.current.truncatedPrimaryRelease].filter(char => char !== ELLIPSIS).length + ).toBeLessThanOrEqual(5); + + expect(result.current.truncatedPrimaryRelease).toBe('2.…2)'); + expect(result.current.truncatedSecondaryRelease).toBe('2.…2)'); + }); +}); diff --git a/static/app/views/performance/mobile/useTruncatedRelease.tsx b/static/app/views/performance/mobile/useTruncatedRelease.tsx new file mode 100644 index 00000000000000..2f2af276b0713a --- /dev/null +++ b/static/app/views/performance/mobile/useTruncatedRelease.tsx @@ -0,0 +1,20 @@ +import {MAX_CHART_RELEASE_CHARS} from 'sentry/views/performance/mobile/constants'; +import {useReleaseSelection} from 'sentry/views/starfish/queries/useReleases'; +import {formatVersionAndCenterTruncate} from 'sentry/views/starfish/utils/centerTruncate'; + +function useTruncatedReleaseNames(truncation?: number) { + const {primaryRelease, secondaryRelease} = useReleaseSelection(); + + const truncatedPrimaryRelease = formatVersionAndCenterTruncate( + primaryRelease ?? '', + truncation ?? MAX_CHART_RELEASE_CHARS + ); + const truncatedSecondaryRelease = formatVersionAndCenterTruncate( + secondaryRelease ?? '', + truncation ?? MAX_CHART_RELEASE_CHARS + ); + + return {truncatedPrimaryRelease, truncatedSecondaryRelease}; +} + +export default useTruncatedReleaseNames; diff --git a/static/app/views/starfish/utils/centerTruncate.ts b/static/app/views/starfish/utils/centerTruncate.ts index c5f976b2331181..1959b17e60c102 100644 --- a/static/app/views/starfish/utils/centerTruncate.ts +++ b/static/app/views/starfish/utils/centerTruncate.ts @@ -1,9 +1,11 @@ import {formatVersion} from 'sentry/utils/formatters'; +export const ELLIPSIS = '\u2026'; + export function centerTruncate(value: string, maxLength: number = 20) { const divider = Math.floor(maxLength / 2); if (value?.length > maxLength) { - return `${value.slice(0, divider)}\u2026${value.slice(value.length - divider)}`; + return `${value.slice(0, divider)}${ELLIPSIS}${value.slice(value.length - divider)}`; } return value; } From ce1cb89a6b53a12ea65ba06d4acc8146dff30c46 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 6 May 2024 11:09:19 -0700 Subject: [PATCH 025/376] chore(feedback): rm beta pill from feedback (#70350) closes https://github.com/getsentry/sentry/issues/70345 the only spot i could find: SCR-20240506-krcv --- static/app/components/sidebar/index.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/static/app/components/sidebar/index.tsx b/static/app/components/sidebar/index.tsx index 948144c5deab0c..9b0623c219b56c 100644 --- a/static/app/components/sidebar/index.tsx +++ b/static/app/components/sidebar/index.tsx @@ -440,7 +440,6 @@ function Sidebar() { {...sidebarItemProps} icon={} label={t('User Feedback')} - isBeta variant="short" to={`/organizations/${organization.slug}/feedback/`} id="feedback" From 3609aff06c06beaad861fb185cf6f503349ffc57 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Mon, 6 May 2024 14:12:27 -0400 Subject: [PATCH 026/376] fix(perf): Fix missing space in Vitals toolbar buttons (#70266) This ensures spacing between the buttons. Closes JAVASCRIPT-2SYJ --- .../browser/webVitals/pageOverview.tsx | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/static/app/views/performance/browser/webVitals/pageOverview.tsx b/static/app/views/performance/browser/webVitals/pageOverview.tsx index 574f891583f284..9e9d0e579067af 100644 --- a/static/app/views/performance/browser/webVitals/pageOverview.tsx +++ b/static/app/views/performance/browser/webVitals/pageOverview.tsx @@ -6,6 +6,7 @@ import moment from 'moment'; import ProjectAvatar from 'sentry/components/avatar/projectAvatar'; import {Breadcrumbs} from 'sentry/components/breadcrumbs'; import {LinkButton} from 'sentry/components/button'; +import ButtonBar from 'sentry/components/buttonBar'; import {AggregateSpans} from 'sentry/components/events/interfaces/spans/aggregateSpans'; import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton'; import * as Layout from 'sentry/components/layouts/thirds'; @@ -179,12 +180,14 @@ export default function PageOverview() { - - {transactionSummaryTarget && ( - - {t('View Transaction Summary')} - - )} + + + {transactionSummaryTarget && ( + + {t('View Transaction Summary')} + + )} + {LANDING_DISPLAYS.map(({label, field}) => ( From b61c84350b71de9e36028a8e9df02960866b40c1 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Mon, 6 May 2024 14:13:26 -0400 Subject: [PATCH 027/376] fix(perf): Do not render contents of closed panels (#70286) Right now, when a `SlideOverPanel` is collapsed, its contents still exist in the DOM. This has a whole bunch of downsides: 1. React has to render stuff inside, which is pointless since it's invisible 2. When writing mocks, we have to assert on elements that might exist in the panel even if its closed This is very annoying! This PR changes it up to use `AnimatePresence` which lets us use a `!collapsed &` condition and _not_ render the panel's contents in the DOM if it's collapsed. Also clarified some variables names while I'm at it, and removed some unused CSS. --- .../starfish/components/detailPanel.spec.tsx | 22 +++++++ .../starfish/components/slideOverPanel.tsx | 65 +++++++++---------- 2 files changed, 53 insertions(+), 34 deletions(-) create mode 100644 static/app/views/starfish/components/detailPanel.spec.tsx diff --git a/static/app/views/starfish/components/detailPanel.spec.tsx b/static/app/views/starfish/components/detailPanel.spec.tsx new file mode 100644 index 00000000000000..62944df628871c --- /dev/null +++ b/static/app/views/starfish/components/detailPanel.spec.tsx @@ -0,0 +1,22 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import DetailPanel from 'sentry/views/starfish/components/detailPanel'; + +describe('DetailPanel', function () { + it('renders toolbar and inner content', function () { + render(Content); + + expect(screen.getByRole('button', {name: 'Dock to the bottom'})).toBeInTheDocument(); + expect(screen.getByRole('button', {name: 'Dock to the right'})).toBeInTheDocument(); + expect(screen.getByRole('button', {name: 'Close Details'})).toBeInTheDocument(); + + expect(screen.getByText('Content')).toBeInTheDocument(); + }); + + it('does not render content when closed', function () { + render(Content); + + expect(screen.queryByRole('button', {name: 'Close Details'})).not.toBeInTheDocument(); + expect(screen.queryByText('Content')).not.toBeInTheDocument(); + }); +}); diff --git a/static/app/views/starfish/components/slideOverPanel.tsx b/static/app/views/starfish/components/slideOverPanel.tsx index ab8daae447ff1d..c62d15fba19511 100644 --- a/static/app/views/starfish/components/slideOverPanel.tsx +++ b/static/app/views/starfish/components/slideOverPanel.tsx @@ -3,21 +3,21 @@ import {forwardRef, useEffect} from 'react'; import isPropValid from '@emotion/is-prop-valid'; import {css} from '@emotion/react'; import styled from '@emotion/styled'; -import {motion} from 'framer-motion'; +import {AnimatePresence, motion} from 'framer-motion'; import {space} from 'sentry/styles/space'; const PANEL_WIDTH = '50vw'; const PANEL_HEIGHT = '50vh'; -const INITIAL_STYLES = { - bottom: {opacity: 0, x: 0, y: 0}, - right: {opacity: 0, x: PANEL_WIDTH, y: 0}, +const OPEN_STYLES = { + bottom: {opacity: 1, x: 0, y: 0}, + right: {opacity: 1, x: 0, y: 0}, }; -const FINAL_STYLES = { +const COLLAPSED_STYLES = { bottom: {opacity: 0, x: 0, y: PANEL_HEIGHT}, - right: {opacity: 0, x: PANEL_WIDTH}, + right: {opacity: 0, x: PANEL_WIDTH, y: 0}, }; type SlideOverPanelProps = { @@ -38,33 +38,40 @@ function SlideOverPanel( onOpen(); } }, [collapsed, onOpen]); - const initial = slidePosition ? INITIAL_STYLES[slidePosition] : INITIAL_STYLES.right; - const final = slidePosition ? FINAL_STYLES[slidePosition] : FINAL_STYLES.right; + + const openStyle = slidePosition ? OPEN_STYLES[slidePosition] : OPEN_STYLES.right; + + const collapsedStyle = slidePosition + ? COLLAPSED_STYLES[slidePosition] + : COLLAPSED_STYLES.right; return ( - <_SlideOverPanel - ref={ref} - collapsed={collapsed} - initial={initial} - animate={!collapsed ? {opacity: 1, x: 0, y: 0} : final} - slidePosition={slidePosition} - transition={{ - type: 'spring', - stiffness: 500, - damping: 50, - }} - > - {children} - + + {!collapsed && ( + <_SlideOverPanel + ref={ref} + initial={collapsedStyle} + animate={openStyle} + exit={collapsedStyle} + slidePosition={slidePosition} + transition={{ + type: 'spring', + stiffness: 500, + damping: 50, + }} + > + {children} + + )} + ); } const _SlideOverPanel = styled(motion.div, { shouldForwardProp: prop => - ['animate', 'transition', 'initial'].includes(prop) || + ['initial', 'animate', 'exit', 'transition'].includes(prop) || (prop !== 'collapsed' && isPropValid(prop)), })<{ - collapsed: boolean; slidePosition?: 'right' | 'bottom'; }>` position: fixed; @@ -109,14 +116,4 @@ const _SlideOverPanel = styled(motion.div, { left: auto; `} } - - ${p => - p.collapsed - ? css` - overflow: hidden; - ` - : css` - overflow-x: hidden; - overflow-y: auto; - `} `; From 882f0bc54033b15459ca19e345321f347409c0fd Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 6 May 2024 11:13:54 -0700 Subject: [PATCH 028/376] ref(feedback): remove 'go to old uf' button (#64964) merge when we remove the beta flag for UF! no need to link to old UF anymore SCR-20240209-kjpl Relates to: * https://github.com/getsentry/sentry/issues/64671 --- .../components/feedback/oldFeedbackButton.tsx | 41 ------------------- .../app/views/feedback/feedbackListPage.tsx | 4 -- 2 files changed, 45 deletions(-) delete mode 100644 static/app/components/feedback/oldFeedbackButton.tsx diff --git a/static/app/components/feedback/oldFeedbackButton.tsx b/static/app/components/feedback/oldFeedbackButton.tsx deleted file mode 100644 index 9ecdc8f98c36dc..00000000000000 --- a/static/app/components/feedback/oldFeedbackButton.tsx +++ /dev/null @@ -1,41 +0,0 @@ -import {LinkButton} from 'sentry/components/button'; -import ExternalLink from 'sentry/components/links/externalLink'; -import {Tooltip} from 'sentry/components/tooltip'; -import {t, tct} from 'sentry/locale'; -import {useLocation} from 'sentry/utils/useLocation'; -import useOrganization from 'sentry/utils/useOrganization'; -import {normalizeUrl} from 'sentry/utils/withDomainRequired'; - -export default function OldFeedbackButton() { - const location = useLocation(); - const organization = useOrganization(); - - return ( - - ), - })} - position="left" - isHoverable - > - - {t('Go to Old User Feedback')} - - - ); -} diff --git a/static/app/views/feedback/feedbackListPage.tsx b/static/app/views/feedback/feedbackListPage.tsx index e6807df6b06ad7..442d3080ef1f7c 100644 --- a/static/app/views/feedback/feedbackListPage.tsx +++ b/static/app/views/feedback/feedbackListPage.tsx @@ -10,7 +10,6 @@ import FeedbackSearch from 'sentry/components/feedback/feedbackSearch'; import FeedbackSetupPanel from 'sentry/components/feedback/feedbackSetupPanel'; import FeedbackWhatsNewBanner from 'sentry/components/feedback/feedbackWhatsNewBanner'; import FeedbackList from 'sentry/components/feedback/list/feedbackList'; -import OldFeedbackButton from 'sentry/components/feedback/oldFeedbackButton'; import useCurrentFeedbackId from 'sentry/components/feedback/useCurrentFeedbackId'; import useHaveSelectedProjectsSetupFeedback, { useHaveSelectedProjectsSetupNewFeedback, @@ -71,9 +70,6 @@ export default function FeedbackListPage({}: Props) { /> - - - From ffff777b4ea9f5e1b0193052375ccdafd08f468a Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Mon, 6 May 2024 14:17:44 -0400 Subject: [PATCH 029/376] chore(perf): Remove more unused Starfish code (#70290) As far as I can tell, none of it is in use, and won't be coming into use! --- static/app/views/starfish/allowedProjects.ts | 22 -- .../views/starfish/components/datePicker.tsx | 23 -- .../samplesTable/useErrorSamples.tsx | 47 --- .../useSlowMedianFastSamplesQuery.tsx | 201 ---------- .../components/starfishProjectSelector.tsx | 74 ---- .../queries/useProjectSpanMetricsCounts.tsx | 37 -- .../utils/generatePerformanceEventView.tsx | 143 ------- .../views/mobileServiceView/index.tsx | 365 ------------------ .../views/mobileServiceView/utils.tsx | 15 - .../views/mobileServiceView/viewsList.tsx | 144 ------- 10 files changed, 1071 deletions(-) delete mode 100644 static/app/views/starfish/allowedProjects.ts delete mode 100644 static/app/views/starfish/components/datePicker.tsx delete mode 100644 static/app/views/starfish/components/samplesTable/useErrorSamples.tsx delete mode 100644 static/app/views/starfish/components/samplesTable/useSlowMedianFastSamplesQuery.tsx delete mode 100644 static/app/views/starfish/components/starfishProjectSelector.tsx delete mode 100644 static/app/views/starfish/queries/useProjectSpanMetricsCounts.tsx delete mode 100644 static/app/views/starfish/utils/generatePerformanceEventView.tsx delete mode 100644 static/app/views/starfish/views/mobileServiceView/index.tsx delete mode 100644 static/app/views/starfish/views/mobileServiceView/utils.tsx delete mode 100644 static/app/views/starfish/views/mobileServiceView/viewsList.tsx diff --git a/static/app/views/starfish/allowedProjects.ts b/static/app/views/starfish/allowedProjects.ts deleted file mode 100644 index 6b66414bec1ab2..00000000000000 --- a/static/app/views/starfish/allowedProjects.ts +++ /dev/null @@ -1,22 +0,0 @@ -import {StarfishType} from 'sentry/views/starfish/types'; - -export const ALLOWED_PROJECT_IDS_FOR_ORG_SLUG: { - [slug: string]: string[]; -} = { - sentry: [ - '1', // Sentry - '300688', // Snuba - '4505160011087872', // GibPotato PHP - '4505148785885184', // GibPotato Go - ], - codecov: ['5215654'], - peated: ['4505138082349056'], - 'sentry-sdks': ['5428557'], - demo: ['6249899'], - 'testorg-az': ['6249899'], -}; - -export const STARFISH_TYPE_FOR_PROJECT: {[project: string]: StarfishType} = { - 5428557: StarfishType.MOBILE, - 6249899: StarfishType.MOBILE, -}; diff --git a/static/app/views/starfish/components/datePicker.tsx b/static/app/views/starfish/components/datePicker.tsx deleted file mode 100644 index 5fbae332de6327..00000000000000 --- a/static/app/views/starfish/components/datePicker.tsx +++ /dev/null @@ -1,23 +0,0 @@ -import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; -import {trackAnalytics} from 'sentry/utils/analytics'; -import useOrganization from 'sentry/utils/useOrganization'; - -function StarfishDatePicker() { - const organization = useOrganization(); - return ( - { - trackAnalytics('starfish.page_filter.data_change', { - organization, - start, - end, - relative, - }); - }} - /> - ); -} - -export default StarfishDatePicker; diff --git a/static/app/views/starfish/components/samplesTable/useErrorSamples.tsx b/static/app/views/starfish/components/samplesTable/useErrorSamples.tsx deleted file mode 100644 index e5e00a7ae0300c..00000000000000 --- a/static/app/views/starfish/components/samplesTable/useErrorSamples.tsx +++ /dev/null @@ -1,47 +0,0 @@ -import {useDiscoverQuery} from 'sentry/utils/discover/discoverQuery'; -import type EventView from 'sentry/utils/discover/eventView'; -import type {QueryFieldValue} from 'sentry/utils/discover/fields'; -import {MutableSearch} from 'sentry/utils/tokenizeSearch'; -import {useLocation} from 'sentry/utils/useLocation'; -import useOrganization from 'sentry/utils/useOrganization'; - -export default function useErrorSamples(eventView: EventView) { - const location = useLocation(); - const organization = useOrganization(); - - const columns: QueryFieldValue[] = [ - { - field: 'timestamp', - kind: 'field', - }, - { - field: 'http.status_code', - kind: 'field', - }, - { - field: 'transaction.status', - kind: 'field', - }, - ]; - - let errorSamplesEventView = eventView.clone(); - errorSamplesEventView.additionalConditions = new MutableSearch( - 'http.status_code:[500,501,502,503,504,505,506,507,508,510,511]' - ); - errorSamplesEventView = errorSamplesEventView.withColumns(columns).withSorts([ - { - field: 'timestamp', - kind: 'desc', - }, - ]); - - const {isLoading, data} = useDiscoverQuery({ - eventView: errorSamplesEventView, - referrer: 'starfish-transaction-summary-sample-events', - location, - orgSlug: organization.slug, - limit: 6, - }); - - return {isLoading, data: data ? data.data : []}; -} diff --git a/static/app/views/starfish/components/samplesTable/useSlowMedianFastSamplesQuery.tsx b/static/app/views/starfish/components/samplesTable/useSlowMedianFastSamplesQuery.tsx deleted file mode 100644 index 2d48448482eece..00000000000000 --- a/static/app/views/starfish/components/samplesTable/useSlowMedianFastSamplesQuery.tsx +++ /dev/null @@ -1,201 +0,0 @@ -import max from 'lodash/max'; - -import {useDiscoverQuery} from 'sentry/utils/discover/discoverQuery'; -import type EventView from 'sentry/utils/discover/eventView'; -import type {QueryFieldValue} from 'sentry/utils/discover/fields'; -import {MutableSearch} from 'sentry/utils/tokenizeSearch'; -import {useLocation} from 'sentry/utils/useLocation'; -import useOrganization from 'sentry/utils/useOrganization'; - -type DataRow = { - 'http.status_code': number; - id: string; - profile_id: string; - project: string; - 'spans.browser': number; - 'spans.db': number; - 'spans.http': number; - 'spans.resource': number; - 'spans.ui': number; - timestamp: string; - trace: string; - 'transaction.duration': number; - 'transaction.status': string; -}; - -const LIMIT_PER_POPULATION = 2; - -/** - * This hook will fetch transaction events from 3 different types of populations and combine them in one set, then return them: - * - * - Slowest Events - * - Median / Baseline Events - * - Fastest Events - * - * It assumes that you are passing an eventView object with a query scoped to a specific transaction - * - * @param eventView An eventView containing query information, such as the transaction and other filters - */ -export default function useSlowMedianFastSamplesQuery( - eventView: EventView, - graphMax?: number -) { - const location = useLocation(); - const organization = useOrganization(); - - const commonColumns: QueryFieldValue[] = [ - { - field: 'trace', - kind: 'field', - }, - { - field: 'transaction.duration', - kind: 'field', - }, - { - field: 'profile_id', - kind: 'field', - }, - { - field: 'timestamp', - kind: 'field', - }, - { - field: 'spans.browser', - kind: 'field', - }, - { - field: 'spans.db', - kind: 'field', - }, - { - field: 'spans.http', - kind: 'field', - }, - { - field: 'spans.resource', - kind: 'field', - }, - { - field: 'spans.ui', - kind: 'field', - }, - ]; - - const eventViewAggregates = eventView - .clone() - .withColumns([ - {kind: 'function', function: ['avg', 'transaction.duration', undefined, undefined]}, - ]); - - const {isLoading: isLoadingAgg, data: aggregatesData} = useDiscoverQuery({ - eventView: eventViewAggregates, - referrer: 'starfish-transaction-summary-sample-events', - location, - orgSlug: organization.slug, - options: { - refetchOnWindowFocus: false, - enabled: graphMax !== undefined, - }, - }); - const avg = aggregatesData - ? aggregatesData.data[0]['avg(transaction.duration)'] - : undefined; - const upperThird = graphMax ? max([graphMax * (2 / 3), avg]) : undefined; - const lowerThird = graphMax ? graphMax * (1 / 3) : undefined; - - const slowestSamplesEventView = eventView - .clone() - .withColumns(commonColumns) - .withSorts([ - { - field: 'id', - kind: 'desc', - }, - ]); - - slowestSamplesEventView.additionalConditions = new MutableSearch( - `transaction.duration:<${graphMax} transaction.duration:>${upperThird}` - ); - - const {isLoading: isLoadingSlowest, data: slowestSamplesData} = useDiscoverQuery({ - eventView: slowestSamplesEventView, - referrer: 'starfish-transaction-summary-sample-events', - location, - orgSlug: organization.slug, - - limit: LIMIT_PER_POPULATION, - options: { - refetchOnWindowFocus: false, - enabled: graphMax !== undefined && avg !== undefined, - }, - }); - - const medianSamplesEventView = eventView - .clone() - .withColumns(commonColumns) - .withSorts([ - { - field: 'transaction.duration', - kind: 'desc', - }, - ]); - - medianSamplesEventView.additionalConditions = new MutableSearch( - `transaction.duration:<=${upperThird} transaction.duration:>${lowerThird}` - ); - - const {isLoading: isLoadingMedian, data: medianSamplesData} = useDiscoverQuery({ - eventView: medianSamplesEventView, - referrer: 'starfish-transaction-summary-sample-events', - location, - orgSlug: organization.slug, - limit: LIMIT_PER_POPULATION, - options: { - refetchOnWindowFocus: false, - enabled: graphMax !== undefined && avg !== undefined, - }, - }); - - const fastestSamplesEventView = eventView - .clone() - .withColumns(commonColumns) - .withSorts([ - { - field: 'transaction.duration', - kind: 'asc', - }, - ]); - - fastestSamplesEventView.additionalConditions = new MutableSearch( - `transaction.duration:<=${lowerThird}` - ); - - const {isLoading: isLoadingFastest, data: fastestSamplesData} = useDiscoverQuery({ - eventView: fastestSamplesEventView, - referrer: 'starfish-transaction-summary-sample-events', - location, - orgSlug: organization.slug, - limit: LIMIT_PER_POPULATION, - options: { - refetchOnWindowFocus: false, - enabled: graphMax !== undefined, - }, - }); - - if (isLoadingAgg || isLoadingSlowest || isLoadingMedian || isLoadingFastest) { - return {isLoading: true, data: []}; - } - - const combinedData = [ - ...(slowestSamplesData?.data ?? []), - ...(medianSamplesData?.data ?? []), - ...(fastestSamplesData?.data ?? []), - ]; - - return { - isLoading: false, - data: combinedData as DataRow[], - aggregatesData: aggregatesData?.data[0] ?? [], - }; -} diff --git a/static/app/views/starfish/components/starfishProjectSelector.tsx b/static/app/views/starfish/components/starfishProjectSelector.tsx deleted file mode 100644 index 664d854f587e80..00000000000000 --- a/static/app/views/starfish/components/starfishProjectSelector.tsx +++ /dev/null @@ -1,74 +0,0 @@ -import {useMemo, useState} from 'react'; - -import {updateProjects} from 'sentry/actionCreators/pageFilters'; -import {CompactSelect} from 'sentry/components/compactSelect'; -import ProjectBadge from 'sentry/components/idBadge/projectBadge'; -import {t} from 'sentry/locale'; -import type {Project} from 'sentry/types/project'; -import useOrganization from 'sentry/utils/useOrganization'; -import usePageFilters from 'sentry/utils/usePageFilters'; -import useProjects from 'sentry/utils/useProjects'; -import useRouter from 'sentry/utils/useRouter'; -import {ALLOWED_PROJECT_IDS_FOR_ORG_SLUG} from 'sentry/views/starfish/allowedProjects'; - -export function StarfishProjectSelector() { - const {projects, initiallyLoaded: projectsLoaded, fetchError} = useProjects(); - const organization = useOrganization(); - const router = useRouter(); - const {selection} = usePageFilters(); - - const allowedProjectIDs: string[] = useMemo( - () => ALLOWED_PROJECT_IDS_FOR_ORG_SLUG[organization.slug] ?? [], - [organization.slug] - ); - - const [selectedProjectId, setSelectedProjectId] = useState( - selection.projects[0] ?? allowedProjectIDs[0] - ); - - const currentProject = selection.projects[0] ?? allowedProjectIDs[0]; - if (selectedProjectId !== currentProject) { - setSelectedProjectId(currentProject); - } - - if (!projectsLoaded) { - return ( - - ); - } - - if (fetchError) { - throw new Error('Failed to fetch projects'); - } - - const projectOptions = projects - .filter(project => allowedProjectIDs.includes(project.id)) - .map(project => ({ - label: , - value: project.id, - })) - .sort((projectA, projectB) => Number(projectA.value) - Number(projectB.value)); - - const handleProjectChange = option => - updateProjects([parseInt(option.value, 10)], router, { - storageNamespace: 'starfish', - save: true, - }); - - return ( - - ); -} - -function ProjectOptionLabel({project}: {project: Project}) { - return ; -} diff --git a/static/app/views/starfish/queries/useProjectSpanMetricsCounts.tsx b/static/app/views/starfish/queries/useProjectSpanMetricsCounts.tsx deleted file mode 100644 index f0c6fec7cc39ba..00000000000000 --- a/static/app/views/starfish/queries/useProjectSpanMetricsCounts.tsx +++ /dev/null @@ -1,37 +0,0 @@ -import EventView from 'sentry/utils/discover/eventView'; -import {DiscoverDatasets} from 'sentry/utils/discover/types'; -import {useSpansQuery} from 'sentry/views/starfish/utils/useSpansQuery'; - -interface Options { - enabled?: boolean; - projectId?: string[]; - query?: string; - statsPeriod?: string; -} - -export const useProjectSpanMetricCounts = ({ - projectId, - enabled, - query, - statsPeriod, -}: Options) => { - const eventView = EventView.fromSavedQuery({ - name: 'Has Any Span Metrics', - query, - fields: ['project.id', 'count()'], - projects: projectId?.map(id => parseInt(id, 10)), - dataset: DiscoverDatasets.SPANS_METRICS, - version: 2, - }); - - eventView.statsPeriod = statsPeriod; - - const result = useSpansQuery<{'count()': number; 'project.id': number}[]>({ - eventView, - initialData: [], - enabled, - referrer: 'span-metrics', - }); - - return result; -}; diff --git a/static/app/views/starfish/utils/generatePerformanceEventView.tsx b/static/app/views/starfish/utils/generatePerformanceEventView.tsx deleted file mode 100644 index b416f0fa266116..00000000000000 --- a/static/app/views/starfish/utils/generatePerformanceEventView.tsx +++ /dev/null @@ -1,143 +0,0 @@ -import type {Location} from 'history'; - -import {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable'; -import {wrapQueryInWildcards} from 'sentry/components/performance/searchBar'; -import {t} from 'sentry/locale'; -import type {PageFilters} from 'sentry/types/core'; -import type {NewQuery, Organization} from 'sentry/types/organization'; -import EventView from 'sentry/utils/discover/eventView'; -import {DiscoverDatasets} from 'sentry/utils/discover/types'; -import {decodeScalar} from 'sentry/utils/queryString'; -import {MutableSearch} from 'sentry/utils/tokenizeSearch'; -import {STARFISH_TYPE_FOR_PROJECT} from 'sentry/views/starfish/allowedProjects'; -import {StarfishType} from 'sentry/views/starfish/types'; - -const DEFAULT_STATS_PERIOD = '7d'; - -const TOKEN_KEYS_SUPPORTED_IN_LIMITED_SEARCH = ['transaction']; -export const TIME_SPENT_IN_SERVICE = 'time_spent_percentage()'; - -const getDefaultStatsPeriod = (organization: Organization) => { - if (organization?.features?.includes('performance-landing-page-stats-period')) { - return '14d'; - } - return DEFAULT_STATS_PERIOD; -}; - -function prepareQueryForLandingPage(searchQuery, withStaticFilters) { - const conditions = new MutableSearch(searchQuery); - - // If there is a bare text search, we want to treat it as a search - // on the transaction name. - if (conditions.freeText.length > 0) { - const parsedFreeText = conditions.freeText.join(' '); - - // the query here is a user entered condition, no need to escape it - conditions.setFilterValues( - 'transaction', - [wrapQueryInWildcards(parsedFreeText)], - false - ); - conditions.freeText = []; - } - if (withStaticFilters) { - conditions.tokens = conditions.tokens.filter( - token => token.key && TOKEN_KEYS_SUPPORTED_IN_LIMITED_SEARCH.includes(token.key) - ); - } - return conditions.formatString(); -} - -export function generateWebServiceEventView( - location: Location, - {withStaticFilters = false} = {}, - organization: Organization, - selection: PageFilters -) { - const {query} = location; - const project = selection.projects[0]; - const starfishType = STARFISH_TYPE_FOR_PROJECT[project] || StarfishType.BACKEND; - - const getSavedQuery = () => { - switch (starfishType) { - case StarfishType.MOBILE: - return generateMobileServiceSavedQuery(location); - case StarfishType.BACKEND: - default: - return generateWebServiceSavedQuery(location); - } - }; - - const savedQuery = getSavedQuery(); - - const hasStartAndEnd = query.start && query.end; - - const widths = Array(savedQuery.fields.length).fill(COL_WIDTH_UNDEFINED); - widths[savedQuery.fields.length - 1] = '110'; - savedQuery.widths = widths; - - if (!query.statsPeriod && !hasStartAndEnd) { - savedQuery.range = getDefaultStatsPeriod(organization); - } - - const searchQuery = decodeScalar(query.query, ''); - savedQuery.query = `${savedQuery.query} ${prepareQueryForLandingPage( - searchQuery, - withStaticFilters - )}`; - - const eventView = EventView.fromNewQueryWithLocation(savedQuery, location); - - return eventView; -} - -export function generateMobileServiceSavedQuery(location: Location) { - const {query} = location; - const orderby = decodeScalar(query.sort, `-eps`); - - const fields = [ - 'transaction', - 'eps()', - 'p75(measurements.frames_slow_rate)', - 'p75(measurements.time_to_initial_display)', - ]; - - const savedQuery: NewQuery = { - id: undefined, - name: t('Performance'), - query: 'event.type:transaction transaction.op:ui.load', - fields, - version: 2, - dataset: DiscoverDatasets.METRICS, - }; - savedQuery.orderby = orderby; - - return savedQuery; -} - -function generateWebServiceSavedQuery(location: Location) { - const {query} = location; - const orderby = decodeScalar(query.sort, `-time_spent_percentage`); - - const fields = [ - 'transaction', - 'http.method', - 'tps()', - 'avg(transaction.duration)', - 'http_error_count()', - 'time_spent_percentage()', - 'sum(transaction.duration)', - ]; - - const savedQuery: NewQuery = { - id: undefined, - name: t('Performance'), - query: 'event.type:transaction has:http.method transaction.op:http.server', - fields, - version: 2, - dataset: DiscoverDatasets.METRICS, - }; - savedQuery.orderby = orderby; - - return savedQuery; -} diff --git a/static/app/views/starfish/views/mobileServiceView/index.tsx b/static/app/views/starfish/views/mobileServiceView/index.tsx deleted file mode 100644 index 95a203b30f690a..00000000000000 --- a/static/app/views/starfish/views/mobileServiceView/index.tsx +++ /dev/null @@ -1,365 +0,0 @@ -import {Fragment} from 'react'; -import styled from '@emotion/styled'; - -import _EventsRequest from 'sentry/components/charts/eventsRequest'; -import {getInterval} from 'sentry/components/charts/utils'; -import LoadingContainer from 'sentry/components/loading/loadingContainer'; -import {PerformanceLayoutBodyRow} from 'sentry/components/performance/layouts'; -import {CHART_PALETTE} from 'sentry/constants/chartPalette'; -import {t} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; -import type {Series, SeriesDataUnit} from 'sentry/types/echarts'; -import {defined} from 'sentry/utils'; -import {tooltipFormatterUsingAggregateOutputType} from 'sentry/utils/discover/charts'; -import EventView from 'sentry/utils/discover/eventView'; -import {DiscoverDatasets} from 'sentry/utils/discover/types'; -import {MutableSearch} from 'sentry/utils/tokenizeSearch'; -import usePageFilters from 'sentry/utils/usePageFilters'; -import Chart, { - ChartType, - useSynchronizeCharts, -} from 'sentry/views/starfish/components/chart'; -import MiniChartPanel from 'sentry/views/starfish/components/miniChartPanel'; -import {useReleaseSelection} from 'sentry/views/starfish/queries/useReleases'; -import {STARFISH_CHART_INTERVAL_FIDELITY} from 'sentry/views/starfish/utils/constants'; -import {useEventsStatsQuery} from 'sentry/views/starfish/utils/useEventsStatsQuery'; -import {ViewsList} from 'sentry/views/starfish/views/mobileServiceView/viewsList'; - -const READABLE_YAXIS_LABELS = { - 'avg(measurements.app_start_cold)': 'avg(app_start_cold)', - 'avg(measurements.app_start_warm)': 'avg(app_start_warm)', - 'avg(measurements.time_to_initial_display)': 'avg(time_to_initial_display)', - 'avg(measurements.time_to_full_display)': 'avg(time_to_full_display)', - 'avg(measurements.frames_slow_rate)': 'avg(frames_slow_rate)', - 'avg(measurements.frames_frozen_rate)': 'avg(frames_frozen_rate)', -}; - -export function MobileStarfishView() { - const pageFilter = usePageFilters(); - const { - primaryRelease, - secondaryRelease, - isLoading: isReleasesLoading, - } = useReleaseSelection(); - - const query = new MutableSearch(['event.type:transaction', 'transaction.op:ui.load']); - - useSynchronizeCharts(); - const { - isLoading: seriesIsLoading, - data: firstReleaseSeries, - error: seriesError, - } = useEventsStatsQuery({ - eventView: EventView.fromNewQueryWithPageFilters( - { - name: '', - fields: [], - yAxis: [ - 'avg(measurements.app_start_cold)', - 'avg(measurements.app_start_warm)', - 'avg(measurements.time_to_initial_display)', - 'avg(measurements.time_to_full_display)', - 'avg(measurements.frames_slow_rate)', - 'avg(measurements.frames_frozen_rate)', - ], - query: - defined(primaryRelease) && primaryRelease !== '' - ? query.copy().addStringFilter(`release:${primaryRelease}`).formatString() - : query.formatString(), - dataset: DiscoverDatasets.METRICS, - version: 2, - interval: getInterval( - pageFilter.selection.datetime, - STARFISH_CHART_INTERVAL_FIDELITY - ), - }, - pageFilter.selection - ), - enabled: !isReleasesLoading, - referrer: 'api.starfish-web-service.span-category-breakdown-timeseries', - initialData: {}, - }); - - const {data: secondReleaseSeries} = useEventsStatsQuery({ - eventView: EventView.fromNewQueryWithPageFilters( - { - name: '', - fields: [], - yAxis: [ - 'avg(measurements.app_start_cold)', - 'avg(measurements.app_start_warm)', - 'avg(measurements.time_to_initial_display)', - 'avg(measurements.time_to_full_display)', - 'avg(measurements.frames_slow_rate)', - 'avg(measurements.frames_frozen_rate)', - ], - query: - defined(secondaryRelease) && secondaryRelease !== '' - ? query.copy().addStringFilter(`release:${secondaryRelease}`).formatString() - : query.formatString(), - dataset: DiscoverDatasets.METRICS, - version: 2, - interval: getInterval( - pageFilter.selection.datetime, - STARFISH_CHART_INTERVAL_FIDELITY - ), - }, - pageFilter.selection - ), - enabled: !isReleasesLoading && primaryRelease !== secondaryRelease, - referrer: 'api.starfish-web-service.span-category-breakdown-timeseries', - initialData: {}, - }); - - if (isReleasesLoading) { - return ; - } - - function renderCharts() { - const transformedSeries: {[yAxisName: string]: Series[]} = { - 'avg(measurements.app_start_cold)': [], - 'avg(measurements.app_start_warm)': [], - 'avg(measurements.time_to_initial_display)': [], - 'avg(measurements.time_to_full_display)': [], - 'avg(measurements.frames_slow_rate)': [], - 'avg(measurements.frames_frozen_rate)': [], - }; - - if (defined(firstReleaseSeries)) { - Object.keys(firstReleaseSeries).forEach(yAxis => { - const label = `${primaryRelease}`; - if (yAxis in transformedSeries) { - transformedSeries[yAxis].push({ - seriesName: label, - color: CHART_PALETTE[1][0], - data: - firstReleaseSeries[yAxis]?.data.map(datum => { - return { - name: datum[0] * 1000, - value: datum[1][0].count, - } as SeriesDataUnit; - }) ?? [], - }); - } - }); - } - - if (defined(secondReleaseSeries)) { - Object.keys(secondReleaseSeries).forEach(yAxis => { - const label = `${secondaryRelease}`; - if (yAxis in transformedSeries) { - transformedSeries[yAxis].push({ - seriesName: label, - color: CHART_PALETTE[1][1], - data: - secondReleaseSeries[yAxis]?.data.map(datum => { - return { - name: datum[0] * 1000, - value: datum[1][0].count, - } as SeriesDataUnit; - }) ?? [], - }); - } - }); - } - - return ( - - - - - {READABLE_YAXIS_LABELS['avg(measurements.app_start_cold)']} - - - tooltipFormatterUsingAggregateOutputType(value, 'duration'), - }} - error={seriesError} - /> - - - - - {READABLE_YAXIS_LABELS['avg(measurements.app_start_warm)']} - - - tooltipFormatterUsingAggregateOutputType(value, 'duration'), - }} - error={seriesError} - /> - - - - - - {READABLE_YAXIS_LABELS['avg(measurements.time_to_initial_display)']} - - - tooltipFormatterUsingAggregateOutputType(value, 'duration'), - }} - error={seriesError} - /> - - - - {READABLE_YAXIS_LABELS['avg(measurements.time_to_full_display)']} - - - tooltipFormatterUsingAggregateOutputType(value, 'duration'), - }} - error={seriesError} - /> - - - - - - {READABLE_YAXIS_LABELS['avg(measurements.frames_slow_rate)']} - - - tooltipFormatterUsingAggregateOutputType(value, 'percentage'), - }} - error={seriesError} - /> - - - - {READABLE_YAXIS_LABELS['avg(measurements.frames_frozen_rate)']} - - - tooltipFormatterUsingAggregateOutputType(value, 'percentage'), - }} - error={seriesError} - /> - - - - ); - } - - return ( -
- - {renderCharts()} - - -
- ); -} - -const StyledRow = styled(PerformanceLayoutBodyRow)` - margin-bottom: ${space(2)}; -`; - -const ChartsContainer = styled('div')` - display: flex; - flex-direction: row; - flex-wrap: wrap; - gap: ${space(2)}; -`; - -const ChartsContainerItem = styled('div')` - flex: 1; -`; - -export const Spacer = styled('div')` - margin-top: ${space(3)}; -`; - -const SubTitle = styled('div')` - margin-bottom: ${space(1.5)}; - font-size: ${p => p.theme.fontSizeSmall}; - font-weight: bold; -`; diff --git a/static/app/views/starfish/views/mobileServiceView/utils.tsx b/static/app/views/starfish/views/mobileServiceView/utils.tsx deleted file mode 100644 index f70774e053ba8d..00000000000000 --- a/static/app/views/starfish/views/mobileServiceView/utils.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import type {Location} from 'history'; - -import {decodeScalar} from 'sentry/utils/queryString'; - -export function getPrimaryRelease(location: Location): string | undefined { - const {primaryRelease} = location.query; - - return decodeScalar(primaryRelease); -} - -export function getSecondaryRelease(location: Location): string | undefined { - const {secondaryRelease} = location.query; - - return decodeScalar(secondaryRelease); -} diff --git a/static/app/views/starfish/views/mobileServiceView/viewsList.tsx b/static/app/views/starfish/views/mobileServiceView/viewsList.tsx deleted file mode 100644 index 6fe0da085edc2a..00000000000000 --- a/static/app/views/starfish/views/mobileServiceView/viewsList.tsx +++ /dev/null @@ -1,144 +0,0 @@ -import {Fragment} from 'react'; -import type {LocationDescriptorObject} from 'history'; - -import type {GridColumnHeader, GridColumnOrder} from 'sentry/components/gridEditable'; -import GridEditable, {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable'; -import SortLink from 'sentry/components/gridEditable/sortLink'; -import Pagination from 'sentry/components/pagination'; -import {t} from 'sentry/locale'; -import {defined} from 'sentry/utils'; -import EventView, {isFieldSortable} from 'sentry/utils/discover/eventView'; -import {getFieldRenderer} from 'sentry/utils/discover/fieldRenderers'; -import {VisuallyCompleteWithData} from 'sentry/utils/performanceForSentry'; -import {useLocation} from 'sentry/utils/useLocation'; -import useOrganization from 'sentry/utils/useOrganization'; -import {getAlignment} from 'sentry/views/starfish/components/tableCells/renderHeadCell'; -import {generateMobileServiceSavedQuery} from 'sentry/views/starfish/utils/generatePerformanceEventView'; -import {useWrappedDiscoverQuery} from 'sentry/views/starfish/utils/useSpansQuery'; -import {DataTitles} from 'sentry/views/starfish/views/spans/types'; - -type TableColumnKeys = - | 'transaction' - | 'eps()' - | 'p75(measurements.frames_slow_rate)' - | 'p75(measurements.time_to_initial_display)'; -type MobileViewsColumn = GridColumnOrder; -type Column = GridColumnHeader; -type MobileViewsRow = Record; - -const COLUMN_ORDER: MobileViewsColumn[] = [ - { - key: 'transaction', - name: t('Screen'), - width: COL_WIDTH_UNDEFINED, - }, - { - key: 'eps()', - name: DataTitles.throughput, - width: COL_WIDTH_UNDEFINED, - }, - { - key: 'p75(measurements.frames_slow_rate)', - name: DataTitles.slowFrames, - width: COL_WIDTH_UNDEFINED, - }, - { - key: 'p75(measurements.time_to_initial_display)', - name: DataTitles.ttid, - width: COL_WIDTH_UNDEFINED, - }, -]; - -export function ViewsList() { - const location = useLocation(); - const organization = useOrganization(); - const savedQuery = generateMobileServiceSavedQuery(location); - const eventView = EventView.fromNewQueryWithLocation(savedQuery, location); - const {isLoading, data, meta, pageLinks} = useViewsList(eventView); - - function renderHeadCell({column, tableMeta}) { - const {key} = column; - const alignment = getAlignment(key); - const field = { - field: column.key, - width: column.width, - }; - - function generateSortLink(): LocationDescriptorObject | undefined { - if (!tableMeta) { - return undefined; - } - - const nextEventView = eventView.sortOnField(field, tableMeta); - const queryStringObject = nextEventView.generateQueryStringObject(); - - return { - ...location, - query: {...location.query, sort: queryStringObject.sort}, - }; - } - - const currentSort = eventView.sortForField(field, tableMeta); - const currentSortKind = currentSort ? currentSort.kind : undefined; - const canSort = isFieldSortable(field, tableMeta); - - const sortLink = ( - - ); - return sortLink; - } - - function renderBodyCell(column: Column, row: MobileViewsRow): React.ReactNode { - if (!meta || !meta?.fields) { - return row[column.key]; - } - - const renderer = getFieldRenderer(column.key, meta.fields, false); - - const rendered = renderer(row, { - location, - organization, - unit: meta.units?.[column.key], - }); - - return rendered; - } - - return ( - - 0} - > - renderHeadCell({column, tableMeta: meta}), - renderBodyCell, - }} - location={location} - /> - - - - ); -} - -export const useViewsList = (eventView: EventView) => { - const {isLoading, data, meta, pageLinks} = useWrappedDiscoverQuery({ - eventView, - initialData: [], - limit: 50, - }); - - return {isLoading, data, meta, pageLinks}; -}; From 8410d2460c8c7ac4b29a374e4877322d2522f850 Mon Sep 17 00:00:00 2001 From: Catherine Lee <55311782+c298lee@users.noreply.github.com> Date: Mon, 6 May 2024 14:24:04 -0400 Subject: [PATCH 030/376] fix(replay): Remove View Sample Replay button (#70368) Removes view sample replay button from replay onboarding because we're not supposed to rely on Arcade inside Sentry. Relates to https://github.com/getsentry/team-replay/issues/433 and https://github.com/getsentry/getsentry/pull/13875 --- .../events/eventReplay/replayInlineOnboardingPanel.tsx | 7 ------- .../feedback/feedbackItem/replayInlineCTAPanel.tsx | 7 ------- static/app/types/hooks.tsx | 1 - static/app/views/replays/list/replayOnboardingPanel.tsx | 5 ----- 4 files changed, 20 deletions(-) diff --git a/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx b/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx index ec8224ef7d9069..6ad5223d2c19a8 100644 --- a/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx +++ b/static/app/components/events/eventReplay/replayInlineOnboardingPanel.tsx @@ -6,7 +6,6 @@ import {usePrompt} from 'sentry/actionCreators/prompts'; import {Button} from 'sentry/components/button'; import {DropdownMenu} from 'sentry/components/dropdownMenu'; import {EventReplaySection} from 'sentry/components/events/eventReplay/eventReplaySection'; -import HookOrDefault from 'sentry/components/hookOrDefault'; import platforms, {otherPlatform} from 'sentry/data/platforms'; import {IconClose} from 'sentry/icons'; import {t, tct} from 'sentry/locale'; @@ -23,11 +22,6 @@ type OnboardingCTAProps = { projectId: string; }; -const OnboardingCTAButton = HookOrDefault({ - hookName: 'component:replay-onboarding-cta-button', - defaultComponent: null, -}); - export default function ReplayInlineOnboardingPanel({ platform, projectId, @@ -64,7 +58,6 @@ export default function ReplayInlineOnboardingPanel({ {t('Watch the errors and latency issues your users face')} - {!isScreenSmall && } + ); +} + const HighlightContainer = styled(TreeContainer)<{columnCount: number}>` margin-top: 0; margin-bottom: ${space(2)}; diff --git a/static/app/components/events/highlights/highlightsSettingsForm.spec.tsx b/static/app/components/events/highlights/highlightsSettingsForm.spec.tsx index a4f30b91627ebe..b3ddf8b249d291 100644 --- a/static/app/components/events/highlights/highlightsSettingsForm.spec.tsx +++ b/static/app/components/events/highlights/highlightsSettingsForm.spec.tsx @@ -4,6 +4,7 @@ import {ProjectFixture} from 'sentry-fixture/project'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; import HighlightsSettingsForm from 'sentry/components/events/highlights/highlightsSettingsForm'; +import * as analytics from 'sentry/utils/analytics'; describe('HighlightsSettingForm', function () { const organization = OrganizationFixture({features: ['event-tags-tree-ui']}); @@ -13,6 +14,7 @@ describe('HighlightsSettingForm', function () { browser: ['name', 'version'], }; const project = ProjectFixture({highlightContext, highlightTags}); + const analyticsSpy = jest.spyOn(analytics, 'trackAnalytics'); beforeEach(async function () { MockApiClient.addMockResponse({ @@ -53,6 +55,10 @@ describe('HighlightsSettingForm', function () { data: {highlightTags: [...highlightTags, newTag]}, }) ); + expect(analyticsSpy).toHaveBeenCalledWith( + 'highlights.project_settings.updated_manually', + expect.anything() + ); }); it('should allow the Highlight Context field to mutate highlights', async function () { diff --git a/static/app/components/events/highlights/highlightsSettingsForm.tsx b/static/app/components/events/highlights/highlightsSettingsForm.tsx index f76203edc4121b..633bfb3d54ca0e 100644 --- a/static/app/components/events/highlights/highlightsSettingsForm.tsx +++ b/static/app/components/events/highlights/highlightsSettingsForm.tsx @@ -51,7 +51,7 @@ export default function HighlightsSettingsForm({ }), data => (updatedProject ? updatedProject : data) ); - trackAnalytics('project_settings.updated_highlights', {organization}); + trackAnalytics('highlights.project_settings.updated_manually', {organization}); addSuccessMessage(`Successfully updated highlights for '${project.name}'`); }, }; diff --git a/static/app/components/modals/navigateToExternalLinkModal.tsx b/static/app/components/modals/navigateToExternalLinkModal.tsx index d553fe1361c478..9a76f8678176fb 100644 --- a/static/app/components/modals/navigateToExternalLinkModal.tsx +++ b/static/app/components/modals/navigateToExternalLinkModal.tsx @@ -18,7 +18,7 @@ function NavigateToExternalLinkModal({Body, closeModal, Header, linkText}: Props

{t('Heads up')}

-

+

{t( "You're leaving Sentry and will be redirected to the following external website:" )} diff --git a/static/app/utils/analytics/issueAnalyticsEvents.tsx b/static/app/utils/analytics/issueAnalyticsEvents.tsx index fd7391a8269035..ebed6329810e4c 100644 --- a/static/app/utils/analytics/issueAnalyticsEvents.tsx +++ b/static/app/utils/analytics/issueAnalyticsEvents.tsx @@ -63,6 +63,16 @@ export type IssueEventParameters = { platform?: string; project_id?: string; }; + 'highlights.edit_modal.add_context_key': {}; + 'highlights.edit_modal.add_tag': {}; + 'highlights.edit_modal.cancel_clicked': {}; + 'highlights.edit_modal.remove_context_key': {}; + 'highlights.edit_modal.remove_tag': {}; + 'highlights.edit_modal.save_clicked': {}; + 'highlights.edit_modal.use_default_clicked': {}; + 'highlights.issue_details.edit_clicked': {}; + 'highlights.issue_details.view_all_clicked': {}; + 'highlights.project_settings.updated_manually': {}; 'integrations.integration_reinstall_clicked': { provider: string; }; @@ -275,6 +285,18 @@ export const issueEventMap: Record = { 'event_cause.docs_clicked': 'Event Cause Docs Clicked', 'event_cause.snoozed': 'Event Cause Snoozed', 'event_cause.dismissed': 'Event Cause Dismissed', + 'highlights.edit_modal.add_context_key': 'Highlights: Add Context in Edit Modal', + 'highlights.edit_modal.add_tag': 'Highlights: Add Tag in Edit Modal', + 'highlights.edit_modal.cancel_clicked': 'Highlights: Cancel from Edit Modal', + 'highlights.edit_modal.remove_context_key': 'Highlights: Remove Context in Edit Modal', + 'highlights.edit_modal.remove_tag': 'Highlights: Remove Tag in Edit Modal', + 'highlights.edit_modal.save_clicked': 'Highlights: Save from Edit Modal', + 'highlights.edit_modal.use_default_clicked': + 'Highlights: Defaults Applied from Edit Modal', + 'highlights.issue_details.edit_clicked': 'Highlights: Open Edit Modal', + 'highlights.issue_details.view_all_clicked': 'Highlights: View All Clicked', + 'highlights.project_settings.updated_manually': + 'Highlights: Updated Manually from Settings', 'issue_details.escalating_feedback_received': 'Issue Details: Escalating Feedback Received', 'issue_details.escalating_issues_banner_feedback_received': From 0c8f916ecd68e06ce8001b2ae1e86ec5d86c582c Mon Sep 17 00:00:00 2001 From: Mark Story Date: Tue, 7 May 2024 16:49:43 -0400 Subject: [PATCH 115/376] chore(database) Drop tables for project and team avatar (#68616) These tables no longer have django models, and can be deleted. --- migrations_lockfile.txt | 2 +- src/sentry/db/router.py | 2 + .../0714_drop_project_team_avatar.py | 42 +++ .../test_0632_apitoken_backfill_last_chars.py | 36 --- ...est_0654_rename_priority_sort_to_trends.py | 35 --- ...et_query_rename_priority_sort_to_trends.py | 65 ----- ...test_0692_backfill_group_priority_again.py | 245 ------------------ ...ackfill_group_attributes_to_self_hosted.py | 58 ----- 8 files changed, 45 insertions(+), 440 deletions(-) create mode 100644 src/sentry/migrations/0714_drop_project_team_avatar.py delete mode 100644 tests/sentry/migrations/test_0632_apitoken_backfill_last_chars.py delete mode 100644 tests/sentry/migrations/test_0654_rename_priority_sort_to_trends.py delete mode 100644 tests/sentry/migrations/test_0675_dashboard_widget_query_rename_priority_sort_to_trends.py delete mode 100644 tests/sentry/migrations/test_0692_backfill_group_priority_again.py delete mode 100644 tests/sentry/migrations/test_0711_backfill_group_attributes_to_self_hosted.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index 35539953164af1..bbb82d29c729c1 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0016_add_control_cacheversion nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0713_team_remove_actor_state +sentry: 0714_drop_project_team_avatar social_auth: 0002_default_auto_field diff --git a/src/sentry/db/router.py b/src/sentry/db/router.py index 670e216cf97ac8..fd82a5e058d1e7 100644 --- a/src/sentry/db/router.py +++ b/src/sentry/db/router.py @@ -68,6 +68,8 @@ class SiloRouter: """ historical_silo_assignments = { + "sentry_teamavatar": SiloMode.REGION, + "sentry_projectavatar": SiloMode.REGION, "sentry_pagerdutyservice": SiloMode.REGION, "sentry_notificationsetting": SiloMode.CONTROL, } diff --git a/src/sentry/migrations/0714_drop_project_team_avatar.py b/src/sentry/migrations/0714_drop_project_team_avatar.py new file mode 100644 index 00000000000000..73e0438f7c58bb --- /dev/null +++ b/src/sentry/migrations/0714_drop_project_team_avatar.py @@ -0,0 +1,42 @@ +# Generated by Django 5.0.3 on 2024-04-10 15:44 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0713_team_remove_actor_state"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + sql='DROP TABLE IF EXISTS "sentry_projectavatar"', + reverse_sql="CREATE TABLE sentry_projectavatar (id BIGSERIAL)", + hints={"tables": ["sentry_teamavatar"]}, + ), + migrations.RunSQL( + sql='DROP TABLE IF EXISTS "sentry_teamavatar"', + reverse_sql="CREATE TABLE sentry_teamavatar (id BIGSERIAL)", + hints={"tables": ["sentry_teamavatar"]}, + ), + ] + ) + ] diff --git a/tests/sentry/migrations/test_0632_apitoken_backfill_last_chars.py b/tests/sentry/migrations/test_0632_apitoken_backfill_last_chars.py deleted file mode 100644 index 2c204a38d3bb80..00000000000000 --- a/tests/sentry/migrations/test_0632_apitoken_backfill_last_chars.py +++ /dev/null @@ -1,36 +0,0 @@ -from django.db import router - -from sentry.silo.safety import unguarded_write -from sentry.testutils.cases import TestMigrations -from sentry.testutils.helpers import override_options -from sentry.testutils.silo import no_silo_test - - -@no_silo_test -class LastCharsApiTokenMigrationTest(TestMigrations): - migrate_from = "0631_add_priority_columns_to_groupedmessage" - migrate_to = "0632_apitoken_backfill_last_chars" - connection = "control" - - def setUp(self): - from sentry.models.apitoken import ApiToken - - with unguarded_write(using=router.db_for_write(ApiToken)): - super().setUp() - - @override_options({"apitoken.auto-add-last-chars": False}) - def setup_before_migration(self, apps): - ApiToken = apps.get_model("sentry", "ApiToken") - - self.api_token = ApiToken.objects.create( - user_id=self.user.id, - refresh_token=None, - ) - self.api_token.save() - - assert self.api_token.token_last_characters is None - - def test(self): - self.api_token.refresh_from_db() - assert self.api_token.name is None - assert self.api_token.token_last_characters == self.api_token.token[-4:] diff --git a/tests/sentry/migrations/test_0654_rename_priority_sort_to_trends.py b/tests/sentry/migrations/test_0654_rename_priority_sort_to_trends.py deleted file mode 100644 index 88a8529f2ebba6..00000000000000 --- a/tests/sentry/migrations/test_0654_rename_priority_sort_to_trends.py +++ /dev/null @@ -1,35 +0,0 @@ -import pytest - -from sentry.models.savedsearch import SavedSearch -from sentry.testutils.cases import TestMigrations - - -@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.") -class RenamePrioritySortToTrendsTest(TestMigrations): - migrate_from = "0653_apitoken_add_token_type" - migrate_to = "0654_rename_priority_sort_to_trends" - - def setup_before_migration(self, apps): - self.priority_searches = [] - for i in range(3): - self.priority_searches.append( - SavedSearch.objects.create( - organization=self.organization, query="is:unresolved", sort="priority" - ) - ) - - self.other_searches = [ - SavedSearch.objects.create(organization=self.organization, query="is:unresolved"), - SavedSearch.objects.create( - organization=self.organization, query="is:unresolved", sort="date" - ), - ] - - def test(self): - for search in self.priority_searches: - search.refresh_from_db() - assert search.sort == "trends" - - for search in self.other_searches: - search.refresh_from_db() - assert search.sort == "date" diff --git a/tests/sentry/migrations/test_0675_dashboard_widget_query_rename_priority_sort_to_trends.py b/tests/sentry/migrations/test_0675_dashboard_widget_query_rename_priority_sort_to_trends.py deleted file mode 100644 index 12c076e4c715e7..00000000000000 --- a/tests/sentry/migrations/test_0675_dashboard_widget_query_rename_priority_sort_to_trends.py +++ /dev/null @@ -1,65 +0,0 @@ -import pytest - -from sentry.models.dashboard_widget import DashboardWidgetDisplayTypes -from sentry.testutils.cases import TestMigrations - - -@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.") -class RenamePrioritySortToTrendsTest(TestMigrations): - migrate_from = "0674_monitor_clear_missed_timeout_as_error" - migrate_to = "0675_dashboard_widget_query_rename_priority_sort_to_trends" - - def setup_before_migration(self, apps): - Dashboard = apps.get_model("sentry", "Dashboard") - DashboardWidget = apps.get_model("sentry", "DashboardWidget") - DashboardWidgetQuery = apps.get_model("sentry", "DashboardWidgetQuery") - - self.dashboard = Dashboard.objects.create( - organization_id=self.organization.id, - title="Dashboard", - created_by_id=self.user.id, - ) - - self.queries_with_priority_sort = [] - self.other_queries = [] - - for i, title in enumerate(["Widget 1", "Widget 2", "Widget 3"]): - widget = DashboardWidget.objects.create( - dashboard=self.dashboard, - order=i, - title=title, - display_type=DashboardWidgetDisplayTypes.TABLE, - ) - widget_query = DashboardWidgetQuery.objects.create( - widget=widget, - name="query", - fields=["assignee", "issue", "title"], - order=1, - orderby="priority", - ) - self.queries_with_priority_sort.append(widget_query) - - for i, title in enumerate(["Widget 1", "Widget 2", "Widget 3"]): - widget = DashboardWidget.objects.create( - dashboard=self.dashboard, - order=i + 3, - title=title, - display_type=DashboardWidgetDisplayTypes.TABLE, - ) - widget_query = DashboardWidgetQuery.objects.create( - widget=widget, - name="query", - fields=["assignee", "issue", "title"], - order=1, - orderby="last_seen", - ) - self.other_queries.append(widget_query) - - def test(self): - for query in self.queries_with_priority_sort: - query.refresh_from_db() - assert query.orderby == "trends" - - for query in self.other_queries: - query.refresh_from_db() - assert query.orderby == "last_seen" diff --git a/tests/sentry/migrations/test_0692_backfill_group_priority_again.py b/tests/sentry/migrations/test_0692_backfill_group_priority_again.py deleted file mode 100644 index facbbd7b658420..00000000000000 --- a/tests/sentry/migrations/test_0692_backfill_group_priority_again.py +++ /dev/null @@ -1,245 +0,0 @@ -import logging - -import pytest -from django.conf import settings - -from sentry.issues.grouptype import ( - ErrorGroupType, - FeedbackGroup, - MonitorIncidentType, - PerformanceConsecutiveHTTPQueriesGroupType, - PerformanceP95EndpointRegressionGroupType, - ReplayDeadClickType, -) -from sentry.models.group import GroupStatus -from sentry.models.project import Project -from sentry.testutils.cases import TestMigrations -from sentry.types.group import GroupSubStatus -from sentry.utils import redis - - -class PriorityLevel: - LOW = 25 - MEDIUM = 50 - HIGH = 75 - - -@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.") -class BackfillGroupPriority(TestMigrations): - migrate_from = "0691_remove_project_team_avatar_model" - migrate_to = "0692_backfill_group_priority_again" - - def setup_initial_state(self): - self._create_groups_to_backfill(self.project) - redis_cluster = redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) - redis_cluster.set("priority_backfill-2.last_processed_id", self.cache_group_id) - - def test(self): - for groups, expected_priority in ( - (self.high_priority_groups, PriorityLevel.HIGH), - (self.medium_priority_groups, PriorityLevel.MEDIUM), - (self.low_priority_groups, PriorityLevel.LOW), - ): - for desc, group in groups: - group.refresh_from_db() - if desc == "skip me": - # these groups should not have been backfilled because the group id is less than the redis cached ID - assert not group.priority - continue - - assert group.priority == expected_priority, desc - if not desc.startswith("existing"): - assert group.data.get("metadata")["initial_priority"] == expected_priority - - def _create_groups_to_backfill(self, project: Project) -> None: - skipped_group_count = 3 - data = [ - # three groups to skip to test the redis cache - ( - "skip me", - {"type": FeedbackGroup.type_id}, - PriorityLevel.MEDIUM, - ), - ( - "skip me", - {"type": FeedbackGroup.type_id}, - PriorityLevel.MEDIUM, - ), - ( - "skip me", - {"type": FeedbackGroup.type_id}, - PriorityLevel.MEDIUM, - ), - # groups with priority remain unchanged, even if escalating. - ( - "existing low priority", - { - "priority": PriorityLevel.LOW, - "data": {"metadata": {"initial_priority": PriorityLevel.LOW}}, - }, - PriorityLevel.LOW, - ), - ( - "existing low priority with escalation", - { - "priority": PriorityLevel.LOW, - "status": GroupStatus.UNRESOLVED, - "substatus": GroupSubStatus.ESCALATING, - "data": {"metadata": {"initial_priority": PriorityLevel.LOW}}, - }, - PriorityLevel.LOW, - ), - # escalating groups are high priority, except for Replay and Feedback issues - ( - "escalating error group", - { - "status": GroupStatus.UNRESOLVED, - "substatus": GroupSubStatus.ESCALATING, - "type": ErrorGroupType.type_id, - "level": logging.INFO, # this level should not matter - }, - PriorityLevel.HIGH, - ), - ( - "escalating performance group", - { - "status": GroupStatus.UNRESOLVED, - "substatus": GroupSubStatus.ESCALATING, - "type": PerformanceConsecutiveHTTPQueriesGroupType.type_id, - }, - PriorityLevel.HIGH, - ), - ( - "escalating cron group", - { - "status": GroupStatus.UNRESOLVED, - "substatus": GroupSubStatus.ESCALATING, - "type": MonitorIncidentType.type_id, - }, - PriorityLevel.HIGH, - ), - ( - "escalating replay group", - { - "status": GroupStatus.UNRESOLVED, - "substatus": GroupSubStatus.ESCALATING, - "type": ReplayDeadClickType.type_id, - }, - PriorityLevel.MEDIUM, - ), - ( - "escalating feedback group", - { - "status": GroupStatus.UNRESOLVED, - "substatus": GroupSubStatus.ESCALATING, - "type": FeedbackGroup.type_id, - }, - PriorityLevel.MEDIUM, - ), - # error groups respect log levels if present - ( - "error group with log level INFO", - { - "type": ErrorGroupType.type_id, - "level": logging.INFO, - }, - PriorityLevel.LOW, - ), - ( - "error group with log level DEBUG", - { - "type": ErrorGroupType.type_id, - "level": logging.DEBUG, - }, - PriorityLevel.LOW, - ), - ( - "error group with log level WARNING", - { - "type": ErrorGroupType.type_id, - "level": logging.WARNING, - }, - PriorityLevel.MEDIUM, - ), - ( - "error group with log level ERROR", - { - "type": ErrorGroupType.type_id, - "level": logging.ERROR, - }, - PriorityLevel.HIGH, - ), - ( - "error group with log level FATAL", - { - "type": ErrorGroupType.type_id, - "level": logging.FATAL, - }, - PriorityLevel.HIGH, - ), - # cron groups are medium priority if they are warnings, high priority otherwise - ( - "cron group with log level WARNING", - { - "type": MonitorIncidentType.type_id, - "level": logging.WARNING, - }, - PriorityLevel.MEDIUM, - ), - ( - "cron group with log level ERROR", - { - "substatus": GroupSubStatus.ONGOING, - "type": MonitorIncidentType.type_id, - "level": logging.ERROR, - }, - PriorityLevel.HIGH, - ), - ( - "cron group with log level DEBUG", - { - "type": MonitorIncidentType.type_id, - "level": logging.DEBUG, - }, - PriorityLevel.HIGH, - ), - # statistical detectors are medium priority - ( - "statistical detector group", - { - "level": logging.ERROR, - "type": PerformanceP95EndpointRegressionGroupType.type_id, - }, - PriorityLevel.MEDIUM, - ), - # performance issues are otherwise low priority - ( - "performance group", - { - "level": logging.ERROR, - "type": PerformanceConsecutiveHTTPQueriesGroupType.type_id, - }, - PriorityLevel.LOW, - ), - ] - - self.low_priority_groups = [] - self.medium_priority_groups = [] - self.high_priority_groups = [] - - for desc, group_data, expected_priority in data: - group = self.create_group(project, **group_data) # type: ignore[arg-type] - - if desc == "skip me": - skipped_group_count -= 1 - if skipped_group_count == 0: - self.cache_group_id = group.id - - if expected_priority == PriorityLevel.LOW: - self.low_priority_groups.append((desc, group)) - - elif expected_priority == PriorityLevel.MEDIUM: - self.medium_priority_groups.append((desc, group)) - - elif expected_priority == PriorityLevel.HIGH: - self.high_priority_groups.append((desc, group)) diff --git a/tests/sentry/migrations/test_0711_backfill_group_attributes_to_self_hosted.py b/tests/sentry/migrations/test_0711_backfill_group_attributes_to_self_hosted.py deleted file mode 100644 index 5e7ba55c847ecd..00000000000000 --- a/tests/sentry/migrations/test_0711_backfill_group_attributes_to_self_hosted.py +++ /dev/null @@ -1,58 +0,0 @@ -from sentry_sdk import Hub -from snuba_sdk.legacy import json_to_snql - -from sentry.testutils.cases import SnubaTestCase, TestMigrations -from sentry.utils import json, redis -from sentry.utils.snuba import _snql_query - - -def run_test(expected_groups): - project = expected_groups[0].project - json_body = { - "selected_columns": [ - "group_id", - ], - "offset": 0, - "limit": 100, - "project": [project.id], - "dataset": "group_attributes", - "order_by": ["group_id"], - "consistent": True, - "tenant_ids": { - "referrer": "group_attributes", - "organization_id": project.organization_id, - }, - } - request = json_to_snql(json_body, "group_attributes") - request.validate() - identity = lambda x: x - resp = _snql_query(((request, identity, identity), Hub(Hub.current), {}, "test_api"))[0] - assert resp.status == 200 - data = json.loads(resp.data)["data"] - assert {g.id for g in expected_groups} == {d["group_id"] for d in data} - - -class TestBackfillGroupAttributes(SnubaTestCase, TestMigrations): - migrate_from = "0710_grouphistory_remove_actor_state" - migrate_to = "0711_backfill_group_attributes_to_self_hosted" - - def setup_initial_state(self): - self.group = self.create_group() - self.group_2 = self.create_group() - - def test(self): - run_test([self.group, self.group_2]) - - -class TestBackfillGroupAttributesRetry(SnubaTestCase, TestMigrations): - migrate_from = "0710_grouphistory_remove_actor_state" - migrate_to = "0711_backfill_group_attributes_to_self_hosted" - - def setup_initial_state(self): - self.group = self.create_group() - self.group_2 = self.create_group() - redis_client = redis.redis_clusters.get("default") - redis_client.set("backfill_group_attributes_to_snuba_progress_again", self.group.id) - - def test_restart(self): - run_test([self.group_2]) From 2dbccba6a52303cc7dc0dfd7e71a42615756bf48 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Tue, 7 May 2024 17:13:26 -0400 Subject: [PATCH 116/376] ref: upgrade djangorestframework-stubs (#70461) upgrading this has no differences in ignored mypy errors --- requirements-dev-frozen.txt | 2 +- requirements-dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 464954c41bcbbf..f9f3c08de1d684 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -179,7 +179,7 @@ sentry-arroyo==2.16.5 sentry-cli==2.16.0 sentry-devenv==1.6.2 sentry-forked-django-stubs==5.0.0.post3 -sentry-forked-djangorestframework-stubs==3.14.5.post1 +sentry-forked-djangorestframework-stubs==3.15.0.post1 sentry-kafka-schemas==0.1.79 sentry-ophio==0.2.7 sentry-redis-tools==0.1.7 diff --git a/requirements-dev.txt b/requirements-dev.txt index 8936121bbf6673..da2ddb786e2802 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -35,7 +35,7 @@ packaging>=21.3 # for type checking sentry-forked-django-stubs>=5.0.0.post3 -sentry-forked-djangorestframework-stubs>=3.14.5.post1 +sentry-forked-djangorestframework-stubs>=3.15.0.post1 lxml-stubs msgpack-types>=0.2.0 mypy>=1.10 From 3a7e9b3de6936a8d66e01c7706b531df91ffefe9 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Tue, 7 May 2024 17:23:31 -0400 Subject: [PATCH 117/376] fix(crons): Teams -> Owners (#70464) --- static/app/views/monitors/components/ownerFilter.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/views/monitors/components/ownerFilter.tsx b/static/app/views/monitors/components/ownerFilter.tsx index 701da7c30336fc..7b847ac203342a 100644 --- a/static/app/views/monitors/components/ownerFilter.tsx +++ b/static/app/views/monitors/components/ownerFilter.tsx @@ -36,7 +36,7 @@ export function OwnerFilter({selectedOwners, onChangeFilter}: OwnerFilterProps) clearable searchable loading={fetching} - menuTitle={t('Filter teams')} + menuTitle={t('Filter owners')} options={[{label: t('Suggested'), options: suggestedOptions}, ...options]} value={selectedOwners} onSearch={value => { From 78d2f285e80b2f41122a77834f57471f42872456 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Tue, 7 May 2024 17:23:38 -0400 Subject: [PATCH 118/376] ref(routes): Prioritize customer domains route (#70451) In a936fb00da922a79943d70d0272e1d6505d6ef95 we introduced a new withOrgPath prop to the Route component. This caused the route to have the same behaviour as doing ```tsx {USING_CUSTOMER_DOMAIN && ( import('sentry/views/someView')))} /> )} import('sentry/views/someView')))} /> ``` However the introduced logic generated the routes in the opposite order, where the org slug version would get priority. For some future changes this becomes important so I am bringing back this matching behaviour --- static/app/components/route.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/components/route.tsx b/static/app/components/route.tsx index 1826e459cc2307..fbd5d115664cc8 100644 --- a/static/app/components/route.tsx +++ b/static/app/components/route.tsx @@ -80,7 +80,7 @@ Route.createRouteFromReactElement = function (element: RouteElement): PlainRoute ]; if (USING_CUSTOMER_DOMAIN) { - childRoutes.push({ + childRoutes.unshift({ ...createRouteFromReactElement(element), path, component: withDomainRequired(component ?? NoOp), From 874db7eb9fc96b7f52862d926783dac025e402eb Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 7 May 2024 14:24:19 -0700 Subject: [PATCH 119/376] ref(seer-grouping): Send group hash to Seer (#70244) This adds the group hash to the outgoing payload sent to Seer. Once this is merged, it will be safe to remove the Seer logic handling an incoming group id, since we'll now be able to rely on the group hash being present. Taken together, this PR and the ones leading up to it (links below) mean will mean that on the Sentry side we'll be sending in the request and handling in the response both hash and group id (and transforming hashes in the response into group ids if hashes are all that's sent). Once Seer is adjusted to only deal in hashes, we can then remove the sending and handling of group ids from Sentry and the transition will be complete. Other PRs which are part of the group-id-to-group-hash switch: - https://github.com/getsentry/sentry/pull/70005, https://github.com/getsentry/sentry/pull/70236, and https://github.com/getsentry/sentry/pull/70237 - various small fixes and tweaks - https://github.com/getsentry/sentry/pull/70070 and https://github.com/getsentry/sentry/pull/70238 - updates to associated types - https://github.com/getsentry/sentry/pull/70240 - automatic conversion from hash to group id when handling Seer similar group data --- .../group_similar_issues_embeddings.py | 2 ++ .../test_group_similar_issues_embeddings.py | 20 +++++++++++++++++++ tests/sentry/seer/test_utils.py | 4 ++++ 3 files changed, 26 insertions(+) diff --git a/src/sentry/api/endpoints/group_similar_issues_embeddings.py b/src/sentry/api/endpoints/group_similar_issues_embeddings.py index e197c55dd31de2..9e9cffd6960a5e 100644 --- a/src/sentry/api/endpoints/group_similar_issues_embeddings.py +++ b/src/sentry/api/endpoints/group_similar_issues_embeddings.py @@ -162,6 +162,7 @@ def get(self, request: Request, group) -> Response: similar_issues_params: SimilarIssuesEmbeddingsRequest = { "group_id": group.id, + "group_hash": latest_event.get_primary_hash(), "project_id": group.project.id, "stacktrace": stacktrace_string, "message": group.message, @@ -183,6 +184,7 @@ def get(self, request: Request, group) -> Response: organization_id=group.organization.id, project_id=group.project.id, group_id=group.id, + group_hash=latest_event.get_primary_hash(), count_over_threshold=len( [ result.stacktrace_distance diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index 458274c1732e30..f5e2c374a5775c 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -662,12 +662,14 @@ def test_get_formatted_results(self): similar_issue_data_1 = SeerSimilarIssueData( message_distance=0.05, parent_group_id=NonNone(self.similar_event.group_id), + parent_group_hash=NonNone(self.similar_event.get_primary_hash()), should_group=True, stacktrace_distance=0.01, ) similar_issue_data_2 = SeerSimilarIssueData( message_distance=0.49, parent_group_id=NonNone(event_from_second_similar_group.group_id), + parent_group_hash=NonNone(event_from_second_similar_group.get_primary_hash()), should_group=False, stacktrace_distance=0.23, ) @@ -718,6 +720,7 @@ def test_simple_only_group_id_returned(self, mock_logger, mock_seer_request): expected_seer_request_params = { "group_id": self.group.id, + "group_hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -764,6 +767,7 @@ def test_simple_only_hash_returned(self, mock_logger, mock_seer_request): expected_seer_request_params = { "group_id": self.group.id, + "group_hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -812,6 +816,7 @@ def test_simple_group_id_and_hash_returned(self, mock_logger, mock_seer_request) expected_seer_request_params = { "group_id": self.group.id, + "group_hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -843,18 +848,21 @@ def test_multiple(self, mock_seer_request, mock_record): { "message_distance": 0.05, "parent_group_id": NonNone(self.similar_event.group_id), + "parent_group_hash": NonNone(self.similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.002, # Over threshold }, { "message_distance": 0.05, "parent_group_id": NonNone(over_threshold_group_event.group_id), + "parent_group_hash": NonNone(over_threshold_group_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.002, # Over threshold }, { "message_distance": 0.05, "parent_group_id": NonNone(under_threshold_group_event.group_id), + "parent_group_hash": NonNone(under_threshold_group_event.get_primary_hash()), "should_group": False, "stacktrace_distance": 0.05, # Under threshold }, @@ -883,6 +891,7 @@ def test_multiple(self, mock_seer_request, mock_record): organization_id=self.org.id, project_id=self.project.id, group_id=self.group.id, + group_hash=NonNone(self.event.get_primary_hash()), count_over_threshold=2, user_id=self.user.id, ) @@ -898,6 +907,7 @@ def test_incomplete_return_data(self, mock_seer_request, mock_logger): { "message_distance": 0.05, "parent_group_id": NonNone(self.similar_event.group_id), + "parent_group_hash": NonNone(self.similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, }, @@ -917,6 +927,7 @@ def test_incomplete_return_data(self, mock_seer_request, mock_logger): extra={ "request_params": { "group_id": NonNone(self.event.group_id), + "group_hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -947,12 +958,14 @@ def test_nonexistent_group(self, mock_seer_request, mock_logger): { "message_distance": 0.05, "parent_group_id": NonNone(self.similar_event.group_id), + "parent_group_hash": NonNone(self.similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, }, { "message_distance": 0.05, "parent_group_id": 1121201212312012, # too high to be real + "parent_group_hash": "not a real hash", "should_group": True, "stacktrace_distance": 0.01, }, @@ -966,6 +979,7 @@ def test_nonexistent_group(self, mock_seer_request, mock_logger): extra={ "request_params": { "group_id": NonNone(self.event.group_id), + "group_hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -973,6 +987,7 @@ def test_nonexistent_group(self, mock_seer_request, mock_logger): "raw_similar_issue_data": { "message_distance": 0.05, "parent_group_id": 1121201212312012, + "parent_group_hash": "not a real hash", "should_group": True, "stacktrace_distance": 0.01, }, @@ -995,6 +1010,7 @@ def test_empty_seer_return(self, mock_seer_request, mock_record): organization_id=self.org.id, project_id=self.project.id, group_id=self.group.id, + group_hash=NonNone(self.event.get_primary_hash()), count_over_threshold=0, user_id=self.user.id, ) @@ -1063,6 +1079,7 @@ def test_no_optional_params(self, mock_seer_request): { "message_distance": 0.05, "parent_group_id": NonNone(self.similar_event.group_id), + "parent_group_hash": NonNone(self.similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, } @@ -1083,6 +1100,7 @@ def test_no_optional_params(self, mock_seer_request): body=json.dumps( { "group_id": self.group.id, + "group_hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -1106,6 +1124,7 @@ def test_no_optional_params(self, mock_seer_request): body=json.dumps( { "group_id": self.group.id, + "group_hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -1130,6 +1149,7 @@ def test_no_optional_params(self, mock_seer_request): body=json.dumps( { "group_id": self.group.id, + "group_hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, diff --git a/tests/sentry/seer/test_utils.py b/tests/sentry/seer/test_utils.py index cb6be084c25773..d0f98618b8cbb6 100644 --- a/tests/sentry/seer/test_utils.py +++ b/tests/sentry/seer/test_utils.py @@ -80,6 +80,7 @@ def test_simple_similar_issues_embeddings_only_group_id_returned( params: SimilarIssuesEmbeddingsRequest = { "group_id": NonNone(event.group_id), + "group_hash": NonNone(event.get_primary_hash()), "project_id": default_project.id, "stacktrace": "string", "message": "message", @@ -106,6 +107,7 @@ def test_simple_similar_issues_embeddings_only_hash_returned(mock_seer_request, params: SimilarIssuesEmbeddingsRequest = { "group_id": NonNone(event.group_id), + "group_hash": NonNone(event.get_primary_hash()), "project_id": default_project.id, "stacktrace": "string", "message": "message", @@ -142,6 +144,7 @@ def test_simple_similar_issues_embeddings_both_returned(mock_seer_request, defau params: SimilarIssuesEmbeddingsRequest = { "group_id": NonNone(event.group_id), + "group_hash": NonNone(event.get_primary_hash()), "project_id": default_project.id, "stacktrace": "string", "message": "message", @@ -160,6 +163,7 @@ def test_empty_similar_issues_embeddings(mock_seer_request, default_project): params: SimilarIssuesEmbeddingsRequest = { "group_id": NonNone(event.group_id), + "group_hash": NonNone(event.get_primary_hash()), "project_id": default_project.id, "stacktrace": "string", "message": "message", From 80ed536b9b06f01e1b1320d91aacd2dad61d593e Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Tue, 7 May 2024 17:25:18 -0400 Subject: [PATCH 120/376] fix(trace-explorer): Breakdown by project and sdk (#70463) For full stack projects that use a single project for all the data, we need to break it down further using sdk. --- .../api/endpoints/organization_traces.py | 9 ++ src/sentry/testutils/cases.py | 3 + .../api/endpoints/test_organization_traces.py | 108 +++++++++++++++++- 3 files changed, 118 insertions(+), 2 deletions(-) diff --git a/src/sentry/api/endpoints/organization_traces.py b/src/sentry/api/endpoints/organization_traces.py index 2478c5f11436a4..0cf68ac1655a97 100644 --- a/src/sentry/api/endpoints/organization_traces.py +++ b/src/sentry/api/endpoints/organization_traces.py @@ -35,6 +35,7 @@ class TraceInterval(TypedDict): project: str | None + sdkName: str | None start: int end: int kind: Literal["project", "missing", "other"] @@ -637,6 +638,8 @@ def process_final_results( # mapping of trace id to a tuple of project slug + transaction name traces_names: MutableMapping[str, tuple[str, str]] = {} for row in traces_breakdown_projects_results["data"]: + if row["trace"] in traces_names: + continue # The underlying column is a Nullable(UInt64) but we write a default of 0 to it. # So make sure to handle both in case something changes. if not row["parent_span"] or int(row["parent_span"], 16) == 0: @@ -706,6 +709,7 @@ def get_traces_breakdown_projects_query( selected_columns=[ "trace", "project", + "sdk.name", "parent_span", "transaction", "precise.start_ts", @@ -748,6 +752,7 @@ def get_traces_breakdown_categories_query( "project", "transaction", "span.category", + "sdk.name", "precise.start_ts", "precise.finish_ts", ], @@ -986,6 +991,7 @@ def should_merge(interval_a, interval_b): return ( interval_a["end"] >= interval_b["start"] and interval_a["project"] == interval_b["project"] + and interval_a["sdkName"] == interval_b["sdkName"] and interval_a["opCategory"] == interval_b["opCategory"] ) @@ -1014,6 +1020,7 @@ def breakdown_push(trace, interval): { "kind": "missing", "project": None, + "sdkName": None, "opCategory": None, "start": last_interval["end"], "end": interval["start"], @@ -1069,6 +1076,7 @@ def stack_clear(trace, until=None): cur: TraceInterval = { "kind": "project", "project": row["project"], + "sdkName": row["sdk.name"], "opCategory": row.get("span.category"), "start": span_start, "end": span_end, @@ -1092,6 +1100,7 @@ def stack_clear(trace, until=None): other: TraceInterval = { "kind": "other", "project": None, + "sdkName": None, "opCategory": None, "start": trace_range["start"], "end": trace_range["end"], diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index 3ab84e50879ff3..3727ffde44d30f 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -1487,6 +1487,7 @@ def store_segment( measurements: Mapping[str, int | float] | None = None, timestamp: datetime | None = None, store_metrics_summary: Mapping[str, Sequence[Mapping[str, Any]]] | None = None, + sdk_name: str | None = None, ): if span_id is None: span_id = self._random_span_id() @@ -1520,6 +1521,8 @@ def store_segment( payload["_metrics_summary"] = store_metrics_summary if parent_span_id: payload["parent_span_id"] = parent_span_id + if sdk_name is not None: + payload["sentry_tags"]["sdk.name"] = sdk_name self.store_span(payload) diff --git a/tests/sentry/api/endpoints/test_organization_traces.py b/tests/sentry/api/endpoints/test_organization_traces.py index 2478e99ded3f81..35b1e18af74942 100644 --- a/tests/sentry/api/endpoints/test_organization_traces.py +++ b/tests/sentry/api/endpoints/test_organization_traces.py @@ -76,7 +76,7 @@ def create_mock_traces(self): project_2 = self.create_project() # Hack: ensure that no span ids with leading 0s are generated for the test - span_ids = ["1" + uuid4().hex[:15] for _ in range(11)] + span_ids = ["1" + uuid4().hex[:15] for _ in range(12)] tags = ["", "bar", "bar", "baz", "", "bar", "baz"] timestamps = [] @@ -91,6 +91,7 @@ def create_mock_traces(self): transaction="foo", duration=60_100, exclusive_time=60_100, + sdk_name="sentry.javascript.node", ) for i in range(1, 4): timestamps.append(before_now(days=0, minutes=9, seconds=45 - i).replace(microsecond=0)) @@ -105,6 +106,7 @@ def create_mock_traces(self): duration=30_000 + i, exclusive_time=30_000 + i, tags={"foo": tags[i]}, + sdk_name="sentry.javascript.node", ) trace_id_2 = uuid4().hex @@ -119,6 +121,7 @@ def create_mock_traces(self): transaction="bar", duration=90_123, exclusive_time=90_123, + sdk_name="sentry.javascript.node", ) for i in range(5, 7): timestamps.append(before_now(days=0, minutes=19, seconds=55 - i).replace(microsecond=0)) @@ -133,6 +136,7 @@ def create_mock_traces(self): duration=20_000 + i, exclusive_time=20_000 + i, tags={"foo": tags[i]}, + sdk_name="sentry.javascript.node", ) timestamps.append(before_now(days=0, minutes=19, seconds=59).replace(microsecond=0)) @@ -215,6 +219,21 @@ def create_mock_traces(self): } ] }, + sdk_name="sentry.javascript.remix", + ) + + timestamps.append(before_now(days=0, minutes=19, seconds=50).replace(microsecond=0)) + self.double_write_segment( + project_id=project_1.id, + trace_id=trace_id_3, + transaction_id=uuid4().hex, + span_id=span_ids[11], + parent_span_id=span_ids[10], + timestamp=timestamps[-1], + transaction="quz", + duration=10_000, + tags={"foo": "quz"}, + sdk_name="sentry.javascript.node", ) error_data = load_data("javascript", timestamp=timestamps[0]) @@ -438,6 +457,7 @@ def test_matching_tag(self): { "project": project_1.slug, "opCategory": None, + "sdkName": "sentry.javascript.node", "start": int(timestamps[0].timestamp() * 1000), "end": int(timestamps[0].timestamp() * 1000) + 60_100, "kind": "project", @@ -445,6 +465,7 @@ def test_matching_tag(self): { "project": project_2.slug, "opCategory": None, + "sdkName": "sentry.javascript.node", "start": int(timestamps[1].timestamp() * 1000), "end": int(timestamps[3].timestamp() * 1000) + 30_003, "kind": "project", @@ -489,6 +510,7 @@ def test_matching_tag(self): { "project": project_1.slug, "opCategory": None, + "sdkName": "sentry.javascript.node", "start": int(timestamps[4].timestamp() * 1000), "end": int(timestamps[4].timestamp() * 1000) + 90_123, "kind": "project", @@ -496,6 +518,7 @@ def test_matching_tag(self): { "project": project_2.slug, "opCategory": None, + "sdkName": "sentry.javascript.node", "start": int(timestamps[5].timestamp() * 1000), "end": int(timestamps[6].timestamp() * 1000) + 20_006, "kind": "project", @@ -583,6 +606,7 @@ def test_matching_tag_breakdown_with_category(self): { "project": project_1.slug, "opCategory": None, + "sdkName": "sentry.javascript.node", "start": int(timestamps[4].timestamp() * 1000), "end": int(timestamps[4].timestamp() * 1000) + 90_123, "kind": "project", @@ -590,6 +614,7 @@ def test_matching_tag_breakdown_with_category(self): { "project": project_1.slug, "opCategory": "http", + "sdkName": "", "start": int(timestamps[7].timestamp() * 1000), "end": int(timestamps[7].timestamp() * 1000) + 1_000, "kind": "project", @@ -597,6 +622,7 @@ def test_matching_tag_breakdown_with_category(self): { "project": project_2.slug, "opCategory": None, + "sdkName": "sentry.javascript.node", "start": int(timestamps[5].timestamp() * 1000), "end": int(timestamps[6].timestamp() * 1000) + 20_006, "kind": "project", @@ -604,6 +630,7 @@ def test_matching_tag_breakdown_with_category(self): { "project": project_1.slug, "opCategory": "db", + "sdkName": "", "start": int(timestamps[8].timestamp() * 1000), "end": int(timestamps[8].timestamp() * 1000) + 3_000, "kind": "project", @@ -677,7 +704,7 @@ def test_matching_tag_metrics(self): "trace": trace_id_3, "numErrors": 0, "numOccurrences": 0, - "numSpans": 1, + "numSpans": 2, "project": project_1.slug, "name": "qux", "duration": 40_000, @@ -687,10 +714,19 @@ def test_matching_tag_metrics(self): { "project": project_1.slug, "opCategory": None, + "sdkName": "sentry.javascript.remix", "start": int(timestamps[10].timestamp() * 1000), "end": int(timestamps[10].timestamp() * 1000) + 40_000, "kind": "project", }, + { + "project": project_1.slug, + "opCategory": None, + "sdkName": "sentry.javascript.node", + "start": int(timestamps[11].timestamp() * 1000), + "end": int(timestamps[11].timestamp() * 1000) + 10_000, + "kind": "project", + }, ], "spans": [ { @@ -761,6 +797,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -773,6 +810,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 100, "kind": "project", @@ -786,6 +824,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -793,6 +832,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "bar", + "sdk.name": "sentry.javascript.node", "transaction": "bar1", "precise.start_ts": 0.025, "precise.finish_ts": 0.075, @@ -805,6 +845,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 100, "kind": "project", @@ -812,6 +853,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "bar", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 25, "end": 75, "kind": "project", @@ -825,6 +867,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.05, @@ -832,6 +875,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "bar", + "sdk.name": "sentry.javascript.node", "transaction": "bar1", "precise.start_ts": 0.025, "precise.finish_ts": 0.075, @@ -839,6 +883,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "baz", + "sdk.name": "sentry.javascript.node", "transaction": "baz1", "precise.start_ts": 0.05, "precise.finish_ts": 0.1, @@ -851,6 +896,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 50, "kind": "project", @@ -858,6 +904,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "bar", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 25, "end": 75, "kind": "project", @@ -865,6 +912,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "baz", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 50, "end": 100, "kind": "project", @@ -878,6 +926,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.025, @@ -885,6 +934,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "bar", + "sdk.name": "sentry.javascript.node", "transaction": "bar1", "precise.start_ts": 0.05, "precise.finish_ts": 0.075, @@ -897,6 +947,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 25, "kind": "project", @@ -904,6 +955,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": None, "opCategory": None, + "sdkName": None, "start": 25, "end": 50, "kind": "missing", @@ -911,6 +963,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "bar", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 50, "end": 75, "kind": "project", @@ -924,6 +977,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -931,6 +985,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo2", "precise.start_ts": 0.025, "precise.finish_ts": 0.075, @@ -943,6 +998,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 100, "kind": "project", @@ -956,6 +1012,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.075, @@ -963,6 +1020,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo2", "precise.start_ts": 0.025, "precise.finish_ts": 0.1, @@ -975,6 +1033,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 100, "kind": "project", @@ -988,6 +1047,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.025, @@ -995,6 +1055,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo2", "precise.start_ts": 0.05, "precise.finish_ts": 0.075, @@ -1007,6 +1068,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 25, "kind": "project", @@ -1014,6 +1076,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": None, "opCategory": None, + "sdkName": None, "start": 25, "end": 50, "kind": "missing", @@ -1021,6 +1084,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 50, "end": 75, "kind": "project", @@ -1034,6 +1098,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -1041,6 +1106,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "bar", + "sdk.name": "sentry.javascript.node", "transaction": "bar1", "precise.start_ts": 0.02, "precise.finish_ts": 0.08, @@ -1048,6 +1114,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "baz", + "sdk.name": "sentry.javascript.node", "transaction": "baz1", "precise.start_ts": 0.04, "precise.finish_ts": 0.06, @@ -1060,6 +1127,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 100, "kind": "project", @@ -1067,6 +1135,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "bar", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 20, "end": 80, "kind": "project", @@ -1074,6 +1143,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "baz", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 40, "end": 60, "kind": "project", @@ -1087,6 +1157,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -1094,12 +1165,14 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "bar", + "sdk.name": "sentry.javascript.node", "transaction": "bar1", "precise.start_ts": 0.025, "precise.finish_ts": 0.05, }, { "trace": "a" * 32, + "sdk.name": "sentry.javascript.node", "project": "baz", "transaction": "baz1", "precise.start_ts": 0.05, @@ -1113,6 +1186,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 100, "kind": "project", @@ -1120,6 +1194,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "bar", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 25, "end": 50, "kind": "project", @@ -1127,6 +1202,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "baz", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 50, "end": 75, "kind": "project", @@ -1140,6 +1216,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.05, @@ -1147,6 +1224,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "bar", + "sdk.name": "sentry.javascript.node", "transaction": "bar1", "precise.start_ts": 0.02, "precise.finish_ts": 0.03, @@ -1154,6 +1232,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "baz", + "sdk.name": "sentry.javascript.node", "transaction": "baz1", "precise.start_ts": 0.05, "precise.finish_ts": 0.075, @@ -1166,6 +1245,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 50, "kind": "project", @@ -1173,6 +1253,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "bar", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 20, "end": 30, "kind": "project", @@ -1180,6 +1261,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "baz", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 50, "end": 75, "kind": "project", @@ -1193,6 +1275,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.05, @@ -1200,6 +1283,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "bar", + "sdk.name": "sentry.javascript.node", "transaction": "bar1", "precise.start_ts": 0.02, "precise.finish_ts": 0.03, @@ -1207,6 +1291,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "baz", + "sdk.name": "sentry.javascript.node", "transaction": "baz1", "precise.start_ts": 0.04, "precise.finish_ts": 0.06, @@ -1219,6 +1304,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 50, "kind": "project", @@ -1226,6 +1312,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "bar", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 20, "end": 30, "kind": "project", @@ -1233,6 +1320,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "baz", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 40, "end": 60, "kind": "project", @@ -1246,6 +1334,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.05, @@ -1253,6 +1342,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "bar", + "sdk.name": "sentry.javascript.node", "transaction": "bar1", "precise.start_ts": 0.01, "precise.finish_ts": 0.02, @@ -1260,6 +1350,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0.03, "precise.finish_ts": 0.04, @@ -1272,6 +1363,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 50, "kind": "project", @@ -1279,6 +1371,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "bar", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 10, "end": 20, "kind": "project", @@ -1292,6 +1385,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -1304,6 +1398,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 50, "kind": "project", @@ -1317,6 +1412,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.05, @@ -1329,6 +1425,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 50, "kind": "project", @@ -1336,6 +1433,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": None, "opCategory": None, + "sdkName": None, "start": 50, "end": 100, "kind": "other", @@ -1349,6 +1447,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.012, @@ -1356,6 +1455,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0.013, "precise.finish_ts": 0.024, @@ -1363,6 +1463,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "project": "foo", + "sdk.name": "sentry.javascript.node", "transaction": "foo1", "precise.start_ts": 0.032, "precise.finish_ts": 0.040, @@ -1375,6 +1476,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 0, "end": 21, "kind": "project", @@ -1382,6 +1484,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": None, "opCategory": None, + "sdkName": None, "start": 21, "end": 30, "kind": "missing", @@ -1389,6 +1492,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "project": "foo", "opCategory": None, + "sdkName": "sentry.javascript.node", "start": 30, "end": 40, "kind": "project", From 45d2d2e3f00f39f914f899d92f68e0614d72a2fd Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Tue, 7 May 2024 14:46:55 -0700 Subject: [PATCH 121/376] feat(slack): EA :white_circle: for actions by adding to issue alert threads FF (#70468) --- src/sentry/integrations/slack/message_builder/issues.py | 6 ++++-- tests/sentry/integrations/slack/test_message_builder.py | 2 +- .../integrations/slack/webhooks/actions/test_status.py | 8 ++++---- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py index 5e163ebf110ab9..457d4a611f8a57 100644 --- a/src/sentry/integrations/slack/message_builder/issues.py +++ b/src/sentry/integrations/slack/message_builder/issues.py @@ -387,7 +387,7 @@ def build_actions( """Having actions means a button will be shown on the Slack message e.g. ignore, resolve, assign.""" if actions and identity: text = get_action_text(actions, identity) - if features.has("organizations:slack-improvements", project.organization): + if features.has("organizations:slack-thread-issue-alert", project.organization): # if actions are taken, return True at the end to show the white circle emoji return [], text, True return [], text, False @@ -608,7 +608,9 @@ def build(self, notification_uuid: str | None = None) -> SlackBlock: if self.actions and self.identity and not action_text: # this means somebody is interacting with the message action_text = get_action_text(self.actions, self.identity) - if features.has("organizations:slack-improvements", self.group.project.organization): + if features.has( + "organizations:slack-thread-issue-alert", self.group.project.organization + ): has_action = True blocks = [self.get_title_block(rule_id, notification_uuid, obj, has_action)] diff --git a/tests/sentry/integrations/slack/test_message_builder.py b/tests/sentry/integrations/slack/test_message_builder.py index 7da9cfc267173a..5bc7170cab4d32 100644 --- a/tests/sentry/integrations/slack/test_message_builder.py +++ b/tests/sentry/integrations/slack/test_message_builder.py @@ -1155,7 +1155,7 @@ def test_identity_and_action(self): group, self.project, "test txt", [MessageAction(name="TEST")], MOCKIDENTITY ) == ([], "", False) - @with_feature("organizations:slack-improvements") + @with_feature("organizations:slack-thread-issue-alert") def test_identity_and_action_has_action(self): # returns True to indicate to use the white circle emoji group = self.create_group(project=self.project) diff --git a/tests/sentry/integrations/slack/webhooks/actions/test_status.py b/tests/sentry/integrations/slack/webhooks/actions/test_status.py index cd0bc5babda783..b88573f1575e1f 100644 --- a/tests/sentry/integrations/slack/webhooks/actions/test_status.py +++ b/tests/sentry/integrations/slack/webhooks/actions/test_status.py @@ -347,7 +347,7 @@ def test_archive_issue_until_escalating_block_kit(self): assert ":red_circle:" in update_data["blocks"][0]["text"]["text"] @responses.activate - @with_feature("organizations:slack-improvements") + @with_feature("organizations:slack-thread-issue-alert") def test_archive_issue_until_escalating_block_kit_improvements(self): original_message = self.get_original_message_block_kit(self.group.id) self.archive_issue_block_kit(original_message, "ignored:archived_until_escalating") @@ -648,7 +648,7 @@ def test_assign_issue_block_kit(self): "integration": ActivityIntegration.SLACK.value, } - @with_feature("organizations:slack-improvements") + @with_feature("organizations:slack-thread-issue-alert") def test_assign_issue_block_kit_improvements(self): user2 = self.create_user(is_superuser=False) self.create_member(user=user2, organization=self.organization, teams=[self.team]) @@ -995,7 +995,7 @@ def test_resolve_issue_block_kit(self): assert ":red_circle:" in update_data["blocks"][0]["text"]["text"] @responses.activate - @with_feature("organizations:slack-improvements") + @with_feature("organizations:slack-thread-issue-alert") def test_resolve_issue_block_kit_improvements(self): original_message = self.get_original_message_block_kit(self.group.id) self.resolve_issue_block_kit(original_message, "resolved") @@ -1012,7 +1012,7 @@ def test_resolve_issue_block_kit_improvements(self): assert ":white_circle:" in update_data["blocks"][0]["text"]["text"] @responses.activate - @with_feature("organizations:slack-improvements") + @with_feature("organizations:slack-thread-issue-alert") def test_resolve_perf_issue_block_kit_improvements(self): group_fingerprint = f"{PerformanceNPlusOneGroupType.type_id}-group1" From ef6c79a2d26a6692d3bfa68c5ff0692e9655c935 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Tue, 7 May 2024 17:50:52 -0400 Subject: [PATCH 122/376] Revert "ref(crons): Normalize crons incident issues (#70289)" (#70469) This reverts commit 9d56889699cd744e99305509d435509ad12e6ccd. --- src/sentry/issues/grouptype.py | 27 +- src/sentry/monitors/constants.py | 3 + src/sentry/monitors/logic/mark_failed.py | 54 +++- .../slack/notifications/test_issue_alert.py | 6 +- .../slack/test_message_builder.py | 4 +- tests/sentry/issues/test_ingest.py | 4 +- tests/sentry/mail/test_adapter.py | 10 +- .../sentry/monitors/logic/test_mark_failed.py | 237 +++++++++++++++++- tests/sentry/tasks/test_weekly_reports.py | 6 +- 9 files changed, 317 insertions(+), 34 deletions(-) diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py index 58eef89f833470..6c2c0658a6f809 100644 --- a/src/sentry/issues/grouptype.py +++ b/src/sentry/issues/grouptype.py @@ -245,6 +245,10 @@ class PerformanceGroupTypeDefaults: noise_config = NoiseConfig() +class CronGroupTypeDefaults: + notification_config = NotificationConfig(context=[]) + + class ReplayGroupTypeDefaults: notification_config = NotificationConfig(context=[]) @@ -512,27 +516,36 @@ class ProfileFunctionRegressionType(GroupType): @dataclass(frozen=True) -class MonitorIncidentType(GroupType): +class MonitorCheckInFailure(CronGroupTypeDefaults, GroupType): type_id = 4001 slug = "monitor_check_in_failure" - description = "Crons Monitor Failed" + description = "Monitor Check In Failed" category = GroupCategory.CRON.value released = True creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds default_priority = PriorityLevel.HIGH - notification_config = NotificationConfig(context=[]) @dataclass(frozen=True) -class MonitorCheckInTimeoutDeprecated(MonitorIncidentType, GroupType): - # This is deprecated, only kept around for it's type_id +class MonitorCheckInTimeout(CronGroupTypeDefaults, GroupType): type_id = 4002 + slug = "monitor_check_in_timeout" + description = "Monitor Check In Timeout" + category = GroupCategory.CRON.value + released = True + creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds + default_priority = PriorityLevel.HIGH @dataclass(frozen=True) -class MonitorCheckInMissedDeprecated(MonitorIncidentType, GroupType): - # This is deprecated, only kept around for it's type_id +class MonitorCheckInMissed(CronGroupTypeDefaults, GroupType): type_id = 4003 + slug = "monitor_check_in_missed" + description = "Monitor Check In Missed" + category = GroupCategory.CRON.value + released = True + creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds + default_priority = PriorityLevel.HIGH @dataclass(frozen=True) diff --git a/src/sentry/monitors/constants.py b/src/sentry/monitors/constants.py index 8f06d3589dff4a..ca6b15bb4a9b27 100644 --- a/src/sentry/monitors/constants.py +++ b/src/sentry/monitors/constants.py @@ -7,6 +7,9 @@ # current limit is 28 days MAX_TIMEOUT = 40_320 +# Format to use in the issue subtitle for the missed check-in timestamp +SUBTITLE_DATETIME_FORMAT = "%b %d, %I:%M %p %Z" + # maximum value for incident + recovery thresholds to be set # affects the performance of recent check-ins query # lowering this may invalidate monitors + block check-ins diff --git a/src/sentry/monitors/logic/mark_failed.py b/src/sentry/monitors/logic/mark_failed.py index 73b20e3e69d4e1..f6df630b628bfd 100644 --- a/src/sentry/monitors/logic/mark_failed.py +++ b/src/sentry/monitors/logic/mark_failed.py @@ -7,8 +7,13 @@ from django.db.models import Q from sentry import features -from sentry.issues.grouptype import MonitorIncidentType +from sentry.issues.grouptype import ( + MonitorCheckInFailure, + MonitorCheckInMissed, + MonitorCheckInTimeout, +) from sentry.models.organization import Organization +from sentry.monitors.constants import SUBTITLE_DATETIME_FORMAT, TIMEOUT from sentry.monitors.models import ( CheckInStatus, MonitorCheckIn, @@ -238,8 +243,10 @@ def create_issue_platform_occurrence( monitor_env = failed_checkin.monitor_environment current_timestamp = datetime.now(timezone.utc) + occurrence_data = get_occurrence_data(failed_checkin) + # Get last successful check-in to show in evidence display - last_successful_checkin_timestamp = "Never" + last_successful_checkin_timestamp = "None" last_successful_checkin = monitor_env.get_last_successful_checkin() if last_successful_checkin: last_successful_checkin_timestamp = last_successful_checkin.date_added.isoformat() @@ -250,11 +257,11 @@ def create_issue_platform_occurrence( project_id=monitor_env.monitor.project_id, event_id=uuid.uuid4().hex, fingerprint=[incident.grouphash], - type=MonitorIncidentType, + type=occurrence_data["group_type"], issue_title=f"Monitor failure: {monitor_env.monitor.name}", - subtitle="Your monitor has reached its failure threshold.", + subtitle=occurrence_data["subtitle"], evidence_display=[ - IssueEvidence(name="Failure reason", value="incident", important=True), + IssueEvidence(name="Failure reason", value=occurrence_data["reason"], important=True), IssueEvidence( name="Environment", value=monitor_env.get_environment().name, important=False ), @@ -265,9 +272,9 @@ def create_issue_platform_occurrence( ), ], evidence_data={}, - culprit="incident", + culprit=occurrence_data["reason"], detection_time=current_timestamp, - level="error", + level=occurrence_data["level"], assignee=monitor_env.monitor.owner_actor, ) @@ -317,3 +324,36 @@ def get_monitor_environment_context(monitor_environment: MonitorEnvironment): "status": monitor_environment.get_status_display(), "type": monitor_environment.monitor.get_type_display(), } + + +def get_occurrence_data(checkin: MonitorCheckIn): + if checkin.status == CheckInStatus.MISSED: + expected_time = ( + checkin.expected_time.astimezone(checkin.monitor.timezone).strftime( + SUBTITLE_DATETIME_FORMAT + ) + if checkin.expected_time + else "the expected time" + ) + return { + "group_type": MonitorCheckInMissed, + "level": "warning", + "reason": "missed_checkin", + "subtitle": f"No check-in reported on {expected_time}.", + } + + if checkin.status == CheckInStatus.TIMEOUT: + duration = (checkin.monitor.config or {}).get("max_runtime") or TIMEOUT + return { + "group_type": MonitorCheckInTimeout, + "level": "error", + "reason": "duration", + "subtitle": f"Check-in exceeded maximum duration of {duration} minutes.", + } + + return { + "group_type": MonitorCheckInFailure, + "level": "error", + "reason": "error", + "subtitle": "An error occurred during the latest check-in.", + } diff --git a/tests/sentry/integrations/slack/notifications/test_issue_alert.py b/tests/sentry/integrations/slack/notifications/test_issue_alert.py index 55b50db848401b..4d56a3046f1901 100644 --- a/tests/sentry/integrations/slack/notifications/test_issue_alert.py +++ b/tests/sentry/integrations/slack/notifications/test_issue_alert.py @@ -11,7 +11,7 @@ from sentry.digests.backends.redis import RedisBackend from sentry.digests.notifications import event_to_record from sentry.integrations.slack.message_builder.issues import get_tags -from sentry.issues.grouptype import MonitorIncidentType +from sentry.issues.grouptype import MonitorCheckInFailure from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence from sentry.models.identity import Identity, IdentityStatus from sentry.models.integrations.external_actor import ExternalActor @@ -157,7 +157,7 @@ def test_crons_issue_alert_user_block(self): IssueEvidence("Evidence 2", "Value 2", False), IssueEvidence("Evidence 3", "Value 3", False), ], - MonitorIncidentType, + MonitorCheckInFailure, datetime.now(UTC), "info", "/api/123", @@ -165,7 +165,7 @@ def test_crons_issue_alert_user_block(self): occurrence.save() event.occurrence = occurrence - event.group.type = MonitorIncidentType.type_id + event.group.type = MonitorCheckInFailure.type_id notification = AlertRuleNotification( Notification(event=event, rule=self.rule), ActionTargetType.MEMBER, self.user.id ) diff --git a/tests/sentry/integrations/slack/test_message_builder.py b/tests/sentry/integrations/slack/test_message_builder.py index 5bc7170cab4d32..cf207e84de7dad 100644 --- a/tests/sentry/integrations/slack/test_message_builder.py +++ b/tests/sentry/integrations/slack/test_message_builder.py @@ -26,7 +26,7 @@ from sentry.issues.grouptype import ( ErrorGroupType, FeedbackGroup, - MonitorIncidentType, + MonitorCheckInFailure, PerformanceP95EndpointRegressionGroupType, ProfileFileIOGroupType, ) @@ -1321,7 +1321,7 @@ def setUp(self): type=PerformanceP95EndpointRegressionGroupType.type_id ) - self.cron_issue = self.create_group(type=MonitorIncidentType.type_id) + self.cron_issue = self.create_group(type=MonitorCheckInFailure.type_id) self.feedback_issue = self.create_group( type=FeedbackGroup.type_id, substatus=GroupSubStatus.NEW ) diff --git a/tests/sentry/issues/test_ingest.py b/tests/sentry/issues/test_ingest.py index cb12627d233684..d1600b7f4a01e9 100644 --- a/tests/sentry/issues/test_ingest.py +++ b/tests/sentry/issues/test_ingest.py @@ -14,7 +14,7 @@ GroupCategory, GroupType, GroupTypeRegistry, - MonitorIncidentType, + MonitorCheckInFailure, NoiseConfig, ) from sentry.issues.ingest import ( @@ -248,7 +248,7 @@ def test_existing_group_different_category(self) -> None: new_event = self.store_event(data={}, project_id=self.project.id) new_occurrence = self.build_occurrence( - fingerprint=["some-fingerprint"], type=MonitorIncidentType.type_id + fingerprint=["some-fingerprint"], type=MonitorCheckInFailure.type_id ) with mock.patch("sentry.issues.ingest.logger") as logger: assert save_issue_from_occurrence(new_occurrence, new_event, None) is None diff --git a/tests/sentry/mail/test_adapter.py b/tests/sentry/mail/test_adapter.py index 94e3d21653528e..2e2d6d9e8d0226 100644 --- a/tests/sentry/mail/test_adapter.py +++ b/tests/sentry/mail/test_adapter.py @@ -17,7 +17,7 @@ from sentry.api.serializers.models.userreport import UserReportWithGroupSerializer from sentry.digests.notifications import build_digest, event_to_record from sentry.event_manager import EventManager, get_event_type -from sentry.issues.grouptype import MonitorIncidentType +from sentry.issues.grouptype import MonitorCheckInFailure from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence from sentry.mail import build_subject_prefix, mail_adapter from sentry.models.activity import Activity @@ -328,7 +328,7 @@ def test_simple_notification_generic(self): IssueEvidence("Evidence 2", "Value 2", False), IssueEvidence("Evidence 3", "Value 3", False), ], - MonitorIncidentType, + MonitorCheckInFailure, timezone.now(), "info", "/api/123", @@ -336,7 +336,7 @@ def test_simple_notification_generic(self): occurrence.save() event.occurrence = occurrence - event.group.type = MonitorIncidentType.type_id + event.group.type = MonitorCheckInFailure.type_id rule = Rule.objects.create(project=self.project, label="my rule") ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) @@ -384,7 +384,7 @@ def test_simple_notification_generic_no_evidence(self): "1234", {"Test": 123}, [], # no evidence - MonitorIncidentType, + MonitorCheckInFailure, timezone.now(), "info", "/api/123", @@ -392,7 +392,7 @@ def test_simple_notification_generic_no_evidence(self): occurrence.save() event.occurrence = occurrence - event.group.type = MonitorIncidentType.type_id + event.group.type = MonitorCheckInFailure.type_id rule = Rule.objects.create(project=self.project, label="my rule") ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) diff --git a/tests/sentry/monitors/logic/test_mark_failed.py b/tests/sentry/monitors/logic/test_mark_failed.py index 2d13f9069f5ecb..df8f44dcb9b601 100644 --- a/tests/sentry/monitors/logic/test_mark_failed.py +++ b/tests/sentry/monitors/logic/test_mark_failed.py @@ -5,10 +5,15 @@ from django.utils import timezone -from sentry.issues.grouptype import MonitorIncidentType +from sentry.issues.grouptype import ( + MonitorCheckInFailure, + MonitorCheckInMissed, + MonitorCheckInTimeout, +) from sentry.issues.ingest import process_occurrence_data from sentry.models.groupassignee import GroupAssignee from sentry.models.grouphash import GroupHash +from sentry.monitors.constants import SUBTITLE_DATETIME_FORMAT from sentry.monitors.logic.mark_failed import mark_failed from sentry.monitors.models import ( CheckInStatus, @@ -278,11 +283,11 @@ def test_mark_failed_default_params_issue_platform(self, mock_produce_occurrence "project_id": self.project.id, "fingerprint": [monitor_incidents[0].grouphash], "issue_title": f"Monitor failure: {monitor.name}", - "subtitle": "Your monitor has reached its failure threshold.", + "subtitle": "An error occurred during the latest check-in.", "resource_id": None, "evidence_data": {}, "evidence_display": [ - {"name": "Failure reason", "value": "incident", "important": True}, + {"name": "Failure reason", "value": "error", "important": True}, { "name": "Environment", "value": monitor_environment.get_environment().name, @@ -294,9 +299,9 @@ def test_mark_failed_default_params_issue_platform(self, mock_produce_occurrence "important": False, }, ], - "type": MonitorIncidentType.type_id, + "type": MonitorCheckInFailure.type_id, "level": "error", - "culprit": "incident", + "culprit": "error", }, ) == dict(occurrence) @@ -336,6 +341,228 @@ def test_mark_failed_default_params_issue_platform(self, mock_produce_occurrence }, ) == dict(event) + @with_feature("organizations:issue-platform") + @patch("sentry.issues.producer.produce_occurrence_to_kafka") + def test_mark_failed_with_timeout_reason_issue_platform(self, mock_produce_occurrence_to_kafka): + monitor = Monitor.objects.create( + name="test monitor", + organization_id=self.organization.id, + project_id=self.project.id, + type=MonitorType.CRON_JOB, + config={ + "schedule": [1, "month"], + "schedule_type": ScheduleType.INTERVAL, + "max_runtime": 10, + "checkin_margin": None, + }, + ) + monitor_environment = MonitorEnvironment.objects.create( + monitor=monitor, + environment_id=self.environment.id, + status=monitor.status, + ) + successful_check_in = MonitorCheckIn.objects.create( + monitor=monitor, + monitor_environment=monitor_environment, + project_id=self.project.id, + status=CheckInStatus.OK, + ) + last_checkin = timezone.now() + + failed_checkin = MonitorCheckIn.objects.create( + monitor=monitor, + monitor_environment=monitor_environment, + project_id=self.project.id, + status=CheckInStatus.TIMEOUT, + date_added=last_checkin, + duration=monitor.config.get("max_runtime"), + ) + assert mark_failed(failed_checkin, ts=failed_checkin.date_added) + + monitor_environment.refresh_from_db() + assert monitor_environment.status == MonitorStatus.ERROR + + monitor_incidents = MonitorIncident.objects.filter(monitor_environment=monitor_environment) + assert len(monitor_incidents) == 1 + + assert len(mock_produce_occurrence_to_kafka.mock_calls) == 1 + + kwargs = mock_produce_occurrence_to_kafka.call_args.kwargs + occurrence = kwargs["occurrence"] + event = kwargs["event_data"] + occurrence = occurrence.to_dict() + + assert dict( + occurrence, + **{ + "project_id": self.project.id, + "fingerprint": [monitor_incidents[0].grouphash], + "issue_title": f"Monitor failure: {monitor.name}", + "subtitle": "Check-in exceeded maximum duration of 10 minutes.", + "resource_id": None, + "evidence_data": {}, + "evidence_display": [ + {"name": "Failure reason", "value": "duration", "important": True}, + { + "name": "Environment", + "value": monitor_environment.get_environment().name, + "important": False, + }, + { + "name": "Last successful check-in", + "value": successful_check_in.date_added.isoformat(), + "important": False, + }, + ], + "type": MonitorCheckInTimeout.type_id, + "level": "error", + "culprit": "duration", + }, + ) == dict(occurrence) + + assert dict( + event, + **{ + "contexts": { + "monitor": { + "status": "error", + "type": "cron_job", + "config": { + "schedule_type": 2, + "schedule": [1, "month"], + "max_runtime": 10, + "checkin_margin": None, + }, + "id": str(monitor.guid), + "name": monitor.name, + "slug": str(monitor.slug), + } + }, + "environment": monitor_environment.get_environment().name, + "event_id": occurrence["event_id"], + "fingerprint": [monitor_incidents[0].grouphash], + "platform": "other", + "project_id": monitor.project_id, + "sdk": None, + "tags": { + "monitor.id": str(monitor.guid), + "monitor.slug": str(monitor.slug), + "monitor.incident": str(monitor_incidents[0].id), + }, + }, + ) == dict(event) + + @with_feature("organizations:issue-platform") + @patch("sentry.issues.producer.produce_occurrence_to_kafka") + def test_mark_failed_with_missed_reason_issue_platform(self, mock_produce_occurrence_to_kafka): + last_checkin = timezone.now().replace(second=0, microsecond=0) + next_checkin = last_checkin + timedelta(hours=1) + + monitor = Monitor.objects.create( + name="test monitor", + organization_id=self.organization.id, + project_id=self.project.id, + type=MonitorType.CRON_JOB, + config={ + "schedule": [1, "hour"], + "schedule_type": ScheduleType.INTERVAL, + "max_runtime": None, + "checkin_margin": None, + }, + ) + monitor_environment = MonitorEnvironment.objects.create( + monitor=monitor, + environment_id=self.environment.id, + last_checkin=last_checkin, + next_checkin=next_checkin, + next_checkin_latest=next_checkin + timedelta(minutes=1), + status=monitor.status, + ) + + failed_checkin = MonitorCheckIn.objects.create( + monitor=monitor, + monitor_environment=monitor_environment, + project_id=self.project.id, + status=CheckInStatus.MISSED, + expected_time=next_checkin, + date_added=next_checkin + timedelta(minutes=1), + ) + assert mark_failed(failed_checkin, ts=failed_checkin.date_added) + + monitor.refresh_from_db() + monitor_environment.refresh_from_db() + assert monitor_environment.status == MonitorStatus.ERROR + + monitor_incidents = MonitorIncident.objects.filter(monitor_environment=monitor_environment) + assert len(monitor_incidents) == 1 + + assert len(mock_produce_occurrence_to_kafka.mock_calls) == 1 + + kwargs = mock_produce_occurrence_to_kafka.call_args.kwargs + occurrence = kwargs["occurrence"] + event = kwargs["event_data"] + occurrence = occurrence.to_dict() + + assert dict( + occurrence, + **{ + "project_id": self.project.id, + "fingerprint": [monitor_incidents[0].grouphash], + "issue_title": f"Monitor failure: {monitor.name}", + "subtitle": f"No check-in reported on {next_checkin.strftime(SUBTITLE_DATETIME_FORMAT)}.", + "resource_id": None, + "evidence_data": {}, + "evidence_display": [ + {"name": "Failure reason", "value": "missed_checkin", "important": True}, + { + "name": "Environment", + "value": monitor_environment.get_environment().name, + "important": False, + }, + { + "name": "Last successful check-in", + "value": "None", + "important": False, + }, + ], + "type": MonitorCheckInMissed.type_id, + "level": "warning", + "culprit": "missed_checkin", + }, + ) == dict(occurrence) + + assert dict( + event, + **{ + "contexts": { + "monitor": { + "status": "error", + "type": "cron_job", + "config": { + "schedule_type": 2, + "schedule": [1, "hour"], + "max_runtime": None, + "checkin_margin": None, + }, + "id": str(monitor.guid), + "name": monitor.name, + "slug": str(monitor.slug), + } + }, + "environment": monitor_environment.get_environment().name, + "event_id": occurrence["event_id"], + "fingerprint": [monitor_incidents[0].grouphash], + "platform": "other", + "project_id": monitor.project_id, + "sdk": None, + "tags": { + "monitor.id": str(monitor.guid), + "monitor.slug": str(monitor.slug), + "monitor.incident": str(monitor_incidents[0].id), + }, + }, + ) == dict(event) + @with_feature("organizations:issue-platform") @patch("sentry.issues.producer.produce_occurrence_to_kafka") def test_mark_failed_muted(self, mock_produce_occurrence_to_kafka): diff --git a/tests/sentry/tasks/test_weekly_reports.py b/tests/sentry/tasks/test_weekly_reports.py index 971d100129a18a..83621236bd4163 100644 --- a/tests/sentry/tasks/test_weekly_reports.py +++ b/tests/sentry/tasks/test_weekly_reports.py @@ -11,7 +11,7 @@ from django.utils import timezone from sentry.constants import DataCategory -from sentry.issues.grouptype import MonitorIncidentType, PerformanceNPlusOneGroupType +from sentry.issues.grouptype import MonitorCheckInFailure, PerformanceNPlusOneGroupType from sentry.models.group import GroupStatus from sentry.models.grouphistory import GroupHistoryStatus from sentry.models.notificationsettingoption import NotificationSettingOption @@ -381,7 +381,7 @@ def test_message_builder_simple(self, message_builder, record): self.create_performance_issue(fingerprint=f"{PerformanceNPlusOneGroupType.type_id}-group2") # store a crons issue just to make sure it's not counted in key_performance_issues - self.create_group(type=MonitorIncidentType.type_id) + self.create_group(type=MonitorCheckInFailure.type_id) prepare_organization_report(self.now.timestamp(), ONE_DAY * 7, self.organization.id) for call_args in message_builder.call_args_list: @@ -458,7 +458,7 @@ def test_message_builder_filter_resolved(self, message_builder, record): self.create_performance_issue(fingerprint=f"{PerformanceNPlusOneGroupType.type_id}-group2") # store a crons issue just to make sure it's not counted in key_performance_issues - self.create_group(type=MonitorIncidentType.type_id) + self.create_group(type=MonitorCheckInFailure.type_id) prepare_organization_report(self.now.timestamp(), ONE_DAY * 7, self.organization.id) for call_args in message_builder.call_args_list: From 277f026f5446c88fc18598b305a19ed73a69d06b Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Tue, 7 May 2024 14:54:57 -0700 Subject: [PATCH 123/376] feat(metrics): Add metricSecond to allowed category (#70442) --- src/sentry/api/endpoints/organization_stats_v2.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/sentry/api/endpoints/organization_stats_v2.py b/src/sentry/api/endpoints/organization_stats_v2.py index c5a15fb37b6358..b7dd4002eb1695 100644 --- a/src/sentry/api/endpoints/organization_stats_v2.py +++ b/src/sentry/api/endpoints/organization_stats_v2.py @@ -168,7 +168,10 @@ def get(self, request: Request, organization) -> Response: with self.handle_query_errors(): if features.has("organizations:metrics-stats", organization): - if request.GET.get("category") == "metrics": + if ( + request.GET.get("category") == "metrics" + or request.GET.get("category") == "metricSecond" + ): # TODO(metrics): align project resolution result = run_metrics_outcomes_query( request.GET, From fadd0b55220226ad6b4eff69c5ca37f668930614 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Tue, 7 May 2024 18:00:45 -0400 Subject: [PATCH 124/376] fix(ui): Use new illustration for tracing keyboard shortcuts (#70474) clipboard.png Looks like the rest of the product now --- .../newTraceDetails/traceShortcuts.tsx | 4 +- .../spot/tracing-keyboard-shortcuts.svg | 132 ++++++++++++++++++ static/images/tracing/tracing-keyboard.jpg | Bin 49737 -> 0 bytes 3 files changed, 134 insertions(+), 2 deletions(-) create mode 100644 static/images/spot/tracing-keyboard-shortcuts.svg delete mode 100644 static/images/tracing/tracing-keyboard.jpg diff --git a/static/app/views/performance/newTraceDetails/traceShortcuts.tsx b/static/app/views/performance/newTraceDetails/traceShortcuts.tsx index f6409ca7a00baf..0e5b5393caab02 100644 --- a/static/app/views/performance/newTraceDetails/traceShortcuts.tsx +++ b/static/app/views/performance/newTraceDetails/traceShortcuts.tsx @@ -1,7 +1,7 @@ import {Fragment, useCallback} from 'react'; import styled from '@emotion/styled'; -import tracingKeyboardIllustration from 'sentry-images/tracing/tracing-keyboard.jpg'; +import tracingKeyboardShortcuts from 'sentry-images/spot/tracing-keyboard-shortcuts.svg'; import {type ModalRenderProps, openModal} from 'sentry/actionCreators/modal'; import {Button} from 'sentry/components/button'; @@ -60,7 +60,7 @@ function TraceShortcutsModal({Header, Body}: ModalRenderProps) {

- Sentry cant fix this + {t('Sentry
diff --git a/static/images/spot/tracing-keyboard-shortcuts.svg b/static/images/spot/tracing-keyboard-shortcuts.svg new file mode 100644 index 00000000000000..7413527a8eb6bf --- /dev/null +++ b/static/images/spot/tracing-keyboard-shortcuts.svg @@ -0,0 +1,132 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/static/images/tracing/tracing-keyboard.jpg b/static/images/tracing/tracing-keyboard.jpg deleted file mode 100644 index 44953744f7d8c288aa3bfeca64062394a2715d54..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 49737 zcmeFYcRZWl`#&D5l$N6QYN=JLHZ{^#ttv_pJ1w<$Z6c|?r6^i8N~xlxMr=~GirTBF zkyuq!f<{QhFYodBe1HG^9*^(i_xI~ca_8iJocrXup7(Xmxt`a#&-ssYGz(cb%)=D` zFg6C<1ONaGfD2S?02)e(it++b@dD`nT?PQGsQCW3>_{c?-#XL)05$vnsh{Wpp#5)s zN}K-{ga7{i&o})`0DzuyhW3_%f_x^`f67$UG=O*ib6)<|t#?%ar^R>F|I;GXop&_< zQwD67UHEUEfB(rphkp?G2Z4VO_y>W15cmgye-QWwfqxMA2Z4VO_y>W15cmgy|8E4& z7Xfhqs(%YL6@`XVQ&ZE>(olq!j`m+dcY*HTa^b&1|8HUVmt6c``Pb!?3u!6uix=oF z{O|ey>%#dGh1Xv_{{moT0Blm-p`j86P_t6euu`4(Qk7DY5On|1@PEvp=t)tGp5Y=R z(-iZ_~5aHr4`89#@6|ni>sTvhi70=a7bua zctmVmd_rPUa!OYAtDM}t*KhJm-(vAOkY`#0|3@aQl8m~e7R$!+|{9Ln)O7X82MVWrqZO-oBdOaCu>sHj6J z9~xF#y6f^6*zTIqKMi0PR)}V}dM~r2uKS{hqB)ks>G=dB=M5#aDDGb-{oA7dJA-2W ze_8au2K}!+=L>*KG*pxuM#BmK0?4mqWWK#L;{K_&Hd{01`Bh;w-j1N}B$YJPB~DW5 zGF3FeiDha&Xg}7C^zIQDW0`EK(F|PeysP;U`IafG<(lX;eZuxjK9uJ<;G)P&CrqoT zo)LKowhjxM{?ytzn>r~u5SZq+P%Bzc!+q^}&$W;*Raj46OA<`erT zBG)n8CK2q-S}>Le-_lc9JDJs&`Y*sJHUzzj;HgT_EW0TE;~Zc`&%ZU5GSxg)>zW*2 z6&M{)Q>!*M+hucBc@FS62hc-@RxW}27f!ft7w)``5;8TXSz2;d+bT1I=Ub6EwzH$b zV`81eNb)TCW__N?9K;XCujsqn2FrN{5?8airu)+87xV5a!0R;i%UG$~m4H!Zn_SlZ zblQ{f3Romc*l>9%#6e?m^YD*kNsK28bl~qxAH6qB4qm>wFZWKCL#^ioTb%$1U>jiq z%h+Q-rO6k(CLo|3<@?yr#k2WyWBp)Vfv|-0poQ9YQEWb06A5SE)|POC@qQ@5aJ@WO z=sfGgN2#@BemP~P_x!B{BUlN|i%I#Yj84kGI(TS+lGu7DSa1&byK}lIeOAyCp@hiz z?jnA+;34|q?cnx?OH?f!-itf+`}vdc*L8(E2)B_ z9sxd*v@z}AKUIH$vVco>1oXnzQb=QjyXOEpYC;7z@vCtOVc;Pw=~TGttz~X7+@cB2 zEs@8TVD}oAUM(;S_&Sjq&|jof>6ooMkP-P2*PlOgt`r zCu@x+pB#o^DMfHTvlKUcQY4%e@gRS*?Hs^(JXg5pr?~sPNc`&@T+yw`vzT2oJlL~F z;Dn)y8W7GiFci7U9|__DCP8$bJ4MS{sf!FU6iZ$ntN0K(w*V5Nfv|=@^^2-l;;Z&b znza^7sx%b#SK*L4SViENORE=27qAaGfo{(f4UC?i15!8np@72_GOW6nz^ky~KNBwG z@SsLr*HG>YK^N6nJGYBdM7_yu*5srY5iC>}L<%BHKGakgz4ydk+}*IUd0)W8y5@aB zPp6xH5fO1lKdH?a#%>a{$d{X>5M8wX{LZh7+fym?uy}2{gb5klq8T8};{pAP*YMFX$b5c@42<(Gi9@j+B+A1HhQsd_B4ORl-OWlfYPPjNW-#bvV zMrR%&MzU-e=u7_nToga&IiTA%_kD11fQBbNOZ460y0BJropf%wm9kmmP#IN$Gsh|V zeVFsTDW-u-jPKL3v@TrGlHapax^0>F;67%o^on=nKEh<|6j31g#C_gl+ykCRK zaC~4KKOW^b3?wm*01NC6e?c5wzJ^6iA(e;5gb0^bE|G<;7YW%tx0my3Y-AyI9>M(4 zb|DwWGa}a1M1BPIg}@q-cBH)E(`M!|a*E!Ik3?-}g`{zz2@4+b z56_a?PY6ib+)B6DtFJqKX)Q0aV;KEQ>M-#kfrI$)*XN{0@B&Tm-1S?bo^8?X{yv49l<2b@R4KL)OUmRBU`-b5mUY zv=ML{^yM_M2eBY~4jAApW`Bz7%JHbS^N=Z5?0feq^Ywax$w<9X>EKY>)3YQsj!D%- z%DBd~RJwx+Yv%hVJa?yFry)a8WH^EUMF8CM_W)t-t9iOo-NdnF7h^H$Qy!Y^BGEr# zE9DDEDrV$tf-eVii*@0E2{P9XV0FsLP7AF-neS$flK8;_+;|W_XoL-NmC*39Pa!OI zjVrZQBr|#5Gv6U&M$3X*Iu@YT{ga_zaj@3WbY27PFj_8Kf?+xbuzJq1w;4(~hQ*pM z?DOuKRHp?wHua^G$6!VxTGA>>uF zplC;P?#hFyzgG>VkxbKBkBYrr>knv+7_Jwp2TVupqsQintH9X5z&#*+n2z4mei=aP zyH%#r2z>wHkp0nb<4A=%aW`&z{IPc4>oJe$yVbV+Ls~PSooLXct;_E>gwmg@~}|T=Bb+F)rK1k?18YxEomt7 zH>+cY$Q=+cEW{q9dOg9)MxZMgZ6SDZ&-BT-8e`-}SMe4SCg9v)ODMRH3rh|(@o6G9 zSGz4oGSpm#H%WGSjM4=C)Hjh&vCLI5&)V~?T4L5WDJq_<+q-nAF(zTpY^QbB=Vj=< z-LPAsQU3*6IPhRAZU5tgl++ENYL^tb4|JO)skTd!Jgf>Y@icED7GG}spe1(rl!!$e z*_xhBv08{%r5yOh77Hp#kVSvr7E6VaX>omVWxr32cjY*0$o&grp>D`3#m#)7pS$#jA|6Y!yFhmRnzhp2h0CMNYd zz>Kkk z!PtD?fF8Jk<+#DJMsL?oQkkE>07N`yONt3GFe#P3bP3rBhk=>7`RxJLLPF?+ZNscTV z{^x*aUn~%^e<>b~avgmet*`dR%tU`dE+U{m+s<|mJpv)tM49OaPPeunZR5W6 z>t45WPxx&zf2U^JtvuZ4xK}7eP;-iHQ=>Rp@K)VU(~O)``p*|%nzNcZG<9p&Kjl_rlvHDTR>-2X$wi185hZ6r=St2gP=8_H zD#S9G-Zof$HpFzOXs>P#7H~Te8oAJ13t^m6{??&Iyh8}>1+!=xS98C8IIC>;wnoYf z^z*Bv^j|p@1IPQwkreh`dX4#F-W41(wbJ&$TfIN6&R#=rCxh?-DdvqIvLFj0!#OuwAQt{d zE>N=3>Gm(!`8c`8_3Smw%|jM4uXzz&k)gLJHga?H&In07shyvHj-`s+1FP3Azx49= z%EgZBVf3R(L3S#1Z5O`w-!dRopA^CeR_c+ZySyRDvi-GCd`R;%lZhgJVQBac+!Iwv zIL4b!tmTd#_ubBTd0g2!l&W-EMw2-(qq!? z@ja&}+$XnQD+CsIvR@Yn9Dv54-lA9tYxt+1JB&d+#hl8i9#2LFNEy6?cN+mVrTKfM zH`UPDG=edm;yQ#wY}Uf+IlzhJIW5pNE_>AoQ@NQex>%g!E9xGzr+$EI;6CxcppGk` ze+>+G@&ejE2NMHTs$cZhJXMbxBaJN&`~4Wh8`u@NYhP(LUAn4m*@xMR?iThA7{9e1 zRP8}jB{2|`^3DP5oU?xx1Z4X$7`NP0a*%M&BQC5ZMW2_;S=k~X&{n@zmqX%+DT zrwzyDnaT6$aHE=hT#&vDtVwSe;|ISjX58tnSxFN@B6`7>$wN@p&(oT^;werzfikS< z*)F==U>^{t>phEReZzVyb?DLUA_8Eor5qnU68-4MlcZZLKm7W*AkdQBixN+AGTVY9U&8fl;y0A6O zQ{>%kAL_Joz{43z4Rq~#S*@HpR`Rf?PixxA8or<4L#4#$b;a80| z%S$5eF2htra{*thFiLf4MHn%tt;sf!Y4m*bzSUt%z!t3x$5xPlGYB1?){7 zRCXA>i+XF;em^ehZezUNJJHX~K-nd!AMw!QInzV6>8v~j@EnQe#Uun=>yDit@X`o> zoe=t?T1a!?f#WYggha7^|0bMzRfSO zBnuSCr^`qq(%^fo>Sm0o0Dz;XFv2TJ=+WnfOwpz+3&<6Q%Q^IducgL1{V^Vs)mJB0 z5=sj{xi`h;Gpo(RXU|qCoZ`$8Mv#Q}eB8{e?O?~mFTNCT zs)xi5e#zJ^-T-fa)5tU~+(pv8n9l^eN@Yn=St(ep_nD}18?^utt-b#Sj`%(s88&byFhif? zN0=Ob{p<}?R9BoNVw+o4FYT&lRKsq8e8$K^YwX#gc2B=5E#>sB0Y%l9>bP(r49+xWYN$$vRYTZ{s*1%)EzO{mIU~ z_2)pMH%j0@&J}G$m27?vKxBIT!HZ7gn7GaX+Au^5K@3cg7}%BhFqHWl%-(1O%2XA9 ztJu$@81M4)g+lh1r`^A#tSD6ycdtB2z{{m}aS*k>T)*L?cLQ{>d*d@R3;53wrfazU z3<-R)7ir9OJ1<>RJN4F6p~WZe&2;^%n!oPec;=!nGMUwbPo-EBLP)$TDDt+y2!N_f zL;tyvKQLteuJY%oTK^=yspnU)KC{px&{xddStkqGJOhalg(Z)ZyVf=eneba@smR#% zUtms6p@5dNnVCJq)sT~&_9>XV-~5z?sY1_ywor?r^YDD7?dPqj1q0u6&eL#c- zzU^>z?l#@gV$jZy8{FVGuJJ1qNb2Z?%ZfC-ejZ=F%5n-MlDgsYsNf@n>9c$^lhuU)Qx% z56yL-&VI#y`ILk0D4}{ovEQ!BTz>eVbHK4e9g>Cp7RvW>gdLe`n~O7}E_UOV6RZZ6R3Vw?!onz*!Uax0;XQq%n`6qh0)S=vfjF5q3K35=DfeWoNdMiV~06M{!Tq z>eEp@8S*-P#aJU)BJXH@uqg}4fRKtl2iyw0oPd%~&3J2tezurK6>b0cRBPjTzD0wv z%xo(7)%=tYn?J$?L=fOfQ5&>*uC^v%dJd>6w=oyJ#ND6@j1M0zp?E=aXZ{f9IM5xO zV(T`4Sm3zbOw}+Ix**MuTcoFSqz;OP9uQYMgY1WAAGJm9-08p3G%h9@witU>P;of; zY2(PR2N@BL6y0I0K8RI?C;brX+&Kq;MdoSquBlA?VAkKTt%fPZF|P*wQrvL_zaxJ| zHR{Jh)Pt1d;I_?4%DOEqv~6T~g`v;-6X^D2zlNLmwZh*^xTcN@&!}B zXA#`Kio*fSLsrlTA-f)}pWb2)X?7p?2Gqy?*6?}o?r6Kt-^&P$g(HxVYV1C~l|Z*P zCTGe!f&Ejh_{u@5H}}xJEh(5djJMlJCJboY6=1n1^}A4ZdEQp))l2TzSv>wAZvNP9 zaGsl{+#8qRF2w8c9QBY_wxqfBNbSkDv!dH2jm>;ZtafRu%n<>z4b4oU%CmiXAl-{KMi0dDHUR!*&LS1twpV^| z_Xwvc3Kq8h(!f+?@u?L!*G4eIF0t66G`$J&dVinje^(Pg300scs}cldjSA;k&8t)D zfsCZ5YiCjzw}E|5QpAj(NS@Bb8jNL|t(8fj^Fwhfk}pp5?)PJ!htwc%qm1WLM_zbo zf^H<1h#r|s({!NE#1spT2sT%BUtf^XxnH+vBW9@9^x~#qcy{C^niSG8p$@Cj52h_G z?g3s=Qrjn8R`_0FG2tVcyX2hN?A=ihR`7EC6gseZPrjuQA`@7|)^_%;i`4_aeC;fU z6e^SiQTOkjyN?rTs&LvFTy%>-;8VSEB;P6k)g8p&ymS%}&VjY!l3Xot0Xj4E+59Sd z*O=-NS%V6%;HWY+_B&bT|A+FWuzadw)IXV@@`eDlgsoFQ#h5!QBbkR&OJ zYl=(J~Qvh(2AbnZ-vYuDR+S*lyok~#>p?Oqd#(Y`$ad8B>K7CK?1 zQlk9Uwzh@?vEbb9Hkmtt5i#(hGnIg>Uw+%MQXCAecc05&&R(@M*69~zF86uLpI5G~ zbNk+<_JHV^)gzMBmIual!?sXWZKzP+WYWH4*06GF=J0XO*}O-jcer}33HD`NXK9|Q zb3qG166?AZx}YikUS+i^(An>sPL5DuX%T1&36ZBvu>y72ffq2BR3UnT$>gG8L!K2f zK`PSSN^Mr{cyy>l?t?4C+`5(a+x4XG!4&Yc>?2wW`z3PLXts&%Kxw4^II?Ft;@n9j5yX1~w2`kE@6 zlJ?!0QMKy{;?Z`pZUf)#yu;zN&_2r0co|evV7^ha$06xxv|Qg@Wi$xO9E(rE=We3p zYEV)_MNqyD58fL(u}UxX-l#3M-PwW@7!dFMJ~TGYN^d?}QttnCb&C(gH#k6|Wh5M$ zc63huaqdWe^NeWwNpnev@Nq!hG;aj}nuIkLWv{|hRlE3b zdFf=qU-{HwseL18Yo}bBr#SvBTalkjW0LhQDGS_~G&RGH{#UoL$V1rf9E`aHH{5Ml zsi8`;=;vV!pF6K`+NhDzr(1nn5Q(Eub8Jak@wQcvD&7E9C%n~%@ zYNVSS2lups?5ts5!SluXQ(bgFz!1LaguF^G?1Cq|1GxYA^S9*(A0E&rk6K)=fO36S z+D5s_Ej$Hrl2x#4f_;s4nFHgFyu=E;C4maZK2Th@ANv;-4?4A2$Vo5VR#^0`(KN{R zsP&Ph8KgD{y^vNwJK(t^dRBn)O_l8UfJMdmU6obsK{)8<3hfS7@Jo^gIM%28ojm6% zLHTSQMGfoiznTy9*VWRZtgQ-FCL6r$f~G1@wga(J`0RLRBQ0SlGgM>a<7i-vWodt?PL-`j6t&71*nY`Ei>Kwse3PqW zdsJCQXufF&jOI|#G+qd)c}uFMIQ|cJ79$g}r72?|0O^hY_T|%jmk@b&)+7LpVp;0% z)J*rZa2|Uq`jRGN{v5CajrSB}KYM>P9G?P?a}>*0G5=~59`ptjeQdymjy?yxzoRimd#q7pJHDs$X}jLlp0S9SrbNP=Yw(uFGrvzUF$|%k;gE^o0C!U zg|m-k&*!=>O_sqYl##BQZht!KYc+y*7Wrp2$?RvP5LEq7h*E~Tb!^;>v>koxlWx5` z+u$ffdI=J@4r3yjmj8}mNfMoUYiT6&U}Dv^(J=0^aP#*><~v`!52TQvq}I-|%?cS1 zloh5Ec~iw4GA;D6W0>FC)lgd4*SxO6SI^qU-sfCXJU(pN&R{O`8eQ3=$ue}UQ(AVob5LVbjsNt=)sxo6IeYUtzv%vV4qb-YzwI$ z&agDp{_=1F@rP4JFZx1BU;Rq7TQD_CVZb{?pHFjOrr=+-*C1aewq-x4n#3s?HwLX{!za6T zu^Wl~KeVO1&H-1nC3j9k-mF%0=g;X+dgFJq6wK1eCBX{S7xh&1uevxtvTh<(3V>3( zq{3`jVtD8Jg&4q#zuR1DvqPhN0=f4HY{T!(zwtZun@gQs{t*DSLYM!XM9U?>nbb&Eghha_^P2hUt zHP5DN*Ik;L3?1c_4&HQ2me(i`e?K*JO_ z24&^*2h-7%@q5xtN#SEqkk7ZF2o8}LEryQ>SsCfw^7gKv3}ff9#2Y_W17&iW+w@ z1MDr~i9(mrgm@f@6@oYk(KCd;MHe|D0SpHqTQfB|88D_rUv;8IO{1lmlq zHEmY9Dpph4M$8y`{Zr-DCW`LVrnppy_8hSKL8$(?zxoh8obI^uWm)*=%R;`sN9nTY za<#9zHxj?K0&r~?c_KhRO%V%y$*iqy*WL#^YQ&~Jfg(x0qz43bTzgO3!+*_N37^O} z!#sa6lo5q>1%6n4@w@$X{Y189^r1q+0H4=)C{I|mBO$Q1QK*5Xx5l~Kwm@#v5MAg} z)-@Hq@Ys3VHkz$ZNJs*ibuFN~4-%r$bBQjD`0&OF7nz>rZORrrPf4hfeOS^0RCW%q zBzShModyxov5{!_0Gt_>sI3s@Y=~2i%|f}%cw^d=UqO~ctroK`$Y;cK8R3LT?8SOh z*Avao3dz?A73f#^ZrYG0#oF=@9BDqRS_!2MH~Z3Lh22L1-Q0-RT>`M~M`Y9_QidA6wJLmqyINF7nI-vZjXW93WANIXh0|u)ExzAdm zjGZK2uKxQoP_ExoL!Zy!{Z<9FPEr z`56%bQm+05<;Q82Om^ILV=ni6myqyBDPN?$PM=n22qiH>c%0B6N}h3_KF5-DERT{k}e=_yW!uiiB z`yVZ-vEX8Yxc=^%Y)N1ZNdaz72(5W|1J+1zJoVyk{`Mu$%VEq-k~V5CY5iGfEwyPh ztO@az*q?oW?#MKS*LpBR-~n*g_<`KpYvI_Frm27ffVs#QjkdXsH^tErts7x2V?-oZ zL*C#Hy`Q@NppZ-48id1T`0F$^W+ihFDaVkQ!Y10n&rBk*gn z!JaP30Rzhmd?LQ#10M-q{YA{U0)^!;9gF&zr|J3PUUqS>MKY=u4Z8&sxe@7KrZHV? zy-|Lb$L@i@cX76Rgc*i=!Xw^(#gzO(-hiD?o{dgJMF>oK)6V061)lda4X4I67(EUU zDwrBP7G}M$)ROUIdk?Gtd3e@D79`2w{#36<8TBk*pDv+ zE%sb^b`9IIH{10j8$H&u1I~a61sCtW6~np4M!pYg8-9G?#D%lfe590FR&v*RaW_lW z#PH{kA;0B_Y0p7eDuwnhExklWWUTZrAo+qx?J0rX=`xHJpjZz8^(1JD{Q zl9d&*pa(Ll!z7TArEiL|K*!n-@{3_Lg|t=|b5QDst>Ylz9jFWhLa@Us%o!D9pjAQN zf|DKj{FVmA9-5tGaa#BI+&4Rv=jK>nLFV=9f#<e0>;)H^*vB z1Di>D9gl#jn+`t>(B^76Wg)I=)@!xlGQ9_Y2I`` z(jvyktG7P2)9v1Dh5KY=PKH`s6X-Qe{5T*QA8s-5Qbkiooe=r>Ecpo~4-^R z0J^_Tq9nSKL$mDN1sNah+~BouHtSIEtcZwXFQX0@2{{KmrdM0u&_6=a1wbF`$i?|L zn3OHpC)-qN=PnPQ)E>@LC*(X=oAkMptvg2^INInf@SRSS@xvb-* z^rCNrVem0B*{>5^J$9?yOYCO)DSuuxL^&8QpmG|-wElC<|L&T`c-1QN`~1IrJ{Lxp zyfY0SyGhvtnAM|Jx5z~0O1BDqHy?pA8gDZROPVIBblg^waugF$$%E$m;h#GN-ZH7d z{p11nb!t=U4%-HNwraWLbXJO1B>s70tl`-^5|xW4`LR9W)$MC^9sPmQ(Ry0?fnNqX zq2Hj>Kp|~Gh{6;^Bns1M^lhT{?`v07dy1s&^`=JTr9I`rczYe{A=ZakK6if5;t>NY zF*_byds5h`$3r;{xJzzD>S3<2Y0g9f%^b&>1Z6o>^&H^Vd3zX>*SnI&-sa$;Iyd&g zM$hyO=U%@Zmi^shjVBp;yK5XI4Ka;dA2Wr3LwAlbxLQ5T8!(OhS{-;~nLS zl|&yN7Uqxhp|N6-&D&qMZdPu* z_U@G2G|G>X+;dqr_UtG{?)!EGlrY|&1Hp!yR~9Fr^oCeC1`z{QC8cV^qLjOF8n(uHYbqVkiVc>T5C9bkB64xM40tXdG{E%DU<<33iUD|O<5ND z#|fyO2!~gOB_1g6q^831U*A`@L-UUs!rk^?5mjFwO5c4)A5kD&+623hr+PIf8!#v0NZT znjjYomtAH?&Spl}#{I8<;a@{=Bj6}sM6#eN0fu$Ftbv~1cdjD5ecUuLc?v{%Txu^0 ze>_3@IR6OmdZEjgP)xLhQU)?^vYdl4T7cdZE8Z(4=wJ)BCb%vv1#2uu@!q&1v!CSq zOuomJQ~uTc3;He+@1pbnJQZRGdKAZN%Td6q9uacDslNF8ny#qgy`BBS_HlDa6%Xcd z(1VM&9CTyLdlQ2I^nL@e%xSm%-HW=WxIsT30D$r6R;@t(G^jqBx_{tCbVq2ayM6?F&U z%nIV-ty01%;5(Y|l(ji_c<`U!r~6TwW5%uGWnEyAeYyFm{uP=o@h}CUc{zM$bF2+T z!{=x04;#Nkt_-hgrsrbP%nBhR#P=7bZq9ZAylNDCxRx=CBm z&d$G1BHX75b--{1B~AtgCUx%#r4MR>Id-fCfV5%TBeCyp?py5qu$_{OmiYuFCP!chsQ zgkmK{IR^%Q7>iQL#x6{tiF)ZWBL!VLQB5n~4MsMGd&v)vX?3r1OV%}KCsvY7W;#5o z8{0;s1(7w|J`yscH>E(y?*VP(5gZ2eev(AkOZKL=E}CGJZBk58{`EX zegC!(gaf<(mCew-3Hqd|W$sgHi-+-Ma{$vP>6&-~sd@ ze178`z?F?s?F=gFO36`+Y?#XPGs*pX@VBd?lstHRJF$Bu-D-vQh-uNmw7W}r+!tb8 zZnvFh8ddSDVZn&@@%+Ni!9=fc&LEdd?3XSwyu);D;|heZg*!~xaS0zKy@s6|vB*`w zzbY?|zOTF9UEt^khDlK#Slcnhf|H>lzyV-P5&!Mb=pzBywVo~(y~lon)qx+Em{QwQ z!*H$AmZiEGdc+}tm*JlAC_RXL>BzfZxOqooeDw!0yEpjw!wRSaR*gneVz)%!leCjo z)z3ZSI9q%Zr9V=O|4q5o`;Mm{!cMAFJ{H?~?VMRJTUtD-0?@nwe4+Uf!f~kIC%2#n zOw2!8798(TgkfwxHJyE)H}<-ccoXsYVww4Y_mgh_3-V*aU2hO?x|E>9%bbM4D6v}Q zPH>OvVXtt}mVTj=;?2u-Z_FlU(megk00A!y0gNY_dB3|>&JgI6MQj5xqlhcl@-Kp5 zV;=O0%p?wdd!#k@RZxl&Bf~qyKM>}=Hq!c`dmWmd$rg(hiB^kJE&zRu3VF5Z%>f!x*l>dBj0-n_7WU@lWh zdqI<7%Lo1X)|W-|-}mCSlHc3SPsKVOLYg+S;zAbo$`9vsQfcxmdaYjhN2zRA=uj5D z@w&LVXc;poJ)zC8=BX3t0ZweErHP-E={S)38XJ(1J^E#ga5w zo|Ahvkhn{tDgA)!&zfeX8I67V!vi_#9WO%>1omD^l$lQu(Zl7zX@>n(tY- zirEJ1I_K1Cs{uI2PHo{Dz4!QuWBg|7M%GD<(1`rxNY&j1FK}xyBSiV^J<5A5=JYBX zNfXzH>Mi79(&W@u67*`kE6>l_Hj-q+p?|!UvH=f4AhiWy-40OZMVYR6qNO`8;%mB| zSslXn<;pK<3&^Bwjr*sU{lK`TGTOEBu$45J8<>U=XB>_W_wA7U_`v9Gp}_1_558(+ z_o3Ma_Md~STI$>!hs-GK0R9yrN@*7#0@a#${1VUSjnpbzyD<6~AD)t7{i$x9B}cK( zL_~I2QQ31Evl2|KJll>`Jp{RVd!cvaqNL8oNGO_9Bdls_hX{_oCxk48<*f9m^#Hl^ zGA&9@VlL(pzZwk7YUfPqOF{ks)D{mZi=zT{DdhVk#$py3NDx_spi%vs60t#^x=1zR zXIaFuC36;~=Wct)?&jISahkFW>huM3%Z~JvpgfMe0J}KvOsGTGh+t+@`BZ#^oZ7_N zl1ztFZHBFX-r`cK%}$YTHnk+2i!OtguI2Mhac(>a?cDoPOq#~kZ6ue z?=MNe+`Yv8r1D{<<0_*0!)?$oko6q!1Ti0pkr~5x6!CyrJpBR$raT=5@*M=RBZ4Zq zS%al)M^w3YhK2ywqEptp7${Mx0nizd9x>7!x2LJ6sfh75j~R4(NAAX4RX=$-P{iz)|&IR=-JxzR+Iyq@XQfS_A#trlVSs*s({U4}aHw?#{#D1_K$_UD>kJd$16O!7X|s%XN&Rk`T3#g3*fq_SA)EDb4V~_1EttyQI4RFk zdOgTSl^X;IMr0_P5-6OZ7oNy@+KZw;@)1F6?Wfp^qh^9HnzUo8ryFIiid|-7^ZE2j z&%Y)>A?`*VTph9PN8fOIf(LXY@q)UDPU1!<-7l3(zkMT}`CpY7N0gh{Fh%}h1=Bysoi%SyZ-7i}L@~yB3 zcviFnpP9DxRLQGzz}ziZc}N?udxgGnhYI5kb3$uz4%K5%}PCRh)9oaT!WI|rDzHgxmdd%Y+!w~p!RBB&Lyj#D}n zPuDG*C9eg+%97u0N~bxc4Pf^e@^rtPIRlN>wAtr-&S@9lZ%lnxGwU`YY7>-Pjkvn% z#oslz2GjhixCXLZT}fEhzGHD?uPfA!Dnon{>NKII%QbxHnZA{t*|Rq=*O7Zf9c)q2 zf%R|bjrO9*o!Jcglkx52$E{gLX<0Qv8@+(spbI)>Of zxWfc-atxbc(vQEcX$=`!N*QI(E-W1x9ckAROfkjtV`$5+JR%Sx9cd@wW3*t~?ag%G zHO*Dm1BcvNlgpJhbquSWTciJiNCXN5rA9GfbQ|Za{0&jV2^hPl!niichtlO}2mqIj ze9_I(Ptw7vq++*LyQexXJ2hd$phiCT7|?{WpcUr*h3C>>}gTO${Dq^I|dJczB*d>+H>KM8(YP_)zN3;+!YF?jzS(O(rP-jl=Ko4gEq zoMKnKLT$4m-(roFW2F!$>W6z-Udgjc{g9TsG=L{jTSR%%%~pakmc0kYj7%izmHTbVmTl}aaq@^<_=0$5(IDZp>XU}mwUptSVbGFW|KhuWemfpt_ksm6xef?bStIphe z;L)F^YoTIZucORW<88j)G+aEsnw=6eWfJVwHD@(VP97$BUd2vJ0#8B(w0yis@FA9z ze5-UKH$-WYbYWRMU=5zK@D9vQ7z$Q!R5Ft#E+#n--+sY>tV>pvHoYlO6YDsGZN|Wo zJH)ZwGL(HH>^N?0Q5xII!}!hm22^{ZyK9S)#$ajoB~u#*7uQkHhp3&k+E#2cIv5Yf z!F!ib}EyND}c*)>HWVk|VAztkpaK$5rgh_7w#cb6be<>{LZ`D&9mIdyf5Y8xbvp~|bL zjE=|B*M~V8pH`*l_u@J))hp-WbBx)Jolq*Eagsby3Hl?#crG1Td=b_=_OK)BHug7X zjZ2lS!=3jKIpL>dOMit=L;5B;(b#>o>h8oULkD;b5e;Q>0QHhti++#l7JNJQi}a%& zTfeKkrm}>dv7DpPVnIlv;qc-HuH8!6670g7#auA!S-JG-T#~H!A3vEaGCJm)@4i#o z{dfZ^e0nC;AxAf&^QcftXUg0woq3s?^VF&>r{qzohiJD#oly8|=i-N;y*C+(?C~ZF zTX^$k(M2;4+jD?i+u(cbZ$d3dViF~~iy9<-3hRG^_G2JDr;u(I{0qtIU#A&`e)vU+ zj``gJ-6&q38o3&i3E=|3pzt$=82OaVqxNu!KGDf@;XRuVNfPa4ccTX@KI7}?arfbx zf6B5ZKTX0B;^Svi0ZIrz_9e=9zi$DRL2{7G!+SvVwc<-G7$4etn=Q&ar#0{EyfA%A zdBg!7kR4QQ43URRKZY^BY645WzE>TP{VOLgXC8&C#zP3Y#JhxGh$;@GpZdrk$tyBm zjIY|2_R8!rE5;y0ADanQCisyrRug62Z3}oanlB(R);=lq);kL$n84ae#@%I?7@$U)igQ5M^G8n)>4@@(#|+p|Gn#g+}(+k=-sXtYx*5 zGx(*n>g||;;jt&;=5_Nh#w7!)yxh3($Qypzxy&DKn%L=jK$r3mTh*H1UBwD{pCfGT zKRbXoHc6R(Ws$YNpnUZ{`n{n|r%`Q}-uSv0JYqR6_=vb(cUEx@a9uL8Aoos@VCZ?o zg_WNl%Dur`YL2SMnvA4r7#Bo{@TMxBAcO7S!Za=fgitz}=-cm8aYF-wmP`0Usb>XB ztQ-L6Yjhmi<=AR`It8{8S1ykbD-{Yd)A&K_ z@y$Ifi+-!j)k0}d#`$><{1qS7+hU0yO+%g4xP`=uKVHeK`q*tCaFPC|4rqsdcaCU8 zaKp!q*RFEzzbCxaaf!8=sJ)vJX=Cs$Sigt+#yKF@4~{n#ZfOwY>)rT#igy6#^Qm&V z2}#~KGW8vpc?Qvm?VmdBw@gZ#L2hJqM43lG^(R0G<=nSQ`QFqv*OSv8)m~M+S(tPi zaFHsK3QP&fn6-hVz{W1DFLDjY|HIREEa|Voo-&aBEJjY@=TM($bECZKg0PpSZ@4I% z$jz*K)xJWXW@72%A#z?dz+{=AMT5UQAm^nUteKpfc8||sFUUVjc7)*K1m8`;^@&1}DD+NDFq=T0@VsY~!mOMnF}LDkirChMPb;9`WuhK_6wiLm^B-^jGyt zJloHwqN9|gcc*hCr_%q!ofvAfbID4^+b zKb#)WYT!(9pVO;DE{!rbIpX4q?fEuRe|C$Rd=nBXxGnSnz%=vKj!@B&a@L&x$9Zk; zFhex6KD#Imsnfyqewpk+PMm;bNPOa{?>ttH&{>`D)rwY<>IYq08WJ7@?0WwcN>b$g z!o5=Fyw_C~jYP23=XR6$^zs+iLp}IS~@}CFECTR9^b?HK-LZ;oLkMtl8Aq z(C?8dcB>dUlBkq+&WACF-waaFgSr&RWRT2kMp~Obt$D~lJ@lNkp*352Mo482E%=<$ zzhbetDXi|G!fhMjYmvgFCcsn7L>$QWQ|W20vN6A3Vw74br{v~X*xHf2?mSFdIsr|( z$WUl8L_kb?5weH^@VOgmaXlm@WD!)~wsZ$u1fo^Cy(GM6^{D=yc3J1jK=IBo!{6kV zTms=M!=ZbK@?{V40ZC|^-9D*kXE)PAgZi7o9DRtr=%9nmu*9y$k#1PGITu?+Tj%k) z3+}{{fhsf}}8)mnK#9(8qS4dc(a+f8LT5j6eo%4sOMTl%9)Yc zKY%OpL~+N)L&7KH5qWXVJqPMK;n_tz%H}k`@KD&$Eu8&SgcjA(xLj%Z40~LFe?D_c zRfEGy2TIC7G1*Y4sp|Kn~DiNiT-LYNgh^K;!*GPg-o3H z#;x`s#j61(Ru9};DplsZliG6k2tbQcK4sUs-ShdLWtKLbFLg7g(w{?n&_mH!^gKK} zfGN5UNWY|$-l%73s{Rk)W%0sDN0HMGer0s>mte!8?fLk7#&Ny)U`{{q^Hy~!5-23?6Y`o7XHbR*payt@SlvD9 zI#&Z_Sv_TkM1WQbmRU*Mem>K*bVAw`-64-sWMA~>y0&ZW2^OIN6kf(p7zotks5~lR zb~avlqkx=FFb;gFALN1)IDp}zdzWNze-ciOP`R(iT@E~Ni+|sl&|<1+(uoYTU1)OnIU5}oZ*#L@{LC5& zZRP#zcC2B_o3XFyH_*)hze?pUw{;C2ND+HY`?@XUb_}OTk z7~0t}R|^+&Wg9->ndF?bk|4~J6$U&MOGnDUo|r+inJ+*!^Mj?e3&TYU1GI{5#bQH+ z5B1I{t&1Ck&-X}t_#ElFC(XvfX9)=BWIYIf)_m=9$G~a7+o#nZUIg=oE(e$P@FIA^FgiIZ}Az@aP8o~zMHnYO*~j+ zBxvQ^5_=7HdA}LKYrHD__3+4SS6@p*Vc7G56T;^_Q%z1xmzU&y_OeAaa{Zx^s)ho9 zT3`1+0CMNs08f6ptSvzoGp3n&(%2_{830aeu^PYunKt$N*0xM9f(JGS5=bAv_jL&NZqYb#h4{d;G)C0l;@*!CN>t*OF3Hz(5h>K>Mq42(YI;JPko znerVoX-C7xomOq>-{EP_ z$oi+8lG|!%q_Gw5Tdq=5jtk?8$D7>=u#{3j;0&-(^lX>#?GtCkZN1}E}3>4hL@{4r@6 z55xK77X2Jsa&Cak8IO{sV03t8h28sG>umDz0r6mz6T1;Iy3Smi{?9A-S!J%4$DOx+ z9s1gX%~N+zOAc^yRKDUsx$jG zNr8gLBelEA1Ymxb@xscfU8J6B%%HucGSQqS0n01+caz)*wPgBVi@N1aN8#B&yo?BO}ubW zjdh1yAO{@X3eGUXc{gTbFJCv&Am*yCej`QXlMn6VSfNI;%sYP?jgJiyj+bQ5lyIXOwl~fsA{lG0j0IsuDW=Y4>M#e zXR_!ue2bOPFO_O&a2Br7S!8gkudSarDEv_q^EQf-hEqMC>Au&kJOK3%rtmfL!e@gy zixO9GSQDN-y?Uk_Kd5m0O{iq&FE6%CP5=M~d9ExPXbO9;Bp7Qe8i=?Y)_U5WZ?SrA zzEE6sF9UrzMJ5sXR!3y;(S&>inc~4oSLE1Y@ehF0+DSROq$-Esd#(}Ec)OhI=L-o3 z3QAbsNOi}HwsTjVa!=#WNFDikIB$lpYetQBdj(tudq$(ixohLB-6HunI|H>{aM7yh zI;jska5#l@<+27@5E6n*{O0@63L#|g;+IENO!4ocUDsxj8DDr0!xzLWpA|d6!hmSK zo8MVa1~&&Gl-wjqQevn}s83+`s+$afKVIBE6D5CJy|*PIYDT#+J!&8Rld8pKVLI}0 z2A#g#jN|oLxeSYK$=XZ@rLb+B^a`u+Lt8~wSEj&$04TCs(MEkzfJH1*0jDrw7u2uy zp0;D@D~|w|P=0*zfsI*OA>6x5z>bUM_eD!Mq^Am~l) z<;to3s-(uD1MA}Ifej7Q=XL`b`RGMR(`)nI@n^)mko~(l0U=T^dr42RxnXV3K=DNL zQgf#kl`@f(B@6YtH&h=8icj45L|vsSj0r&sD5(pdg2uTmU6LZ5by~n$?Mfs*<3Q+_ew$AeXP?3wUabY(ZDvD` zyt+z;Lx<&Dz0Pg0BS)W^-nMI%Vl--IMaEc%)#^iFt%)9WjME*%g8dn8BraKghxwg@?7P1Aaa@&SlGY}_1Kh1 z1*}TON;{nEa#6Vp|1HM9x{7bRcPV1B`+Tu&u#`{?QJ*oG;*Q;#1|3Vu$vrNsK^Vx(F}jj zw<)cA7ZGY3vCJ_V%s!*PaioSf630lTW(RT?bc=voRe4Ne#VM?;fgC5R9=wt0?6FY2 z9`8jFdwP6fqEME5X{lp&;2#)=t;@CaGIMJ@MTK#*uV9Dd4uf_|P+?30ti_uX9ocoD ze^bZsYoasJlBGnwwZU=W!9p3ZnA%0j#mBUE9%>FX11qrN)meFqXLZ={7}CRCj+_%$2?tN+^_v~wUK7S%Y4WF2k>z7 z{LaB8++2 zYwB|be8|-#V=b5_?~0?J{{y^ASM1Rn^JB#X#U;!)pDs|Z3;*3Ey#67TeH!@7*;4j5 zSe82J7=G&?;CV+{jttI?!C_B$5U11~j?mFcHf@-0%;;-D<4_l)fdMr_Kx0 z#ybTQ)m>&p#;cTiT1fpK9fu&MD*{2qt{FdObf=}FE zVWwf~7<=ROI&XzyR9ACaHDXq@t)wW8>PfMiT-W01=tiI2uygV^3&40$>m6HqY76`( z>J#5-*xJXEL3?jYRLAM*!IcYy`0`sy5ju?{KpJJ}ryGm1LCv!CCTt`{qZ7lLL^;t) zg`$O}0=X#yhHeCAxX}4OKpzCFtxLHS16%?3K`-SSTvWdMRV}eSgdFxa-l=dK)$Xn4 z&MrTkLrCLeX>Yv#%d<70Lu4S;zjMB|Ki**t0VsPBOxtE$khFY+PJu9XZwfLSuSf$ z$Mcc@Z#8w;T$P|~QDHzP5(gB>0(Z!kB#5taM>|Oo(RLsH0hFg~BH11< zWIZWasWUY+4D)6Xe86+`@Uo_VPJ>63BdkIh$9@M=ZOSVfS_Jnjx(2lGcvlCJ){1EwL9lS)vu?<~D zr)oC}ujHpaaO=1Z)wNN^mu26-L^x+G6Nkv-J|&VGSo6rkIkG-RWCd~ z{3NLd#kFMW*Z8V&+7*4=W89qHE7dLHEA5LDRC?9-B%ZYtu)?`k@(88hy=p9Q9mf`A z<6RyH-f*%!{@x6;U2vON)Z8Yrgf`Jq_Ym5&#q109iaF$+3iRB9c`v-2FSzI2iiiQ& zS*_^F{UB{SZVI#ZA2ltV{{T7f`!^qXj6DVD>I-KQM#uq8y1nI_^;#RluSD%*)p_;# zG-5yB`I+1EsG1Y#;t_ih(I4lQPGX#y1rJ+^VNbbv2q@dLVNNA(I?8?&k8Ah*?BFJE zMM-6fJCA~c+ZcEhi%_>YVb`lRoC(7*8{cd; z5o?1RSzUeBOH&rSA-L$rjR`ujlBt<%XnIR`Y zH4vOc^qcvc%e^;^s#OaWyUlX(x4L;o+}fV5*m1r=bLgkCd0gR_%5w|4V~Wc*E>J>l zom-?5ol?O?k(<9q(N3vPgZu4KPtU+7^*`#O^G?M0hT>26I?KqQDo{C*fnczz8wb35 zTgFOwGQVm1Z>jRt5@C&>!_dX{CG3s~I1wN^nQF=A%8g(*#4&->q?UA%1;+mXg8@uY z#gF@Rv?gYBhjo8$hdc=KzsiBzFFw=3&LsZ50`>eU5~NE@^wrvUmG@Zv5hXqn5R6u0 zB~5{9NBc!x{2-W_I1#tM2Efb)-Z~j=RuW&7otoX*8&$<}+%ugPETp&eFC?}@sCr@^ zJY(x$Zy%)23^`vnv;M`C;9l}9AG1Uwl{{^Z}2F+b?EHjBq#rVvDLBCx2koqPiB0erp8Q1{A-6>&mjmx ze(r(gTpiEMp4|Lmip>Pp1D9h(;jMpw zSUfrM6pe*<0qZuu7D;XNUP$Ws6>3F2I$qokDAK@BB}zm&!8lkLODM5bA8B0JNY{g2 z{J{A}Ond`z z#cs)fK^CEISPQ%1*X6Xe^{@poQ_7J@B~ANWw0G`H28^Pm++Rk$kWnxQ`x z`_bN#TSgotad4|&Ciai1`~0^iPLEU7`jw=cgy?SAilvQC6pX9DgfbtpSf&d2?@O2d z7Y;HR0uUyoD9|%*?pJG!(_Ujv!M$yWa1ctz@2UfQM0r|eki*-;RY-A`uNue4^5&M3 zXSueOP{+CnZ4tQamW8sh)rekDrIPPZGgzS>#%Ehuq_4pT6ZPy;VE9fnAb77r-LCDv z*ItFwYH?g88Gc62SKRg!bPLb`I;nE1xtF6hXdm007U zb>*hVR{?eU4QTUBt1KscriRyIZnd9w=WNcyU!_~JWt#U(>*h6wJQK@?T^EQAT0~t4 z=&FodeHo2{-xhNqaQI_Jbo(Ij zc-=r8%2AXV^udfe^!wPUID=V#@=oe54gNa~OX`jBpbm>TYAVIVPMKdi`>-9jBVE8S z26X_(@t^f_+l}P32HT1F`jpMkxZq~H*y^)wwExHgWI3y*Z*aRmkyNR{fnre@>H09k zFk0OjW!@4Tn@q>qLsr4rCjb#epUW#Q5p9LCTSbnR?^X4(-V@New&N3+qwiZ6a^cqCKY6T}BM7in>IJX+?q`(j;zA}eqf#-= zJX%>vH!bC<7dOs7E%xCJp1gP(`Q~Ov3F9eApOFOtpOm@EJ4q7DeiN&?aWIe_ zD!g&E2RyZ6j=JOQ(Rd_jNf-C<>-E#-Hd{wge~X~w_;Yg|66#VaWCBiAMAla3*7_s} z11ey;e}J1uaYb%*j}Bf5Ldy#>D8f1|)C7L1FE;{0)OwY8u}f-+`5E@J+ha&QPZdqP z=8jsXh8ZXN_{7xIvaT|O*cw@Fv>j}nF~}|BOLl#F@P1ftR2a$ua>oK{b$cRXxN+>& z-uGp*u9TqGz%OBl&Nu2RS@eEXI)5=atb_vx9D`od7-CB5u~fN2T=p}gz>neuJxqN0 zUmKs^rRS32@xBpz$)bCOXrp^qWljLDp0X$baCAKt60?L*${Gtz-=GFxQ|Ysp>%9K1 zItzCTrKW;R2(;LYwcvR7pjYEOZ8?lt=G$NM+8Y!zmv$f3^sB)tle$qBJr1$~Sfgrp zra;arH9_D-1np{x^LdKpBhvE$>M6bM2au z+FPNAjXvfd{71rWi1#@nlM}BeY@=S3@x5S+=^xWX{zdjbB{Esq^Gn*4r-Eh| zT!mPj=_?vyl`^hAwy8b2sxF+^pS_>;vLz;lbf@wjX8<0+umNf8FkH5|wCrKFd7y6d zM%~4mm5UCpi27kjdJv8;{XyS&J6nsNt=CSY{$y#@LFK7Uw(}hx&30-X|1}U z>D!OS6Z3R{jhvi?*MRHuQ{+h!K4yKa6PL~&yAxG>cB1qAYB4E!<#ylN+mE8Q|7<(c}RPefhhwNt|_#H*mlrK3$e8 zsoti#3HsyO)k0BOQ`h#hi5x5P2o#=+0V_9H)jOrHHRlxl@sEzi2TKGkOa_V%FT*XBm@#`9BBo5A=iPGrfs zZsWU43r9f=eBRB_QLixEs&S(JH69~RySoQok zlYM(=#^@alO3ANSybqB_W10L zUkQ_E97~)+LLU1`c#B6b-;;veaP%1Q{Sw)2R$^pY2+5MbpG5dme82j35{p;7x2hdg z`TTC^gVYT$wd`AusM7~nHRn^m+6Y8=SsA8%_o=2{h%S@_;hDxHiXam7^TCUibbLUm z?>Vn8;<`w@S`qAuC)TSueoa{iz~!?T6n`ljcx6wS8r=wp3$r z@?7?Y3<9~QXKppAV3ZH-3c3OsI2Lvv7{ElJA|NZ88yB%_LGm#8D!E0< z)l*d0eY~t2Hpy#yKjZh)wqz90PhesF@gOf_pKnWg^t*yRba9-B?VM7f15;gc;(@D0 zVNx(MsWVtOoS?P_q6tzOBWkQAxGen6%&8cIk=5Ca4-FsH&K?=RZKfMsQvffxx0?jJ z`nBDb(n~0Gt3O-}a1u`@M8}zAPRe@!2^?|?D4K_asV!Up55Q#vo0sE6ka!Q|uBPZp zA_ecWoe80ecW)$Yyej4>Wao3HrW+4pF!SaS+TgAw!G?(b-zT)JTQrTFurMdC_J&eC z6RrYMuh!Ls;opQ_zDYism(KQy1}*~n&e~DeUXZ490o~#eC$Oq}tbh3x7!bi+ zuZ!;q_^$wmbBS|As7R&a4#{D0dbxhItNEjOc!im52qA}SN12v6HUnQ(@``OA2K;spBrC?5bn|tGiCu|S;OHM{@GuF3LId$A zLL6@OQlI4l5=|zY$|%?JF#Zy$m|$6xbuh8b@muQ@V*8Tg6L}>yeF|k@bZiU(PTkRf z2H!|OMwY1~%ul5V%jCkxtFaKCIjC85wA5FxD{^0gs$e=GG0mMh@cllU?vQ4SjAi02 zY2h6RnFm7RC7Anka9x$Zjh)eTV&>3gnk@_t4W$O68INJW(96-kYFC+Ogg80_j*oiX zsDR9PBc%on^9)z$k=>EMNyRX=++d)1O-f_IHrf7{$9qhNusC~?BjNXm0!r;}jzL-4 z&cC}3r;q29Z2fKO!H(IU(Z+9R-?w@Fb&EpPy7dB+*d>|M@iJETv6ZXmo>rD!cxZ9M zgRqA58o;&*UF*6E?YBtR<+{v}e7jQ9mt{`R5PkLCG>M=#NCT zJc8PbRvb7M`EKHL?`9@6t2~3T@4mB~gZ;`iD=v!*BLU`}Mm@!Qus+aZFW>2TpV;x) zwoTaklFQ|p7r`tV)mVJ8Q~r+NhPg=(=bb##{g@YdL;w?(f$VS2+H@~T;3{Xx&`Uzi zT4%?zSo@AI=8WDq(=qP0f%Y(L#TVW(Si)@jV-2~JqdTNOATE$IZPWzZVN|AbiPCMc zF{X+v)5jn#)^hjv99W}xI`3(m{O#lfXE`l#UPYdYK84&QfIG?34>|ju75JfXi5DAE zz5(eI1f$QuXTIsvCpFEZb62dmrg-7goiL-I;j9%J%4nJs`vKMZx}N7@xK`cv%G98kkVA$~pztXzi1CcE*_b1c>*E^KRrOcH=;B`qlZw0Bq?}U#_ zL+Y`T6RhX4?WP z*5k#|26$JT5P>`stj=@~Q%~_u^CnuFbqmmfzTSVq)YJ%m2zmUGH%`cJsaehdJ4LPw z4W~0(abv|}#E;hH?@|@Jq6b;p z5~pyyfJcRJ?o?L#jrM`5wZ~jME_3U?4A+m$pT_>4x|Z(G@RsrYVQipo4C=0e05+}@ z9o-A!_&w&7>GEz}qELgU_D*u&eMedikt!n|l^eBpDE-vJ6@=GD*vT}3wcOs?`&J;F z$^pFL5XOr#e?#K|yLaPh0~Mohc9}`S zzyk+rVDj?a14(U&a*_s)I}Z5KbR$++USrLaHwEu7f7?AZJMp(FtBr%`Cl;PXqBsW0 zMC$F}P?TpQteQ-g|Sld8i>9nEbtO7Ks`=}g)UOV!afsWA;T9DQx;6M+(UaB*F) zr3LO?mBtJVeV4yCGOVLvRdh~U%3%&8hHw&J};ccfscK+SGOuFgMz z&%iQC5-T1?mLr^xx=?o{v$WkbI$~CuYC9qYqSV(pOiei%qd3_K&U^SLxPk6=(|Xh# zy#JZW4qP4-DRp&rVQL?|wcp5lRrTwnUUlbPJb?KDl`xvb5Hb&`(H%JW&k;^ddbG`c zSojHLQ{NF&CBxmV5VlA!9Y!x(HM}C7jjQ3JHl8NQFLt7<%F8Vm&35puui5Q$n^c^f zQz(s-+luZEjpQ!zc5n{Wx`i3OjID;ZZSI!pNdFOaK%}Llp>x!679r8`ToBf~CeXMAangV}Mu==b`*2?zz?t~Xz}_Lux=gw(;5NNtExO}+ zF5@(!id!Q5u#n>lPd4Kw&Xc${XG@Nak^HTYm#GA&Jf{oDf#M31!BM(;es+hQ8!4pb za$W5j|I&jO@ecB%lP8&1qV5Vsjg#MA8?_bjY;_B=%C7jXg~k#I1Qy^t%CX(o@-4;o zg=l@9OR+4p*-Z7fAGM2C4uK~z?F=?`8a7+ol;p|S>a)4?Q^6f{WkgaAel8m;oAWG@ z+pNXVsqbRwFG#TP0@)=}RrFrS2=uk_A|!&u8~fNUFwoH6#67kX-%rDZo9Cjmb+v6U zzH1}Kk#txhWw`5NaF=T!NAcP6E>uiI#%l%N2d7_!4rJFKAA>?VSL}Y44(E^8fnbd? z)*k7UM&@JQ0~1`%Jf2+NVf{N4kEs=cP~D(g16pA$Z#w~Rw@~XsMKxlF)Kb|NFEn?! z4BZ2chu;P@&s_r^ZvP^ zUqz}O2vNUhvWP$3>GODQ9PL{!{qP+8{jdj%M8IKx+p;h$T}Mo7L1u2TZeFFbtTg_1 zJl_Miylhz4COrny{8$TjZeNejmqVU`yW5UUqbb%2zdMH0E=q?_s|mhTTDy}t6Tjft z8VJt@B-Xew=Vn{dDk+XN7~E~G(ZF~-3ee|O-Tn`)!CFb{L1&_952#5b2E2!tPzz8I@ACjB-Uz3hJq>TWs;U#?S%S7IOpOk0BV z+j7Q#bC`})e%{ZqRljrK@nN?AX&c2k_!M=s!c|^ai-F!($D)C>UNs()An(&mSdOpy z4$gX48&BOTClK;`dikh&W9d3>>f7|rjZ_-gH=t<0EKdmtgKi({CYh_6w@RaX?R2L* zV7TvJi>wG`y7z;gje9@l=@1n|*eQN)*ybqBSx9d5Cy^U+aV1-^2n;;VM;cN0_bX%= z7R45m_)00u{6?^Zl>?I>JiZ47)%o0Pw%c$VoHQc~>}}(Ain97sGU{+|kAa!c#rJ2u zttrW|J^>+cH6?|_4ya_PVa2}U*N-<_KU)6&_Rj9xSCgb)!NMElK+8wtCb}Y`Fv#b5oQGzB?5W~r7*=9BIi0xEXYlLdrF=+7M z-B(|x9@Pr>K-)x>_cK-iLSSi>rTL4A&Goxx=truudyV7mbcM}Nk5u?2eqZYZYPvK$ z)s4D!-ywD2v}(vSG9X|g%PM~QDb!|AN;i69oBgwrC_&06hh-YZW0GObP`%JCduO5Z z30pAhyU$)yN4d8Lf1eB(4CX zdNx)`<~38^Bvcm>a`9f+k@S40b>&K^&_zQ0C0`2D#DSPNlc^ylI{Ur}Ps#qx&hxWQ z2rKD9kWgzaHoA{Dx7UofhOhm5Ba}nKlL}A64tWtSa;@`Z>@B^;wNwkYgSEBXYnG18 z+*EPDT5yu!>*i{q9a_rn-TlXyl!eDXssZxsRjEwaaUetzw;DL_4kwnIxIel zp&nt+ZnD>>m%U-%KH6sAv0%cczCwVj}<3I}A2Cvl&DVqGCaH83}D_ znB`lJ%+L7|m@D0$85(@n@SWN+Ri+0gu%@>rJt*g&C}EpQFLt*B*ve?s^1Vv$$5JR3 zLiB4W?U%!fzmDC5_!w>;mp`iA_W?pJsfk2y@Y51-F*Qcj99nAd>qAxltS{q(hm4$%cXb0zGbO{2GE_gJG!4=rg9@V-kYCYa( zZK<=gX$~ER26`{zgGC?MT|Rbk;Wj-n;X>?Pgl}JAfOoM-hPUpV8u4i~mufs3MB zqKsu@lk}(UcXv1LY8W$T`q&$=yP|9Sp&96>V5grHK$<=B7U!%_(NhI`)H4@JZ1DZG zJn9o0)aqFH8`ByuzvcSpA7C0ecEm{(m?81_-6KyVj6pO7iv$DnuxZO%%ROW{(a<>{ zCzs}_d8)jI1hoFY@v@=5A&I+83Q6e-wjKNg?ZS4ltJB6&wmsM!e%|n>hm2K_St$HM zwI9z#Ow7IsoIn{w%5|G)T1;6tgWD>-y?wd7PBX+`VioAr`-PsERf3Sa17K|TZZHMD zp75@6yH=7VGvV)2>hprbOO?;lDO$X4+a=cE8`xW+lcA8-R9(6{I?wleoF1k2J`at^ zsY_lgfPxp#bZ$e=wH#upvT+RBsrW+Nlf@F6qq=ONMfYj^4`uNTkv}rv6l;WO3*bp1 z)8Dh0N^cF^hctLl>OE(4R`*+)y)D?~n50k8ZPD%l{fuMkQU#Mjy{bmQPu zQZQ)>n;W%|?Th|*b*;B(>%#sr<;;DiM{q09|F^4zZD^>k2^AGtGkF*Dg4gkSnGm)t z&mF)?KC-JX_pTHSf=?vWEY~ha=?ezY<^u^;Rr8>or;_jT#xVU zj%I`af>bB}&Ym7|?}RXSwBU+kEKiT9`~r5R!vu>`7@h~KnipQ?yYNtUj8PA|u&P#y zkWt5Es3WktaX)J zol>k7caBnrjOEfFl34xFdq&k0;_Fcw>wly;viWgs(WcjFTV-OsRfU*Uz}F^(mIeFZ zo&>di*<1RBy{u(-zvJ9>G@qU2lc$5$?+DwgT)r~dDl31@;{f3hhXDn$D)MD`@7{a# z#r#IGmKP{Fk}$ERJ1@L88VQ3tQelgGT9+yJrY{hU(dHUWQDFBZ)(4Df27JJl|j5z_b9#8xAxX5jfa3tx)o;UXA-axIS zKo~$STrBuoIEKzR)P29yK6%m6nEDhc-VckW3ro?^Mb(4Dg#jjVzx{rMBF#EOZU_}& ztCtya(`z&eMgN&l*ehP*ti*-wO;LaCizlyo>0`S`Ja<$+8*lUbk6ui@d33^UGKDh6 z4TK(tJnZ^2U|nJsDcjdCWpfij<0&I0JG^MMUp237MqEVQUK`x1lW2PF`qEfa-}x5% z7P8y!gw1V;6z?b&X+;2dkD>EZmFLr^M3C6!b!8Qs;>~1%t|7F0QJ@QmQER zmk)-jmrMOSCnl8m{g$2M!!gY@>tBV8+___(^j1%1ZEA1P68*yToWH~g z9b~4HOO!|?lldt!n`s{Wx^5jQ)ah0iBGf`$0#-nGi z@F8mBQ7Q!wu=V{@(8bgVDAZoh$&=ldZjSY|FW3&Z`!M+OS`f50TcMW>w)$(x$>hij zzTFWo9acQ)gB)6>BAViIXG8qef!*!8oZd)$>vmjN zuYC^ZyG40EO@w*_T2p9j@nUH>WZ-0IMDT5`OAmug`DY9LsfOtWqC)aMgsTk(#lVDS zRDbB!?**iu`xxul{PLMk*t)DPHn!;(9z@x1I+E_0IUP#3r;RMua{v1CtLr0v#HTTb z8avZ1erp$WM*$LqZfM%sf;cwoRneXG1^r}E<~I1qYp)OGy$_dC)e*xffLF?$I<8Is1%{n9{7`;M-7)b! z;d*e@pCtKFoCZZuvD|q`+TZ)W*U>-Ct0B)KdEZmJhNC-zamYw>lSwZKCg(l$ytwam4y#KSIGWf7 zJ||NOkXGr6+#$9_p6I@{yCtb^>$Z`sygv8}sIr&cNvUusXt1NTMXc#j+9L2Hflq%p-#>P3iR z>Gbh=x4Qk%4{FXep2q9NiTzGIkj3!%S=?lDC(w+$yE)05t89T46hR!?+L=gH`ciQu z_vm48J2hOlyX9-e!5YnGO{wCu(5}+~XDlQR!nlIXLWU6^P#(n~3&g@Jq?2=QoN#(^ zrG`PL25W*;LH9Vtm}OE)9qVo{zI~sxwj(MNGRSuAbU84bD8QO)mhJmg`jrnZr)J2| zgEZvPiY9Lh2D!-j{&Ft25L@Oy%C4nqeOqeih+Jeb^WwOBixK%6xo?P@CP zG0%wjSV!e^jt8OlOUwj8vo;fz zAs>kM3CKJmj9`Zd(!w6&;P+Y^p=U>Fxpfjq15%NI*6C#%v|^ zx74MFFjc?9msGy#bJ5s%mvHTB|I7MUTA_H`_mm-5LHDMV*a)ER3*R1b+If`%(I!F4 zs{ig*4?pAaXwITcVmn;{9~US1El_G0^z@khIT%;W#n_DzbVSqSN)R~lF0Pz%Y+z^R zQJ=Q>npnNdnb~HHgH(FX@y`*uGK)|4e)*Si!Uwl(`wt2oA9xsS1L;M)65xHAQ^W%} zF!nG%$J~kJ$CJomQW>GeVds+_`LSPa$13@a#SrYEDBG|(fvus0H@i?4aQ(eUXy_>u zA>fw>U7(bxNI^!5Z_Rlw%fXhb`8hZ3+M@nh_w;i}r|yS0COagN8Q7*a_o8*Y`40XE z%sfF=%g*sk+1XY%S&0}Q)5#Y-2fD6ANf0MP8ttIi;jV6tw`%FDhntv23o9#&hRdwd zS(~TY4@EpaJO==nj<@z-<_A5UIcihGAr$l&;(|QNe@$Pc`omn%+I2cX+0w#-8tF^2 zYV=!FPEYq)9>*u4p>e;6!n-<+!-rJ|O&>cFo-`eJC2eYeh2E4EOsE=jn~fVe~L9WFX*0LrhYum29Q?=v2>r7dIY}) zQQ3{1A)KbF+Its_c6Ewi6;3a|z8KLMI-WMrQ*5(Ti4I4KCZ8gS_snh*gF$5YMv`u( zRwc_SLktguVL?6gMvH2a9bNqUR{*v!t)k`>mcbLo@c8S1E757G5nvDA{vW`+IdPhD z_wc{#)oqMqtR+zlE1j_dq%?hra15aQY;0_tzI|6X5bD^i#{LM)#P}Zoyn1vngF9N) z?ayU5A^lvg&AvbrQhTuq8D`O+`Qw!Ts2v`9vndO81ckPhOUjA!xT62`ufy|9LkuEH zl}xneFphmtnVrF5^ zB0Ja##(gZ>SlBn|02A*U-9P#^s99O>GW|BWO&pkH+1!BFRN%Vx2ap67MB8d^V z=hO+4csm8)4y1QUG+3;bo(BPWl%`Q}qfDM}m`yd_3!yFVoT903;Ff77W|#&4@dtx` zlKs3?%u!f;?-=y&&!y`H+p9;93HF^gu+L^rs~|cPyi-Kacg5<|8Q=c_ibq4!tn-R^ zW;3j>K;iX17i|?RAJo#7Zv^7TQ!x#>Ueg!Ru}2d&yS~6?gD_+Cg1?8?Z;I?=n?F(H zQyw}|8ddWC3{=)-`aY$mfn*JC{zB_8TB=nZIA=xWzKl7s)E7FD);ptKQ4#`{Qt_wx%Td%s+0PU=!$X-QwjRJDmFt& zf?Dsd-8emfq?3PDZ7>Xq+Ru@Y+Sll017&InI_MlUI{%MLV;_K=y`msDWFt~BGrgeO z59s*FY3{!_3n^ue-+7z`t);JgnRx{XW!k9_~-h%XPw6fP+g9J=*->&-r@NU+4sj|W;;1&O#{?~Vm#AOLUNIj`|WA?Q)EdwY0b)94P1e8E7;$jxT%nyjr&Zn41YcXu6gb&s~ge<5-cyMKu=94i`MB9($O! z)zbQjs%1)ScGDQJHT=Jd&N`~ezYXIc3MvAkBE9jGQX1*nRHRFgu8D{W2uP0{f^>s` zk|HVH%|?%q?$Is1kps5*KJWiKXXiQJzV7?_T%ic?;(yOpk)h-%{q*knpI>)CRAg*t z&Y@xTuI;%dvt}xrt!LS`pBU>{o|*krxOQn>e+`+Pu{vIApr^HrD1LH;8+pfaQDAQ7 za|nW~yiD;hd*y=^tr08{xNv%-x((B`oSU2fW9q^fV@&&9<{ud}*q>@m=syicW$*wi z5g-$GKR3PgV5`2hAwZq`PT_3_gY`RzbRR?hj=6-Ds;6I&j#L!jSkpa2>W^`zPH&%+ zttD@<1G!yJ;=5tF$3{Q>+C17$*ogeNLPH&8G1{p~h(=gPHAM9gtuqjj;~AQ=H>Cc7 zqj2SO4seFL7Hr@B=2XW+W&VNroWS1>5%pCGUD+styLLP|Te=5(x%l$a^B9c4+Hm>@ zIoFJajqlcZ-m?%hGjLeU{aYYgg_P4#meHIieJ>ubBLxULNbKw9D|B?f7VIx0-gCPz zu-C|t7o?2pvlY>`e3oW7qHJAd=U>T49~*u$ak`-OXI$e6=-rTa%U8c%w305G$<#Bv zMaddFt>bmT<)>mk$()y90y(K-bBqUGwoD`jCiZ}n3p4$RGN*&h$x{=}M0HQqttc$j z^R_lJa@ednX9xUK5cS^n)npzHIzC=5laXC+9R;y4>9lxYP%8by-=pkKM9*qHB?I-R z<UJbiwpnHecW%~5j5#JFv2dOtQ@(7y_jq59ehEjzxc>LL2X52 z#+?ST@2MnD%g*Gdf&fr@FQ=qnbU;0%I^UG@ufJ%Q>6fs3RujL>73GQoZ|mH<(C!Mz z7XC;nT~Dm7rKp4qbOEsYCJIZ?6f4D=C27HvW>&&;CLsvOztQLgaS{ ziqE^Vwi!}12fmfMJ`!#CbAPV#OsYF;cCHgeUmGUxx8kRK+>Vs2pxU$K9F6FP|Im}v zF3IOF^n06O;V1Hpb6Q^TN@vkmK~-MrN~=UxF|a~$^(MB-V(oKr1J+^FwJ-b|`OwrG zF-sfl?pk?hi`B+9gyUAu4xYSp{qx)SSOWIjvh-R$1)ciopf;4w-qa1FTp?JtFZ2p6 zAjY&PL*KyHKYJalX|xLBf}4HAY2T9;06HAD`mjT?`hldqCiGQRi+65XV~k0c^dE}# zEB*qV_!Dp!EF$w3-U5r($G5H)cg4OK`?Ppz_Em18D~I#_{DENGE7?R?6yS~;#`7UM z!8HD($S0FfyJF7VohXC>sAei9#a_JdcO48vhNUvZvF#qvH`GAl z!yCP*PdwAW^UGJl6*XH!4$iJ;nmBMa26FtidS9Yx>qIrcS@Hp7J2yvt5xJ_dQCoKf zY3q5W!pZ3<(+j|#E=7sdRf`M*&SQVzQ_&nMn!0D_hI;0Rab&v!=89 z0~z*A;0z~-5(`L+V5~uT5R_u57;Dz?%g-7ud5&@aGFDexG(gM|(Cr#okiB^N_&!&{ zC*NEZX=a8_0u3=8!s~kv|Dhb;!#^lyhVNe8zW-|KRAYssI`!l5K@Gm2?kPXNarGTm zqo5xq8e$)C)udafD+s|J2i<4QvG5;5QUL$oVTh!q2ZCrNZ3XDNZ*(!TC_a}|6{J7D z4m-j~U}(FkVdJywo2W?tG|+!<@E-ZBLDJrKxq>W78}NP!3GKnpy z;$>LlLvF5N8tA@&q$lNRR#l>}h4HdlWk+lZR~hBbKeETpKQw=p4`dTVgNEPkgKQAHSow(@p{Js$ z4vzC?Pn8{QSR-*qQE%3=!P@isV%sGMFV-g(jVC~6@wI$A~2Ace%JrEbx8a znt?~GzhyEI)A-FTxu^5 z@W0>l<$K9Jqk@QFDY_vbln967U6SA~5>qB=ZRu1TjZ-4*3pj{X9fRIf% zK4~#Z8}&s{!8LSsW5ih122mlm6cAne2k7H%hvA`NHvgW?Yj{zhUGQ>$U1Y(|lSOcC zQKC;+;g!R@X9IQ;-I7UtOtT2Kh zV(}4I^p6Y@&!IiatA27{PR`3Cko4P4!aYmO!}-2e4Oc?hZw3YHgSlV=Ud~DX$kgI8 zh1z;I5FS6)-6SsW6T`{%W6AiefflBM71)P05KmM!Hxn$CZ)jIX2)48E<3d68=y8wP z+`4Yn_&3d>h`Z$Id@6PTC=(cRXTO=jOaC$E$>as~hx=5VZyF9y7%uWwoLK5(#o-dgtyZr48cPT8)x4)P*ywUs^2 z4(idMl9l>K8mn(gAk_Tzk&(34j`ZE#^abODXb?KMjB$Ll=YZSX&v#eRH!t*so$J|x z7Hg3A@q#Tj0Mn2jnw!Gi>uOZVf5gh=amb@!2Ql*=3`CQ!1-BF39o%B`+9wN4PyZ`Y`9^@@ z$_ZSumDr&)o?U7lA`aFp&{@xJ2qjYg<^O?Y3;^2R3Prr`$of@+T+0m=aummPTO`tTMKcu5gkBbOi*8}uV@Veg( zfP52H2YMJNeA8C9lsK=(6gPOR!lcP?d~MB@y>SlM2TPF^TiSZb@qZomWfGmN&+NM( z+@tcGd4px}z$0pE)#rbvX*r|3?_{l0`rQC(fUj^CF|}*0W`^U}20lo1_U2>Ez6RZK z@vRgw>hZou=|d*M7aB=$TB8H+WX*vjCkiGGs20Se`oN!5m^ze2(fw5H_88O$RM1`g ziiu(SU2iU2kh_nVVuU+-vbq=|W2K0OvLC z*gFq`x#C2F-$L*4pDbwtUCt05)C}a-j8iS|dFuY9xVOuVWxou{=_ZAcKRi@R`^H)H zNEUkp<(~L(M19l3Vk>*eFjW|20_t`)ZDU{)as7hR>^*e=G$5mEy3a-?(;+N}51%d@ zley<~2*Jo<0Q9E@2z;hx(mn8_3{!pl3FcSDpQMKQcra5l>$hm-n0w?Uy=yR-*OI6( z%Ix+0&a4av79mSiY6d_KJ!gwPr5YWn+J#rlKVRSOOzC-cpW}w2KWCm1P_X*2G+VrO zT`!9c17zUo)xNFw5dAYUK;hGN71x6Ze93C;ChvUQp!1-@JSO*dA9rhl$5qDumOY5w zoZFzp4!XPP4@;^UcR+d8Js$ZiOAhlyw97)@&BexKLIC|^bS(gfck1+Xu5G%}*3uGo zVB9Y6rF-|z_aU7=^rL1iMbfRo75-T=9mbsj#&c8t_ndz{$e^6ClB^r@2p~fb`6HV^ zQP&it)Eu$om|^S8L(~7{BFeE{5#s%G-8n|o63hrs2`LS7d-_908@6CKP_Dh?v6Uq+ zP)IrwPK7DR5C1IphZ1!0MkNbCDXPX`n!p@4YWYz_&W`2NbAP9rnQJ{e673M1pT}hX z`36N#o~m>LGkoznKoi$+V)K=n+?Qy5gaz{)^cy7e@O>Ug`r8k;k+zY)O9L0?Tm0&u z)M*SCnrpwTD_$KAMts1Y)t4ms4211>#aXCk>njX4yfZQ93%+Objv{(y6*Umc2njb4 za@Sft;;NWQ8Yfw3Oqv`)rLR!)Q(M#T%zb3l^ty}7wjzD4t0KFWSb$LYK4+ev6#~u} zuWj%p8;^v^qdLCqmPYl9C|cw6^ffY<1lvI`ud`>N(2n2`f8C{DnIXt1EE-Kmy?f5P z61p$7@>iaaj{_%*@kOJ5<2P1f1F|T5$$fD4 ztJL$e#vfNCyB)hX8yj3V=FFZyl+tjPkE-mvceMBHT1!cJ*SretWCynHL-J&vg|hf| zVV?A`YVPv#gpDVmSi9^TT7j4Aj>@46{QCH{vzlY=)Ld(=(}3c?@k^fLGYHlri-j(f z^aN)aF3n4uT9O!IN%qI{niATIu$zig*$c%0iA^sXL-cpl0Yh^$1}saGw=Kqe%C*x_EHDDOQCqzrJ&(jL=qmH3uAh< z#5g>@^pzLiFPQZ=!;*er{h~Eh%VVS#8@lGc2jWj@gE8(Ky}&tCg*<(Edv+&Hn`N`_ zoIZ4CM0CDD`a;Q1s?e6vMPV>$(>J<3_FJ}zFA~l?7bnB3F#Hn;ekhj6N97Fsv_E?V z>)4|fC}LNDzwVV`zJB*PI?vY+=eZWz?KC^royj_mWVLs(^GFSEia&|A_nRycG5fm8 znm6anD&r93s#??%Jj`%mX|TKwM~fS?1@jr~SQI%eI}Q#cl?RKL{hCakp9AIdOaBf6 z*w`$2({)o&T`s))f3*oa9aIg+x;6oga}5j!v~q0z@G?5R5uW5B4t)J?(37hTxZE=~ z;xe>=xjfSs&?Ij_urwPrEHJ=FL9+v4qhrZEm!wC0#QY{ZVp$?`{IqWSs8U|*XN&Dz z1$XGgrtteL>j32dj_OLjh&YR4&F=&3qrPQPpKsmp;@D#9_V-&A9t{e20WWNt_^Apu zmckLsx=jRaLTFc+)1uGIF(9%?ryu|uw5U>#y4>kVWr-8vQP^vC&TRPz!j-8$kV7XOmGiqY_HV3q#{=6Qdw z>X5c8S_eI1uprivqUI8ruXzo_?|+^V%DKs!f+BO@M%x|z#tz!5329v+U|72MsE1>( z95Xb`F6Rp3H$?v4H?LN_exHF=K`V8Z%6<~91aU$Ne){Cn@K4|1a z{fSN>gw}b8Q@V3{#qMdIN%NC|`RHb!DLup1)o*2qqdDu+iQyFtPh?GkS`3>UGnWwa z-cm8O!x+|f%R6dgFdCToT$0+s!ok{?kj@_-Z?1DieH&)pc{b?p4If^OMZ?1WG#Ly> z>~B`Q4qqP^@b7krffz5Wdwo!-?t<@TCLO_|kWT@!R?C3>8qxA-S35+PZ}va>7vu z1o}jFY3wLd5EsAk1mcdzK`|1wTRTpz$ib|-uI`BRZtTD5AbTF+w-ZL}x z{M32}-6(NtWuqj3CRf&}U^uD|TL9$^KX#ZbsSkWR59hg8S}^~Y#L+zRe&Rzb(kxOx zd_8oZ(Ip8}Ror95PVoesdMt6JX9>c{oyLykgtN==up=YQphSe4Pbm%UE zN+j<%F_x74%XQ$3tI|I*Ar$Ad_6r;=99#zrBniO_x;(+`ppMn^c0KcX+d8SKoufIk z427>^Dv@+2snmox!JzcG+jc@sO z%tp6ZNG8JWZ~h*sF#!+@%7ZXlL8o)55D@oxtp!Z8B4I_4Gx5B?^LFsBH>J^gKXGVp zq9{J?$3&OzpQl9*uQ$}ycG)j$l4|N&g9FbdST3tPNG-8LM1UIG2D**4IE-j{Qn}B~ z^SpYK=Y?dh`O#0%HE{EwZ^B{mE~Z(vYr)R+YYvB}$6vBNKx{uNH=ZWl^?f_#>r-sJ zh^aBs>y!91F51OJ7BA@>0y~JiwfEV6NdiR&SL_7eb8|^Hyi^~|2JWKofza#hri7OZ z?y`MRYW$*PWg2{IQz(}{-xvL2MKu+(esKL3{Vc9;30js?^~d&3Tz+Rzq4MWF3@ZaZ zha2`pbgG?bJJzXPciU7$AayDqF};Qlk%CtB(B5?$HoX}Bcs)JYdO*y@_?+u%y4Kfz zq~$rN_WuohqZdu^=(mQNS-k9?F>P6^+vCz@)>>G}ENASTF77P*zaJ~n7^XzN-|O{% zbk4&mC@RXUdPaZCT#1*&cZPk<6S7F)BS7&EYr_$yKgN71@)fSK=eSTVi{Q&$(F;c6 zqS>hZsr?FKuh=%{qp3qycku>}d|aQ^L6-Z-t=XgRElCxP;TCWi(@M{L5pfyX$>JXi zR1k(bJI^XjOm4>Kggnq!y78RJfXr~;m$2PoIPSE&Zo^^=I3HiVtZ2)oCo?c0BLfJ; z%XC)y5~W}*9A&KgPkZ{3x|B-!`npz^tbxMufR;>kC$I>{eSXLyJg?3r8 zRseZGlOVMccUH;;Tfa|pLT`Al3YhplODWGPAMk~KA+h|;B&$J^22;H@CWN%Qju+In z)c<%>Iu=kwohl63mnJMUTw+xu2zeQs06W6&tqAQXG6PCex-oOnC@mvJ@f+)jCK`1b z86uhS^A&T%>G5ee>oc!v4TWcx_~VYaxL@+2lY3U|qiG~4y$Uvgjl9R(Xc0_H;)UN|tqTTbCu4y7 z<63HWIj&n>aY&h%k1-%A_Q?PskP}Q$!+Lh@-@o!{uYbGB?Ifhnkz1io#`DDblemkp z`=dt0T>{UO)jWI(w)u5hhb3QB+3qffd~@#WEWV?8(gA!};h9x>AdR!3ThZVsy8MH# zUol|mby7Knxyt)0(=xE<<;G5ASgVZ5E9RMhlqHrJ)k($Ih|LW}a_RMS-ie92$*02f zsO7kG>6V)XEt7ch?m(-(`nV%63%q%qew@8> z-%`jnQHSoaEKVEK{&(8G{bXFDSnlH$$Ur5mvp}nv_gw7X1W`~7{V4H%W&lXK9XvvE zf2bRS@9m|4G{@?0Pv@CW3OhJCZX(A^62>SooWWwXls*YAIw%TE5tm0-lKInhGS}!? zP-eOjK9s!-k-uv-usPp5Er)butw4VwF&snpZ(Kp`5D$OW;dH)AfcN%2Rbp&HhgtV| zB9v7`)Qc0+B5;q63JN*iBGdJQ>5`rj_q?6ki~3}=4#*Fq{A*9;8GC+&JpZ9L=+by( zP*A_{K6P&@SsHiYCt$$)qN>FNAG#Jx56@Wvm4ZV_f`4Dz8aduF3RFGdh*!ChW5cmF z6W~`oSO($n<-pMp7|$_?EKgRj;Nra3?=O?@t}{SZg8g)_w9;Z(e6^?DG%n`$&fZS3 zy6-hadJ)aiPutk}HxPJz%zw=$mLiNE+N|~YVLE!DEbT#bZxL(XEVe)|0nQiRV>|Cd1qJ2Y-i50uDVKiyV6r;E~VN>644I; zfyg>@audqn%bf$;_J3d&{elKU@8sne5dMyO!~CJuMvq0nMP*z7MURftY%GvnhOx+M zZxH@D2I{rFRG7E*!Ap>U5}Jb)>SSwIWM3Q zuAPhAtXl8_a|*1MKK$+4x{vCeuE2((+0B%-%zd434uhrZ>i$E5;>Ig8xTpuz1;4dr zt)&kKwd}Ewag&ONSquc5)p91Y>Nln^Sfr1MWMDRkM!_K`?oC{K=QZg^O<6SN^#o3g z*)^HkdTwYW(G--~P!ZvEz#jk04OKN|d zp{x}L?f^Z=6cOAnadN+xR=@xLV|Fa>b~AI2oLl*rT=;=dzI{rfs9Hj3Hq220b^C(q zECnyRIvi%?VJVNdbI6_CE5&s9$W?DdHam1E(f=W*?_>SI%xWghW%L)S>@U{-2EIj; zx`hu(%57YqQ`e^D|IwPuiinbbWX$q6aZDJ3$QsE6 ziJ(i#L)06aj+Sdn6iPgzhsG7rfCV>RJnHIcZe( z69|f{J^qMcugm7L&&33SJgX9~o14E$2vNEU`^jb%!@oj!iDTqnNk^K5*vcuHDV3yo zR;hVbS9^c-+>n=u>X~{~z$&ajSB-ej%1ofaQnfF#w!Eqr`8d_Pl^*>U%=AjZd$rX? zBa&Z*=icq#te$Rjb3ZSqr7KnS6+!d#bt06s3e8sD-u?{5XZ8bLs}&f{q4sMavP5bA z!~;ql9d+O}EuA8sY4v^j4t*7t&}csA{3b5snlU~q=gX5Aog0IC6FQ+GOyE>G^ljrnNol<>P*}%}XRqFOkr9RH#SoWRS{Q|2k zcfIbaa=omWt7}j1nZUF87cqFYWjD~|1q>_PMPkDaqn8-qCX8KjK6= z#RRTccwc}NmO(emPxw0=nO%B6OUp@j3~8&u7Bd$1NetKbM=jCED9~GrPeb~bDmraz z{*U271^@xmk&Yh!x(lBG+$TFQ4x~tz0nlG7wfvCFSyl3{{&t;D?r573MW}{RwHrO* z!lAyxE5me3wlwH)i2;6}6ypi)sVSfGEL+Ue#$bb1v2K?|Vf>m8E~gV#RlYA*_WTAa z!q>8?ET}k_?`@b8cbd7yqyIek>vqO?o_W4`=UE~g?HxUZ!Yy?mncsEif5|i7@d?$D zT`z5-KwlR+p3XiiANGCU=giDYFVLftl1L*{>nBtAdn~qe>t?fqhkx42=0k~!36U|F zgD@J_mSPaOFYEBfDJzqiF$VQCzWehcU(1Lf^g}sCR^%dm!-PEex2>pTcH!)Q&N=fv zmANmA-s>x0U;Q}Ts~@tT_41oja1tV16&zOXK};ww1(H!BJ7wShydHk0=u|-*vG!2b zGtNrn|AOW$M;(Wbro`@I4=f`8=zFcP)O^eFsQFx6{dclvwDg{Ban31>?WwHs&UbIu z(L8>M@fE1Byvd3aJwa%Eyi0Ogr3XUUXfbu!T4<7AANA#Efy_kTzmi6AbU0UVf za;k2NCHt(Yaa21%JNG%=U4d^Y1VS|rp99JS(EF#(Www$cif3}v=4fj!=b{TeZ4s7@ zhyZ_I4yZd7!^uKOP`+*5Z`%*IUxblSvjo5T|LvXJ62<`UR-|2EYmy(cunhNYBHhGC z8G14yO+WN4g7F&Gc{|Yn!SkW{;HsdH$26+NU_QcY_O|uA$E{>apo`V!;-n+NAMZM< z6BCTMVsYihlP8oBQcd;T8P(~u^KaM;X%6D5t|u>t^_O4Iq@=oFz&px8%K^qmw^l!F zY9SWnZl1C3<@U;afzl8V)OyjDo!AFK+LQ`k4C+ofIcJQ#h(TP%4|>}xeEgQ_m1>2qr#m`(3rfLjb!-hN6x02v^&=R0p%RoOqjKz zs^y@O>FF((&iXqL9>^h=z;rijx5I*$LX&Y*TvREu2Q2=Kwm$pgBG=gF2;sRs!+u7` zx8z%Kva*>s`RwCHeXVecXD*{=q`OwYVZGp>U4^@1|~J~wQL@w zJNB>(#_Y=`YP%?w>I8JG{Ia`=v>`^2#|bfbHH52x-qMMxEYb`ZO?mUwX9M8aAoCH& zx5$c?D0#7$?Q3pU6ZGA_}|+(wYKHV_ONaH&_W!1M2omQ{Yw74kP}QW*5fb`iYXf>eF)%si)QQ2eWx4Y* zgBkYoGih23FD*{)hF$(DEiT+5CT$;3wZiH4{sUr&Lyk$Ej9Hg=%YOo{dtgf_XIBIY z+m@)xFM_~&dT8nTUnx6Yo*|o1Y|Qo;fA!`RMPmgMY1>PB zjmsU)Fv&I7vA;qk%~U4U@sI3?&lYjI<@As2S8`J^;#Bc3Gy{=jg1(y6F}lc_{x*H^ zi;%Kz-zg9{H?$KzspE_;m@0@l@ZuGH!T3(2g_7-!>sS$>t8)}wB5>lnVEjcIsR2x+ zY}u2)pQ_W4hpoOC!YzhdbK`M$2Kj2S_bUz8#T#A7A{-6tC#Vih1d@@4n%!bol9@C)hTI6)4S_`9{M z`k}2&GV71!`GdJ3`Fcw+^rr|69#|5fdg!51A@4jGxa9~}m$SgD=cc;^I)0^ZP}lNH z_2=2RzfVB+5A+g+m_am;6=#dN^D{#e=Lj5Q;%PIdv9xxWo-98{S0~w}Vc~Cx*_ik6 zGr;p^bd;^OM&?+8v-(t;d7l?g;!Z55 z+`z|I1#x|?sm--0z)pHJLhy`OMWT$Rfms51Ld+141;OjC+QGLDI4dHb z;kC-`Fn^3**lI2ONlka5^Q&M5mS4#mG#1Rx;4AfJIjO@-xPJk00jw}BESB{axQi;5eiZ<~n64;SW ziS@0hg2D3zQs^$*4Rw(Wh-De#(ov<_SH7vC1q%xGfzNFucg!xtx>)P^@ncZ$wVt$y zVm;8RUh=1Ak+bCTcX=9Yui<7Dt+C~_ctGFa+Ij{|8255ggy|*!ata~QOy@igSOVRM zEH>$j$6Md5a{XErWIyiaorBA8o0aWq|H$|WKb@g9fWKB=1*h0fqU8H+<8WiM;jc8M zah+)g85=v&UP|-b9V6bt8O7hW)!T`M?=q#rg2ri^ZI_r3>npxM1Nhs?EmE?gk%}%% zc(v7btYf>syXw&dZt+!57Er=%P#V@C>0yy8<^T*n6GBo7-p zt~KO=v*orW^_4I{Yo@?15kDh{ET@MCq_Pj%_p2VgvGbs7 z%2`@z7Ki7h>td5F1?uryz@vRr^%MDSjAynL^} z-cvvF(%6glS+S=@pZ}2+I-T3A_Ra6flh)LqT8LaI-npx+>>@{EFHR)SZ~f@$u$=g1 zE}wsNi3Cmii@K!ongAa?NF**1Q(ILBj;_OLLqF2()2ue@kr|&@hz`lEssSE;sQQ`L ze%H+~dgs`b#2)*;8t8@eKQbl&!A7kKK;6fut6;<#WY&bym@^{|7_;dn#N`XwT_>>8 z1vCA8$`|YJWR)2|%NM&{N`K)C+ESx3dGNaK4Bx6R5*N1XSy9yoLzEVQ(IbOjy5gh2 utIB0xpc7f=#g6~Th(^Tb!GB~wrzLfCui#@>Ppj;6EVCv9GtZ#^X8#9Qck|Z( From a97dccf54873aea0df3512ea1a3b7b39a71f64f0 Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Tue, 7 May 2024 18:11:47 -0400 Subject: [PATCH 125/376] chore(issue-stream): Register feature flag for upcoming changes to issues stream events graph (#70471) [Project details](https://github.com/getsentry/sentry/issues/69691) * [SOA PR ](https://github.com/getsentry/sentry-options-automator/pull/1378) * ~~getSentry PR~~ (No longer necessary) --- src/sentry/conf/server.py | 2 ++ src/sentry/features/temporary.py | 1 + 2 files changed, 3 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 11431fef71a1a1..ea827d7a88bb24 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1637,6 +1637,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:issue-search-group-attributes-side-query": False, # Enable the updated empty state for issues "organizations:issue-stream-empty-state": False, + # Enable new events graph design for issue stream + "organizations:issue-stream-new-events-graph": False, # Enable issue stream performance improvements "organizations:issue-stream-performance": False, # Enabled latest adopted release filter for issue alerts diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py index 961ad5ab22a3c6..878487f44e3d39 100644 --- a/src/sentry/features/temporary.py +++ b/src/sentry/features/temporary.py @@ -95,6 +95,7 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:issue-search-allow-postgres-only-search", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:issue-search-group-attributes-side-query", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:issue-stream-empty-state", OrganizationFeature, FeatureHandlerStrategy.REMOTE) + manager.add("organizations:issue-stream-new-events-graph", OrganizationFeature, FeatureHandlerStrategy.OPTIONS) manager.add("organizations:issue-stream-performance", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:large-debug-files", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) manager.add("organizations:latest-adopted-release-filter", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From 7ea571e1c4ff6366499f2229dbcea141663edc7a Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 7 May 2024 15:17:08 -0700 Subject: [PATCH 126/376] chore(events): Move `PLACEHOLDER_EVENT_TITLES` to a neutral location (#70470) I tried to use `PLACEHOLDER_EVENT_TITLES` in an upcoming PR, and landed in a circular dependency with `event_manager.py`, where it currently lives. This moves it to `constants.py`, which solves the problem. --- src/sentry/constants.py | 2 ++ src/sentry/event_manager.py | 2 +- tests/sentry/event_manager/test_severity.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/sentry/constants.py b/src/sentry/constants.py index d6b52b31058609..32b586a28cb53f 100644 --- a/src/sentry/constants.py +++ b/src/sentry/constants.py @@ -228,6 +228,8 @@ def get_all_languages() -> list[str]: DEFAULT_LOGGER_NAME = "" LOG_LEVELS_MAP = {v: k for k, v in LOG_LEVELS.items()} +PLACEHOLDER_EVENT_TITLES = frozenset(["", "", "", "Error"]) + # Default alerting threshold values DEFAULT_ALERT_PROJECT_THRESHOLD = (500, 25) # 500%, 25 events DEFAULT_ALERT_GROUP_THRESHOLD = (1000, 25) # 1000%, 25 events diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 92874f4672ef27..f9c1e508cdd079 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -35,6 +35,7 @@ DEFAULT_STORE_NORMALIZER_ARGS, LOG_LEVELS_MAP, MAX_TAG_VALUE_LENGTH, + PLACEHOLDER_EVENT_TITLES, DataCategory, ) from sentry.culprit import generate_culprit @@ -146,7 +147,6 @@ # Timeout for cached group crash report counts CRASH_REPORT_TIMEOUT = 24 * 3600 # one day -PLACEHOLDER_EVENT_TITLES = frozenset(["", "", "", "Error"]) HIGH_SEVERITY_THRESHOLD = 0.1 diff --git a/tests/sentry/event_manager/test_severity.py b/tests/sentry/event_manager/test_severity.py index 6627026a62b4b3..0d2703a304be91 100644 --- a/tests/sentry/event_manager/test_severity.py +++ b/tests/sentry/event_manager/test_severity.py @@ -9,8 +9,8 @@ from urllib3.exceptions import MaxRetryError from sentry import options +from sentry.constants import PLACEHOLDER_EVENT_TITLES from sentry.event_manager import ( - PLACEHOLDER_EVENT_TITLES, SEER_ERROR_COUNT_KEY, EventManager, _get_severity_score, From 2489a4a1d76318be0ef8c33d749ebc80e928cc03 Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Tue, 7 May 2024 18:19:21 -0400 Subject: [PATCH 127/376] fix(cache): update docs link (#70475) The link to the docs will be plural --- .../performance/landing/widgets/components/selectableList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/views/performance/landing/widgets/components/selectableList.tsx b/static/app/views/performance/landing/widgets/components/selectableList.tsx index 91ffd165970855..60ff231a34ff0f 100644 --- a/static/app/views/performance/landing/widgets/components/selectableList.tsx +++ b/static/app/views/performance/landing/widgets/components/selectableList.tsx @@ -132,7 +132,7 @@ export function HighestCacheMissRateTransactionsWidgetEmptyStateWarning() { 'Transactions may be missing due to the filters above, a low sampling rate, or an error with instrumentation. Please see the [link] for more information.', { link: ( - + {t('Cache module documentation')} ), From 0b96de408f250aedc648bf0eb4544f85a45fc3bd Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Tue, 7 May 2024 15:53:42 -0700 Subject: [PATCH 128/376] chore(crons): Rename badly named api file (#70480) I accidentally pluralised this when creating it, just fixing. --- src/sentry/api/urls.py | 2 +- .../{project_monitors_details.py => project_monitor_details.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename src/sentry/monitors/endpoints/{project_monitors_details.py => project_monitor_details.py} (100%) diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 523e5921e4ea9f..1b2ce693f3bf7e 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -142,11 +142,11 @@ from sentry.monitors.endpoints.project_monitor_checkin_index import ( ProjectMonitorCheckInIndexEndpoint, ) +from sentry.monitors.endpoints.project_monitor_details import ProjectMonitorDetailsEndpoint from sentry.monitors.endpoints.project_monitor_environment_details import ( ProjectMonitorEnvironmentDetailsEndpoint, ) from sentry.monitors.endpoints.project_monitor_stats import ProjectMonitorStatsEndpoint -from sentry.monitors.endpoints.project_monitors_details import ProjectMonitorDetailsEndpoint from sentry.replays.endpoints.organization_replay_count import OrganizationReplayCountEndpoint from sentry.replays.endpoints.organization_replay_details import OrganizationReplayDetailsEndpoint from sentry.replays.endpoints.organization_replay_events_meta import ( diff --git a/src/sentry/monitors/endpoints/project_monitors_details.py b/src/sentry/monitors/endpoints/project_monitor_details.py similarity index 100% rename from src/sentry/monitors/endpoints/project_monitors_details.py rename to src/sentry/monitors/endpoints/project_monitor_details.py From dbf524c7b6ce8e27ebb5d07c18a85cd12b76040e Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Tue, 7 May 2024 15:58:22 -0700 Subject: [PATCH 129/376] fix(ui): Revert to using project release commmit API (#70485) In https://github.com/getsentry/sentry/pull/63860 we switched from a class component to a FC and also switched from hitting `ProjectReleaseCommitsEndpoint` to `OrganizationReleaseCommitsEndpoint`. The latter doesn't respect repo name or id in the query param, which makes it so selecting a repo in the dropdown will show you commits from other repos (and also messes up pagination as well, not showing you all commits). Related to https://github.com/getsentry/sentry/issues/70411 --- .../releases/detail/commitsAndFiles/commits.spec.tsx | 8 ++++---- .../app/views/releases/detail/commitsAndFiles/commits.tsx | 3 +-- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/static/app/views/releases/detail/commitsAndFiles/commits.spec.tsx b/static/app/views/releases/detail/commitsAndFiles/commits.spec.tsx index bf0cd4e4cad276..c5da72862783be 100644 --- a/static/app/views/releases/detail/commitsAndFiles/commits.spec.tsx +++ b/static/app/views/releases/detail/commitsAndFiles/commits.spec.tsx @@ -68,7 +68,7 @@ describe('Commits', () => { body: repos, }); MockApiClient.addMockResponse({ - url: `/organizations/org-slug/releases/${encodeURIComponent( + url: `/projects/org-slug/project-slug/releases/${encodeURIComponent( release.version )}/commits/`, body: [], @@ -85,7 +85,7 @@ describe('Commits', () => { body: repos, }); MockApiClient.addMockResponse({ - url: `/organizations/org-slug/releases/${encodeURIComponent( + url: `/projects/org-slug/project-slug/releases/${encodeURIComponent( release.version )}/commits/`, body: [CommitFixture()], @@ -112,7 +112,7 @@ describe('Commits', () => { ], }); MockApiClient.addMockResponse({ - url: `/organizations/org-slug/releases/${encodeURIComponent( + url: `/projects/org-slug/project-slug/releases/${encodeURIComponent( release.version )}/commits/`, body: [CommitFixture()], @@ -146,7 +146,7 @@ describe('Commits', () => { body: [repos[0]!, otherRepo], }); MockApiClient.addMockResponse({ - url: `/organizations/org-slug/releases/${encodeURIComponent( + url: `/projects/org-slug/project-slug/releases/${encodeURIComponent( release.version )}/commits/`, body: [ diff --git a/static/app/views/releases/detail/commitsAndFiles/commits.tsx b/static/app/views/releases/detail/commitsAndFiles/commits.tsx index 2377987dbd1c4f..c8c24dbae7d689 100644 --- a/static/app/views/releases/detail/commitsAndFiles/commits.tsx +++ b/static/app/views/releases/detail/commitsAndFiles/commits.tsx @@ -49,7 +49,7 @@ function Commits({activeReleaseRepo, releaseRepos, projectSlug}: CommitsProps) { getResponseHeader, } = useApiQuery( [ - `/organizations/${organization.slug}/releases/${encodeURIComponent( + `/projects/${organization.slug}/${projectSlug}/releases/${encodeURIComponent( params.release )}/commits/`, {query}, @@ -58,7 +58,6 @@ function Commits({activeReleaseRepo, releaseRepos, projectSlug}: CommitsProps) { staleTime: Infinity, } ); - const commitsByRepository = getCommitsByRepository(commitList); const reposToRender = getReposToRender(Object.keys(commitsByRepository)); const activeRepoName: string | undefined = activeReleaseRepo From e624ee9e7a5b59587a421770956245caed0d9f44 Mon Sep 17 00:00:00 2001 From: Matt Duncan <14761+mrduncan@users.noreply.github.com> Date: Tue, 7 May 2024 16:17:45 -0700 Subject: [PATCH 130/376] chore(issues): Opt in already passing issues files to stronger typing (#69828) `sentry.issues.*` and `test.sentry.issues.*` are not close to passing but in the mean time we can get incremental benefits and prevent regressions by opting in modules which are already passing. #69374 includes a bit more details and outlines additional fixes we can follow up this change with. --- pyproject.toml | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index c161eb50a76477..2cc68fc90ef298 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -567,7 +567,32 @@ module = [ "sentry.buffer.base", "sentry.buffer.redis", "sentry.eventstore.reprocessing.redis", + "sentry.issues", + "sentry.issues.analytics", + "sentry.issues.apps", + "sentry.issues.constants", + "sentry.issues.endpoints", + "sentry.issues.endpoints.group_events", + "sentry.issues.endpoints.project_stacktrace_link", + "sentry.issues.escalating_group_forecast", + "sentry.issues.escalating_issues_alg", + "sentry.issues.forecasts", + "sentry.issues.ignored", + "sentry.issues.ingest", + "sentry.issues.issue_occurrence", + "sentry.issues.json_schemas", + "sentry.issues.merge", + "sentry.issues.ongoing", + "sentry.issues.priority", + "sentry.issues.producer", + "sentry.issues.query", + "sentry.issues.receivers", "sentry.issues.related.*", + "sentry.issues.run", + "sentry.issues.status_change", + "sentry.issues.status_change_consumer", + "sentry.issues.status_change_message", + "sentry.issues.update_inbox", "sentry.lang.java.processing", "sentry.llm.*", "sentry.migrations.*", @@ -612,6 +637,20 @@ module = [ "sentry_plugins.base", "tests.sentry.api.endpoints.issues.*", "tests.sentry.grouping.test_fingerprinting", + "tests.sentry.issues", + "tests.sentry.issues.endpoints", + "tests.sentry.issues.test_escalating_issues_alg", + "tests.sentry.issues.test_group_attributes_dataset", + "tests.sentry.issues.test_grouptype", + "tests.sentry.issues.test_ignored", + "tests.sentry.issues.test_ingest", + "tests.sentry.issues.test_issue_occurrence", + "tests.sentry.issues.test_json_schemas", + "tests.sentry.issues.test_merge", + "tests.sentry.issues.test_ongoing", + "tests.sentry.issues.test_search_issues_dataset", + "tests.sentry.issues.test_status_change", + "tests.sentry.issues.test_update_inbox", "tests.sentry.relay.config.test_metric_extraction", "tests.sentry.tasks.test_on_demand_metrics", "tools.*", From 173b69012db4737336284ebb4c75adbf08704e70 Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Tue, 7 May 2024 16:32:23 -0700 Subject: [PATCH 131/376] fix(chartcuterie): Added Visual Map Field for Endpoint Regression (#70477) There is mismatch in the way we build the EChart Options object in our FE code and how Chartcuterie handles it. In our FE [code,](https://github.com/getsentry/sentry/blob/master/static/app/components/events/eventStatisticalDetector/breakpointChartOptions.tsx#L89-L104), we wrap the visualMap object in an extra option, which allows us to maintain the hierarchy for styling. However, Chartcuterie cannot handle the wrapped object, so when we pass the service the options, we unwrap it. I also created a modifier option to modify chart options specifically for slack and removed the icon from the legend icon from there. ![example2](https://github.com/getsentry/sentry/assets/33237075/2ecdf0db-3abd-4245-a426-b371c6a2fd98) --- static/app/chartcuterie/performance.tsx | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/static/app/chartcuterie/performance.tsx b/static/app/chartcuterie/performance.tsx index a4d01c31df2c2f..c17e86f12ac868 100644 --- a/static/app/chartcuterie/performance.tsx +++ b/static/app/chartcuterie/performance.tsx @@ -1,3 +1,4 @@ +import type {LineChartProps} from 'sentry/components/charts/lineChart'; import {transformToLineSeries} from 'sentry/components/charts/lineChart'; import getBreakpointChartOptionsFromData, { type EventBreakpointChartData, @@ -10,17 +11,31 @@ import {ChartType} from './types'; export const performanceCharts: RenderDescriptor[] = []; +function modifyOptionsForSlack(options: Omit) { + options.legend = options.legend || {}; + options.legend.icon = 'none'; + + return { + ...options, + grid: slackChartDefaults.grid, + visualMap: options.options?.visualMap, + }; +} + performanceCharts.push({ key: ChartType.SLACK_PERFORMANCE_ENDPOINT_REGRESSION, getOption: (data: EventBreakpointChartData) => { const {chartOptions, series} = getBreakpointChartOptionsFromData(data, theme); const transformedSeries = transformToLineSeries({series}); + const modifiedOptions = modifyOptionsForSlack(chartOptions); return { - ...chartOptions, + ...modifiedOptions, + backgroundColor: theme.background, series: transformedSeries, grid: slackChartDefaults.grid, + visualMap: modifiedOptions.options?.visualMap, }; }, ...slackChartSize, From 01160aa3cac4b8a17be50a92fa6788d6ab76c3c7 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 7 May 2024 16:41:32 -0700 Subject: [PATCH 132/376] ref(seer-grouping): Switch to using `hash` and `parent_hash` from `group_hash` and `parent_group_hash` (#70383) As we've been thinking about the switch from sending and receiving group ids when communicating with Seer to doing so with hashes, all along we've been talking about those hashes as "group hashes." In truth, though, hash values are based on the data in a particular event (not the group overall), and indeed, that's how we're using them in Seer - pairing up hashes not with what group they're in but which event data they represent. There _is_ a pairing of groups and hashes - on the Sentry side, in the form of the `GroupHash` table - but entries from that table aren't what we're using with Seer. With Seer, we only care about the "hash" part of `GroupHash`. So, both for accuracy and so as to be able to differentiate in Seer-related Sentry code between hashes (hex strings) and grouphashes (association table records), we're switching from using `group_hash` and `parent_group_hash` to using `hash` and `parent_hash`. This PR makes the change on the Sentry side. Fortunately, nothing in Seer is yet relying on hashes, so as long as we wait for this to go live, we can then add hash support on the seer side using the new names from the get-go. --- .../group_similar_issues_embeddings.py | 4 +- src/sentry/seer/utils.py | 14 +++--- .../test_group_similar_issues_embeddings.py | 48 +++++++++---------- tests/sentry/seer/test_utils.py | 20 ++++---- 4 files changed, 43 insertions(+), 43 deletions(-) diff --git a/src/sentry/api/endpoints/group_similar_issues_embeddings.py b/src/sentry/api/endpoints/group_similar_issues_embeddings.py index 9e9cffd6960a5e..aa00efb555a982 100644 --- a/src/sentry/api/endpoints/group_similar_issues_embeddings.py +++ b/src/sentry/api/endpoints/group_similar_issues_embeddings.py @@ -162,7 +162,7 @@ def get(self, request: Request, group) -> Response: similar_issues_params: SimilarIssuesEmbeddingsRequest = { "group_id": group.id, - "group_hash": latest_event.get_primary_hash(), + "hash": latest_event.get_primary_hash(), "project_id": group.project.id, "stacktrace": stacktrace_string, "message": group.message, @@ -184,7 +184,7 @@ def get(self, request: Request, group) -> Response: organization_id=group.organization.id, project_id=group.project.id, group_id=group.id, - group_hash=latest_event.get_primary_hash(), + hash=latest_event.get_primary_hash(), count_over_threshold=len( [ result.stacktrace_distance diff --git a/src/sentry/seer/utils.py b/src/sentry/seer/utils.py index a015328cf37188..55a957609d6a12 100644 --- a/src/sentry/seer/utils.py +++ b/src/sentry/seer/utils.py @@ -102,7 +102,7 @@ class SimilarIssuesEmbeddingsRequest(TypedDict): k: NotRequired[int] # how many neighbors to find threshold: NotRequired[float] group_id: NotRequired[int] # TODO: Remove this once we stop sending it to seer - group_hash: NotRequired[str] # TODO: Make this required once id -> hash change is done + hash: NotRequired[str] # TODO: Make this required once id -> hash change is done class RawSeerSimilarIssueData(TypedDict): @@ -110,7 +110,7 @@ class RawSeerSimilarIssueData(TypedDict): message_distance: float should_group: bool parent_group_id: NotRequired[int] # TODO: Remove this once seer stops sending it - parent_group_hash: NotRequired[str] # TODO: Make this required once id -> hash change is done + parent_hash: NotRequired[str] # TODO: Make this required once id -> hash change is done class SimilarIssuesEmbeddingsResponse(TypedDict): @@ -125,7 +125,7 @@ class SeerSimilarIssueData: should_group: bool parent_group_id: int # TODO: See if we end up needing the hash here - parent_group_hash: str | None = None + parent_hash: str | None = None @classmethod def from_raw(cls, project_id: int, raw_similar_issue_data: RawSeerSimilarIssueData) -> Self: @@ -141,12 +141,12 @@ def from_raw(cls, project_id: int, raw_similar_issue_data: RawSeerSimilarIssueDa """ similar_issue_data = raw_similar_issue_data - parent_group_hash = raw_similar_issue_data.get("parent_group_hash") + parent_hash = raw_similar_issue_data.get("parent_hash") parent_group_id = raw_similar_issue_data.get("parent_group_id") - if not parent_group_id and not parent_group_hash: + if not parent_group_id and not parent_hash: raise IncompleteSeerDataError( - "Seer similar issues response missing both `parent_group_id` and `parent_group_hash`" + "Seer similar issues response missing both `parent_group_id` and `parent_hash`" ) if parent_group_id: @@ -155,7 +155,7 @@ def from_raw(cls, project_id: int, raw_similar_issue_data: RawSeerSimilarIssueDa else: parent_grouphash = ( - GroupHash.objects.filter(project_id=project_id, hash=parent_group_hash) + GroupHash.objects.filter(project_id=project_id, hash=parent_hash) .exclude(state=GroupHash.State.LOCKED_IN_MIGRATION) .first() ) diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index f5e2c374a5775c..20062b81aac95e 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -662,14 +662,14 @@ def test_get_formatted_results(self): similar_issue_data_1 = SeerSimilarIssueData( message_distance=0.05, parent_group_id=NonNone(self.similar_event.group_id), - parent_group_hash=NonNone(self.similar_event.get_primary_hash()), + parent_hash=NonNone(self.similar_event.get_primary_hash()), should_group=True, stacktrace_distance=0.01, ) similar_issue_data_2 = SeerSimilarIssueData( message_distance=0.49, parent_group_id=NonNone(event_from_second_similar_group.group_id), - parent_group_hash=NonNone(event_from_second_similar_group.get_primary_hash()), + parent_hash=NonNone(event_from_second_similar_group.get_primary_hash()), should_group=False, stacktrace_distance=0.23, ) @@ -720,7 +720,7 @@ def test_simple_only_group_id_returned(self, mock_logger, mock_seer_request): expected_seer_request_params = { "group_id": self.group.id, - "group_hash": NonNone(self.event.get_primary_hash()), + "hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -748,7 +748,7 @@ def test_simple_only_hash_returned(self, mock_logger, mock_seer_request): "responses": [ { "message_distance": 0.05, - "parent_group_hash": NonNone(self.similar_event.get_primary_hash()), + "parent_hash": NonNone(self.similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, } @@ -767,7 +767,7 @@ def test_simple_only_hash_returned(self, mock_logger, mock_seer_request): expected_seer_request_params = { "group_id": self.group.id, - "group_hash": NonNone(self.event.get_primary_hash()), + "hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -797,7 +797,7 @@ def test_simple_group_id_and_hash_returned(self, mock_logger, mock_seer_request) { "message_distance": 0.05, "parent_group_id": NonNone(self.similar_event.group_id), - "parent_group_hash": NonNone(self.similar_event.get_primary_hash()), + "parent_hash": NonNone(self.similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, } @@ -816,7 +816,7 @@ def test_simple_group_id_and_hash_returned(self, mock_logger, mock_seer_request) expected_seer_request_params = { "group_id": self.group.id, - "group_hash": NonNone(self.event.get_primary_hash()), + "hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -848,21 +848,21 @@ def test_multiple(self, mock_seer_request, mock_record): { "message_distance": 0.05, "parent_group_id": NonNone(self.similar_event.group_id), - "parent_group_hash": NonNone(self.similar_event.get_primary_hash()), + "parent_hash": NonNone(self.similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.002, # Over threshold }, { "message_distance": 0.05, "parent_group_id": NonNone(over_threshold_group_event.group_id), - "parent_group_hash": NonNone(over_threshold_group_event.get_primary_hash()), + "parent_hash": NonNone(over_threshold_group_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.002, # Over threshold }, { "message_distance": 0.05, "parent_group_id": NonNone(under_threshold_group_event.group_id), - "parent_group_hash": NonNone(under_threshold_group_event.get_primary_hash()), + "parent_hash": NonNone(under_threshold_group_event.get_primary_hash()), "should_group": False, "stacktrace_distance": 0.05, # Under threshold }, @@ -891,7 +891,7 @@ def test_multiple(self, mock_seer_request, mock_record): organization_id=self.org.id, project_id=self.project.id, group_id=self.group.id, - group_hash=NonNone(self.event.get_primary_hash()), + hash=NonNone(self.event.get_primary_hash()), count_over_threshold=2, user_id=self.user.id, ) @@ -900,14 +900,14 @@ def test_multiple(self, mock_seer_request, mock_record): @mock.patch("sentry.seer.utils.logger") @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") def test_incomplete_return_data(self, mock_seer_request, mock_logger): - # Two suggested groups, one with valid data, one missing both parent group id and parent group hash. + # Two suggested groups, one with valid data, one missing both parent group id and parent hash. # We should log the second and return the first. seer_return_value: SimilarIssuesEmbeddingsResponse = { "responses": [ { "message_distance": 0.05, "parent_group_id": NonNone(self.similar_event.group_id), - "parent_group_hash": NonNone(self.similar_event.get_primary_hash()), + "parent_hash": NonNone(self.similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, }, @@ -923,11 +923,11 @@ def test_incomplete_return_data(self, mock_seer_request, mock_logger): response = self.client.get(self.path) mock_logger.exception.assert_called_with( - "Seer similar issues response missing both `parent_group_id` and `parent_group_hash`", + "Seer similar issues response missing both `parent_group_id` and `parent_hash`", extra={ "request_params": { "group_id": NonNone(self.event.group_id), - "group_hash": NonNone(self.event.get_primary_hash()), + "hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -958,14 +958,14 @@ def test_nonexistent_group(self, mock_seer_request, mock_logger): { "message_distance": 0.05, "parent_group_id": NonNone(self.similar_event.group_id), - "parent_group_hash": NonNone(self.similar_event.get_primary_hash()), + "parent_hash": NonNone(self.similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, }, { "message_distance": 0.05, "parent_group_id": 1121201212312012, # too high to be real - "parent_group_hash": "not a real hash", + "parent_hash": "not a real hash", "should_group": True, "stacktrace_distance": 0.01, }, @@ -979,7 +979,7 @@ def test_nonexistent_group(self, mock_seer_request, mock_logger): extra={ "request_params": { "group_id": NonNone(self.event.group_id), - "group_hash": NonNone(self.event.get_primary_hash()), + "hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -987,7 +987,7 @@ def test_nonexistent_group(self, mock_seer_request, mock_logger): "raw_similar_issue_data": { "message_distance": 0.05, "parent_group_id": 1121201212312012, - "parent_group_hash": "not a real hash", + "parent_hash": "not a real hash", "should_group": True, "stacktrace_distance": 0.01, }, @@ -1010,7 +1010,7 @@ def test_empty_seer_return(self, mock_seer_request, mock_record): organization_id=self.org.id, project_id=self.project.id, group_id=self.group.id, - group_hash=NonNone(self.event.get_primary_hash()), + hash=NonNone(self.event.get_primary_hash()), count_over_threshold=0, user_id=self.user.id, ) @@ -1079,7 +1079,7 @@ def test_no_optional_params(self, mock_seer_request): { "message_distance": 0.05, "parent_group_id": NonNone(self.similar_event.group_id), - "parent_group_hash": NonNone(self.similar_event.get_primary_hash()), + "parent_hash": NonNone(self.similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, } @@ -1100,7 +1100,7 @@ def test_no_optional_params(self, mock_seer_request): body=json.dumps( { "group_id": self.group.id, - "group_hash": NonNone(self.event.get_primary_hash()), + "hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -1124,7 +1124,7 @@ def test_no_optional_params(self, mock_seer_request): body=json.dumps( { "group_id": self.group.id, - "group_hash": NonNone(self.event.get_primary_hash()), + "hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, @@ -1149,7 +1149,7 @@ def test_no_optional_params(self, mock_seer_request): body=json.dumps( { "group_id": self.group.id, - "group_hash": NonNone(self.event.get_primary_hash()), + "hash": NonNone(self.event.get_primary_hash()), "project_id": self.project.id, "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, diff --git a/tests/sentry/seer/test_utils.py b/tests/sentry/seer/test_utils.py index d0f98618b8cbb6..a6ef47c970e381 100644 --- a/tests/sentry/seer/test_utils.py +++ b/tests/sentry/seer/test_utils.py @@ -80,7 +80,7 @@ def test_simple_similar_issues_embeddings_only_group_id_returned( params: SimilarIssuesEmbeddingsRequest = { "group_id": NonNone(event.group_id), - "group_hash": NonNone(event.get_primary_hash()), + "hash": NonNone(event.get_primary_hash()), "project_id": default_project.id, "stacktrace": "string", "message": "message", @@ -97,7 +97,7 @@ def test_simple_similar_issues_embeddings_only_hash_returned(mock_seer_request, raw_similar_issue_data: RawSeerSimilarIssueData = { "message_distance": 0.05, - "parent_group_hash": NonNone(similar_event.get_primary_hash()), + "parent_hash": NonNone(similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, } @@ -107,7 +107,7 @@ def test_simple_similar_issues_embeddings_only_hash_returned(mock_seer_request, params: SimilarIssuesEmbeddingsRequest = { "group_id": NonNone(event.group_id), - "group_hash": NonNone(event.get_primary_hash()), + "hash": NonNone(event.get_primary_hash()), "project_id": default_project.id, "stacktrace": "string", "message": "message", @@ -134,7 +134,7 @@ def test_simple_similar_issues_embeddings_both_returned(mock_seer_request, defau raw_similar_issue_data: RawSeerSimilarIssueData = { "message_distance": 0.05, "parent_group_id": NonNone(similar_event.group_id), - "parent_group_hash": NonNone(similar_event.get_primary_hash()), + "parent_hash": NonNone(similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, } @@ -144,7 +144,7 @@ def test_simple_similar_issues_embeddings_both_returned(mock_seer_request, defau params: SimilarIssuesEmbeddingsRequest = { "group_id": NonNone(event.group_id), - "group_hash": NonNone(event.get_primary_hash()), + "hash": NonNone(event.get_primary_hash()), "project_id": default_project.id, "stacktrace": "string", "message": "message", @@ -163,7 +163,7 @@ def test_empty_similar_issues_embeddings(mock_seer_request, default_project): params: SimilarIssuesEmbeddingsRequest = { "group_id": NonNone(event.group_id), - "group_hash": NonNone(event.get_primary_hash()), + "hash": NonNone(event.get_primary_hash()), "project_id": default_project.id, "stacktrace": "string", "message": "message", @@ -192,7 +192,7 @@ def test_from_raw_only_parent_hash(default_project): similar_event = save_new_event({"message": "Dogs are great!"}, default_project) raw_similar_issue_data: RawSeerSimilarIssueData = { "message_distance": 0.05, - "parent_group_hash": NonNone(similar_event.get_primary_hash()), + "parent_hash": NonNone(similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, } @@ -216,7 +216,7 @@ def test_from_raw_parent_group_id_and_parent_hash(default_project): raw_similar_issue_data: RawSeerSimilarIssueData = { "message_distance": 0.05, "parent_group_id": NonNone(similar_event.group_id), - "parent_group_hash": NonNone(similar_event.get_primary_hash()), + "parent_hash": NonNone(similar_event.get_primary_hash()), "should_group": True, "stacktrace_distance": 0.01, } @@ -230,7 +230,7 @@ def test_from_raw_parent_group_id_and_parent_hash(default_project): def test_from_raw_missing_data(default_project): with pytest.raises(IncompleteSeerDataError): raw_similar_issue_data: RawSeerSimilarIssueData = { - # missing both `parent_group_id` and `parent_group_hash` + # missing both `parent_group_id` and `parent_hash` "message_distance": 0.05, "should_group": True, "stacktrace_distance": 0.01, @@ -244,7 +244,7 @@ def test_from_raw_nonexistent_group(default_project): with pytest.raises(SimilarGroupNotFoundError): raw_similar_issue_data: RawSeerSimilarIssueData = { "parent_group_id": 1121201212312012, # too high to be real - "parent_group_hash": "not a real hash", + "parent_hash": "not a real hash", "message_distance": 0.05, "should_group": True, "stacktrace_distance": 0.01, From fc8b6665e6dc4ee848bd8187010971c71f23bf1d Mon Sep 17 00:00:00 2001 From: Ryan Hiebert Date: Wed, 8 May 2024 02:33:09 -0500 Subject: [PATCH 133/376] Use last forwarded IP (#68884) Well-behaved forwarders will append the IP they're forwarding for to an existing list. In the most typical case, this means that only the last one is trustworthy from a spoofing request-maker. And all this is assuming that the proxy itself is trusted. https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Forwarded-For#security_and_privacy_concerns ### Legal Boilerplate Look, I get it. The entity doing business as "Sentry" was incorporated in the State of Delaware in 2015 as Functional Software, Inc. and is gonna need some rights from me in order to utilize my contributions in this here PR. So here's the deal: I retain all rights, title and interest in and to my contributions, and by keeping this boilerplate intact I confirm that Sentry can use, modify, copy, and redistribute my contributions, under Sentry's choice of terms. --- src/sentry/middleware/proxy.py | 4 ++-- tests/sentry/middleware/test_proxy.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/sentry/middleware/proxy.py b/src/sentry/middleware/proxy.py index d3e1e254d7588a..1b423750826adb 100644 --- a/src/sentry/middleware/proxy.py +++ b/src/sentry/middleware/proxy.py @@ -29,7 +29,7 @@ def process_request(self, request: HttpRequest) -> None: pass else: # HTTP_X_FORWARDED_FOR can be a comma-separated list of IPs. - # Take just the first one. - real_ip = real_ip.split(",")[0].strip() + # Take the last one, from the last trusted forwarder. + real_ip = real_ip.split(",")[-1].strip() real_ip = self._remove_port_number(real_ip) request.META["REMOTE_ADDR"] = real_ip diff --git a/tests/sentry/middleware/test_proxy.py b/tests/sentry/middleware/test_proxy.py index c8c9db57e37608..314c90bb57b57a 100644 --- a/tests/sentry/middleware/test_proxy.py +++ b/tests/sentry/middleware/test_proxy.py @@ -18,9 +18,9 @@ class SetRemoteAddrFromForwardedForTestCase(TestCase): def test_ipv4(self): request = HttpRequest() - request.META["HTTP_X_FORWARDED_FOR"] = "8.8.8.8:80,8.8.4.4" + request.META["HTTP_X_FORWARDED_FOR"] = "8.8.8.8,8.8.4.4:80" self.middleware.process_request(request) - assert request.META["REMOTE_ADDR"] == "8.8.8.8" + assert request.META["REMOTE_ADDR"] == "8.8.4.4" def test_ipv4_whitespace(self): request = HttpRequest() @@ -32,7 +32,7 @@ def test_ipv6(self): request = HttpRequest() request.META["HTTP_X_FORWARDED_FOR"] = "2001:4860:4860::8888,2001:4860:4860::8844" self.middleware.process_request(request) - assert request.META["REMOTE_ADDR"] == "2001:4860:4860::8888" + assert request.META["REMOTE_ADDR"] == "2001:4860:4860::8844" test_region = Region( From 4623b5dc44cc4ca48e48f2c7811fdb3db5f1c536 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Wed, 8 May 2024 06:07:45 -0400 Subject: [PATCH 134/376] fix(trace-explorer): Date range narrowing condition is backwards (#70496) This was changing the end timestamp to be too narrow and missing some spans. --- src/sentry/api/endpoints/organization_traces.py | 7 ++++++- src/sentry/sentry_metrics/querying/samples_list.py | 6 +++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/src/sentry/api/endpoints/organization_traces.py b/src/sentry/api/endpoints/organization_traces.py index 0cf68ac1655a97..c54be68c9228f9 100644 --- a/src/sentry/api/endpoints/organization_traces.py +++ b/src/sentry/api/endpoints/organization_traces.py @@ -357,12 +357,17 @@ def get_traces_matching_metric_conditions( return min_timestamp, max_timestamp, [], [] else: # No user queries so take the first N trace ids as our list + min_timestamp = snuba_params.end + max_timestamp = snuba_params.start + assert min_timestamp is not None + assert max_timestamp is not None + trace_ids = trace_ids[: self.limit] timestamps = timestamps[: self.limit] for timestamp in timestamps: if timestamp < min_timestamp: min_timestamp = timestamp - if timestamp < max_timestamp: + if timestamp > max_timestamp: max_timestamp = timestamp self.refine_params(min_timestamp, max_timestamp) diff --git a/src/sentry/sentry_metrics/querying/samples_list.py b/src/sentry/sentry_metrics/querying/samples_list.py index d9d75bc266b2e7..2a12b2ddae4b4d 100644 --- a/src/sentry/sentry_metrics/querying/samples_list.py +++ b/src/sentry/sentry_metrics/querying/samples_list.py @@ -258,7 +258,7 @@ def get_matching_spans_from_traces( # This also means we cannot order by any columns or paginate. orderby=None, limit=len(trace_ids) * max_spans_per_trace, - limitby=("trace", 1), + limitby=("trace", max_spans_per_trace), ) trace_id_condition = Condition(Column("trace_id"), Op.IN, trace_ids) @@ -609,7 +609,7 @@ def get_matching_spans_from_traces( # This also means we cannot order by any columns or paginate. orderby=None, limit=len(trace_ids) * max_spans_per_trace, - limitby=("trace", 1), + limitby=("trace", max_spans_per_trace), ) trace_id_condition = Condition(Column("trace_id"), Op.IN, trace_ids) @@ -943,7 +943,7 @@ def get_matching_spans_from_traces( # This also means we cannot order by any columns or paginate. orderby=None, limit=len(trace_ids) * max_spans_per_trace, - limitby=("trace", 1), + limitby=("trace", max_spans_per_trace), ) trace_id_condition = Condition(Column("trace_id"), Op.IN, trace_ids) From 7512ed64a8abe1c1817731567475288b041f9fc9 Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Wed, 8 May 2024 07:29:26 -0400 Subject: [PATCH 135/376] perf: use orjson in all middlewares (#70456) --- src/sentry/middleware/health.py | 4 ++-- src/sentry/middleware/integrations/parsers/github.py | 10 +++++----- src/sentry/middleware/integrations/parsers/gitlab.py | 7 ++++--- .../middleware/integrations/parsers/jira_server.py | 6 +++--- src/sentry/middleware/integrations/parsers/msteams.py | 6 +++--- src/sentry/middleware/integrations/parsers/slack.py | 6 +++--- src/sentry/middleware/integrations/parsers/vsts.py | 4 ++-- src/sentry/middleware/integrations/tasks.py | 8 +++----- src/sentry/middleware/ratelimit.py | 5 +++-- 9 files changed, 28 insertions(+), 28 deletions(-) diff --git a/src/sentry/middleware/health.py b/src/sentry/middleware/health.py index fe4ef7ea835ea1..6947b27104dbe9 100644 --- a/src/sentry/middleware/health.py +++ b/src/sentry/middleware/health.py @@ -1,5 +1,6 @@ import itertools +import orjson from django.http import HttpResponse from django.utils.deprecation import MiddlewareMixin from rest_framework.request import Request @@ -20,7 +21,6 @@ def process_request(self, request: Request): return HttpResponse("ok", content_type="text/plain") from sentry.status_checks import Problem, check_all - from sentry.utils import json threshold = Problem.threshold(Problem.SEVERITY_CRITICAL) results = { @@ -29,7 +29,7 @@ def process_request(self, request: Request): problems = list(itertools.chain.from_iterable(results.values())) return HttpResponse( - json.dumps( + orjson.dumps( { "problems": [str(p) for p in problems], "healthy": {type(check).__name__: not p for check, p in results.items()}, diff --git a/src/sentry/middleware/integrations/parsers/github.py b/src/sentry/middleware/integrations/parsers/github.py index 087eda9c8c99a4..baa14228f15be5 100644 --- a/src/sentry/middleware/integrations/parsers/github.py +++ b/src/sentry/middleware/integrations/parsers/github.py @@ -4,6 +4,7 @@ from collections.abc import Mapping from typing import Any +import orjson from django.http import HttpResponse from sentry.integrations.github.webhook import ( @@ -16,7 +17,6 @@ from sentry.models.outbox import WebhookProviderIdentifier from sentry.services.hybrid_cloud.util import control_silo_function from sentry.types.integrations import EXTERNAL_PROVIDERS, ExternalProviders -from sentry.utils import json logger = logging.getLogger(__name__) @@ -36,8 +36,8 @@ def get_integration_from_request(self) -> Integration | None: if not self.is_json_request(): return None try: - event = json.loads(self.request.body.decode(encoding="utf-8")) - except json.JSONDecodeError: + event = orjson.loads(self.request.body) + except orjson.JSONDecodeError: return None external_id = self._get_external_id(event=event) if not external_id: @@ -49,8 +49,8 @@ def get_response(self): return self.get_response_from_control_silo() try: - event = json.loads(self.request.body.decode(encoding="utf-8")) - except json.JSONDecodeError: + event = orjson.loads(self.request.body) + except orjson.JSONDecodeError: return HttpResponse(status=400) if event.get("installation") and event.get("action") in {"created", "deleted"}: diff --git a/src/sentry/middleware/integrations/parsers/gitlab.py b/src/sentry/middleware/integrations/parsers/gitlab.py index 75ba93b39dfa38..9bd3169d64e2b2 100644 --- a/src/sentry/middleware/integrations/parsers/gitlab.py +++ b/src/sentry/middleware/integrations/parsers/gitlab.py @@ -4,6 +4,7 @@ from collections.abc import Mapping from typing import Any +import orjson from django.http.response import HttpResponseBase from django.urls import resolve @@ -16,7 +17,7 @@ from sentry.models.outbox import WebhookProviderIdentifier from sentry.services.hybrid_cloud.util import control_silo_function from sentry.types.integrations import EXTERNAL_PROVIDERS, ExternalProviders -from sentry.utils import json, metrics +from sentry.utils import metrics logger = logging.getLogger(__name__) @@ -87,8 +88,8 @@ def get_response_from_gitlab_webhook(self): return self.get_default_missing_integration_response() try: - data = json.loads(self.request.body) - except ValueError: + data = orjson.loads(self.request.body) + except orjson.JSONDecodeError: data = {} return self.get_response_from_webhookpayload( diff --git a/src/sentry/middleware/integrations/parsers/jira_server.py b/src/sentry/middleware/integrations/parsers/jira_server.py index 9d54446814bf94..d2101165b3dbb5 100644 --- a/src/sentry/middleware/integrations/parsers/jira_server.py +++ b/src/sentry/middleware/integrations/parsers/jira_server.py @@ -4,6 +4,7 @@ from collections.abc import Mapping from typing import Any +import orjson from django.http import HttpResponse from sentry.integrations.jira_server.webhooks import ( @@ -12,7 +13,6 @@ ) from sentry.middleware.integrations.parsers.base import BaseRequestParser from sentry.models.outbox import WebhookProviderIdentifier -from sentry.utils import json logger = logging.getLogger(__name__) @@ -33,8 +33,8 @@ def get_response_from_issue_update_webhook(self): regions = self.get_regions_from_organizations(organizations=organizations) try: - data = json.loads(self.request.body) - except ValueError: + data = orjson.loads(self.request.body) + except orjson.JSONDecodeError: data = {} # We only process webhooks with changelogs diff --git a/src/sentry/middleware/integrations/parsers/msteams.py b/src/sentry/middleware/integrations/parsers/msteams.py index 4b98810893835f..27cd4469fa6dc5 100644 --- a/src/sentry/middleware/integrations/parsers/msteams.py +++ b/src/sentry/middleware/integrations/parsers/msteams.py @@ -5,6 +5,7 @@ from functools import cached_property from typing import Any +import orjson import sentry_sdk from django.http.response import HttpResponseBase @@ -20,7 +21,6 @@ from sentry.services.hybrid_cloud.util import control_silo_function from sentry.types.integrations import EXTERNAL_PROVIDERS, ExternalProviders from sentry.types.region import Region, RegionResolutionError -from sentry.utils import json logger = logging.getLogger(__name__) @@ -37,8 +37,8 @@ class MsTeamsRequestParser(BaseRequestParser, MsTeamsWebhookMixin): def request_data(self): data = {} try: - data = json.loads(self.request.body.decode(encoding="utf-8")) - except Exception as err: + data = orjson.loads(self.request.body) + except orjson.JSONDecodeError as err: sentry_sdk.capture_exception(err) return data diff --git a/src/sentry/middleware/integrations/parsers/slack.py b/src/sentry/middleware/integrations/parsers/slack.py index dd598a1da93a82..93d79b57e8312f 100644 --- a/src/sentry/middleware/integrations/parsers/slack.py +++ b/src/sentry/middleware/integrations/parsers/slack.py @@ -3,6 +3,7 @@ import logging from collections.abc import Sequence +import orjson import sentry_sdk from django.http.response import HttpResponse, HttpResponseBase from rest_framework import status @@ -29,7 +30,6 @@ from sentry.models.outbox import WebhookProviderIdentifier from sentry.types.integrations import EXTERNAL_PROVIDERS, ExternalProviders from sentry.types.region import Region -from sentry.utils import json from sentry.utils.signing import unsign from .base import BaseRequestParser, create_async_request_payload @@ -127,8 +127,8 @@ def get_response(self): # Handle event interactions challenge request data = None try: - data = json.loads(self.request.body.decode(encoding="utf-8")) - except Exception: + data = orjson.loads(self.request.body) + except orjson.JSONDecodeError: pass if data and is_event_challenge(data): return self.get_response_from_control_silo() diff --git a/src/sentry/middleware/integrations/parsers/vsts.py b/src/sentry/middleware/integrations/parsers/vsts.py index 94b904ddfaae21..1bd867cedf3836 100644 --- a/src/sentry/middleware/integrations/parsers/vsts.py +++ b/src/sentry/middleware/integrations/parsers/vsts.py @@ -2,6 +2,7 @@ import logging +import orjson import sentry_sdk from django.http.response import HttpResponseBase @@ -11,7 +12,6 @@ from sentry.models.integrations.organization_integration import OrganizationIntegration from sentry.models.outbox import WebhookProviderIdentifier from sentry.services.hybrid_cloud.util import control_silo_function -from sentry.utils import json logger = logging.getLogger(__name__) @@ -25,7 +25,7 @@ class VstsRequestParser(BaseRequestParser): @control_silo_function def get_integration_from_request(self) -> Integration | None: try: - data = json.loads(self.request.body.decode(encoding="utf-8")) + data = orjson.loads(self.request.body) external_id = get_vsts_external_id(data=data) except Exception as e: sentry_sdk.capture_exception(e) diff --git a/src/sentry/middleware/integrations/tasks.py b/src/sentry/middleware/integrations/tasks.py index 164e79e8e4b1d8..b6f80f107af650 100644 --- a/src/sentry/middleware/integrations/tasks.py +++ b/src/sentry/middleware/integrations/tasks.py @@ -2,6 +2,7 @@ from collections.abc import MutableMapping from typing import Any, cast +import orjson import requests import sentry_sdk from requests import Response @@ -11,7 +12,6 @@ from sentry.silo.client import RegionSiloClient from sentry.tasks.base import instrumented_task from sentry.types.region import get_region_by_name -from sentry.utils import json logger = logging.getLogger(__name__) @@ -73,7 +73,7 @@ def convert_to_async_slack_response( response_payload = {} try: - response_payload = json.loads(response_body.decode(encoding="utf-8")) + response_payload = orjson.loads(response_body) except Exception as exc: sentry_sdk.capture_exception(exc) @@ -145,9 +145,7 @@ def convert_to_async_discord_response( # handling the request asynchronously, we extract only the data, and post it to the webhook # that discord provides. # https://discord.com/developers/docs/interactions/receiving-and-responding#followup-messages - response_payload = json.loads(result["response"].content.decode(encoding="utf-8")).get( - "data" - ) + response_payload = orjson.loads(result["response"].content).get("data") except Exception as e: sentry_sdk.capture_exception(e) integration_response = requests.post(response_url, json=response_payload) diff --git a/src/sentry/middleware/ratelimit.py b/src/sentry/middleware/ratelimit.py index da11323431768e..d9b9675adf75da 100644 --- a/src/sentry/middleware/ratelimit.py +++ b/src/sentry/middleware/ratelimit.py @@ -4,6 +4,7 @@ import uuid from collections.abc import Callable +import orjson from django.conf import settings from django.http.request import HttpRequest from django.http.response import HttpResponse, HttpResponseBase @@ -18,7 +19,7 @@ ) from sentry.ratelimits.config import RateLimitConfig from sentry.types.ratelimit import RateLimitCategory, RateLimitMeta, RateLimitType -from sentry.utils import json, metrics +from sentry.utils import metrics DEFAULT_ERROR_MESSAGE = ( "You are attempting to use this endpoint too frequently. Limit is " @@ -106,7 +107,7 @@ def process_view( }, ) response = HttpResponse( - json.dumps( + orjson.dumps( DEFAULT_ERROR_MESSAGE.format( limit=request.rate_limit_metadata.limit, window=request.rate_limit_metadata.window, From 5bff93306e3d4bf328db2a81bd4bafb849e3eb2a Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 8 May 2024 08:34:04 -0400 Subject: [PATCH 136/376] ref: remove unused partition parameter from buffer (#70441) --- src/sentry/buffer/base.py | 6 ++---- src/sentry/buffer/redis.py | 6 ++---- src/sentry/tasks/process_buffer.py | 25 ++++++++--------------- tests/sentry/tasks/test_process_buffer.py | 2 +- 4 files changed, 14 insertions(+), 25 deletions(-) diff --git a/src/sentry/buffer/base.py b/src/sentry/buffer/base.py index 2627298a89a4bd..e80e18f977981a 100644 --- a/src/sentry/buffer/base.py +++ b/src/sentry/buffer/base.py @@ -69,12 +69,10 @@ def incr( } ) - # TODO: `partition` is unused, remove after a deploy - - def process_pending(self, partition: int | None = None) -> None: + def process_pending(self) -> None: return - def process_batch(self, partition: int | None = None) -> None: + def process_batch(self) -> None: return def process( diff --git a/src/sentry/buffer/redis.py b/src/sentry/buffer/redis.py index 18405b2f54df3e..812c52d49fa3ef 100644 --- a/src/sentry/buffer/redis.py +++ b/src/sentry/buffer/redis.py @@ -303,7 +303,7 @@ def get_hash( return decoded_hash - def process_batch(self, partition: int | None = None) -> None: + def process_batch(self) -> None: client = get_cluster_routing_client(self.cluster, self.is_redis_cluster) lock_key = self._lock_key(client, self.pending_key, ex=10) if not lock_key: @@ -371,9 +371,7 @@ def incr( tags={"module": model.__module__, "model": model.__name__}, ) - # TODO: `partition` is unused, remove after a deploy - - def process_pending(self, partition: int | None = None) -> None: + def process_pending(self) -> None: client = get_cluster_routing_client(self.cluster, self.is_redis_cluster) lock_key = self._lock_key(client, self.pending_key, ex=60) if not lock_key: diff --git a/src/sentry/tasks/process_buffer.py b/src/sentry/tasks/process_buffer.py index 7a1def04b698f9..e4deedee5b5947 100644 --- a/src/sentry/tasks/process_buffer.py +++ b/src/sentry/tasks/process_buffer.py @@ -6,58 +6,51 @@ from sentry.tasks.base import instrumented_task from sentry.utils.locking import UnableToAcquireLock +from sentry.utils.locking.lock import Lock logger = logging.getLogger(__name__) -# TODO: `partition` is unused, remove after a deploy - - -def get_process_lock(lock_name: str, partition: str | None = None): +def get_process_lock(lock_name: str) -> Lock: from sentry.locks import locks - if partition is None: - lock_key = f"buffer:{lock_name}" - else: - lock_key = f"buffer:{lock_name}:{partition}" - - return locks.get(lock_key, duration=60, name=lock_name) + return locks.get(f"buffer:{lock_name}", duration=60, name=lock_name) @instrumented_task( name="sentry.tasks.process_buffer.process_pending", queue="buffers.process_pending" ) -def process_pending(partition=None): +def process_pending() -> None: """ Process pending buffers. """ from sentry import buffer - lock = get_process_lock("process_pending", partition) + lock = get_process_lock("process_pending") try: with lock.acquire(): buffer.process_pending() except UnableToAcquireLock as error: - logger.warning("process_pending.fail", extra={"error": error, "partition": partition}) + logger.warning("process_pending.fail", extra={"error": error}) @instrumented_task( name="sentry.tasks.process_buffer.process_pending_batch", queue="buffers.process_pending_batch" ) -def process_pending_batch(partition=None): +def process_pending_batch() -> None: """ Process pending buffers in a batch. """ from sentry import buffer - lock = get_process_lock("process_pending_batch", partition) + lock = get_process_lock("process_pending_batch") try: with lock.acquire(): buffer.process_batch() except UnableToAcquireLock as error: - logger.warning("process_pending_batch.fail", extra={"error": error, "partition": partition}) + logger.warning("process_pending_batch.fail", extra={"error": error}) @instrumented_task(name="sentry.tasks.process_buffer.process_incr", queue="counters-0") diff --git a/tests/sentry/tasks/test_process_buffer.py b/tests/sentry/tasks/test_process_buffer.py index 6688ba1d33d5a4..ff4035f4065897 100644 --- a/tests/sentry/tasks/test_process_buffer.py +++ b/tests/sentry/tasks/test_process_buffer.py @@ -42,7 +42,7 @@ def test_process_pending_batch(self, mock_process_pending_batch): @mock.patch("sentry.buffer.backend.process_batch") def test_process_pending_batch_locked_out(self, mock_process_pending_batch): with self.assertLogs("sentry.tasks.process_buffer", level="WARNING") as logger: - lock = get_process_lock("process_pending_batch", None) + lock = get_process_lock("process_pending_batch") with lock.acquire(): process_pending_batch() self.assertEqual(len(logger.output), 1) From d3d6bdf70621e8bfa79cc10b9f4ca3112e78468b Mon Sep 17 00:00:00 2001 From: Mark Story Date: Wed, 8 May 2024 10:20:37 -0400 Subject: [PATCH 137/376] chore(actor) Remove Actor model from django state (#70439) Remove the Actor model from django state. Refs HC-1183 --- .../backup/model_dependencies/detailed.json | 15 +--- fixtures/backup/model_dependencies/flat.json | 3 +- migrations_lockfile.txt | 2 +- src/sentry/backup/dependencies.py | 4 +- .../0715_remove_actormodel_constraints.py | 65 ++++++++++++++ src/sentry/models/actor.py | 84 ++----------------- src/sentry/models/outbox.py | 2 +- src/sentry/receivers/outbox/region.py | 6 +- src/sentry/services/hybrid_cloud/actor.py | 3 +- src/sentry/snuba/models.py | 2 +- .../test_default_comparators.pysnap | 3 +- .../test_704_backfill_rule_user_team.py | 55 ------------ ...test_706_grouphistory_userteam_backfill.py | 49 ----------- 13 files changed, 84 insertions(+), 209 deletions(-) create mode 100644 src/sentry/migrations/0715_remove_actormodel_constraints.py delete mode 100644 tests/sentry/migrations/test_704_backfill_rule_user_team.py delete mode 100644 tests/sentry/migrations/test_706_grouphistory_userteam_backfill.py diff --git a/fixtures/backup/model_dependencies/detailed.json b/fixtures/backup/model_dependencies/detailed.json index 4d132468467dd3..252e8942ca192c 100644 --- a/fixtures/backup/model_dependencies/detailed.json +++ b/fixtures/backup/model_dependencies/detailed.json @@ -330,11 +330,6 @@ "kind": "FlexibleForeignKey", "model": "sentry.team", "nullable": true - }, - "user_id": { - "kind": "HybridCloudForeignKey", - "model": "sentry.user", - "nullable": true } }, "model": "sentry.actor", @@ -344,14 +339,7 @@ "Region" ], "table_name": "sentry_actor", - "uniques": [ - [ - "team" - ], - [ - "user_id" - ] - ] + "uniques": [] }, "sentry.alertrule": { "dangling": false, @@ -5803,7 +5791,6 @@ }, "model": "sentry.snubaquery", "relocation_dependencies": [ - "sentry.actor", "sentry.organization", "sentry.project" ], diff --git a/fixtures/backup/model_dependencies/flat.json b/fixtures/backup/model_dependencies/flat.json index ae8f8ba1a64d3c..2902738dbae41f 100644 --- a/fixtures/backup/model_dependencies/flat.json +++ b/fixtures/backup/model_dependencies/flat.json @@ -45,8 +45,7 @@ "sentry.user" ], "sentry.actor": [ - "sentry.team", - "sentry.user" + "sentry.team" ], "sentry.alertrule": [ "sentry.organization", diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index bbb82d29c729c1..5a64add4d4a8e5 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0016_add_control_cacheversion nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0714_drop_project_team_avatar +sentry: 0715_remove_actormodel_constraints social_auth: 0002_default_auto_field diff --git a/src/sentry/backup/dependencies.py b/src/sentry/backup/dependencies.py index df374aff6f4a26..ba4274b772bf2c 100644 --- a/src/sentry/backup/dependencies.py +++ b/src/sentry/backup/dependencies.py @@ -508,9 +508,7 @@ def dependencies() -> dict[NormalizedModelName, ModelRelations]: # TODO(getsentry/team-ospo#190): In practice, we can treat `AlertRule`'s dependency on # `Organization` as non-nullable, so mark it is non-dangling. This is a hack - we should figure - # out a more rigorous way to deduce this. The same applies to `Actor`, since each actor must - # reference at least one `User` or `Team`, neither of which are dangling. - model_dependencies_dict[NormalizedModelName("sentry.actor")].dangling = False + # out a more rigorous way to deduce this. model_dependencies_dict[NormalizedModelName("sentry.alertrule")].dangling = False # TODO(getsentry/team-ospo#190): The same is basically true for the remaining models in this diff --git a/src/sentry/migrations/0715_remove_actormodel_constraints.py b/src/sentry/migrations/0715_remove_actormodel_constraints.py new file mode 100644 index 00000000000000..cd87515e7ed464 --- /dev/null +++ b/src/sentry/migrations/0715_remove_actormodel_constraints.py @@ -0,0 +1,65 @@ +# Generated by Django 5.0.4 on 2024-05-07 19:52 + +import django.db.models.deletion +from django.db import migrations, models + +import sentry.db.models.fields.foreignkey +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0714_drop_project_team_avatar"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + """ + -- drop team_id index & constraint + ALTER TABLE "sentry_actor" DROP CONSTRAINT IF EXISTS "sentry_actor_team_id_6ca8eba5_fk_sentry_team_id"; + ALTER TABLE "sentry_actor" DROP CONSTRAINT IF EXISTS "sentry_actor_team_id_6ca8eba5_uniq"; + DROP INDEX IF EXISTS "sentry_actor_team_id_6ca8eba5_like"; + -- drop user_id index + ALTER TABLE "sentry_actor" DROP CONSTRAINT IF EXISTS "sentry_actor_user_id_c832ff63_uniq"; + DROP INDEX IF EXISTS "sentry_actor_user_id_c832ff63_like"; + """, + reverse_sql="", + hints={"tables": ["sentry_actor"]}, + ), + ], + state_operations=[ + migrations.AlterField( + model_name="actor", + name="team", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + db_constraint=False, + db_index=False, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="sentry.team", + ), + ), + migrations.AlterField( + model_name="actor", + name="user_id", + field=models.BigIntegerField(null=True), + ), + ], + ) + ] diff --git a/src/sentry/models/actor.py b/src/sentry/models/actor.py index 449b0754b1b050..64e359be3f9d97 100644 --- a/src/sentry/models/actor.py +++ b/src/sentry/models/actor.py @@ -1,90 +1,24 @@ from __future__ import annotations -from django.conf import settings -from django.db import models, router, transaction -from django.forms import model_to_dict +from django.db import models -from sentry.backup.dependencies import ImportKind, PrimaryKeyMap -from sentry.backup.helpers import ImportFlags -from sentry.backup.scopes import ImportScope, RelocationScope +from sentry.backup.scopes import RelocationScope from sentry.db.models import Model, region_silo_model from sentry.db.models.fields.foreignkey import FlexibleForeignKey -from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey -from sentry.models.outbox import OutboxCategory, OutboxScope, RegionOutbox, outbox_context - -ACTOR_TYPES = { - "team": 0, - "user": 1, -} @region_silo_model class Actor(Model): - # Temporary until Actor is removed + """ + XXX: This model is being removed. Do not use. + """ + __relocation_scope__ = RelocationScope.Excluded - type = models.PositiveSmallIntegerField( - choices=( - (ACTOR_TYPES["team"], "team"), - (ACTOR_TYPES["user"], "user"), - ) - ) - user_id = HybridCloudForeignKey( - settings.AUTH_USER_MODEL, on_delete="CASCADE", db_index=True, unique=True, null=True - ) - team = FlexibleForeignKey( - "sentry.Team", - related_name="actor_from_team", - db_constraint=True, - db_index=True, - unique=True, - null=True, - on_delete=models.CASCADE, - ) + type = models.PositiveSmallIntegerField() + user_id = models.BigIntegerField(null=True) + team = FlexibleForeignKey("sentry.Team", null=True, db_index=False, db_constraint=False) class Meta: app_label = "sentry" db_table = "sentry_actor" - - def outbox_for_update(self) -> RegionOutbox: - return RegionOutbox( - shard_scope=OutboxScope.ORGANIZATION_SCOPE, - shard_identifier=self.id, - object_identifier=self.id, - category=OutboxCategory.ACTOR_UPDATE, - ) - - def delete(self, **kwargs): - with outbox_context(transaction.atomic(router.db_for_write(Actor))): - self.outbox_for_update().save() - return super().delete(**kwargs) - - @classmethod - def query_for_relocation_export(cls, q: models.Q, pk_map: PrimaryKeyMap) -> models.Q: - # Actors that can have both their `user` and `team` value set to null. Exclude such actors # from the export. - q = super().query_for_relocation_export(q, pk_map) - - return q & ~models.Q(team__isnull=True, user_id__isnull=True) - - # TODO(hybrid-cloud): actor refactor. Remove this method when done. - def write_relocation_import( - self, scope: ImportScope, flags: ImportFlags - ) -> tuple[int, ImportKind] | None: - if self.team is None: - return super().write_relocation_import(scope, flags) - - # `Actor` and `Team` have a direct circular dependency between them for the time being due - # to an ongoing refactor (that is, `Actor` foreign keys directly into `Team`, and `Team` - # foreign keys directly into `Actor`). If we use `INSERT` database calls naively, they will - # always fail, because one half of the cycle will always be missing. - # - # Because `Team` ends up first in the dependency sorting (see: - # fixtures/backup/model_dependencies/sorted.json), a viable solution here is to always null - # out the `actor_id` field of the `Team` when we import it, then rely on that model's - # `post_save()` hook to fill in the `Actor` model. - (actor, _) = Actor.objects.get_or_create(team=self.team, defaults=model_to_dict(self)) - if actor: - self.pk = actor.pk - self.save() - - return (self.pk, ImportKind.Inserted) diff --git a/src/sentry/models/outbox.py b/src/sentry/models/outbox.py index 625ae16f833355..36bad01ab4e34e 100644 --- a/src/sentry/models/outbox.py +++ b/src/sentry/models/outbox.py @@ -91,7 +91,7 @@ class OutboxCategory(IntEnum): API_KEY_UPDATE = 28 PARTNER_ACCOUNT_UPDATE = 29 SENTRY_APP_UPDATE = 30 - ACTOR_UPDATE = 31 + ACTOR_UPDATE = 31 # Deprecated API_TOKEN_UPDATE = 32 ORG_AUTH_TOKEN_UPDATE = 33 ISSUE_COMMENT_UPDATE = 34 diff --git a/src/sentry/receivers/outbox/region.py b/src/sentry/receivers/outbox/region.py index d63527da643027..569e7bed45913a 100644 --- a/src/sentry/receivers/outbox/region.py +++ b/src/sentry/receivers/outbox/region.py @@ -11,7 +11,6 @@ from django.dispatch import receiver -from sentry.models.actor import Actor from sentry.models.authproviderreplica import AuthProviderReplica from sentry.models.organization import Organization from sentry.models.outbox import OutboxCategory, process_region_outbox @@ -54,9 +53,8 @@ def process_project_updates(object_identifier: int, **kwds: Any): @receiver(process_region_outbox, sender=OutboxCategory.ACTOR_UPDATE) def process_actor_updates(object_identifier: int, **kwds: Any): - if (actor := maybe_process_tombstone(Actor, object_identifier)) is None: - return - actor + # Retain until we have no ACTOR_UPDATE messages in flight. + pass @receiver(process_region_outbox, sender=OutboxCategory.ORGANIZATION_MAPPING_CUSTOMER_ID_UPDATE) diff --git a/src/sentry/services/hybrid_cloud/actor.py b/src/sentry/services/hybrid_cloud/actor.py index bde58f24e3f3a5..d0edb08568c9ae 100644 --- a/src/sentry/services/hybrid_cloud/actor.py +++ b/src/sentry/services/hybrid_cloud/actor.py @@ -14,7 +14,6 @@ from sentry.services.hybrid_cloud.user import RpcUser if TYPE_CHECKING: - from sentry.models.actor import Actor from sentry.models.team import Team from sentry.models.user import User from sentry.services.hybrid_cloud.organization import RpcTeam @@ -25,7 +24,7 @@ class ActorType(str, Enum): TEAM = "Team" -ActorTarget = Union["Actor", "RpcActor", "User", "RpcUser", "Team", "RpcTeam"] +ActorTarget = Union["RpcActor", "User", "RpcUser", "Team", "RpcTeam"] class RpcActor(RpcModel): diff --git a/src/sentry/snuba/models.py b/src/sentry/snuba/models.py index ae71ddbc4deee6..c8468eb52057f4 100644 --- a/src/sentry/snuba/models.py +++ b/src/sentry/snuba/models.py @@ -27,7 +27,7 @@ class QueryAggregations(Enum): @region_silo_model class SnubaQuery(Model): __relocation_scope__ = RelocationScope.Organization - __relocation_dependencies__ = {"sentry.Actor", "sentry.Organization", "sentry.Project"} + __relocation_dependencies__ = {"sentry.Organization", "sentry.Project"} class Type(Enum): ERROR = 0 diff --git a/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap b/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap index 76a5c93834b795..f0273917072eec 100644 --- a/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap +++ b/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap @@ -1,5 +1,5 @@ --- -created: '2024-05-01T20:30:02.246280+00:00' +created: '2024-05-07T20:04:53.383480+00:00' creator: sentry source: tests/sentry/backup/test_comparators.py --- @@ -80,7 +80,6 @@ source: tests/sentry/backup/test_comparators.py - class: ForeignKeyComparator fields: - team - - user_id model_name: sentry.actor - comparators: - class: DateUpdatedComparator diff --git a/tests/sentry/migrations/test_704_backfill_rule_user_team.py b/tests/sentry/migrations/test_704_backfill_rule_user_team.py deleted file mode 100644 index 28b86e29cca023..00000000000000 --- a/tests/sentry/migrations/test_704_backfill_rule_user_team.py +++ /dev/null @@ -1,55 +0,0 @@ -import pytest - -from sentry.models.actor import ACTOR_TYPES, Actor -from sentry.models.rule import Rule -from sentry.testutils.cases import TestMigrations - - -@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.") -class BackfillRuleUserTeamTest(TestMigrations): - migrate_from = "0703_add_team_user_to_rule" - migrate_to = "0704_backfill_rule_user_team" - - def setup_initial_state(self): - self.org = self.create_organization(owner=self.user) - self.team = self.create_team(organization=self.org, members=[self.user]) - self.project = self.create_project(organization=self.org) - - self.user_actor = Actor.objects.create(type=ACTOR_TYPES["user"], user_id=self.user.id) - self.team_actor = Actor.objects.get(type=ACTOR_TYPES["team"], team_id=self.team.id) - self.team_rule = Rule.objects.create( - project=self.project, - label="team rule", - owner_team=self.team, - ) - self.user_rule = Rule.objects.create( - project=self.project, - label="user rule", - owner_user_id=self.user.id, - ) - - other_user = self.create_user() - self.valid = Rule.objects.create( - project=self.project, - label="valid", - owner_user_id=other_user.id, - ) - - # Use QuerySet.update() to avoid validation in AlertRule - Rule.objects.filter(id__in=[self.team_rule.id, self.user_rule.id]).update( - owner_team_id=None, owner_user_id=None - ) - - def test(self): - self.user_rule.refresh_from_db() - self.team_rule.refresh_from_db() - self.valid.refresh_from_db() - - assert self.user_rule.owner_user_id == self.user.id - assert self.user_rule.owner_team_id is None - - assert self.team_rule.owner_team_id == self.team.id - assert self.team_rule.owner_user_id is None - - assert self.valid.owner_team_id is None - assert self.valid.owner_user_id diff --git a/tests/sentry/migrations/test_706_grouphistory_userteam_backfill.py b/tests/sentry/migrations/test_706_grouphistory_userteam_backfill.py deleted file mode 100644 index 698bfcc668ea3c..00000000000000 --- a/tests/sentry/migrations/test_706_grouphistory_userteam_backfill.py +++ /dev/null @@ -1,49 +0,0 @@ -import pytest - -from sentry.models.actor import ACTOR_TYPES, Actor -from sentry.models.grouphistory import GroupHistory, GroupHistoryStatus, record_group_history -from sentry.testutils.cases import TestMigrations - - -@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.") -class BackfillGroupHistoryUserTeamTest(TestMigrations): - migrate_from = "0705_grouphistory_add_userteam" - migrate_to = "0706_grouphistory_userteam_backfill" - - def setup_initial_state(self): - self.org = self.create_organization(owner=self.user) - self.team = self.create_team(organization=self.org, members=[self.user]) - self.project = self.create_project(organization=self.org) - self.group = self.create_group(project=self.project) - other_user = self.create_user() - - self.user_actor = Actor.objects.create(type=ACTOR_TYPES["user"], user_id=self.user.id) - self.team_actor = Actor.objects.get(type=ACTOR_TYPES["team"], team_id=self.team.id) - self.team_history = record_group_history( - group=self.group, actor=self.team, status=GroupHistoryStatus.RESOLVED - ) - self.user_history = record_group_history( - group=self.group, actor=self.user, status=GroupHistoryStatus.ESCALATING - ) - self.valid = record_group_history( - group=self.group, actor=other_user, status=GroupHistoryStatus.ONGOING - ) - - # Use QuerySet.update() to avoid validation in GroupHistory - GroupHistory.objects.filter(id__in=[self.team_history.id, self.user_history.id]).update( - team_id=None, user_id=None - ) - - def test(self): - self.user_history.refresh_from_db() - self.team_history.refresh_from_db() - self.valid.refresh_from_db() - - assert self.user_history.user_id == self.user.id - assert self.user_history.team_id is None - - assert self.team_history.team_id == self.team.id - assert self.team_history.user_id is None - - assert self.valid.team_id is None - assert self.valid.user_id From eb4de59175e6c7604e3f3bdf3ddbac75181df6a4 Mon Sep 17 00:00:00 2001 From: Ash <0Calories@users.noreply.github.com> Date: Wed, 8 May 2024 10:25:30 -0400 Subject: [PATCH 138/376] feat(perf): Add backend referrers for span summary and span metrics (#70466) Adds backend referrers to be used on the new span summary and span metrics pages Relevant PR: https://github.com/getsentry/sentry/pull/69159 --- src/sentry/api/endpoints/organization_events.py | 2 ++ src/sentry/api/endpoints/organization_events_stats.py | 3 +++ src/sentry/snuba/referrer.py | 9 +++++++++ 3 files changed, 14 insertions(+) diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index 78f38a513a3ce3..791757a257446e 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -101,6 +101,8 @@ Referrer.API_PERFORMANCE_MOBILE_UI_SCREEN_TABLE.value, Referrer.API_PERFORMANCE_MOBILE_UI_SPAN_TABLE.value, Referrer.API_PERFORMANCE_MOBILE_UI_METRICS_RIBBON.value, + Referrer.API_PERFORMANCE_SPAN_SUMMARY_HEADER_DATA.value, + Referrer.API_PERFORMANCE_SPAN_SUMMARY_TABLE.value, } API_TOKEN_REFERRER = Referrer.API_AUTH_TOKEN_EVENTS.value diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index dc21da2a214d7d..4036b5c8341a07 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -100,6 +100,9 @@ Referrer.API_PERFORMANCE_HTTP_DOMAIN_SUMMARY_THROUGHPUT_CHART.value, Referrer.API_PERFORMANCE_HTTP_SAMPLES_PANEL_DURATION_CHART.value, Referrer.API_PERFORMANCE_HTTP_SAMPLES_PANEL_RESPONSE_CODE_CHART.value, + Referrer.API_PERFORMANCE_SPAN_SUMMARY_DURATION_CHART.value, + Referrer.API_PERFORMANCE_SPAN_SUMMARY_THROUGHPUT_CHART.value, + Referrer.API_PERFORMANCE_SPAN_SUMMARY_TRANSACTION_THROUGHPUT_CHART.value, } diff --git a/src/sentry/snuba/referrer.py b/src/sentry/snuba/referrer.py index 827765f5391fe0..13d89e6d5f7376 100644 --- a/src/sentry/snuba/referrer.py +++ b/src/sentry/snuba/referrer.py @@ -489,6 +489,15 @@ class Referrer(Enum): "api.performance.http.samples-panel-response-code-samples" ) + # Performance Span Summary Page and Span Metrics + API_PERFORMANCE_SPAN_SUMMARY_HEADER_DATA = "api.performance.span-summary-header-data" + API_PERFORMANCE_SPAN_SUMMARY_TABLE = "api.performance.span-summary-table" + API_PERFORMANCE_SPAN_SUMMARY_DURATION_CHART = "api.performance.span-summary-duration-chart" + API_PERFORMANCE_SPAN_SUMMARY_THROUGHPUT_CHART = "api.performance.span-summary-throughput-chart" + API_PERFORMANCE_SPAN_SUMMARY_TRANSACTION_THROUGHPUT_CHART = ( + "api.performance.span-summary-transaction-throughput-chart" + ) + API_SPAN_SAMPLE_GET_BOUNDS = "api.spans.sample-get-bounds" API_SPAN_SAMPLE_GET_SPAN_IDS = "api.spans.sample-get-span-ids" API_SPAN_SAMPLE_GET_SPAN_DATA = "api.spans.sample-get-span-data" From 11a80a383a6ed7d7f0af4a4fdf0344ce5d2fd7a3 Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Wed, 8 May 2024 10:36:32 -0400 Subject: [PATCH 139/376] feat(cache): add average transaction duration to sample sidebar (#70445) New metric readout for avg transaction duration in sample sidebar image --- .../cache/samplePanel/samplePanel.tsx | 25 ++++++++++++++++++- .../app/views/starfish/views/spans/types.tsx | 2 ++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/static/app/views/performance/cache/samplePanel/samplePanel.tsx b/static/app/views/performance/cache/samplePanel/samplePanel.tsx index e86bb219955c1c..d0a1ae2b5388dc 100644 --- a/static/app/views/performance/cache/samplePanel/samplePanel.tsx +++ b/static/app/views/performance/cache/samplePanel/samplePanel.tsx @@ -25,10 +25,12 @@ import {MetricReadout} from 'sentry/views/performance/metricReadout'; import * as ModuleLayout from 'sentry/views/performance/moduleLayout'; import DetailPanel from 'sentry/views/starfish/components/detailPanel'; import {getTimeSpentExplanation} from 'sentry/views/starfish/components/tableCells/timeSpentCell'; -import {useSpanMetrics} from 'sentry/views/starfish/queries/useDiscover'; +import {useMetrics, useSpanMetrics} from 'sentry/views/starfish/queries/useDiscover'; import {useIndexedSpans} from 'sentry/views/starfish/queries/useIndexedSpans'; import {useTransactions} from 'sentry/views/starfish/queries/useTransactions'; import { + MetricsFields, + type MetricsQueryFilters, SpanFunction, SpanIndexedField, type SpanIndexedQueryFilters, @@ -76,6 +78,15 @@ export function CacheSamplePanel() { referrer: Referrer.SAMPLES_CACHE_METRICS_RIBBON, }); + const {data: transactionDurationData, isLoading: isTransactionDurationLoading} = + useMetrics({ + search: MutableSearch.fromQueryObject({ + transaction: query.transaction, + } satisfies MetricsQueryFilters), + fields: [`avg(${MetricsFields.TRANSACTION_DURATION})`], + enabled: isPanelOpen && Boolean(query.transaction), + }); + const sampleFilters: SpanIndexedQueryFilters = { ...BASE_FILTERS, transaction: query.transaction, @@ -192,6 +203,18 @@ export function CacheSamplePanel() { isLoading={areCacheTransactionMetricsFetching} /> + + = { 'avg(http.response_content_length)': t('Avg Encoded Size'), 'avg(http.decoded_response_content_length)': t('Avg Decoded Size'), 'avg(http.response_transfer_size)': t('Avg Transfer Size'), + 'avg(transaction.duration)': t('Avg Txn Duration'), 'avg(cache.item_size)': t('Avg Value Size'), unsuccessfulHTTPCodes: t('Response Codes (3XX, 4XX, 5XX)'), httpCodeBreakdown: t('Response Code Breakdown'), From 4d124bd69d0f6c4e641638e9bd89d067c2d3d7e8 Mon Sep 17 00:00:00 2001 From: Evan Hicks Date: Wed, 8 May 2024 11:19:07 -0400 Subject: [PATCH 140/376] fix: Add a metric for non-success Snuba requests (#70452) Tracking this on the Sentry side allows alerts to be created that are separate from the Snuba API itself, in case the API is in a broken state and can't accurately report what is happening. --- src/sentry/utils/snuba.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 5948b75f60af4a..33af7f9a936409 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -1020,7 +1020,10 @@ def _bulk_snuba_query( if response.status != 200: _log_request_query(snuba_param_list[index][0]) - + metrics.incr( + "snuba.client.api.error", + tags={"status_code": response.status, "referrer": query_referrer}, + ) if body.get("error"): error = body["error"] if response.status == 429: From 50b4ed390b1cbf6570b843f3f3ae7672d8924276 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Wed, 8 May 2024 11:32:28 -0400 Subject: [PATCH 141/376] ref(routes): A few more routes using withOrgPath (#70449) --- static/app/routes.tsx | 47 +++++++++---------------------------------- 1 file changed, 9 insertions(+), 38 deletions(-) diff --git a/static/app/routes.tsx b/static/app/routes.tsx index 95e54b6efc1fb9..5799e637d535f3 100644 --- a/static/app/routes.tsx +++ b/static/app/routes.tsx @@ -209,38 +209,16 @@ function buildRoutes() { path="/organizations/new/" component={make(() => import('sentry/views/organizationCreate'))} /> - {USING_CUSTOMER_DOMAIN && ( - import('sentry/views/dataExport/dataDownload')) - )} - key="orgless-data-export-route" - /> - )} import('sentry/views/dataExport/dataDownload')) - )} - key="org-data-export" + path="/data-export/:dataExportId" + component={make(() => import('sentry/views/dataExport/dataDownload'))} + withOrgPath /> - {USING_CUSTOMER_DOMAIN && ( - import('sentry/views/disabledMember')) - )} - key="orgless-disabled-member-route" - /> - )} import('sentry/views/disabledMember')) - )} - key="org-disabled-member" + path="/disabled-member/" + component={make(() => import('sentry/views/disabledMember'))} + withOrgPath /> {USING_CUSTOMER_DOMAIN && ( @@ -298,17 +276,10 @@ function buildRoutes() { import('sentry/views/onboarding'))} /> - {USING_CUSTOMER_DOMAIN && ( - import('sentry/views/stories/index'))} - key="orgless-stories" - /> - )} import('sentry/views/stories/index')))} - key="org-stories" + path="/stories/" + component={make(() => import('sentry/views/stories/index'))} + withOrgPath /> ); From 6b530ffab46d0b50512f0f4c9df8d084c1feb831 Mon Sep 17 00:00:00 2001 From: colin-sentry <161344340+colin-sentry@users.noreply.github.com> Date: Wed, 8 May 2024 11:50:48 -0400 Subject: [PATCH 142/376] chore(ai-monitoring): Add a unit to total cost (#70484) --- .../app/views/aiMonitoring/PipelinesTable.tsx | 18 +++++++++--------- .../aiMonitoring/aiMonitoringDetailsPage.tsx | 7 ++----- static/app/views/starfish/types.tsx | 2 +- 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/static/app/views/aiMonitoring/PipelinesTable.tsx b/static/app/views/aiMonitoring/PipelinesTable.tsx index 203761b5fc4153..03d1039a2f8f56 100644 --- a/static/app/views/aiMonitoring/PipelinesTable.tsx +++ b/static/app/views/aiMonitoring/PipelinesTable.tsx @@ -39,7 +39,7 @@ type Row = Pick< | 'avg(span.duration)' | 'sum(span.duration)' | 'ai_total_tokens_used()' - | 'ai_total_tokens_used(c:spans/ai.total_cost@none)' + | 'ai_total_tokens_used(c:spans/ai.total_cost@usd)' >; type Column = GridColumnHeader< @@ -47,7 +47,7 @@ type Column = GridColumnHeader< | 'spm()' | 'avg(span.duration)' | 'ai_total_tokens_used()' - | 'ai_total_tokens_used(c:spans/ai.total_cost@none)' + | 'ai_total_tokens_used(c:spans/ai.total_cost@usd)' >; const COLUMN_ORDER: Column[] = [ @@ -62,7 +62,7 @@ const COLUMN_ORDER: Column[] = [ width: 180, }, { - key: 'ai_total_tokens_used(c:spans/ai.total_cost@none)', + key: 'ai_total_tokens_used(c:spans/ai.total_cost@usd)', name: t('Total cost'), width: 180, }, @@ -130,7 +130,7 @@ export function PipelinesTable() { fields: [ 'span.ai.pipeline.group', 'ai_total_tokens_used()', - 'ai_total_tokens_used(c:spans/ai.total_cost@none)', + 'ai_total_tokens_used(c:spans/ai.total_cost@usd)', ], }); @@ -138,7 +138,7 @@ export function PipelinesTable() { const row: Row = { ...baseRow, 'ai_total_tokens_used()': 0, - 'ai_total_tokens_used(c:spans/ai.total_cost@none)': 0, + 'ai_total_tokens_used(c:spans/ai.total_cost@usd)': 0, }; if (!tokensUsedLoading) { const tokenUsedDataPoint = tokensUsedData.find( @@ -146,8 +146,8 @@ export function PipelinesTable() { ); if (tokenUsedDataPoint) { row['ai_total_tokens_used()'] = tokenUsedDataPoint['ai_total_tokens_used()']; - row['ai_total_tokens_used(c:spans/ai.total_cost@none)'] = - tokenUsedDataPoint['ai_total_tokens_used(c:spans/ai.total_cost@none)']; + row['ai_total_tokens_used(c:spans/ai.total_cost@usd)'] = + tokenUsedDataPoint['ai_total_tokens_used(c:spans/ai.total_cost@usd)']; } } return row; @@ -237,8 +237,8 @@ function renderBodyCell( ); } - if (column.key === 'ai_total_tokens_used(c:spans/ai.total_cost@none)') { - const cost = row['ai_total_tokens_used(c:spans/ai.total_cost@none)']; + if (column.key === 'ai_total_tokens_used(c:spans/ai.total_cost@usd)') { + const cost = row['ai_total_tokens_used(c:spans/ai.total_cost@usd)']; if (cost) { if (cost < 0.01) { return US {cost * 100}¢; diff --git a/static/app/views/aiMonitoring/aiMonitoringDetailsPage.tsx b/static/app/views/aiMonitoring/aiMonitoringDetailsPage.tsx index 3635ca22f8ac58..948872a12ac1bc 100644 --- a/static/app/views/aiMonitoring/aiMonitoringDetailsPage.tsx +++ b/static/app/views/aiMonitoring/aiMonitoringDetailsPage.tsx @@ -73,10 +73,7 @@ export default function AiMonitoringPage({params}: Props) { 'span.category': 'ai', 'span.ai.pipeline.group': groupId, }), - fields: [ - 'ai_total_tokens_used()', - 'ai_total_tokens_used(c:spans/ai.total_cost@none)', - ], + fields: ['ai_total_tokens_used()', 'ai_total_tokens_used(c:spans/ai.total_cost@usd)'], enabled: Boolean(groupId), referrer: 'api.ai-pipelines.view', }); @@ -137,7 +134,7 @@ export default function AiMonitoringPage({params}: Props) { title={t('Total Cost')} value={ tokenUsedMetric[ - 'ai_total_tokens_used(c:spans/ai.total_cost@none)' + 'ai_total_tokens_used(c:spans/ai.total_cost@usd)' ] } unit={CurrencyUnit.USD} diff --git a/static/app/views/starfish/types.tsx b/static/app/views/starfish/types.tsx index 4ebe02181adc55..eace04518f2d10 100644 --- a/static/app/views/starfish/types.tsx +++ b/static/app/views/starfish/types.tsx @@ -132,7 +132,7 @@ export type SpanMetricsResponse = { 'http_response_rate(4)': number; 'http_response_rate(5)': number; } & { - 'ai_total_tokens_used(c:spans/ai.total_cost@none)': number; + 'ai_total_tokens_used(c:spans/ai.total_cost@usd)': number; } & { ['project']: string; ['project.id']: number; From 7d1cf85647197eb53e97ad412f00b918867a73a4 Mon Sep 17 00:00:00 2001 From: Matt Duncan <14761+mrduncan@users.noreply.github.com> Date: Wed, 8 May 2024 08:58:45 -0700 Subject: [PATCH 143/376] chore(issues): Enable stronger typing on occurrence_consumer (#70487) This is a quick follow up to #69828 since these two type errors are trivial to resolve. --- pyproject.toml | 1 + src/sentry/issues/occurrence_consumer.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2cc68fc90ef298..c27789ea0709e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -582,6 +582,7 @@ module = [ "sentry.issues.issue_occurrence", "sentry.issues.json_schemas", "sentry.issues.merge", + "sentry.issues.occurrence_consumer", "sentry.issues.ongoing", "sentry.issues.priority", "sentry.issues.producer", diff --git a/src/sentry/issues/occurrence_consumer.py b/src/sentry/issues/occurrence_consumer.py index a9d57b5380d45b..b3cd00eff9c47a 100644 --- a/src/sentry/issues/occurrence_consumer.py +++ b/src/sentry/issues/occurrence_consumer.py @@ -364,7 +364,7 @@ def _process_message( return None -def _process_batch(worker: ThreadPoolExecutor, message: Message[ValuesBatch[KafkaPayload]]): +def _process_batch(worker: ThreadPoolExecutor, message: Message[ValuesBatch[KafkaPayload]]) -> None: """ Receives batches of occurrences. This function will take the batch and group them together by fingerprint (ensuring order is preserved) and @@ -402,7 +402,7 @@ def _process_batch(worker: ThreadPoolExecutor, message: Message[ValuesBatch[Kafk wait(futures) -def process_occurrence_group(items: list[Mapping[str, Any]]): +def process_occurrence_group(items: list[Mapping[str, Any]]) -> None: """ Process a group of related occurrences (all part of the same group) completely serially. From f161ebb335207d5f6f8f5d9f16cb97ee9fc53013 Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Wed, 8 May 2024 09:16:58 -0700 Subject: [PATCH 144/376] ref(daily summary): Disable notification (#70295) I don't currently have any time to dedicate to bug fixing and it's been sending multiple times, so this is going to be disabled for now. We'll likely have to keep track of which users per org received the notification and check that before sending, and then clear it out every hour to avoid duplicate sending. --- src/sentry/conf/server.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index ea827d7a88bb24..68ae96ec0cf7f2 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1110,12 +1110,12 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: "schedule": crontab(minute="0", hour="12", day_of_week="saturday"), "options": {"expires": 60 * 60 * 3}, }, - "schedule-daily-organization-reports": { - "task": "sentry.tasks.summaries.daily_summary.schedule_organizations", - # Run every 1 hour on business days - "schedule": crontab(minute=0, hour="*/1", day_of_week="mon-fri"), - "options": {"expires": 60 * 60 * 3}, - }, + # "schedule-daily-organization-reports": { + # "task": "sentry.tasks.summaries.daily_summary.schedule_organizations", + # # Run every 1 hour on business days + # "schedule": crontab(minute=0, hour="*/1", day_of_week="mon-fri"), + # "options": {"expires": 60 * 60 * 3}, + # }, "schedule-hybrid-cloud-foreign-key-jobs": { "task": "sentry.tasks.deletion.hybrid_cloud.schedule_hybrid_cloud_foreign_key_jobs", # Run every 15 minutes From 9a90b575090e5e3f3d9232f70b471ad16a65fbf2 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Wed, 8 May 2024 12:18:49 -0400 Subject: [PATCH 145/376] ref(crons): Use constant for DEFAULT_CHECKIN_MARGIN (#70510) --- static/app/views/monitors/components/detailsSidebar.tsx | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/static/app/views/monitors/components/detailsSidebar.tsx b/static/app/views/monitors/components/detailsSidebar.tsx index 5f154607eaadc7..1ee247365a4364 100644 --- a/static/app/views/monitors/components/detailsSidebar.tsx +++ b/static/app/views/monitors/components/detailsSidebar.tsx @@ -12,7 +12,10 @@ import {t, tn} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import {getFormattedDate} from 'sentry/utils/dates'; import useCopyToClipboard from 'sentry/utils/useCopyToClipboard'; -import {DEFAULT_MAX_RUNTIME} from 'sentry/views/monitors/components/monitorForm'; +import { + DEFAULT_CHECKIN_MARGIN, + DEFAULT_MAX_RUNTIME, +} from 'sentry/views/monitors/components/monitorForm'; import {MonitorIndicator} from 'sentry/views/monitors/components/monitorIndicator'; import type {Monitor, MonitorEnvironment} from 'sentry/views/monitors/types'; import {ScheduleType} from 'sentry/views/monitors/types'; @@ -78,7 +81,7 @@ export default function DetailsSidebar({monitorEnv, monitor}: Props) { {tn( 'Check-ins missed after %s min', 'Check-ins missed after %s mins', - checkin_margin ?? 1 + checkin_margin ?? DEFAULT_CHECKIN_MARGIN )} From dbc926a9150ef7acc8ecfb7aa7535e110edb1d2c Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Wed, 8 May 2024 12:22:14 -0400 Subject: [PATCH 146/376] ref(crons): Move timezone to schedule text (#70511) image It is now next to the schedule. This is more logical --- static/app/views/monitors/components/detailsSidebar.tsx | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/static/app/views/monitors/components/detailsSidebar.tsx b/static/app/views/monitors/components/detailsSidebar.tsx index 1ee247365a4364..a64c8df7676f5b 100644 --- a/static/app/views/monitors/components/detailsSidebar.tsx +++ b/static/app/views/monitors/components/detailsSidebar.tsx @@ -69,7 +69,10 @@ export default function DetailsSidebar({monitorEnv, monitor}: Props) { {t('Schedule')} - {scheduleAsText(monitor.config)} + + {scheduleAsText(monitor.config)}{' '} + {schedule_type === ScheduleType.CRONTAB && `(${timezone})`} + {schedule_type === ScheduleType.CRONTAB && ( ({schedule}) )} @@ -96,9 +99,6 @@ export default function DetailsSidebar({monitorEnv, monitor}: Props) { {t('Cron Details')} - {schedule_type === ScheduleType.CRONTAB && ( - - )} Date: Wed, 8 May 2024 09:23:28 -0700 Subject: [PATCH 147/376] chore(issues): Enable stronger typing on two endpoints (#70488) This is another quick follow up to #69828 since these two type errors are trivial to resolve. --- pyproject.toml | 2 ++ src/sentry/issues/endpoints/organization_activity.py | 6 +++++- .../endpoints/organization_release_previous_commits.py | 3 ++- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c27789ea0709e1..7ffeb82610f8b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -573,6 +573,8 @@ module = [ "sentry.issues.constants", "sentry.issues.endpoints", "sentry.issues.endpoints.group_events", + "sentry.issues.endpoints.organization_activity", + "sentry.issues.endpoints.organization_release_previous_commits", "sentry.issues.endpoints.project_stacktrace_link", "sentry.issues.escalating_group_forecast", "sentry.issues.escalating_issues_alg", diff --git a/src/sentry/issues/endpoints/organization_activity.py b/src/sentry/issues/endpoints/organization_activity.py index 8f1d5bd2e0a49a..df13ded4e97778 100644 --- a/src/sentry/issues/endpoints/organization_activity.py +++ b/src/sentry/issues/endpoints/organization_activity.py @@ -11,6 +11,8 @@ from sentry.api.paginator import DateTimePaginator from sentry.api.serializers import OrganizationActivitySerializer, serialize from sentry.models.activity import Activity +from sentry.models.organization import Organization +from sentry.models.organizationmember import OrganizationMember from sentry.models.organizationmemberteam import OrganizationMemberTeam from sentry.models.project import Project from sentry.types.activity import ActivityType @@ -28,7 +30,9 @@ class OrganizationActivityEndpoint(OrganizationMemberEndpoint, EnvironmentMixin) "Activities for each issue at 'GET /api/0/organizations/{organization_slug}/issues/{issue_id}/activities/'", "api.organization-activity.brownout", ) - def get(self, request: Request, organization, member) -> Response: + def get( + self, request: Request, organization: Organization, member: OrganizationMember + ) -> Response: # There is an activity record created for both sides of the unmerge # operation, so we only need to include one of them here to avoid # showing the same entry twice. diff --git a/src/sentry/issues/endpoints/organization_release_previous_commits.py b/src/sentry/issues/endpoints/organization_release_previous_commits.py index 790bb99e1683e4..41ef12e0840fcf 100644 --- a/src/sentry/issues/endpoints/organization_release_previous_commits.py +++ b/src/sentry/issues/endpoints/organization_release_previous_commits.py @@ -8,6 +8,7 @@ from sentry.api.bases.organization import OrganizationReleasesBaseEndpoint from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.serializers import serialize +from sentry.models.organization import Organization from sentry.models.release import Release from sentry.ratelimits.config import RateLimitConfig @@ -20,7 +21,7 @@ class OrganizationReleasePreviousCommitsEndpoint(OrganizationReleasesBaseEndpoin owner = ApiOwner.ISSUES rate_limits = RateLimitConfig(group="CLI") - def get(self, request: Request, organization, version) -> Response: + def get(self, request: Request, organization: Organization, version: str) -> Response: """ Retrieve an Organization's Most Recent Release with Commits ```````````````````````````````````````````````````````````` From a96f1ff4e272456e5852eb2318396ba7ce925ab1 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Wed, 8 May 2024 12:34:37 -0400 Subject: [PATCH 148/376] ref(crons): Improve details legend (#70515) The legend now differentiates between timeout and failed clipboard.png --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- .../monitors/components/detailsSidebar.tsx | 18 ++++---- .../monitors/components/monitorIndicator.tsx | 7 ++- .../components/timeline/checkInTimeline.tsx | 44 ++----------------- static/app/views/monitors/utils.tsx | 38 ++++++++++++++++ 4 files changed, 57 insertions(+), 50 deletions(-) diff --git a/static/app/views/monitors/components/detailsSidebar.tsx b/static/app/views/monitors/components/detailsSidebar.tsx index a64c8df7676f5b..095880c500d9ac 100644 --- a/static/app/views/monitors/components/detailsSidebar.tsx +++ b/static/app/views/monitors/components/detailsSidebar.tsx @@ -18,7 +18,7 @@ import { } from 'sentry/views/monitors/components/monitorForm'; import {MonitorIndicator} from 'sentry/views/monitors/components/monitorIndicator'; import type {Monitor, MonitorEnvironment} from 'sentry/views/monitors/types'; -import {ScheduleType} from 'sentry/views/monitors/types'; +import {CheckInStatus, ScheduleType} from 'sentry/views/monitors/types'; import {scheduleAsText} from 'sentry/views/monitors/utils/scheduleAsText'; interface Props { @@ -77,21 +77,23 @@ export default function DetailsSidebar({monitorEnv, monitor}: Props) { ({schedule}) )} - {t('Margins')} + {t('Legend')} - + {tn( - 'Check-ins missed after %s min', - 'Check-ins missed after %s mins', + 'Check-in missed after %s min', + 'Check-in missed after %s mins', checkin_margin ?? DEFAULT_CHECKIN_MARGIN )} - + + {t('Check-in reported as failed')} + {tn( - 'Check-ins longer than %s min or errors', - 'Check-ins longer than %s mins or errors', + 'Check-in timed out after %s min', + 'Check-in timed out after %s mins', max_runtime ?? DEFAULT_MAX_RUNTIME )} diff --git a/static/app/views/monitors/components/monitorIndicator.tsx b/static/app/views/monitors/components/monitorIndicator.tsx index b8f9e2cee0354c..d32b70561ca348 100644 --- a/static/app/views/monitors/components/monitorIndicator.tsx +++ b/static/app/views/monitors/components/monitorIndicator.tsx @@ -1,15 +1,18 @@ import styled from '@emotion/styled'; +import type {CheckInStatus} from 'sentry/views/monitors/types'; +import {getTickStyle} from 'sentry/views/monitors/utils'; + const MonitorIndicator = styled('div')<{ size: number; - status: 'success' | 'warning' | 'error' | 'disabled'; + status: CheckInStatus; }>` display: inline-block; position: relative; border-radius: 50%; height: ${p => p.size}px; width: ${p => p.size}px; - background: ${p => p.theme[p.status]}; + ${p => getTickStyle(p.status, p.theme)} `; export {MonitorIndicator}; diff --git a/static/app/views/monitors/components/timeline/checkInTimeline.tsx b/static/app/views/monitors/components/timeline/checkInTimeline.tsx index fcca51e0f686b3..e58efa15304ec4 100644 --- a/static/app/views/monitors/components/timeline/checkInTimeline.tsx +++ b/static/app/views/monitors/components/timeline/checkInTimeline.tsx @@ -1,10 +1,9 @@ -import {css} from '@emotion/react'; import styled from '@emotion/styled'; import {DateTime} from 'sentry/components/dateTime'; import {Tooltip} from 'sentry/components/tooltip'; import {CheckInStatus} from 'sentry/views/monitors/types'; -import {tickStyle} from 'sentry/views/monitors/utils'; +import {getTickStyle} from 'sentry/views/monitors/utils'; import {getAggregateStatus} from './utils/getAggregateStatus'; import {mergeBuckets} from './utils/mergeBuckets'; @@ -131,44 +130,7 @@ const JobTick = styled('div')<{ transform: translateY(-50%); opacity: 0.7; - ${p => { - const style = tickStyle[p.status]; - - if (style.hatchTick === undefined) { - return css` - background: ${p.theme[style.tickColor]}; - `; - } - - return css` - border: 1px solid ${p.theme[style.tickColor]}; - ${!p.roundedLeft && 'border-left-width: 0'}; - ${!p.roundedRight && 'border-right-width: 0'}; - - background-size: 3px 3px; - opacity: 0.5; - background-image: linear-gradient( - -45deg, - ${p.theme[style.hatchTick]} 25%, - transparent 25%, - transparent 50%, - ${p.theme[style.hatchTick]} 50%, - ${p.theme[style.hatchTick]} 75%, - transparent 75%, - transparent - ), - linear-gradient( - 45deg, - ${p.theme[style.hatchTick]} 25%, - transparent 25%, - transparent 50%, - ${p.theme[style.hatchTick]} 50%, - ${p.theme[style.hatchTick]} 75%, - transparent 75%, - transparent - ); - `; - }}; + ${p => getTickStyle(p.status, p.theme)}; ${p => p.roundedLeft && @@ -182,4 +144,6 @@ const JobTick = styled('div')<{ border-top-right-radius: 2px; border-bottom-right-radius: 2px; `} + ${p => !p.roundedLeft && 'border-left-width: 0'}; + ${p => !p.roundedRight && 'border-right-width: 0'}; `; diff --git a/static/app/views/monitors/utils.tsx b/static/app/views/monitors/utils.tsx index f84d26af56fd97..3b2b634cd11611 100644 --- a/static/app/views/monitors/utils.tsx +++ b/static/app/views/monitors/utils.tsx @@ -1,3 +1,5 @@ +import {css, type Theme} from '@emotion/react'; + import {t, tn} from 'sentry/locale'; import type {Organization, SelectValue} from 'sentry/types'; import type {ColorOrAlias} from 'sentry/utils/theme'; @@ -95,3 +97,39 @@ export const getScheduleIntervals = (n: number): SelectValue[] => [ {value: 'month', label: tn('month', 'months', n)}, {value: 'year', label: tn('year', 'years', n)}, ]; + +export function getTickStyle(status: CheckInStatus, theme: Theme) { + const style = tickStyle[status]; + + if (style.hatchTick === undefined) { + return css` + background: ${theme[style.tickColor]}; + `; + } + + return css` + border: 1px solid ${theme[style.tickColor]}; + background-size: 3px 3px; + opacity: 0.5; + background-image: linear-gradient( + -45deg, + ${theme[style.hatchTick]} 25%, + transparent 25%, + transparent 50%, + ${theme[style.hatchTick]} 50%, + ${theme[style.hatchTick]} 75%, + transparent 75%, + transparent + ), + linear-gradient( + 45deg, + ${theme[style.hatchTick]} 25%, + transparent 25%, + transparent 50%, + ${theme[style.hatchTick]} 50%, + ${theme[style.hatchTick]} 75%, + transparent 75%, + transparent + ); + `; +} From 35b38d9f218fcf172da360414b772b0fbbbded1e Mon Sep 17 00:00:00 2001 From: Steven Eubank <47563310+smeubank@users.noreply.github.com> Date: Wed, 8 May 2024 18:38:57 +0200 Subject: [PATCH 149/376] Add Deno Runtime Icon (#69100) QoL, this has been bugging me Should show the Deno icon, from Deno runtimes hosted on Supabase and others after: ![image](https://github.com/getsentry/sentry/assets/47563310/a1c63d17-3a89-4639-93d1-bca250f0bc31) ![image](https://github.com/getsentry/sentry/assets/47563310/ca66d7cd-8315-4366-a828-8a93936c1a07) --- src/sentry/static/sentry/images/logos/logo-deno.svg | 1 + static/app/components/events/contextSummary/contextIcon.tsx | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 src/sentry/static/sentry/images/logos/logo-deno.svg diff --git a/src/sentry/static/sentry/images/logos/logo-deno.svg b/src/sentry/static/sentry/images/logos/logo-deno.svg new file mode 100644 index 00000000000000..dd1e8d1a36ed0e --- /dev/null +++ b/src/sentry/static/sentry/images/logos/logo-deno.svg @@ -0,0 +1 @@ + diff --git a/static/app/components/events/contextSummary/contextIcon.tsx b/static/app/components/events/contextSummary/contextIcon.tsx index 9983a03bcec60c..5f71db987a3f9e 100644 --- a/static/app/components/events/contextSummary/contextIcon.tsx +++ b/static/app/components/events/contextSummary/contextIcon.tsx @@ -13,6 +13,7 @@ import logoArm from 'sentry-logos/logo-arm.svg'; import logoChrome from 'sentry-logos/logo-chrome.svg'; import logoChromium from 'sentry-logos/logo-chromium.svg'; import logoCrystal from 'sentry-logos/logo-crystal.svg'; +import logoDeno from 'sentry-logos/logo-deno.svg'; import logoDotnet from 'sentry-logos/logo-dotnet.svg'; import logoEdgeNew from 'sentry-logos/logo-edge-new.svg'; import logoEdgeOld from 'sentry-logos/logo-edge-old.svg'; @@ -72,6 +73,7 @@ const LOGO_MAPPING = { cpython: logoPython, crystal: logoCrystal, darwin: logoApple, + deno: logoDeno, edge: logoEdgeNew, electron: logoElectron, firefox: logoFirefox, From 56514bc045185de88d5bb8611e35d2fa6252d106 Mon Sep 17 00:00:00 2001 From: Catherine Lee <55311782+c298lee@users.noreply.github.com> Date: Wed, 8 May 2024 12:47:19 -0400 Subject: [PATCH 150/376] ref(replay): Rage click clicked element name (#70493) Since we now use react component names in the selector path, we should modify clicked element to provide more specific info such as class, role etc. This prevents giving the react component name twice and would give more info for debugging since react component names aren't very specific Before: image --- .../replays/usecases/ingest/issue_creation.py | 26 ++++++++++- .../replays/unit/test_issue_creation.py | 46 +++++++++++++++---- 2 files changed, 63 insertions(+), 9 deletions(-) diff --git a/src/sentry/replays/usecases/ingest/issue_creation.py b/src/sentry/replays/usecases/ingest/issue_creation.py index d2efaa0d0dfeb6..0d210db826c9fc 100644 --- a/src/sentry/replays/usecases/ingest/issue_creation.py +++ b/src/sentry/replays/usecases/ingest/issue_creation.py @@ -32,7 +32,7 @@ def report_rage_click_issue_with_replay_event( timestamp_utc = date.replace(tzinfo=datetime.UTC) selector = selector - clicked_element = selector.split(" > ")[-1] + clicked_element = _make_clicked_element(node) component_name = component_name evidence = [ IssueEvidence(name="Clicked Element", value=clicked_element, important=False), @@ -110,3 +110,27 @@ def _make_tags(replay_id, url, replay_event): tags.update(replay_event["tags"]) return tags + + +def _make_clicked_element(node): + element = node.get("tagName", "") + if "attributes" in node: + for key, value in node["attributes"].items(): + if key == "id": + element += f"#{value}" + elif key == "class": + element = element + "." + ".".join(value.split(" ")) + elif key == "role": + element += f'[role="{value}"]' + elif key == "alt": + element += f'[alt="{value}"]' + elif key == "data-test-id" or key == "data-testid": + element += f'[data-test-id="{value}"]' + elif key == "aria-label": + element += f'[aria="{value}"]' + elif key == "title": + element += f'[title="{value}"]' + elif key == "data-sentry-component": + element += f'[data-sentry-component="{value}"]' + + return element diff --git a/tests/sentry/replays/unit/test_issue_creation.py b/tests/sentry/replays/unit/test_issue_creation.py index 510d78b49fd5b5..85125317840fef 100644 --- a/tests/sentry/replays/unit/test_issue_creation.py +++ b/tests/sentry/replays/unit/test_issue_creation.py @@ -23,36 +23,66 @@ def test_report_rage_click_issue_with_replay_event(mock_new_issue_occurrence, de report_rage_click_issue_with_replay_event( project_id=default_project.id, replay_id=replay_id, - selector="div.xyz > a", + selector="div.xyz > SmartSearchBar", timestamp=seq1_timestamp.timestamp(), url="https://www.sentry.io", - node={"tagName": "a"}, + node={ + "tagName": "a", + "attributes": { + "id": "id", + "class": "class1 class2", + "role": "button", + "aria-label": "test", + "alt": "1", + "data-testid": "2", + "title": "3", + "data-sentry-component": "SignUpForm", + }, + }, component_name="SmartSearchBar", replay_event=mock_replay_event(), ) issue_occurence_call = mock_new_issue_occurrence.call_args[1] assert issue_occurence_call["culprit"] == "https://www.sentry.io" assert issue_occurence_call["environment"] == "production" - assert issue_occurence_call["fingerprint"] == ["div.xyz > a"] + assert issue_occurence_call["fingerprint"] == ["div.xyz > SmartSearchBar"] assert issue_occurence_call["issue_type"].type_id == 5002 assert issue_occurence_call["level"] == "error" assert issue_occurence_call["platform"] == "javascript" assert issue_occurence_call["project_id"] == default_project.id - assert issue_occurence_call["subtitle"] == "div.xyz > a" + assert issue_occurence_call["subtitle"] == "div.xyz > SmartSearchBar" assert issue_occurence_call["title"] == "Rage Click" assert issue_occurence_call["evidence_data"] == { - "node": {"tagName": "a"}, - "selector": "div.xyz > a", + "node": { + "tagName": "a", + "attributes": { + "id": "id", + "class": "class1 class2", + "role": "button", + "aria-label": "test", + "alt": "1", + "data-testid": "2", + "title": "3", + "data-sentry-component": "SignUpForm", + }, + }, + "selector": "div.xyz > SmartSearchBar", "component_name": "SmartSearchBar", } assert ( issue_occurence_call["evidence_display"][0].to_dict() - == IssueEvidence(name="Clicked Element", value="a", important=False).to_dict() + == IssueEvidence( + name="Clicked Element", + value='a#id.class1.class2[role="button"][aria="test"][alt="1"][data-test-id="2"][title="3"][data-sentry-component="SignUpForm"]', + important=False, + ).to_dict() ) assert ( issue_occurence_call["evidence_display"][1].to_dict() - == IssueEvidence(name="Selector Path", value="div.xyz > a", important=False).to_dict() + == IssueEvidence( + name="Selector Path", value="div.xyz > SmartSearchBar", important=False + ).to_dict() ) assert ( issue_occurence_call["evidence_display"][2].to_dict() From 1354478967d580699107222d42a95373b5a4dcaa Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Wed, 8 May 2024 09:56:24 -0700 Subject: [PATCH 151/376] feat(chartcuterie): Change Chart Stylings (#70489) two quick updates: 1. Changes the label for regressed to a darker color so we can read it better ![image](https://github.com/getsentry/sentry/assets/33237075/80cd09dd-3619-4d58-abd3-65795de248c0) 2. Updated some chart styling for slack so the legend is on the left ![image](https://github.com/getsentry/sentry/assets/33237075/de5a048a-cad5-4e9a-b152-15cbf9bc254e) --- static/app/chartcuterie/performance.tsx | 2 ++ static/app/views/performance/utils/getIntervalLine.tsx | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/static/app/chartcuterie/performance.tsx b/static/app/chartcuterie/performance.tsx index c17e86f12ac868..452d440eecddcf 100644 --- a/static/app/chartcuterie/performance.tsx +++ b/static/app/chartcuterie/performance.tsx @@ -14,6 +14,8 @@ export const performanceCharts: RenderDescriptor[] = []; function modifyOptionsForSlack(options: Omit) { options.legend = options.legend || {}; options.legend.icon = 'none'; + options.legend.left = '25'; + options.legend.top = '20'; return { ...options, diff --git a/static/app/views/performance/utils/getIntervalLine.tsx b/static/app/views/performance/utils/getIntervalLine.tsx index 42d15371f55a4b..5fe71c8c04e4cf 100644 --- a/static/app/views/performance/utils/getIntervalLine.tsx +++ b/static/app/views/performance/utils/getIntervalLine.tsx @@ -180,7 +180,7 @@ export function getIntervalLine( transformedTransaction.aggregate_range_2 )}`, position: 'insideEndBottom', - color: theme.red300, + color: theme.gray400, }; additionalLineSeries.push({ From 0b1e18c9da05c66dd593a83381d85c3f60728a13 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Wed, 8 May 2024 13:03:26 -0400 Subject: [PATCH 152/376] feat(traces): More improvements, use sdk names now (#70514) ### Summary This uses sdk names as well as project to differentiate when it comes to multi-service single project traces. This also adds some small UI tweaks. --- .../app/views/performance/traces/content.tsx | 49 +++++++---- static/app/views/performance/traces/data.tsx | 1 + .../performance/traces/fieldRenderers.tsx | 81 +++++++++++++------ static/app/views/performance/traces/utils.tsx | 19 ++++- 4 files changed, 110 insertions(+), 40 deletions(-) diff --git a/static/app/views/performance/traces/content.tsx b/static/app/views/performance/traces/content.tsx index ac0441177a8071..e531eda772e811 100644 --- a/static/app/views/performance/traces/content.tsx +++ b/static/app/views/performance/traces/content.tsx @@ -33,13 +33,14 @@ import { ProjectRenderer, SpanBreakdownSliceRenderer, SpanIdRenderer, + SpanTimeRenderer, TraceBreakdownContainer, TraceBreakdownRenderer, TraceIdRenderer, TraceIssuesRenderer, } from './fieldRenderers'; import {TracesSearchBar} from './tracesSearchBar'; -import {normalizeTraces} from './utils'; +import {getSecondaryNameFromSpan, getStylingSliceName, normalizeTraces} from './utils'; const DEFAULT_PER_PAGE = 20; @@ -135,15 +136,17 @@ export function Content() { {t('Total Spans')}
- {t('Breakdown')} + {t('Timeline')} - {t('Trace Duration')} + {t('Duration')} + + + {t('Timestamp')} {t('Issues')} - {isLoading && ( @@ -177,20 +180,22 @@ function TraceRow({trace}: {trace: TraceResult}) { }), [_setHighlightedSliceName] ); + + const onClickExpand = useCallback(() => setExpanded(e => !e), [setExpanded]); + return ( - + - )} - - - - ); -} - -const getColumnOrder = ( - span: Pick< - SpanIndexedFieldTypes, - SpanIndexedField.SPAN_GROUP | SpanIndexedField.SPAN_OP - > -): TableColumnHeader[] => [ - { - key: 'transaction', - name: t('Found In'), - width: COL_WIDTH_UNDEFINED, - }, - { - key: 'spm()', - name: getThroughputTitle(span[SpanIndexedField.SPAN_OP]), - width: COL_WIDTH_UNDEFINED, - }, - { - key: `avg(${SpanMetricsField.SPAN_SELF_TIME})`, - name: DataTitles.avg, - width: COL_WIDTH_UNDEFINED, - }, - ...(span?.['span.op']?.startsWith('http') - ? ([ - { - key: `http_error_count()`, - name: DataTitles.errorCount, - width: COL_WIDTH_UNDEFINED, - }, - ] as TableColumnHeader[]) - : []), - { - key: 'time_spent_percentage()', - name: DataTitles.timeSpent, - width: COL_WIDTH_UNDEFINED, - }, -]; - -const Footer = styled('div')` - display: flex; - justify-content: space-between; -`; - -const StyledPagination = styled(Pagination)` - margin-top: 0; - margin-left: auto; -`; - -export function isAValidSort(sort: Sort): sort is ValidSort { - return SORTABLE_FIELDS.has(sort.field); -} From f810375236c59b8d2849acae0d95fc79f2386b8e Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Wed, 8 May 2024 15:42:45 -0400 Subject: [PATCH 168/376] chore(trace-explorer): Use end timestamp (#70526) Historically we use end timestamp for everything. And the searching is based on the end timestamp so rendering start timestamp here can result in cases where the user selects last hour but the trace started 2 hours ago. --- static/app/views/performance/traces/content.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/views/performance/traces/content.tsx b/static/app/views/performance/traces/content.tsx index e531eda772e811..7e94eb8ff4cd6f 100644 --- a/static/app/views/performance/traces/content.tsx +++ b/static/app/views/performance/traces/content.tsx @@ -224,7 +224,7 @@ function TraceRow({trace}: {trace: TraceResult}) { - + @@ -333,7 +333,7 @@ function SpanRow({ From ff36ac168899974a6cf2ce45a2b81b4d24b0991b Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Wed, 8 May 2024 12:47:02 -0700 Subject: [PATCH 169/376] ref(api-idorslug): `api-docs` changes for renaming `organization_slug` to `organization_id_or_slug` (#70378) This PR contains the api-docs changes for https://github.com/getsentry/sentry/pull/70081/ --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- api-docs/openapi.json | 72 +++++++++---------- api-docs/paths/events/issue-details.json | 12 ++-- api-docs/paths/events/issue-events.json | 4 +- api-docs/paths/events/issue-hashes.json | 39 +++------- api-docs/paths/events/latest-event.json | 34 +++------ api-docs/paths/events/oldest-event.json | 34 +++------ .../paths/events/project-event-details.json | 4 +- api-docs/paths/events/project-events.json | 4 +- api-docs/paths/events/project-issues.json | 12 ++-- api-docs/paths/events/tag-details.json | 4 +- api-docs/paths/events/tag-values.json | 4 +- .../sentry-app-installations.json | 2 +- api-docs/paths/organizations/details.json | 8 +-- .../paths/organizations/event-id-lookup.json | 4 +- .../paths/organizations/repo-commits.json | 2 +- api-docs/paths/organizations/repos.json | 2 +- api-docs/paths/organizations/shortid.json | 4 +- api-docs/paths/projects/dsyms.json | 12 ++-- .../paths/projects/service-hook-details.json | 12 ++-- api-docs/paths/projects/service-hooks.json | 8 +-- api-docs/paths/projects/spike-protection.json | 8 +-- api-docs/paths/projects/stats.json | 4 +- api-docs/paths/projects/tag-values.json | 4 +- api-docs/paths/projects/user-feedback.json | 8 +-- api-docs/paths/projects/users.json | 4 +- api-docs/paths/releases/deploys.json | 8 +-- .../organization-release-commit-files.json | 4 +- .../organization-release-commits.json | 4 +- .../paths/releases/organization-release.json | 12 ++-- .../paths/releases/organization-releases.json | 8 +-- .../releases/project-release-commits.json | 4 +- .../paths/releases/project-release-file.json | 12 ++-- .../paths/releases/project-release-files.json | 8 +-- api-docs/paths/releases/release-file.json | 12 ++-- api-docs/paths/releases/release-files.json | 8 +-- api-docs/paths/teams/by-slug.json | 12 ++-- api-docs/paths/teams/stats.json | 4 +- 37 files changed, 172 insertions(+), 229 deletions(-) diff --git a/api-docs/openapi.json b/api-docs/openapi.json index 958c48a3765018..1aa21bbb140a26 100644 --- a/api-docs/openapi.json +++ b/api-docs/openapi.json @@ -87,115 +87,115 @@ } ], "paths": { - "/api/0/teams/{organization_slug}/{team_id_or_slug}/": { + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/": { "$ref": "paths/teams/by-slug.json" }, - "/api/0/teams/{organization_slug}/{team_id_or_slug}/stats/": { + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/stats/": { "$ref": "paths/teams/stats.json" }, "/api/0/organizations/": { "$ref": "paths/organizations/index.json" }, - "/api/0/organizations/{organization_slug}/eventids/{event_id}/": { + "/api/0/organizations/{organization_id_or_slug}/eventids/{event_id}/": { "$ref": "paths/organizations/event-id-lookup.json" }, - "/api/0/organizations/{organization_slug}/": { + "/api/0/organizations/{organization_id_or_slug}/": { "$ref": "paths/organizations/details.json" }, - "/api/0/organizations/{organization_slug}/repos/": { + "/api/0/organizations/{organization_id_or_slug}/repos/": { "$ref": "paths/organizations/repos.json" }, - "/api/0/organizations/{organization_slug}/repos/{repo_id}/commits/": { + "/api/0/organizations/{organization_id_or_slug}/repos/{repo_id}/commits/": { "$ref": "paths/organizations/repo-commits.json" }, - "/api/0/organizations/{organization_slug}/shortids/{short_id}/": { + "/api/0/organizations/{organization_id_or_slug}/shortids/{short_id}/": { "$ref": "paths/organizations/shortid.json" }, "/api/0/projects/": { "$ref": "paths/projects/index.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/files/dsyms/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/dsyms/": { "$ref": "paths/projects/dsyms.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/users/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/users/": { "$ref": "paths/projects/users.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/tags/{key}/values/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/tags/{key}/values/": { "$ref": "paths/projects/tag-values.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/stats/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/stats/": { "$ref": "paths/projects/stats.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/user-feedback/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/user-feedback/": { "$ref": "paths/projects/user-feedback.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/hooks/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/hooks/": { "$ref": "paths/projects/service-hooks.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/hooks/{hook_id}/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/hooks/{hook_id}/": { "$ref": "paths/projects/service-hook-details.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/events/{event_id}/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/": { "$ref": "paths/events/project-event-details.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/events/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/": { "$ref": "paths/events/project-events.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/issues/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/issues/": { "$ref": "paths/events/project-issues.json" }, - "/api/0/organizations/{organization_slug}/issues/{issue_id}/tags/{key}/values/": { + "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/tags/{key}/values/": { "$ref": "paths/events/tag-values.json" }, - "/api/0/organizations/{organization_slug}/issues/{issue_id}/tags/{key}/": { + "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/tags/{key}/": { "$ref": "paths/events/tag-details.json" }, - "/api/0/organizations/{organization_slug}/issues/{issue_id}/hashes/": { + "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/hashes/": { "$ref": "paths/events/issue-hashes.json" }, - "/api/0/organizations/{organization_slug}/issues/{issue_id}/events/oldest/": { + "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/events/oldest/": { "$ref": "paths/events/oldest-event.json" }, - "/api/0/organizations/{organization_slug}/issues/{issue_id}/events/latest/": { + "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/events/latest/": { "$ref": "paths/events/latest-event.json" }, - "/api/0/organizations/{organization_slug}/issues/{issue_id}/events/": { + "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/events/": { "$ref": "paths/events/issue-events.json" }, - "/api/0/organizations/{organization_slug}/issues/{issue_id}/": { + "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/": { "$ref": "paths/events/issue-details.json" }, - "/api/0/organizations/{organization_slug}/releases/": { + "/api/0/organizations/{organization_id_or_slug}/releases/": { "$ref": "paths/releases/organization-releases.json" }, - "/api/0/organizations/{organization_slug}/releases/{version}/": { + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/": { "$ref": "paths/releases/organization-release.json" }, - "/api/0/organizations/{organization_slug}/releases/{version}/files/": { + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/files/": { "$ref": "paths/releases/release-files.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/releases/{version}/files/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/files/": { "$ref": "paths/releases/project-release-files.json" }, - "/api/0/organizations/{organization_slug}/releases/{version}/files/{file_id}/": { + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/files/{file_id}/": { "$ref": "paths/releases/release-file.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/releases/{version}/files/{file_id}/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/files/{file_id}/": { "$ref": "paths/releases/project-release-file.json" }, - "/api/0/organizations/{organization_slug}/releases/{version}/commits/": { + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/commits/": { "$ref": "paths/releases/organization-release-commits.json" }, - "/api/0/projects/{organization_slug}/{project_id_or_slug}/releases/{version}/commits/": { + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/commits/": { "$ref": "paths/releases/project-release-commits.json" }, - "/api/0/organizations/{organization_slug}/releases/{version}/commitfiles/": { + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/commitfiles/": { "$ref": "paths/releases/organization-release-commit-files.json" }, - "/api/0/organizations/{organization_slug}/releases/{version}/deploys/": { + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/deploys/": { "$ref": "paths/releases/deploys.json" }, - "/api/0/organizations/{organization_slug}/sentry-app-installations/": { + "/api/0/organizations/{organization_id_or_slug}/sentry-app-installations/": { "$ref": "paths/integration-platform/sentry-app-installations.json" }, "/api/0/sentry-app-installations/{uuid}/external-issues/": { @@ -204,7 +204,7 @@ "/api/0/sentry-app-installations/{uuid}/external-issues/{external_issue_id}/": { "$ref": "paths/integration-platform/sentry-app-external-issue-details.json" }, - "/api/0/organizations/{organization_slug}/spike-protections/": { + "/api/0/organizations/{organization_id_or_slug}/spike-protections/": { "$ref": "paths/projects/spike-protection.json" } }, diff --git a/api-docs/paths/events/issue-details.json b/api-docs/paths/events/issue-details.json index 9fccc2c9e9c674..d73c5db6ce42fa 100644 --- a/api-docs/paths/events/issue-details.json +++ b/api-docs/paths/events/issue-details.json @@ -5,9 +5,9 @@ "operationId": "Retrieve an Issue", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issue belongs to.", + "description": "The id or slug of the organization the issue belongs to.", "required": true, "schema": { "type": "string" @@ -186,9 +186,9 @@ "operationId": "Update an Issue", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issue belongs to.", + "description": "The id or slug of the organization the issue belongs to.", "required": true, "schema": { "type": "string" @@ -306,9 +306,9 @@ "operationId": "Remove an Issue", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issue belongs to.", + "description": "The id or slug of the organization the issue belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/events/issue-events.json b/api-docs/paths/events/issue-events.json index a42b3d440cbb12..ea2b6ec41ad713 100644 --- a/api-docs/paths/events/issue-events.json +++ b/api-docs/paths/events/issue-events.json @@ -5,9 +5,9 @@ "operationId": "List an Issue's Events", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issues belongs to.", + "description": "The id or slug of the organization the issues belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/events/issue-hashes.json b/api-docs/paths/events/issue-hashes.json index a20e1fca3e15df..9f4d1c4e2cae89 100644 --- a/api-docs/paths/events/issue-hashes.json +++ b/api-docs/paths/events/issue-hashes.json @@ -5,9 +5,9 @@ "operationId": "List an Issue's Hashes", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issue belong to.", + "description": "The id or slug of the organization the issue belong to.", "required": true, "schema": { "type": "string" @@ -159,17 +159,11 @@ 71, " return fn.apply(this, wrappedArguments);" ], - [ - 72, - " // tslint:enable:no-unsafe-any" - ], + [72, " // tslint:enable:no-unsafe-any"], [73, " }"], [74, " catch (ex) {"], [75, " ignoreNextOnError();"], - [ - 76, - " withScope(function (scope) {" - ] + [76, " withScope(function (scope) {"] ], "symbolAddr": null, "trust": null, @@ -190,17 +184,11 @@ "instructionAddr": null, "context": [ [69, " */"], - [ - 70, - " triggerAsync: function triggerAsync() {" - ], + [70, " triggerAsync: function triggerAsync() {"], [71, " var args = arguments,"], [72, " me = this;"], [73, " _.nextTick(function () {"], - [ - 74, - " me.trigger.apply(me, args);" - ], + [74, " me.trigger.apply(me, args);"], [75, " });"], [76, " },"], [77, ""], @@ -238,14 +226,8 @@ "context": [ [76, "/*!"], [77, " Copyright (c) 2018 Jed Watson."], - [ - 78, - " Licensed under the MIT License (MIT), see" - ], - [ - 79, - " http://jedwatson.github.io/react-select" - ], + [78, " Licensed under the MIT License (MIT), see"], + [79, " http://jedwatson.github.io/react-select"], [80, "*/"], [ 81, @@ -253,10 +235,7 @@ ], [82, "/*!"], [83, " * JavaScript Cookie v2.2.1"], - [ - 84, - " * https://github.com/js-cookie/js-cookie" - ], + [84, " * https://github.com/js-cookie/js-cookie"], [85, " *"], [ 86, diff --git a/api-docs/paths/events/latest-event.json b/api-docs/paths/events/latest-event.json index c27b30c2003f20..9b30d273243300 100644 --- a/api-docs/paths/events/latest-event.json +++ b/api-docs/paths/events/latest-event.json @@ -5,9 +5,9 @@ "operationId": "Retrieve the Latest Event for an Issue", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issue belong to.", + "description": "The id or slug of the organization the issue belong to.", "required": true, "schema": { "type": "string" @@ -145,10 +145,7 @@ 71, " return fn.apply(this, wrappedArguments);" ], - [ - 72, - " // tslint:enable:no-unsafe-any" - ], + [72, " // tslint:enable:no-unsafe-any"], [73, " }"], [74, " catch (ex) {"], [75, " ignoreNextOnError();"], @@ -173,10 +170,7 @@ "instructionAddr": null, "context": [ [69, " */"], - [ - 70, - " triggerAsync: function triggerAsync() {" - ], + [70, " triggerAsync: function triggerAsync() {"], [71, " var args = arguments,"], [72, " me = this;"], [73, " _.nextTick(function () {"], @@ -218,14 +212,8 @@ "context": [ [76, "/*!"], [77, " Copyright (c) 2018 Jed Watson."], - [ - 78, - " Licensed under the MIT License (MIT), see" - ], - [ - 79, - " http://jedwatson.github.io/react-select" - ], + [78, " Licensed under the MIT License (MIT), see"], + [79, " http://jedwatson.github.io/react-select"], [80, "*/"], [ 81, @@ -233,15 +221,9 @@ ], [82, "/*!"], [83, " * JavaScript Cookie v2.2.1"], - [ - 84, - " * https://github.com/js-cookie/js-cookie" - ], + [84, " * https://github.com/js-cookie/js-cookie"], [85, " *"], - [ - 86, - " * Copyright 2006, 2015 Klaus Hartl & Fagner Brack" - ] + [86, " * Copyright 2006, 2015 Klaus Hartl & Fagner Brack"] ], "symbolAddr": null, "trust": null, diff --git a/api-docs/paths/events/oldest-event.json b/api-docs/paths/events/oldest-event.json index c543817903998b..2b8648974434b1 100644 --- a/api-docs/paths/events/oldest-event.json +++ b/api-docs/paths/events/oldest-event.json @@ -5,9 +5,9 @@ "operationId": "Retrieve the Oldest Event for an Issue", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issue belong to.", + "description": "The id or slug of the organization the issue belong to.", "required": true, "schema": { "type": "string" @@ -145,10 +145,7 @@ 71, " return fn.apply(this, wrappedArguments);" ], - [ - 72, - " // tslint:enable:no-unsafe-any" - ], + [72, " // tslint:enable:no-unsafe-any"], [73, " }"], [74, " catch (ex) {"], [75, " ignoreNextOnError();"], @@ -173,10 +170,7 @@ "instructionAddr": null, "context": [ [69, " */"], - [ - 70, - " triggerAsync: function triggerAsync() {" - ], + [70, " triggerAsync: function triggerAsync() {"], [71, " var args = arguments,"], [72, " me = this;"], [73, " _.nextTick(function () {"], @@ -218,14 +212,8 @@ "context": [ [76, "/*!"], [77, " Copyright (c) 2018 Jed Watson."], - [ - 78, - " Licensed under the MIT License (MIT), see" - ], - [ - 79, - " http://jedwatson.github.io/react-select" - ], + [78, " Licensed under the MIT License (MIT), see"], + [79, " http://jedwatson.github.io/react-select"], [80, "*/"], [ 81, @@ -233,15 +221,9 @@ ], [82, "/*!"], [83, " * JavaScript Cookie v2.2.1"], - [ - 84, - " * https://github.com/js-cookie/js-cookie" - ], + [84, " * https://github.com/js-cookie/js-cookie"], [85, " *"], - [ - 86, - " * Copyright 2006, 2015 Klaus Hartl & Fagner Brack" - ] + [86, " * Copyright 2006, 2015 Klaus Hartl & Fagner Brack"] ], "symbolAddr": null, "trust": null, diff --git a/api-docs/paths/events/project-event-details.json b/api-docs/paths/events/project-event-details.json index a6db66e7e9b23b..a70ae01239f225 100644 --- a/api-docs/paths/events/project-event-details.json +++ b/api-docs/paths/events/project-event-details.json @@ -5,9 +5,9 @@ "operationId": "Retrieve an Event for a Project", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the event belongs to.", + "description": "The id or slug of the organization the event belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/events/project-events.json b/api-docs/paths/events/project-events.json index 4e08ac2cc6d901..3019011f0d46c8 100644 --- a/api-docs/paths/events/project-events.json +++ b/api-docs/paths/events/project-events.json @@ -5,9 +5,9 @@ "operationId": "List a Project's Error Events", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the events belong to.", + "description": "The id or slug of the organization the events belong to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/events/project-issues.json b/api-docs/paths/events/project-issues.json index b6de7c4ddb39c5..8c824e90893ea8 100644 --- a/api-docs/paths/events/project-issues.json +++ b/api-docs/paths/events/project-issues.json @@ -5,9 +5,9 @@ "operationId": "List a Project's Issues", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issues belong to.", + "description": "The id or slug of the organization the issues belong to.", "required": true, "schema": { "type": "string" @@ -143,9 +143,9 @@ "operationId": "Bulk Mutate a List of Issues", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issues belong to.", + "description": "The id or slug of the organization the issues belong to.", "required": true, "schema": { "type": "string" @@ -304,9 +304,9 @@ "operationId": "Bulk Remove a List of Issues", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issues belong to.", + "description": "The id or slug of the organization the issues belong to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/events/tag-details.json b/api-docs/paths/events/tag-details.json index 6fd6b6448b5e59..9084913e7e9201 100644 --- a/api-docs/paths/events/tag-details.json +++ b/api-docs/paths/events/tag-details.json @@ -5,9 +5,9 @@ "operationId": "Retrieve Tag Details", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issue belongs to.", + "description": "The id or slug of the organization the issue belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/events/tag-values.json b/api-docs/paths/events/tag-values.json index b2c1bda64cbd4e..c8d604eea8f6f2 100644 --- a/api-docs/paths/events/tag-values.json +++ b/api-docs/paths/events/tag-values.json @@ -5,9 +5,9 @@ "operationId": "List a Tag's Values Related to an Issue", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the issue belongs to.", + "description": "The id or slug of the organization the issue belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/integration-platform/sentry-app-installations.json b/api-docs/paths/integration-platform/sentry-app-installations.json index 5da073f871eeb4..a2416a7a69f88e 100644 --- a/api-docs/paths/integration-platform/sentry-app-installations.json +++ b/api-docs/paths/integration-platform/sentry-app-installations.json @@ -5,7 +5,7 @@ "operationId": "List an Organization's Integration Platform Installations", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", "description": "The organization short name.", "required": true, diff --git a/api-docs/paths/organizations/details.json b/api-docs/paths/organizations/details.json index 1d8641fb0bbac4..2bd77703c4cde8 100644 --- a/api-docs/paths/organizations/details.json +++ b/api-docs/paths/organizations/details.json @@ -5,9 +5,9 @@ "operationId": "Retrieve an Organization", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization to look up.", + "description": "The id or slug of the organization to look up.", "required": true, "schema": { "type": "string" @@ -230,9 +230,9 @@ "operationId": "Update an Organization", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization to update.", + "description": "The id or slug of the organization to update.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/organizations/event-id-lookup.json b/api-docs/paths/organizations/event-id-lookup.json index da598bdfab26a7..d0ebbcfb0f58c3 100644 --- a/api-docs/paths/organizations/event-id-lookup.json +++ b/api-docs/paths/organizations/event-id-lookup.json @@ -5,9 +5,9 @@ "operationId": "Resolve an Event ID", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the event ID should be looked up in.", + "description": "The id or slug of the organization the event ID should be looked up in.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/organizations/repo-commits.json b/api-docs/paths/organizations/repo-commits.json index 0bf765d6d5a344..083a9389aea7fd 100644 --- a/api-docs/paths/organizations/repo-commits.json +++ b/api-docs/paths/organizations/repo-commits.json @@ -5,7 +5,7 @@ "operationId": "List a Repository's Commits", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", "description": "The organization short name.", "required": true, diff --git a/api-docs/paths/organizations/repos.json b/api-docs/paths/organizations/repos.json index b133239de82b2d..5bfb690c03d9cc 100644 --- a/api-docs/paths/organizations/repos.json +++ b/api-docs/paths/organizations/repos.json @@ -5,7 +5,7 @@ "operationId": "List an Organization's Repositories", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", "description": "The organization short name.", "required": true, diff --git a/api-docs/paths/organizations/shortid.json b/api-docs/paths/organizations/shortid.json index e4b62c23cfce0c..9d30ba46f57178 100644 --- a/api-docs/paths/organizations/shortid.json +++ b/api-docs/paths/organizations/shortid.json @@ -5,9 +5,9 @@ "operationId": "Resolve a Short ID", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the short ID should be looked up in.", + "description": "The id or slug of the organization the short ID should be looked up in.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/projects/dsyms.json b/api-docs/paths/projects/dsyms.json index 64f9dd5e5295f1..f4844c4b8ad7a4 100644 --- a/api-docs/paths/projects/dsyms.json +++ b/api-docs/paths/projects/dsyms.json @@ -5,9 +5,9 @@ "operationId": "List a Project's Debug Information Files", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the file belongs to.", + "description": "The id or slug of the organization the file belongs to.", "required": true, "schema": { "type": "string" @@ -49,9 +49,9 @@ "operationId": "Upload a New File", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the project belongs to.", + "description": "The id or slug of the organization the project belongs to.", "required": true, "schema": { "type": "string" @@ -117,9 +117,9 @@ "operationId": "Delete a Specific Project's Debug Information File", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the file belongs to.", + "description": "The id or slug of the organization the file belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/projects/service-hook-details.json b/api-docs/paths/projects/service-hook-details.json index 8fa341253efc39..b6e6fef144ad4d 100644 --- a/api-docs/paths/projects/service-hook-details.json +++ b/api-docs/paths/projects/service-hook-details.json @@ -5,9 +5,9 @@ "operationId": "Retrieve a Service Hook", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the client keys belong to.", + "description": "The id or slug of the organization the client keys belong to.", "required": true, "schema": { "type": "string" @@ -70,9 +70,9 @@ "operationId": "Update a Service Hook", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the client keys belong to.", + "description": "The id or slug of the organization the client keys belong to.", "required": true, "schema": { "type": "string" @@ -166,9 +166,9 @@ "operationId": "Remove a Service Hook", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the client keys belong to.", + "description": "The id or slug of the organization the client keys belong to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/projects/service-hooks.json b/api-docs/paths/projects/service-hooks.json index 887cb044c75dc8..30b6bb9eed3018 100644 --- a/api-docs/paths/projects/service-hooks.json +++ b/api-docs/paths/projects/service-hooks.json @@ -5,9 +5,9 @@ "operationId": "List a Project's Service Hooks", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the client keys belong to.", + "description": "The id or slug of the organization the client keys belong to.", "required": true, "schema": { "type": "string" @@ -66,9 +66,9 @@ "operationId": "Register a New Service Hook", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the client keys belong to.", + "description": "The id or slug of the organization the client keys belong to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/projects/spike-protection.json b/api-docs/paths/projects/spike-protection.json index d658e6c38106ab..185b57373862c8 100644 --- a/api-docs/paths/projects/spike-protection.json +++ b/api-docs/paths/projects/spike-protection.json @@ -5,9 +5,9 @@ "operationId": "Enable Spike Protection", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the projects belong to", + "description": "The id or slug of the organization the projects belong to", "required": true, "schema": { "type": "string" @@ -58,9 +58,9 @@ "operationId": "Disable Spike Protection", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the projects belong to", + "description": "The id or slug of the organization the projects belong to", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/projects/stats.json b/api-docs/paths/projects/stats.json index 61d19e9371f641..c47e3005da17e7 100644 --- a/api-docs/paths/projects/stats.json +++ b/api-docs/paths/projects/stats.json @@ -6,9 +6,9 @@ "operationId": "Retrieve Event Counts for a Project", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/projects/tag-values.json b/api-docs/paths/projects/tag-values.json index 87595cf07e716c..e96797c31002ca 100644 --- a/api-docs/paths/projects/tag-values.json +++ b/api-docs/paths/projects/tag-values.json @@ -5,9 +5,9 @@ "operationId": "List a Tag's Values", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/projects/user-feedback.json b/api-docs/paths/projects/user-feedback.json index 71100576b51178..2913a4590520e2 100644 --- a/api-docs/paths/projects/user-feedback.json +++ b/api-docs/paths/projects/user-feedback.json @@ -5,9 +5,9 @@ "operationId": "List a Project's User Feedback", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" @@ -72,9 +72,9 @@ "operationId": "Submit User Feedback", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/projects/users.json b/api-docs/paths/projects/users.json index 7895d7ac2a05de..4364bc5e55090a 100644 --- a/api-docs/paths/projects/users.json +++ b/api-docs/paths/projects/users.json @@ -5,9 +5,9 @@ "operationId": "List a Project's Users", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/releases/deploys.json b/api-docs/paths/releases/deploys.json index 8b6dae586df0a6..4e80268882d0a4 100644 --- a/api-docs/paths/releases/deploys.json +++ b/api-docs/paths/releases/deploys.json @@ -5,9 +5,9 @@ "operationId": "List a Release's Deploys", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" @@ -66,9 +66,9 @@ "operationId": "Create a New Deploy for an Organization", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/releases/organization-release-commit-files.json b/api-docs/paths/releases/organization-release-commit-files.json index 1a77caf322e671..0c35b227e1fd11 100644 --- a/api-docs/paths/releases/organization-release-commit-files.json +++ b/api-docs/paths/releases/organization-release-commit-files.json @@ -5,9 +5,9 @@ "operationId": "Retrieve Files Changed in a Release's Commits", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the release belongs to.", + "description": "The id or slug of the organization the release belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/releases/organization-release-commits.json b/api-docs/paths/releases/organization-release-commits.json index 9287c6a0da477b..1e11cfeec65526 100644 --- a/api-docs/paths/releases/organization-release-commits.json +++ b/api-docs/paths/releases/organization-release-commits.json @@ -5,9 +5,9 @@ "operationId": "List an Organization Release's Commits", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the release belongs to.", + "description": "The id or slug of the organization the release belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/releases/organization-release.json b/api-docs/paths/releases/organization-release.json index 1f1039936885d7..779eb7ff11e4cd 100644 --- a/api-docs/paths/releases/organization-release.json +++ b/api-docs/paths/releases/organization-release.json @@ -5,9 +5,9 @@ "operationId": "Retrieve an Organization's Releases", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the release belongs to.", + "description": "The id or slug of the organization the release belongs to.", "required": true, "schema": { "type": "string" @@ -78,9 +78,9 @@ "operationId": "Update an Organization's Release", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the release belongs to.", + "description": "The id or slug of the organization the release belongs to.", "required": true, "schema": { "type": "string" @@ -193,9 +193,9 @@ "operationId": "Delete an Organization's Release", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the release belongs to.", + "description": "The id or slug of the organization the release belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/releases/organization-releases.json b/api-docs/paths/releases/organization-releases.json index 8ad643c0b58535..e91b6783791801 100644 --- a/api-docs/paths/releases/organization-releases.json +++ b/api-docs/paths/releases/organization-releases.json @@ -5,9 +5,9 @@ "operationId": "List an Organization's Releases", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" @@ -110,9 +110,9 @@ "operationId": "Create a New Release for an Organization", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/releases/project-release-commits.json b/api-docs/paths/releases/project-release-commits.json index dd898977bfa769..7bbc8df3a35689 100644 --- a/api-docs/paths/releases/project-release-commits.json +++ b/api-docs/paths/releases/project-release-commits.json @@ -5,9 +5,9 @@ "operationId": "List a Project Release's Commits", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the release belongs to.", + "description": "The id or slug of the organization the release belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/releases/project-release-file.json b/api-docs/paths/releases/project-release-file.json index 0f71b0f3afbb08..b887db60aeadc6 100644 --- a/api-docs/paths/releases/project-release-file.json +++ b/api-docs/paths/releases/project-release-file.json @@ -5,9 +5,9 @@ "operationId": "Retrieve a Project Release's File", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" @@ -90,9 +90,9 @@ "operationId": "Update a Project Release File", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" @@ -189,9 +189,9 @@ "operationId": "Delete a Project Release's File", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the release belongs to.", + "description": "The id or slug of the organization the release belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/releases/project-release-files.json b/api-docs/paths/releases/project-release-files.json index a987aa51437339..c7e693232d4e45 100644 --- a/api-docs/paths/releases/project-release-files.json +++ b/api-docs/paths/releases/project-release-files.json @@ -5,9 +5,9 @@ "operationId": "List a Project's Release Files", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" @@ -78,9 +78,9 @@ "operationId": "Upload a New Project Release File", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/releases/release-file.json b/api-docs/paths/releases/release-file.json index afd6e2e0476417..78444df864390b 100644 --- a/api-docs/paths/releases/release-file.json +++ b/api-docs/paths/releases/release-file.json @@ -5,9 +5,9 @@ "operationId": "Retrieve an Organization Release's File", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" @@ -81,9 +81,9 @@ "operationId": "Update an Organization Release File", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" @@ -171,9 +171,9 @@ "operationId": "Delete an Organization Release's File", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the release belongs to.", + "description": "The id or slug of the organization the release belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/releases/release-files.json b/api-docs/paths/releases/release-files.json index 87b9bf28d5af64..cdbe81db55410c 100644 --- a/api-docs/paths/releases/release-files.json +++ b/api-docs/paths/releases/release-files.json @@ -5,9 +5,9 @@ "operationId": "List an Organization's Release Files", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" @@ -69,9 +69,9 @@ "operationId": "Upload a New Organization Release File", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization.", + "description": "The id or slug of the organization.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/teams/by-slug.json b/api-docs/paths/teams/by-slug.json index 7aa2d3b788064e..bb6b264d4c6823 100644 --- a/api-docs/paths/teams/by-slug.json +++ b/api-docs/paths/teams/by-slug.json @@ -5,9 +5,9 @@ "operationId": "Retrieve a Team", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the team belongs to.", + "description": "The id or slug of the organization the team belongs to.", "required": true, "schema": { "type": "string" @@ -83,9 +83,9 @@ "operationId": "Update a Team", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the team belongs to.", + "description": "The id or slug of the organization the team belongs to.", "required": true, "schema": { "type": "string" @@ -172,9 +172,9 @@ "operationId": "Delete a Team", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the team belongs to.", + "description": "The id or slug of the organization the team belongs to.", "required": true, "schema": { "type": "string" diff --git a/api-docs/paths/teams/stats.json b/api-docs/paths/teams/stats.json index ac59190d9b8f58..c0dfc6077450a4 100644 --- a/api-docs/paths/teams/stats.json +++ b/api-docs/paths/teams/stats.json @@ -6,9 +6,9 @@ "operationId": "Retrieve Event Counts for a Team", "parameters": [ { - "name": "organization_slug", + "name": "organization_id_or_slug", "in": "path", - "description": "The slug of the organization the team belongs to.", + "description": "The id or slug of the organization the team belongs to.", "required": true, "schema": { "type": "string" From a0be07eb808888d184a44ee93ba67e24c94dee5e Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 8 May 2024 12:52:47 -0700 Subject: [PATCH 170/376] fix(grouping): Store variant dict rather than items on `CalculatedHashes` (#70479) In https://github.com/getsentry/sentry/pull/68360, I added `variants` data to the `CalculatedHashes` dataclass, so that we'd have access to them later in the ingest process. In https://github.com/getsentry/sentry/pull/68579, I split up the function getting grouping info for an event so that the part of the work getting variant data was separate from the part acting on that variant data, with the specific intention of being able to use the latter on the variant data now being stored on `CalculatedHashes`. All good, except that I mistakenly ended up with data stored as dict items containing dictified variant data, when the function I wanted to pass that data to wants a dict itself, filled with legit variant objects. Whoops. This fixes the problem, by switching to storing the `variants` dictionary itself on `CalculatedHashes`. In order to do this, I had to pull the contents of a helper function, `get_sorted_grouping_variants`, up into the one place it's called, because inside of that function is currently the only place the variants data exists in dictionary form. Also, for ease of doing recursive checks in tests now that we're not pre-dictifying `CalculatedHashes.variants`, I added a `__eq__` method to the `BaseVariant` class which does the dictifying automatically before comparison. --- src/sentry/eventstore/models.py | 16 +++------ src/sentry/grouping/result.py | 12 +++++-- src/sentry/grouping/variants.py | 5 +++ tests/sentry/eventstore/test_models.py | 47 ++++++++------------------ 4 files changed, 34 insertions(+), 46 deletions(-) diff --git a/src/sentry/eventstore/models.py b/src/sentry/eventstore/models.py index 98deeab45987f9..36f4fb99040c3d 100644 --- a/src/sentry/eventstore/models.py +++ b/src/sentry/eventstore/models.py @@ -368,7 +368,10 @@ def get_hashes(self, force_config: StrategyConfiguration | None = None) -> Calcu return rv # Create fresh hashes - flat_variants, hierarchical_variants = self.get_sorted_grouping_variants(force_config) + from sentry.grouping.api import sort_grouping_variants + + variants = self.get_grouping_variants(force_config) + flat_variants, hierarchical_variants = sort_grouping_variants(variants) flat_hashes, _ = self._hashes_from_sorted_grouping_variants(flat_variants) hierarchical_hashes, tree_labels = self._hashes_from_sorted_grouping_variants( hierarchical_variants @@ -386,18 +389,9 @@ def get_hashes(self, force_config: StrategyConfiguration | None = None) -> Calcu hashes=flat_hashes, hierarchical_hashes=hierarchical_hashes, tree_labels=tree_labels, - variants=[*flat_variants, *hierarchical_variants], + variants=variants, ) - def get_sorted_grouping_variants( - self, force_config: StrategyConfiguration | None = None - ) -> tuple[KeyedVariants, KeyedVariants]: - """Get grouping variants sorted into flat and hierarchical variants""" - from sentry.grouping.api import sort_grouping_variants - - variants = self.get_grouping_variants(force_config) - return sort_grouping_variants(variants) - @staticmethod def _hashes_from_sorted_grouping_variants( variants: KeyedVariants, diff --git a/src/sentry/grouping/result.py b/src/sentry/grouping/result.py index 844d36119385a6..7a000885196890 100644 --- a/src/sentry/grouping/result.py +++ b/src/sentry/grouping/result.py @@ -3,7 +3,7 @@ from typing import Any, Optional, TypedDict from sentry.db.models import NodeData -from sentry.grouping.variants import KeyedVariants +from sentry.grouping.variants import BaseVariant from sentry.utils.safe import get_path, safe_execute, set_path EventMetadata = dict[str, Any] @@ -100,7 +100,15 @@ class CalculatedHashes: hashes: list[str] hierarchical_hashes: list[str] tree_labels: list[TreeLabel | None] - variants: KeyedVariants | None = None + # `variants` will never be `None` when the `CalculatedHashes` instance is created as part of + # event grouping, but it has to be typed including `None` because we use the `CalculatedHashes` + # container in other places where we don't have the variants data + # + # TODO: Once we get rid of hierarchical hashing, those other places will just be using + # `CalculatedHashes` to wrap `hashes` - meaning we don't need a wrapper at all, and can save use + # of `CalculatedHashes` for times when we know the variants are there (so we can make them + # required in the type) + variants: dict[str, BaseVariant] | None = None def write_to_event(self, event_data: NodeData) -> None: event_data["hashes"] = self.hashes diff --git a/src/sentry/grouping/variants.py b/src/sentry/grouping/variants.py index 95ff6b19a9364e..7f289154f15a54 100644 --- a/src/sentry/grouping/variants.py +++ b/src/sentry/grouping/variants.py @@ -29,6 +29,11 @@ def as_dict(self): def __repr__(self): return f"<{self.__class__.__name__} {self.get_hash()!r} ({self.type})>" + def __eq__(self, other: object) -> bool: + if not isinstance(other, BaseVariant): + return NotImplemented + return self.as_dict() == other.as_dict() + KeyedVariants = KeyedList[BaseVariant] diff --git a/tests/sentry/eventstore/test_models.py b/tests/sentry/eventstore/test_models.py index e16ac0722c35d8..e68b57d2b93dde 100644 --- a/tests/sentry/eventstore/test_models.py +++ b/tests/sentry/eventstore/test_models.py @@ -10,6 +10,7 @@ from sentry.grouping.enhancer import Enhancements from sentry.grouping.result import CalculatedHashes from sentry.grouping.utils import hash_from_values +from sentry.grouping.variants import ComponentVariant from sentry.interfaces.user import User from sentry.issues.issue_occurrence import IssueOccurrence from sentry.models.environment import Environment @@ -421,41 +422,21 @@ def test_get_hashes_gets_hashes_and_variants_if_none_on_event(self): calculated_hashes = event.get_hashes() expected_hash_values = [hash_from_values(["Dogs are great!"])] - expected_variants = list(get_grouping_variants_for_event(event).items()) + expected_variants = get_grouping_variants_for_event(event) assert calculated_hashes.hashes == expected_hash_values - assert ( - calculated_hashes.variants is not None - and len(calculated_hashes.variants) == len(expected_variants) == 1 - ) - - variant_key, variant = calculated_hashes.variants[0] - expected_variant_key, expected_variant = expected_variants[0] - variant_dict = variant._get_metadata_as_dict() - expected_variant_dict = expected_variant._get_metadata_as_dict() - - assert variant_key == expected_variant_key == "default" - assert ( - variant_dict["config"]["id"] - == expected_variant_dict["config"]["id"] - == NEWSTYLE_GROUPING_CONFIG - ) - assert ( - variant_dict["component"]["id"] == expected_variant_dict["component"]["id"] == "default" - ) - assert ( - len(variant_dict["component"]["values"]) - == len(expected_variant_dict["component"]["values"]) - == 1 - ) - - component_value = variant_dict["component"]["values"][0] - expected_component_value = expected_variant_dict["component"]["values"][0] - - assert component_value["id"] == expected_component_value["id"] == "message" - assert ( - component_value["values"] == expected_component_value["values"] == ["Dogs are great!"] - ) + assert calculated_hashes.variants == expected_variants + + # Since the `variants` dictionaries are equal, it suffices to only check the values in one + assert "default" in calculated_hashes.variants + default_variant = calculated_hashes.variants["default"] + + assert isinstance(default_variant, ComponentVariant) + assert default_variant.config.id == NEWSTYLE_GROUPING_CONFIG + assert default_variant.component.id == "default" + assert len(default_variant.component.values) == 1 + assert default_variant.component.values[0].id == "message" + assert default_variant.component.values[0].values == ["Dogs are great!"] class EventGroupsTest(TestCase): From f5ec992c3358077079120f556ab2142878880928 Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Wed, 8 May 2024 15:54:45 -0400 Subject: [PATCH 171/376] fix(cache): right align value size header (#70524) Right align value size in cache samples drawer and do a little bit of code consistency changes image --- .../starfish/components/tableCells/renderHeadCell.tsx | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/static/app/views/starfish/components/tableCells/renderHeadCell.tsx b/static/app/views/starfish/components/tableCells/renderHeadCell.tsx index f6c78f09ca6845..46a77ca39ed083 100644 --- a/static/app/views/starfish/components/tableCells/renderHeadCell.tsx +++ b/static/app/views/starfish/components/tableCells/renderHeadCell.tsx @@ -26,7 +26,6 @@ type Options = { const DEFAULT_SORT_PARAMETER_NAME = 'sort'; const {SPAN_SELF_TIME, HTTP_RESPONSE_CONTENT_LENGTH, CACHE_ITEM_SIZE} = SpanMetricsField; -const {RESPONSE_CODE, MESSAGING_MESSAGE_BODY_SIZE} = SpanIndexedField; const { TIME_SPENT_PERCENTAGE, SPS, @@ -58,11 +57,12 @@ export const SORTABLE_FIELDS = new Set([ ]); const NUMERIC_FIELDS = new Set([ - `${RESPONSE_CODE}`, - CACHE_ITEM_SIZE, 'transaction.duration', + SpanMetricsField.CACHE_ITEM_SIZE, + SpanIndexedField.RESPONSE_CODE, SpanIndexedField.SPAN_SELF_TIME, - MESSAGING_MESSAGE_BODY_SIZE, + SpanIndexedField.CACHE_ITEM_SIZE, + SpanIndexedField.MESSAGING_MESSAGE_BODY_SIZE, ]); export const renderHeadCell = ({column, location, sort, sortParameterName}: Options) => { From 81f3a59a8f46a07308f7dfc6f4075542841bded1 Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Wed, 8 May 2024 15:59:14 -0400 Subject: [PATCH 172/376] ref(issues-stream-assignees): Upgrade assignee dropdown to use (#69639) This PR upgrades the current assignee dropdown menu by replacing it with a `` implementation. To reduce the scope of this absolutely freaking massive PR, this change only applies to the assignee dropdown on the Issues Stream page and not in the Issue Details page or User Feedback page. Before: image After: image It also makes the following design tweaks: * Suggest Assignees, Users, and Teams are now separated into different sections in the assignee list, in that order * The current assignee, if one exists, is now bubbled to the top of the list, and has a check mark next to its name * ~~The user's name is no longer bubbled to the top of the list~~ Open Design Questions: * ~~Do we want to bubble the user's name to the top of the assignee list?~~ TODOS: - [x] Add test file for NewAssigneeSelectorDropdown (placeholder name) - [x] Rename NewAssigneeSelectorDropdown to something acceptable - [x] Open invite/request new member modal when `(+) Invite Member` is clicked --------- Co-authored-by: Scott Cooper --- .../assigneeSelectorDropdown.spec.tsx | 600 ++++++++++++++++++ .../components/assigneeSelectorDropdown.tsx | 561 ++++++++++++++++ .../components/deprecatedAssigneeSelector.tsx | 2 +- static/app/components/stream/group.spec.tsx | 13 +- static/app/components/stream/group.tsx | 74 ++- 5 files changed, 1226 insertions(+), 24 deletions(-) create mode 100644 static/app/components/assigneeSelectorDropdown.spec.tsx create mode 100644 static/app/components/assigneeSelectorDropdown.tsx diff --git a/static/app/components/assigneeSelectorDropdown.spec.tsx b/static/app/components/assigneeSelectorDropdown.spec.tsx new file mode 100644 index 00000000000000..436537260fd8ab --- /dev/null +++ b/static/app/components/assigneeSelectorDropdown.spec.tsx @@ -0,0 +1,600 @@ +import {GroupFixture} from 'sentry-fixture/group'; +import {MemberFixture} from 'sentry-fixture/member'; +import {ProjectFixture} from 'sentry-fixture/project'; +import {RouterContextFixture} from 'sentry-fixture/routerContextFixture'; +import {TeamFixture} from 'sentry-fixture/team'; +import {UserFixture} from 'sentry-fixture/user'; + +import {act, render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; + +import {assignToActor, clearAssignment} from 'sentry/actionCreators/group'; +import {openInviteMembersModal} from 'sentry/actionCreators/modal'; +import AssigneeSelectorDropdown, { + type AssignableEntity, +} from 'sentry/components/assigneeSelectorDropdown'; +import ConfigStore from 'sentry/stores/configStore'; +import GroupStore from 'sentry/stores/groupStore'; +import MemberListStore from 'sentry/stores/memberListStore'; +import ProjectsStore from 'sentry/stores/projectsStore'; +import TeamStore from 'sentry/stores/teamStore'; +import type {Group} from 'sentry/types'; + +jest.mock('sentry/actionCreators/modal', () => ({ + openInviteMembersModal: jest.fn(), +})); + +describe('AssigneeSelectorDropdown', () => { + let USER_1, USER_2, USER_3, USER_4; + let TEAM_1, TEAM_2; + let PROJECT_1; + let GROUP_1; + let GROUP_2; + + beforeEach(() => { + USER_1 = UserFixture({ + id: '1', + name: 'Apple Bees', + email: 'applebees@example.com', + }); + USER_2 = UserFixture({ + id: '2', + name: 'Cert Depo', + email: 'cd@example.com', + }); + USER_3 = UserFixture({ + id: '3', + name: 'Epic Fail', + email: 'epicf@example.com', + }); + USER_4 = MemberFixture({ + id: '4', + name: 'Git Hub', + email: 'github@example.com', + }); + + TEAM_1 = TeamFixture({ + id: '3', + name: 'COOL TEAM', + slug: 'cool-team', + }); + + TEAM_2 = TeamFixture({ + id: '4', + name: 'LAME TEAM', + slug: 'lame-team', + }); + + PROJECT_1 = ProjectFixture({ + teams: [TEAM_1, TEAM_2], + }); + + GROUP_1 = GroupFixture({ + id: '1337', + project: PROJECT_1, + }); + + GROUP_2 = GroupFixture({ + id: '1338', + project: PROJECT_1, + owners: [ + { + type: 'suspectCommit', + owner: `user:${USER_1.id}`, + date_added: '', + }, + ], + }); + + TeamStore.reset(); + TeamStore.setTeams([TEAM_1, TEAM_2]); + GroupStore.reset(); + GroupStore.loadInitialData([GROUP_1, GROUP_2]); + + jest.spyOn(MemberListStore, 'getAll').mockImplementation(() => []); + jest.spyOn(GroupStore, 'get').mockImplementation(() => GROUP_1); + + MemberListStore.reset(); + }); + + beforeEach(() => { + ProjectsStore.loadInitialData([PROJECT_1]); + }); + + afterEach(() => { + ProjectsStore.reset(); + GroupStore.reset(); + MockApiClient.clearMockResponses(); + }); + + // Doesn't need to always be async, but it was easier to prevent flakes this way + const openMenu = async () => { + await userEvent.click(await screen.findByTestId('assignee-selector'), undefined); + }; + + const updateGroupSpy = jest.fn(); + + const updateGroup = async (group: Group, newAssignee: AssignableEntity | null) => { + updateGroupSpy(group, newAssignee); + if (newAssignee) { + await assignToActor({ + id: group.id, + orgSlug: 'org-slug', + actor: {id: newAssignee.id, type: newAssignee.type}, + assignedBy: 'assignee_selector', + }); + } else { + await clearAssignment(group.id, 'org-slug', 'assignee_selector'); + } + }; + + describe('render with props', () => { + it('renders members from the prop when present', async () => { + MemberListStore.loadInitialData([USER_1]); + render( + updateGroup(GROUP_1, newAssignee)} + /> + ); + await openMenu(); + expect(screen.queryByTestId('loading-indicator')).not.toBeInTheDocument(); + + // 3 total items + expect(screen.getAllByRole('option')).toHaveLength(4); + // 1 team + expect(screen.getByText(`#${TEAM_1.slug}`)).toBeInTheDocument(); + // 2 Users + expect(screen.getByText(USER_2.name)).toBeInTheDocument(); + expect(screen.getByText(USER_3.name)).toBeInTheDocument(); + }); + }); + + it('shows all user and team assignees in the correct order', async () => { + render( + updateGroup(GROUP_1, newAssignee)} + /> + ); + act(() => MemberListStore.loadInitialData([USER_1, USER_2, USER_3, USER_4])); + await openMenu(); + expect(screen.queryByTestId('loading-indicator')).not.toBeInTheDocument(); + + // 3 total items + const options = screen.getAllByRole('option'); + // 4 Users + 2 Teams = 6 total options + expect(options).toHaveLength(6); + // Expect users to be in alphabetical order + expect(options[0]).toHaveTextContent(`${USER_1.name} (You)`); + expect(options[1]).toHaveTextContent(USER_2.name); + expect(options[2]).toHaveTextContent(USER_3.name); + expect(options[3]).toHaveTextContent(USER_4.name); + // Expect team to be at the bottom of the list + expect(options[4]).toHaveTextContent(TEAM_1.slug); + expect(options[5]).toHaveTextContent(TEAM_2.slug); + }); + + it('successfully assigns users', async () => { + // This is necessary in addition to passing in the same member list into the component + // because the avatar component uses the member list store to get the user's avatar + MemberListStore.loadInitialData([USER_1, USER_2, USER_3, USER_4]); + const assignedGroup: Group = { + ...GROUP_1, + assignedTo: {...USER_1, type: 'user'}, + }; + + const assignMock = MockApiClient.addMockResponse({ + method: 'PUT', + url: `/organizations/org-slug/issues/${GROUP_1.id}/`, + body: assignedGroup, + }); + + const {rerender} = render( + updateGroup(GROUP_1, newAssignee)} + /> + ); + + await openMenu(); + expect(screen.queryByTestId('loading-indicator')).not.toBeInTheDocument(); + + await userEvent.click(screen.getByText(`${USER_1.name} (You)`)); + + await waitFor(() => + expect(assignMock).toHaveBeenLastCalledWith( + `/organizations/org-slug/issues/${GROUP_1.id}/`, + expect.objectContaining({ + data: {assignedTo: `user:${USER_1.id}`, assignedBy: 'assignee_selector'}, + }) + ) + ); + expect(updateGroupSpy).toHaveBeenCalledWith(GROUP_1, { + assignee: USER_1, + id: `${USER_1.id}`, + type: 'user', + suggestedAssignee: undefined, + }); + rerender( + updateGroup(assignedGroup, newAssignee)} + /> + ); + + expect(await screen.findByTestId('letter_avatar-avatar')).toBeInTheDocument(); + // USER_1 initials + expect(screen.getByTestId('assignee-selector')).toHaveTextContent('AB'); + }); + + it('successfully assigns teams', async () => { + const assignedGroup: Group = { + ...GROUP_1, + assignedTo: {...TEAM_1, type: 'team'}, + }; + + const assignMock = MockApiClient.addMockResponse({ + method: 'PUT', + url: `/organizations/org-slug/issues/${GROUP_1.id}/`, + body: assignedGroup, + }); + + const {rerender} = render( + updateGroup(GROUP_1, newAssignee)} + /> + ); + await openMenu(); + expect(screen.queryByTestId('loading-indicator')).not.toBeInTheDocument(); + + const team1slug = `#${TEAM_1.slug}`; + await userEvent.click(screen.getByText(team1slug)); + + await waitFor(() => + expect(assignMock).toHaveBeenCalledWith( + `/organizations/org-slug/issues/${GROUP_1.id}/`, + expect.objectContaining({ + data: {assignedTo: 'team:3', assignedBy: 'assignee_selector'}, + }) + ) + ); + expect(updateGroupSpy).toHaveBeenCalledWith(GROUP_1, { + assignee: { + id: `team:${TEAM_1.id}`, + name: TEAM_1.slug, + type: 'team', + }, + id: `${TEAM_1.id}`, + type: 'team', + suggestedAssignee: undefined, + }); + + rerender( + updateGroup(assignedGroup, newAssignee)} + /> + ); + + expect(await screen.findByTestId('letter_avatar-avatar')).toBeInTheDocument(); + expect(screen.getByTestId('assignee-selector')).toHaveTextContent('CT'); + }); + + it('successfully switches an assignee', async () => { + MemberListStore.loadInitialData([USER_1, USER_2, USER_3, USER_4]); + const assignedGroupUser1: Group = { + ...GROUP_1, + assignedTo: {...USER_1, type: 'user'}, + }; + const assignedGroupUser2: Group = { + ...GROUP_1, + assignedTo: {...USER_2, type: 'user'}, + }; + + const assignMock = MockApiClient.addMockResponse({ + method: 'PUT', + url: `/organizations/org-slug/issues/${GROUP_1.id}/`, + body: assignedGroupUser1, + }); + + const {rerender} = render( + updateGroup(GROUP_1, newAssignee)} + /> + ); + await openMenu(); + + expect(screen.queryByTestId('loading-indicator')).not.toBeInTheDocument(); + await userEvent.click(screen.getByText(`${USER_1.name} (You)`)); + + await waitFor(() => + expect(assignMock).toHaveBeenLastCalledWith( + `/organizations/org-slug/issues/${GROUP_1.id}/`, + expect.objectContaining({ + data: {assignedTo: `user:${USER_1.id}`, assignedBy: 'assignee_selector'}, + }) + ) + ); + + expect(updateGroupSpy).toHaveBeenCalledWith(GROUP_1, { + assignee: USER_1, + id: `${USER_1.id}`, + type: 'user', + suggestedAssignee: undefined, + }); + + rerender( + updateGroup(assignedGroupUser1, newAssignee)} + /> + ); + + expect(await screen.findByTestId('letter_avatar-avatar')).toBeInTheDocument(); + expect(screen.getByTestId('assignee-selector')).toHaveTextContent('AB'); + + await openMenu(); + expect(screen.queryByTestId('loading-indicator')).not.toBeInTheDocument(); + await userEvent.click(screen.getByText(USER_2.name)); + + await waitFor(() => + expect(assignMock).toHaveBeenLastCalledWith( + `/organizations/org-slug/issues/${GROUP_1.id}/`, + expect.objectContaining({ + data: {assignedTo: `user:${USER_2.id}`, assignedBy: 'assignee_selector'}, + }) + ) + ); + expect(updateGroupSpy).toHaveBeenCalledWith(GROUP_1, { + assignee: USER_1, + id: `${USER_1.id}`, + type: 'user', + suggestedAssignee: undefined, + }); + rerender( + updateGroup(assignedGroupUser2, newAssignee)} + /> + ); + expect(screen.getByTestId('assignee-selector')).toHaveTextContent('CD'); + }); + + it('successfully clears assignment', async () => { + const assignedGroup: Group = { + ...GROUP_1, + assignedTo: {...USER_2, type: 'user'}, + }; + + const assignMock = MockApiClient.addMockResponse({ + method: 'PUT', + url: `/organizations/org-slug/issues/${GROUP_1.id}/`, + body: assignedGroup, + }); + + const {rerender} = render( + updateGroup(GROUP_1, newAssignee)} + /> + ); + await openMenu(); + + await userEvent.click(screen.getByText(USER_2.name)); + + await waitFor(() => + expect(assignMock).toHaveBeenCalledWith( + `/organizations/org-slug/issues/${GROUP_1.id}/`, + expect.objectContaining({ + data: {assignedTo: 'user:2', assignedBy: 'assignee_selector'}, + }) + ) + ); + rerender( + updateGroup(assignedGroup, newAssignee)} + onClear={() => updateGroup(assignedGroup, null)} + /> + ); + + await openMenu(); + await userEvent.click(screen.getByRole('button', {name: 'Clear'})); + + // api was called with empty string, clearing assignment + await waitFor(() => + expect(assignMock).toHaveBeenLastCalledWith( + '/organizations/org-slug/issues/1337/', + expect.objectContaining({ + data: {assignedTo: '', assignedBy: 'assignee_selector'}, + }) + ) + ); + }); + + it('filters user by email and selects with keyboard', async () => { + MemberListStore.loadInitialData([USER_1, USER_2, USER_3, USER_4]); + const assignedGroup: Group = { + ...GROUP_2, + assignedTo: {...USER_2, type: 'user'}, + }; + + const assignMock = MockApiClient.addMockResponse({ + method: 'PUT', + url: `/organizations/org-slug/issues/${GROUP_2.id}/`, + body: assignedGroup, + }); + + const {rerender} = render( + updateGroup(GROUP_2, newAssignee)} + /> + ); + await openMenu(); + expect(screen.queryByTestId('loading-indicator')).not.toBeInTheDocument(); + + await userEvent.type(screen.getByRole('textbox'), 'Cert'); + + // 1 total item + await waitFor(() => { + expect(screen.getAllByRole('option')).toHaveLength(1); + }); + + expect(await screen.findByText(`${USER_2.name}`)).toBeInTheDocument(); + + await userEvent.click(await screen.findByText(`${USER_2.name}`)); + + await waitFor(() => + expect(assignMock).toHaveBeenLastCalledWith( + `/organizations/org-slug/issues/${GROUP_2.id}/`, + expect.objectContaining({ + data: {assignedTo: `user:${USER_2.id}`, assignedBy: 'assignee_selector'}, + }) + ) + ); + rerender( + updateGroup(assignedGroup, newAssignee)} + /> + ); + expect(await screen.findByTestId('letter_avatar-avatar')).toBeInTheDocument(); + // USER_2 initials + expect(screen.getByTestId('assignee-selector')).toHaveTextContent('CD'); + }); + + it('successfully shows suggested assignees and suggestion reason', async () => { + jest.spyOn(GroupStore, 'get').mockImplementation(() => GROUP_2); + + MemberListStore.loadInitialData([USER_1, USER_2, USER_3]); + + const assignedGroup: Group = { + ...GROUP_2, + assignedTo: {...USER_1, type: 'user'}, + }; + + const assignGroup2Mock = MockApiClient.addMockResponse({ + method: 'PUT', + url: `/organizations/org-slug/issues/${GROUP_2.id}/`, + body: { + ...GROUP_2, + assignedBy: 'assignee_selector', + assignedTo: {assignedTo: USER_1, type: 'user'}, + }, + }); + + const {rerender} = render( + updateGroup(GROUP_2, newAssignee)} + /> + ); + + expect(screen.getByTestId('suggested-avatar-stack')).toBeInTheDocument(); + // Hover over avatar + await userEvent.hover(await screen.findByTestId('letter_avatar-avatar')); + expect(await screen.findByText('Suggestion: Apple Bees')).toBeInTheDocument(); + expect(await screen.findByText('commit data')).toBeInTheDocument(); + + await openMenu(); + expect(screen.queryByTestId('loading-indicator')).not.toBeInTheDocument(); + expect(await screen.findByText('Suggested')).toBeInTheDocument(); + + const options = await screen.findAllByRole('option'); + + // Suggested assignee initials + expect(options[0]).toHaveTextContent('AB'); + await userEvent.click(options[0]); + + await waitFor(() => + expect(assignGroup2Mock).toHaveBeenCalledWith( + '/organizations/org-slug/issues/1338/', + expect.objectContaining({ + data: {assignedTo: `user:${USER_1.id}`, assignedBy: 'assignee_selector'}, + }) + ) + ); + + rerender( + updateGroup(assignedGroup, newAssignee)} + /> + ); + + // Suggested assignees shouldn't show anymore because we assigned to the suggested actor expect(screen.getByTestId('suggested-avatar-stack')).not.toBeInTheDocument(); + + expect(updateGroupSpy).toHaveBeenCalledWith(GROUP_2, { + assignee: USER_1, + id: `${USER_1.id}`, + type: 'user', + suggestedAssignee: expect.objectContaining({id: USER_1.id}), + }); + }); + + it('shows invite member button', async () => { + MemberListStore.loadInitialData([USER_1, USER_2]); + render( + updateGroup(GROUP_1, newAssignee)} + />, + { + context: RouterContextFixture(), + } + ); + jest.spyOn(ConfigStore, 'get').mockImplementation(() => true); + + await openMenu(); + expect(screen.queryByTestId('loading-indicator')).not.toBeInTheDocument(); + + await userEvent.click(await screen.findByRole('button', {name: 'Invite Member'})); + expect(openInviteMembersModal).toHaveBeenCalled(); + (ConfigStore.get as jest.Mock).mockRestore(); + }); + + it('renders unassigned', async () => { + render( + updateGroup(GROUP_1, newAssignee)} + /> + ); + + await userEvent.hover(screen.getByTestId('unassigned')); + expect(await screen.findByText('Unassigned')).toBeInTheDocument(); + }); +}); diff --git a/static/app/components/assigneeSelectorDropdown.tsx b/static/app/components/assigneeSelectorDropdown.tsx new file mode 100644 index 00000000000000..2be4a6d0281d9d --- /dev/null +++ b/static/app/components/assigneeSelectorDropdown.tsx @@ -0,0 +1,561 @@ +import {Fragment} from 'react'; +import styled from '@emotion/styled'; +import uniqBy from 'lodash/uniqBy'; + +import {openInviteMembersModal} from 'sentry/actionCreators/modal'; +import ActorAvatar from 'sentry/components/avatar/actorAvatar'; +import SuggestedAvatarStack from 'sentry/components/avatar/suggestedAvatarStack'; +import {Button} from 'sentry/components/button'; +import {Chevron} from 'sentry/components/chevron'; +import { + CompactSelect, + type SelectOption, + type SelectOptionOrSection, +} from 'sentry/components/compactSelect'; +import IdBadge from 'sentry/components/idBadge'; +import ExternalLink from 'sentry/components/links/externalLink'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; +import {Tooltip} from 'sentry/components/tooltip'; +import {IconAdd, IconUser} from 'sentry/icons'; +import {t, tct, tn} from 'sentry/locale'; +import ConfigStore from 'sentry/stores/configStore'; +import MemberListStore from 'sentry/stores/memberListStore'; +import ProjectsStore from 'sentry/stores/projectsStore'; +import {useLegacyStore} from 'sentry/stores/useLegacyStore'; +import {space} from 'sentry/styles/space'; +import type {Actor, Group, SuggestedOwnerReason, Team, User} from 'sentry/types'; +import {buildTeamId} from 'sentry/utils'; + +const suggestedReasonTable: Record = { + suspectCommit: t('Suspect Commit'), + ownershipRule: t('Ownership Rule'), + projectOwnership: t('Ownership Rule'), + // TODO: codeowners may no longer exist + codeowners: t('Codeowners'), +}; + +export type AssignableEntity = { + assignee: User | Actor; + id: string; + type: Actor['type']; + suggestedAssignee?: SuggestedAssignee; +}; + +export type SuggestedAssignee = Actor & { + assignee: AssignableTeam | User; + suggestedReason: SuggestedOwnerReason; + suggestedReasonText?: React.ReactNode; +}; + +type AssignableTeam = { + display: string; + email: string; + id: string; + team: Team; +}; + +export interface AssigneeSelectorDropdownProps { + group: Group; + loading: boolean; + memberList?: User[]; + noDropdown?: boolean; + onAssign?: (assignedActor: AssignableEntity | null) => void; + onClear?: (clearedAssignee: User | Actor) => void; + owners?: Omit[]; +} + +export function AssigneeAvatar({ + assignedTo, + suggestedActors = [], +}: { + assignedTo?: Actor | null; + suggestedActors?: SuggestedAssignee[]; +}) { + const suggestedReasons: Record = { + suspectCommit: tct('Based on [commit:commit data]', { + commit: ( + + ), + }), + ownershipRule: t('Matching Issue Owners Rule'), + projectOwnership: t('Matching Issue Owners Rule'), + codeowners: t('Matching Codeowners Rule'), + }; + const assignedToSuggestion = suggestedActors.find(actor => actor.id === assignedTo?.id); + + if (assignedTo) { + return ( + + {tct('Assigned to [name]', { + name: assignedTo.type === 'team' ? `#${assignedTo.name}` : assignedTo.name, + })} + {assignedToSuggestion && + suggestedReasons[assignedToSuggestion.suggestedReason] && ( + + {suggestedReasons[assignedToSuggestion.suggestedReason]} + + )} + + } + /> + ); + } + + if (suggestedActors.length > 0) { + return ( + +
+ {tct('Suggestion: [name]', { + name: + suggestedActors[0].type === 'team' + ? `#${suggestedActors[0].name}` + : suggestedActors[0].name, + })} + {suggestedActors.length > 1 && + tn(' + %s other', ' + %s others', suggestedActors.length - 1)} +
+ + {suggestedReasons[suggestedActors[0].suggestedReason]} + + + } + /> + ); + } + + return ( + +
{t('Unassigned')}
+ + {tct( + 'You can auto-assign issues by adding [issueOwners:Issue Owner rules].', + { + issueOwners: ( + + ), + } + )} + + + } + > + +
+ ); +} + +export default function AssigneeSelectorDropdown({ + group, + loading, + memberList, + noDropdown = false, + onAssign, + onClear, + owners, +}: AssigneeSelectorDropdownProps) { + const memberLists = useLegacyStore(MemberListStore); + const sessionUser = ConfigStore.get('user'); + + const currentMemberList = (): User[] | undefined => { + return memberList ?? memberLists?.members; + }; + + const getSuggestedAssignees = (): SuggestedAssignee[] => { + const currAssignableTeams = getAssignableTeams(); + const currMembers = currentMemberList() ?? []; + + if (owners !== undefined) { + // Add team or user from store + return owners + .map(owner => { + if (owner.type === 'user') { + const member = currMembers.find(user => user.id === owner.id); + if (member) { + return { + ...owner, + assignee: member, + }; + } + } + if (owner.type === 'team') { + const matchingTeam = currAssignableTeams.find( + assignableTeam => assignableTeam.team.id === owner.id + ); + if (matchingTeam) { + return { + ...owner, + assignee: matchingTeam, + }; + } + } + + return null; + }) + .filter((owner): owner is SuggestedAssignee => !!owner); + } + + const suggestedOwners = group.owners ?? []; + if (!suggestedOwners) { + return []; + } + + const uniqueSuggestions = uniqBy(suggestedOwners, owner => owner.owner); + return uniqueSuggestions + .map(suggestion => { + const [suggestionType, suggestionId] = suggestion.owner.split(':'); + const suggestedReasonText = suggestedReasonTable[suggestion.type]; + if (suggestionType === 'user') { + const member = currMembers.find(user => user.id === suggestionId); + if (member) { + return { + id: suggestionId, + type: 'user', + name: member.name, + suggestedReason: suggestion.type, + suggestedReasonText, + assignee: member, + }; + } + } else if (suggestionType === 'team') { + const matchingTeam = currAssignableTeams.find( + assignableTeam => assignableTeam.id === suggestion.owner + ); + if (matchingTeam) { + return { + id: suggestionId, + type: 'team', + name: matchingTeam.team.name, + suggestedReason: suggestion.type, + suggestedReasonText, + assignee: matchingTeam, + }; + } + } + + return null; + }) + .filter((owner): owner is SuggestedAssignee => !!owner); + }; + + const getAssignableTeams = (): AssignableTeam[] => { + const teams = ProjectsStore.getBySlug(group?.project.slug)?.teams ?? []; + return teams + .sort((a, b) => a.slug.localeCompare(b.slug)) + .map(team => ({ + id: buildTeamId(team.id), + display: `#${team.slug}`, + email: team.id, + team, + })); + }; + + const handleSelect = (selectedOption: SelectOption | null) => { + // selectedOption is falsey when the option selected is already selected, or when the clear button is clicked + if (!selectedOption) { + if (onClear && group.assignedTo) { + onClear(group.assignedTo); + } + return; + } + // See makeMemberOption and makeTeamOption for how the value is formatted + const type = selectedOption.value.startsWith('user:') ? 'user' : 'team'; + const assigneeId = selectedOption.value.split(':')[1]; + let assignee: User | Actor; + + if (type === 'user') { + assignee = currentMemberList()?.find(member => member.id === assigneeId) as User; + } else { + const assignedTeam = getAssignableTeams().find( + assignableTeam => assignableTeam.team.id === assigneeId + ) as AssignableTeam; + // Convert AssingableTeam to Actor + assignee = { + id: assignedTeam.id, + name: assignedTeam.team.slug, + type: 'team', + }; + } + // Assignee is guaranteed to exist here, but we check to satisfy the type-checker + if (assignee && onAssign) { + const suggestedAssignee = getSuggestedAssignees().find( + actor => actor.type === type && actor.id === assignee.id + ); + onAssign({ + assignee: assignee, + id: assigneeId, + type: type, + suggestedAssignee: suggestedAssignee, + }); + } + }; + + const makeMemberOption = ( + userId: string, + userDisplay: string + ): SelectOption => { + const isCurrentUser = userId === sessionUser?.id; + + return { + label: ( + + ), + // Jank way to pass assignee type (team or user) into each row + value: `user:${userId}`, + textValue: userDisplay, + }; + }; + + const makeTeamOption = (assignableTeam: AssignableTeam): SelectOption => ({ + label: , + value: `team:${assignableTeam.team.id}`, + textValue: assignableTeam.team.slug, + }); + + const makeSuggestedAssigneeOption = ( + assignee: SuggestedAssignee + ): SelectOption => { + if (assignee.type === 'user') { + const isCurrentUser = assignee.id === sessionUser?.id; + return { + label: ( + + ), + value: `user:${assignee.id}`, + textValue: assignee.name, + }; + } + const assignedTeam = assignee.assignee as AssignableTeam; + return { + label: ( + + ), + value: `team:${assignee.id}`, + textValue: assignedTeam.team.slug, + }; + }; + + const makeAllOptions = (): SelectOptionOrSection[] => { + const options: SelectOptionOrSection[] = []; + + let memList = currentMemberList(); + let assignableTeamList = getAssignableTeams(); + let suggestedAssignees = getSuggestedAssignees(); + let assignedUser: User | undefined; + + // If the group is already assigned, extract the assigned user/team + // from the member-list/assignedTeam-list and add to the top of the menu + if (group.assignedTo) { + if (group.assignedTo.type === 'team') { + const assignedTeam = assignableTeamList.find( + assignableTeam => assignableTeam.team.id === group.assignedTo?.id + ); + if (assignedTeam) { + options.push(makeTeamOption(assignedTeam)); + assignableTeamList = assignableTeamList?.filter( + assignableTeam => assignableTeam.team.id !== group.assignedTo?.id + ); + suggestedAssignees = suggestedAssignees?.filter(suggestedAssignee => { + return suggestedAssignee.id !== group.assignedTo?.id; + }); + } + } else { + assignedUser = memList?.find(user => user.id === group.assignedTo?.id); + if (assignedUser) { + options.push( + makeMemberOption(assignedUser.id, assignedUser.name || assignedUser.email) + ); + memList = memList?.filter(member => member.id !== group.assignedTo?.id); + suggestedAssignees = suggestedAssignees?.filter(suggestedAssignee => { + return suggestedAssignee.id !== group.assignedTo?.id; + }); + } + } + } + + // Only bubble the current user to the top if they are not already assigned or suggested + const isUserAssignedOrSuggested = + assignedUser?.id === sessionUser.id || + !!getSuggestedAssignees()?.find( + suggestedAssignee => suggestedAssignee.id === sessionUser.id + ); + if (!isUserAssignedOrSuggested) { + const currentUser = memList?.find(user => user.id === sessionUser.id); + if (currentUser) { + memList = memList?.filter(user => user.id !== sessionUser.id); + // This can't be sessionUser even though they're the same thing + // because it would bork the tests + memList?.unshift(currentUser); + } + } + + const memberOptions = { + value: '_members', + label: t('Members'), + options: + memList?.map(member => + makeMemberOption(member.id, member.name || member.email) + ) ?? [], + }; + + const teamOptions = { + value: '_teams', + label: t('Teams'), + options: assignableTeamList?.map(makeTeamOption) ?? [], + }; + + const suggestedUsers = suggestedAssignees?.filter( + assignee => assignee.type === 'user' + ); + const suggestedTeams = suggestedAssignees?.filter( + assignee => assignee.type === 'team' + ); + + const suggestedOptions = { + value: '_suggested_assignees', + label: t('Suggested'), + options: + suggestedUsers + .map(makeSuggestedAssigneeOption) + .concat(suggestedTeams?.map(makeSuggestedAssigneeOption)) ?? [], + }; + + options.push(suggestedOptions, memberOptions, teamOptions); + + return options; + }; + + const makeTrigger = ( + props: Omit, 'children'>, + isOpen: boolean + ) => { + const avatarElement = ( + + ); + return ( + + {loading && ( + + )} + {!loading && !noDropdown && ( + + {avatarElement} + + + )} + {!loading && noDropdown && avatarElement} + + ); + }; + + const makeFooterInviteButton = () => { + return ( + + ); + }; + + return ( + + e.stopPropagation()} + value={ + group.assignedTo + ? `${group.assignedTo?.type === 'user' ? 'user:' : 'team:'}${group.assignedTo.id}` + : '' + } + onClear={() => handleSelect(null)} + menuTitle={t('Assignee')} + searchPlaceholder="Search users or teams..." + size="sm" + onChange={handleSelect} + options={makeAllOptions()} + trigger={makeTrigger} + menuFooter={makeFooterInviteButton()} + /> + + ); +} + +const AssigneeWrapper = styled('div')` + display: flex; + justify-content: flex-end; +`; + +const DropdownButton = styled('button')` + appearance: none; + border: 0; + background: transparent; + display: flex; + align-items: center; + font-size: 20px; + gap: ${space(0.5)}; +`; + +const StyledIconUser = styled(IconUser)` + margin-right: 2px; +`; + +const TooltipWrapper = styled('div')` + text-align: left; +`; + +const TooltipSubExternalLink = styled(ExternalLink)` + color: ${p => p.theme.subText}; + text-decoration: underline; + + :hover { + color: ${p => p.theme.subText}; + } +`; + +const TooltipSubtext = styled('div')` + color: ${p => p.theme.subText}; +`; diff --git a/static/app/components/deprecatedAssigneeSelector.tsx b/static/app/components/deprecatedAssigneeSelector.tsx index 7ea7bdf1786009..4a9f9427693b60 100644 --- a/static/app/components/deprecatedAssigneeSelector.tsx +++ b/static/app/components/deprecatedAssigneeSelector.tsx @@ -29,7 +29,7 @@ interface DeprecatedAssigneeSelectorProps noDropdown?: boolean; } -function AssigneeAvatar({ +export function AssigneeAvatar({ assignedTo, suggestedActors = [], }: { diff --git a/static/app/components/stream/group.spec.tsx b/static/app/components/stream/group.spec.tsx index 6f8c5cacb7fb7e..c81621863d5975 100644 --- a/static/app/components/stream/group.spec.tsx +++ b/static/app/components/stream/group.spec.tsx @@ -43,17 +43,18 @@ describe('StreamGroup', function () { GroupStore.reset(); }); - it('renders with anchors', function () { + it('renders with anchors', async function () { const {routerContext, organization} = initializeOrg(); render(, { context: routerContext, organization, }); + expect(await screen.findByTestId('group')).toBeInTheDocument(); expect(GuideStore.state.anchors).toEqual(new Set(['dynamic_counts', 'issue_stream'])); }); - it('marks as reviewed', function () { + it('marks as reviewed', async function () { const {routerContext, organization} = initializeOrg(); render( GroupStore.onUpdate('1337', undefined, data)); act(() => GroupStore.onUpdateSuccess('1337', undefined, data)); @@ -73,13 +77,14 @@ describe('StreamGroup', function () { expect(screen.getByTestId('group')).toHaveAttribute('data-test-reviewed', 'true'); }); - it('marks as resolved', function () { + it('marks as resolved', async function () { const {routerContext, organization} = initializeOrg(); render(, { context: routerContext, organization, }); + expect(await screen.findByTestId('group')).toBeInTheDocument(); expect(screen.queryByTestId('resolved-issue')).not.toBeInTheDocument(); const data: GroupStatusResolution = { status: GroupStatus.RESOLVED, diff --git a/static/app/components/stream/group.tsx b/static/app/components/stream/group.tsx index 20369fc7417a62..0412522fb7ce03 100644 --- a/static/app/components/stream/group.tsx +++ b/static/app/components/stream/group.tsx @@ -1,13 +1,17 @@ -import {Fragment, useCallback, useMemo, useRef} from 'react'; +import {Fragment, useCallback, useMemo, useRef, useState} from 'react'; import type {Theme} from '@emotion/react'; import {css} from '@emotion/react'; import styled from '@emotion/styled'; import type {LocationDescriptor} from 'history'; +import {assignToActor, clearAssignment} from 'sentry/actionCreators/group'; +import {addErrorMessage} from 'sentry/actionCreators/indicator'; +import AssigneeSelectorDropdown, { + type AssignableEntity, +} from 'sentry/components/assigneeSelectorDropdown'; import GuideAnchor from 'sentry/components/assistant/guideAnchor'; import Checkbox from 'sentry/components/checkbox'; import Count from 'sentry/components/count'; -import DeprecatedAssigneeSelector from 'sentry/components/deprecatedAssigneeSelector'; import EventOrGroupExtraDetails from 'sentry/components/eventOrGroupExtraDetails'; import EventOrGroupHeader from 'sentry/components/eventOrGroupHeader'; import type {GroupListColumn} from 'sentry/components/issues/groupList'; @@ -42,6 +46,8 @@ import {trackAnalytics} from 'sentry/utils/analytics'; import {isDemoWalkthrough} from 'sentry/utils/demoMode'; import EventView from 'sentry/utils/discover/eventView'; import {getConfigForIssueType} from 'sentry/utils/issueTypeConfig'; +import {useMutation} from 'sentry/utils/queryClient'; +import type RequestError from 'sentry/utils/requestError/requestError'; import usePageFilters from 'sentry/utils/usePageFilters'; import withOrganization from 'sentry/utils/withOrganization'; import type {TimePeriodType} from 'sentry/views/alerts/rules/metric/details/constants'; @@ -106,6 +112,8 @@ function BaseGroupRow({ const {selection} = usePageFilters(); + const [assigneeLoading, setAssigneeLoading] = useState(false); + const originalInboxState = useRef(group.inbox as InboxDetails | null); const referrer = source ? `${source}-issue-stream` : 'issue-stream'; @@ -136,20 +144,44 @@ function BaseGroupRow({ }; }, [organization, group.id, group.owners, query]); - const trackAssign: React.ComponentProps['onAssign'] = - useCallback( - (type, _assignee, suggestedAssignee) => { - if (query !== undefined) { - trackAnalytics('issues_stream.issue_assigned', { - ...sharedAnalytics, - did_assign_suggestion: !!suggestedAssignee, - assigned_suggestion_reason: suggestedAssignee?.suggestedReason, - assigned_type: type, - }); - } - }, - [query, sharedAnalytics] - ); + const {mutate: handleAssigneeChange} = useMutation< + AssignableEntity | null, + RequestError, + AssignableEntity | null + >({ + mutationFn: async ( + newAssignee: AssignableEntity | null + ): Promise => { + setAssigneeLoading(true); + if (newAssignee) { + await assignToActor({ + id: group.id, + orgSlug: organization.slug, + actor: {id: newAssignee.id, type: newAssignee.type}, + assignedBy: 'assignee_selector', + }); + return Promise.resolve(newAssignee); + } + + await clearAssignment(group.id, organization.slug, 'assignee_selector'); + return Promise.resolve(null); + }, + onSuccess: (newAssignee: AssignableEntity | null) => { + if (query !== undefined && newAssignee) { + trackAnalytics('issues_stream.issue_assigned', { + ...sharedAnalytics, + did_assign_suggestion: !!newAssignee.suggestedAssignee, + assigned_suggestion_reason: newAssignee.suggestedAssignee?.suggestedReason, + assigned_type: newAssignee.type, + }); + } + setAssigneeLoading(false); + }, + onError: () => { + addErrorMessage('Failed to updated assignee'); + setAssigneeLoading(false); + }, + }); const wrapperToggle = useCallback( (evt: React.MouseEvent) => { @@ -467,10 +499,14 @@ function BaseGroupRow({ ) : null} {withColumns.includes('assignee') && ( - + handleAssigneeChange(assignedActor) + } + onClear={() => handleAssigneeChange(null)} /> )} From 541ed7bc7112f64da61ac0272b9a816d7fcbd1c4 Mon Sep 17 00:00:00 2001 From: Ash <0Calories@users.noreply.github.com> Date: Wed, 8 May 2024 15:59:55 -0400 Subject: [PATCH 173/376] feat(perf): Update Span Summary page with new design and span metrics dataset (#69159) Upgrades the old 'Suspect Spans' / 'Similar Spans' view to a new design fit for a span-centric world. All API requests have been changed to query from the span metrics and indexed spans datasets, so a summary page will be available for every unique span hash. ### Note This is the initial PR and there is still some polishing that needs to be done, and will be addressed in follow up PRs. This page is currently hidden behind a feature flag that's only available to certain internal users, and will slowly be rolled out to GA after it is complete. To be done in future PRs: - [ ] Unit tests - [ ] Transaction throughput chart is bugged - [ ] Chart cursors should be synchronized - [ ] Pagination cursor should be reset when `Reset View` is clicked ### Before ![image](https://github.com/getsentry/sentry/assets/16740047/74224e42-3ed8-4cdc-9bf6-1104f318cd22) ### After ![image](https://github.com/getsentry/sentry/assets/16740047/6b725837-d1b4-41cd-91af-22e50f0fff77) --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> Co-authored-by: Shruthi --- .../spans/newTraceDetailsSpanDetails.tsx | 15 +- .../details/span/sections/description.tsx | 9 +- .../transactionSpans/spanDetails/content.tsx | 8 + .../transactionSpans/spanDetails/utils.tsx | 2 + .../transactionSpans/spanSummary/content.tsx | 139 +++++++++ .../spanSummary/referrers.tsx | 7 + .../spanSummary/spanSummaryCharts.tsx | 179 +++++++++++ .../spanSummary/spanSummaryControls.tsx | 33 ++ .../spanSummary/spanSummaryHeader.tsx | 108 +++++++ .../spanSummary/spanSummaryTable.tsx | 282 ++++++++++++++++++ .../spanSummary/useSpanSummarySort.tsx | 42 +++ static/app/views/starfish/colors.tsx | 1 + .../components/tableCells/renderHeadCell.tsx | 2 + 13 files changed, 823 insertions(+), 4 deletions(-) create mode 100644 static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.tsx create mode 100644 static/app/views/performance/transactionSummary/transactionSpans/spanSummary/referrers.tsx create mode 100644 static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx create mode 100644 static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryControls.tsx create mode 100644 static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryHeader.tsx create mode 100644 static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryTable.tsx create mode 100644 static/app/views/performance/transactionSummary/transactionSpans/spanSummary/useSpanSummarySort.tsx diff --git a/static/app/components/events/interfaces/spans/newTraceDetailsSpanDetails.tsx b/static/app/components/events/interfaces/spans/newTraceDetailsSpanDetails.tsx index 8648960d3fb89f..633aab6a78d63c 100644 --- a/static/app/components/events/interfaces/spans/newTraceDetailsSpanDetails.tsx +++ b/static/app/components/events/interfaces/spans/newTraceDetailsSpanDetails.tsx @@ -229,6 +229,17 @@ function NewTraceDetailsSpanDetail(props: SpanDetailProps) { } const transactionName = event.title; + const hasNewSpansUIFlag = organization.features.includes('performance-spans-new-ui'); + + // The new spans UI relies on the group hash assigned by Relay, which is different from the hash available on the span itself + const groupHash = hasNewSpansUIFlag + ? props.node.value.sentry_tags?.group ?? '' + : props.node.value.hash; + + // Do not render a button if there is no group hash, since this can result in broken links + if (hasNewSpansUIFlag && !groupHash) { + return null; + } return ( @@ -243,11 +254,11 @@ function NewTraceDetailsSpanDetail(props: SpanDetailProps) { orgSlug: organization.slug, transaction: transactionName, query: location.query, - spanSlug: {op: props.node.value.op, group: props.node.value.hash}, + spanSlug: {op: props.node.value.op, group: groupHash}, projectID: event.projectID, })} > - {t('View Similar Spans')} + {hasNewSpansUIFlag ? t('View Span Summary') : t('View Similar Spans')} ); diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/description.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/description.tsx index 3c1c228ccc3f92..384e280c590787 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/description.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/description.tsx @@ -40,6 +40,11 @@ export function SpanDescription({ return null; } + const hasNewSpansUIFlag = organization.features.includes('performance-spans-new-ui'); + + // The new spans UI relies on the group hash assigned by Relay, which is different from the hash available on the span itself + const groupHash = hasNewSpansUIFlag ? span.sentry_tags?.group ?? '' : span.hash ?? ''; + const actions = !span.op || !span.hash ? null : ( @@ -50,11 +55,11 @@ export function SpanDescription({ orgSlug: organization.slug, transaction: event.title, query: location.query, - spanSlug: {op: span.op, group: span.hash}, + spanSlug: {op: span.op, group: groupHash}, projectID: event.projectID, })} > - {t('View Similar Spans')} + {hasNewSpansUIFlag ? t('View Span Summary') : t('View Similar Spans')} ); diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/content.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/content.tsx index 2c6d92f340f5e6..7ac474986f906d 100644 --- a/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/content.tsx +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/content.tsx @@ -18,6 +18,7 @@ import {decodeScalar} from 'sentry/utils/queryString'; import useRouteAnalyticsEventNames from 'sentry/utils/routeAnalytics/useRouteAnalyticsEventNames'; import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; import Breadcrumb from 'sentry/views/performance/breadcrumb'; +import SpanSummary from 'sentry/views/performance/transactionSummary/transactionSpans/spanSummary/content'; import {getSelectedProjectPlatforms} from 'sentry/views/performance/utils'; import Tab from '../../tabs'; @@ -53,6 +54,13 @@ export default function SpanDetailsContentWrapper(props: Props) { project_platforms: project ? getSelectedProjectPlatforms(location, [project]) : '', }); + const hasNewSpansUIFlag = organization.features.includes('performance-spans-new-ui'); + + // TODO: When this feature is rolled out to GA, we will no longer need the entire `spanDetails` directory and can switch to `spanSummary` + if (hasNewSpansUIFlag) { + return ; + } + return ( diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/utils.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/utils.tsx index 15eb5cb10e88cf..aa727ebc701f70 100644 --- a/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/utils.tsx +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/utils.tsx @@ -124,4 +124,6 @@ export function resourceSummaryRouteWithQuery({ export enum ZoomKeys { MIN = 'min', MAX = 'max', + START = 'start', + END = 'end', } diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.tsx new file mode 100644 index 00000000000000..e7b9aa7bc0af61 --- /dev/null +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.tsx @@ -0,0 +1,139 @@ +import {Fragment} from 'react'; +import type {Location} from 'history'; + +import IdBadge from 'sentry/components/idBadge'; +import * as Layout from 'sentry/components/layouts/thirds'; +import {t} from 'sentry/locale'; +import type {Organization, Project} from 'sentry/types'; +import type EventView from 'sentry/utils/discover/eventView'; +import type {SpanSlug} from 'sentry/utils/performance/suspectSpans/types'; +import useRouteAnalyticsEventNames from 'sentry/utils/routeAnalytics/useRouteAnalyticsEventNames'; +import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; +import {MutableSearch} from 'sentry/utils/tokenizeSearch'; +import {useParams} from 'sentry/utils/useParams'; +import Breadcrumb from 'sentry/views/performance/breadcrumb'; +import {SpanSummaryReferrer} from 'sentry/views/performance/transactionSummary/transactionSpans/spanSummary/referrers'; +import SpanSummaryCharts from 'sentry/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts'; +import SpanSummaryTable from 'sentry/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryTable'; +import {getSelectedProjectPlatforms} from 'sentry/views/performance/utils'; +import {useSpanMetrics} from 'sentry/views/starfish/queries/useDiscover'; +import type {SpanMetricsQueryFilters} from 'sentry/views/starfish/types'; + +import Tab from '../../tabs'; + +import SpanSummaryControls from './spanSummaryControls'; +import SpanSummaryHeader from './spanSummaryHeader'; + +type Props = { + eventView: EventView; + location: Location; + organization: Organization; + project: Project | undefined; + spanSlug: SpanSlug; + transactionName: string; +}; + +export default function SpanSummary(props: Props) { + const {location, organization, eventView, project, transactionName, spanSlug} = props; + + // customize the route analytics event we send + useRouteAnalyticsEventNames( + 'performance_views.span_summary.view', + 'Performance Views: Span Summary page viewed' + ); + useRouteAnalyticsParams({ + project_platforms: project ? getSelectedProjectPlatforms(location, [project]) : '', + }); + + return ( + + + + + + {project && ( + + )} + {transactionName} + + + + + + + + + + ); +} + +type ContentProps = { + eventView: EventView; + location: Location; + organization: Organization; + project: Project | undefined; + spanSlug: SpanSlug; + transactionName: string; +}; + +function SpanSummaryContent(props: ContentProps) { + const {transactionName, project} = props; + + const {spanSlug: spanParam} = useParams(); + const [spanOp, groupId] = spanParam.split(':'); + + const filters: SpanMetricsQueryFilters = { + 'span.group': groupId, + 'span.op': spanOp, + transaction: transactionName, + }; + + const {data: spanHeaderData} = useSpanMetrics({ + search: MutableSearch.fromQueryObject(filters), + // TODO: query average duration instead of self time before releasing this + fields: ['span.description', 'avg(span.self_time)', 'sum(span.self_time)', 'count()'], + enabled: Boolean(groupId), + referrer: SpanSummaryReferrer.SPAN_SUMMARY_HEADER_DATA, + }); + + const description = spanHeaderData[0]?.['span.description'] ?? t('unknown'); + const timeSpent = spanHeaderData[0]?.['sum(span.self_time)']; + const avgDuration = spanHeaderData[0]?.['avg(span.self_time)']; + const spanCount = spanHeaderData[0]?.['count()']; + + return ( + + + + + + + ); +} diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/referrers.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/referrers.tsx new file mode 100644 index 00000000000000..e5ffc6e6332dc2 --- /dev/null +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/referrers.tsx @@ -0,0 +1,7 @@ +export enum SpanSummaryReferrer { + SPAN_SUMMARY_HEADER_DATA = 'api.performance.span-summary-header-data', + SPAN_SUMMARY_TABLE = 'api.performance.span-summary-table', + SPAN_SUMMARY_DURATION_CHART = 'api.performance.span-summary-duration-chart', + SPAN_SUMMARY_THROUGHPUT_CHART = 'api.performance.span-summary-throughput-chart', + SPAN_SUMMARY_TRANSACTION_THROUGHPUT_CHART = 'api.performance.span-summary-transaction-throughput-chart', +} diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx new file mode 100644 index 00000000000000..55e595f223ad74 --- /dev/null +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx @@ -0,0 +1,179 @@ +import {t} from 'sentry/locale'; +import type {Series} from 'sentry/types/echarts'; +import EventView, {type MetaType} from 'sentry/utils/discover/eventView'; +import {RateUnit} from 'sentry/utils/discover/fields'; +import { + type DiscoverQueryProps, + useGenericDiscoverQuery, +} from 'sentry/utils/discover/genericDiscoverQuery'; +import {formatRate} from 'sentry/utils/formatters'; +import {MutableSearch} from 'sentry/utils/tokenizeSearch'; +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import {useParams} from 'sentry/utils/useParams'; +import {SpanSummaryReferrer} from 'sentry/views/performance/transactionSummary/transactionSpans/spanSummary/referrers'; +import { + AVG_COLOR, + THROUGHPUT_COLOR, + TXN_THROUGHPUT_COLOR, +} from 'sentry/views/starfish/colors'; +import Chart, {ChartType} from 'sentry/views/starfish/components/chart'; +import ChartPanel from 'sentry/views/starfish/components/chartPanel'; +import {useSpanMetricsSeries} from 'sentry/views/starfish/queries/useDiscoverSeries'; +import { + SpanMetricsField, + type SpanMetricsQueryFilters, +} from 'sentry/views/starfish/types'; +import {Block, BlockContainer} from 'sentry/views/starfish/views/spanSummaryPage/block'; + +function SpanSummaryCharts() { + const organization = useOrganization(); + const {spanSlug} = useParams(); + const [spanOp, groupId] = spanSlug.split(':'); + + const location = useLocation(); + const {transaction} = location.query; + + const filters: SpanMetricsQueryFilters = { + 'span.group': groupId, + 'span.op': spanOp, + transaction: transaction as string, + }; + + const { + isLoading: isThroughputDataLoading, + data: throughputData, + error: throughputError, + } = useSpanMetricsSeries({ + search: MutableSearch.fromQueryObject(filters), + yAxis: ['spm()'], + enabled: Boolean(groupId), + referrer: SpanSummaryReferrer.SPAN_SUMMARY_THROUGHPUT_CHART, + }); + + const { + isLoading: isAvgDurationDataLoading, + data: avgDurationData, + error: avgDurationError, + } = useSpanMetricsSeries({ + search: MutableSearch.fromQueryObject(filters), + // TODO: Switch this to SPAN_DURATION before release + yAxis: [`avg(${SpanMetricsField.SPAN_SELF_TIME})`], + enabled: Boolean(groupId), + referrer: SpanSummaryReferrer.SPAN_SUMMARY_DURATION_CHART, + }); + + const eventView = EventView.fromNewQueryWithLocation( + { + yAxis: ['tpm()'], + name: 'Transaction Throughput', + query: MutableSearch.fromQueryObject({ + transaction: transaction as string, + }).formatString(), + fields: [], + version: 2, + }, + location + ); + + const { + isLoading: isTxnThroughputDataLoading, + data: txnThroughputData, + error: txnThroughputError, + } = useGenericDiscoverQuery< + { + data: any[]; + meta: MetaType; + }, + DiscoverQueryProps + >({ + route: 'events-stats', + eventView, + location, + orgSlug: organization.slug, + getRequestPayload: () => ({ + ...eventView.getEventsAPIPayload(location), + yAxis: eventView.yAxis, + topEvents: eventView.topEvents, + excludeOther: 0, + partial: 1, + orderby: undefined, + interval: eventView.interval, + }), + options: { + refetchOnWindowFocus: false, + }, + referrer: SpanSummaryReferrer.SPAN_SUMMARY_TRANSACTION_THROUGHPUT_CHART, + }); + + const transactionSeries: Series = { + seriesName: 'tpm()', + data: + txnThroughputData?.data.map(datum => ({ + value: datum[1][0].count, + name: datum[0], + })) ?? [], + }; + + return ( + + + + + + + + + + formatRate(value, RateUnit.PER_MINUTE), + }} + /> + + + + + + formatRate(value, RateUnit.PER_MINUTE), + }} + /> + + + + ); +} + +export default SpanSummaryCharts; diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryControls.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryControls.tsx new file mode 100644 index 00000000000000..924c6fb9520ee2 --- /dev/null +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryControls.tsx @@ -0,0 +1,33 @@ +import styled from '@emotion/styled'; + +import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; +import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; +import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; +import {space} from 'sentry/styles/space'; + +import {SPAN_RELATIVE_PERIODS, SPAN_RETENTION_DAYS} from '../utils'; + +export default function SpanDetailsControls() { + return ( + + + + + + + ); +} + +const FilterActions = styled('div')` + display: flex; + justify-content: space-between; + margin-bottom: ${space(2)}; + flex-direction: column; + + @media (min-width: ${p => p.theme.breakpoints.small}) { + flex-direction: row; + } +`; diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryHeader.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryHeader.tsx new file mode 100644 index 00000000000000..e843c1b8bac09d --- /dev/null +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryHeader.tsx @@ -0,0 +1,108 @@ +import styled from '@emotion/styled'; + +import {SectionHeading} from 'sentry/components/charts/styles'; +import Count from 'sentry/components/count'; +import PerformanceDuration from 'sentry/components/performanceDuration'; +import {t, tct} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import {defined} from 'sentry/utils'; +import {formatMetricUsingUnit} from 'sentry/utils/metrics/formatters'; +import {DataTitles} from 'sentry/views/starfish/views/spans/types'; + +type Props = { + avgDuration: number; + spanCount: number; + spanDescription: string; + spanOp: string; + timeSpent: number; +}; + +export default function SpanSummaryHeader(props: Props) { + const {spanOp, spanDescription, avgDuration, timeSpent, spanCount} = props; + + return ( + + + {t('Span')} + + {spanDescription ?? emptyValue} + + {spanOp} + + + + {DataTitles.avg} + + + {defined(avgDuration) + ? formatMetricUsingUnit(avgDuration, 'milliseconds') + : '\u2014'} + + + + + + {DataTitles.timeSpent} + + + {defined(timeSpent) ? ( + + ) : ( + '\u2014' + )} + + + {defined(spanCount) + ? tct('[spanCount] spans', {spanCount: }) + : '\u2014'} + + + + + ); +} + +const ContentHeader = styled('div')` + display: grid; + grid-template-columns: 1fr; + gap: ${space(4)}; + margin-bottom: ${space(2)}; + + @media (min-width: ${p => p.theme.breakpoints.medium}) { + grid-template-columns: 1fr repeat(3, max-content); + } +`; + +const HeaderInfo = styled('div')` + ${p => p.theme.overflowEllipsis}; + height: 78px; +`; + +const StyledSectionHeading = styled(SectionHeading)` + margin: 0; +`; + +const NumericSectionWrapper = styled('div')` + text-align: right; +`; + +const SectionBody = styled('div')<{overflowEllipsis?: boolean}>` + font-size: ${p => p.theme.fontSizeExtraLarge}; + padding: ${space(0.5)} 0; + max-height: 32px; +`; + +const SectionSubtext = styled('div')` + color: ${p => p.theme.subText}; + font-size: ${p => p.theme.fontSizeMedium}; +`; + +export const SpanLabelContainer = styled('div')` + ${p => p.theme.overflowEllipsis}; +`; + +const EmptyValueContainer = styled('span')` + color: ${p => p.theme.gray300}; +`; + +const emptyValue = {t('(unnamed span)')}; diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryTable.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryTable.tsx new file mode 100644 index 00000000000000..f96e64ae169aaf --- /dev/null +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryTable.tsx @@ -0,0 +1,282 @@ +import {Fragment} from 'react'; +import {browserHistory} from 'react-router'; +import styled from '@emotion/styled'; +import type {Location} from 'history'; + +import type {GridColumnHeader} from 'sentry/components/gridEditable'; +import GridEditable, {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable'; +import Pagination, {type CursorHandler} from 'sentry/components/pagination'; +import {ROW_HEIGHT, ROW_PADDING} from 'sentry/components/performance/waterfall/constants'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import type {Organization, Project} from 'sentry/types'; +import EventView, {type MetaType} from 'sentry/utils/discover/eventView'; +import {getFieldRenderer} from 'sentry/utils/discover/fieldRenderers'; +import type {ColumnType} from 'sentry/utils/discover/fields'; +import { + type DiscoverQueryProps, + useGenericDiscoverQuery, +} from 'sentry/utils/discover/genericDiscoverQuery'; +import {VisuallyCompleteWithData} from 'sentry/utils/performanceForSentry'; +import {decodeScalar} from 'sentry/utils/queryString'; +import {MutableSearch} from 'sentry/utils/tokenizeSearch'; +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import {useParams} from 'sentry/utils/useParams'; +import {SpanDurationBar} from 'sentry/views/performance/transactionSummary/transactionSpans/spanDetails/spanDetailsTable'; +import {SpanSummaryReferrer} from 'sentry/views/performance/transactionSummary/transactionSpans/spanSummary/referrers'; +import {useSpanSummarySort} from 'sentry/views/performance/transactionSummary/transactionSpans/spanSummary/useSpanSummarySort'; +import {renderHeadCell} from 'sentry/views/starfish/components/tableCells/renderHeadCell'; +import {SpanIdCell} from 'sentry/views/starfish/components/tableCells/spanIdCell'; +import {useIndexedSpans} from 'sentry/views/starfish/queries/useIndexedSpans'; +import { + type IndexedResponse, + SpanIndexedField, + type SpanMetricsQueryFilters, +} from 'sentry/views/starfish/types'; +import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; + +type DataRowKeys = + | SpanIndexedField.ID + | SpanIndexedField.TIMESTAMP + | SpanIndexedField.SPAN_DURATION + | SpanIndexedField.TRANSACTION_ID + | SpanIndexedField.TRACE + | SpanIndexedField.PROJECT; + +type ColumnKeys = + | SpanIndexedField.ID + | SpanIndexedField.TIMESTAMP + | SpanIndexedField.SPAN_DURATION; + +type DataRow = Pick & {'transaction.duration': number}; + +type Column = GridColumnHeader; + +const COLUMN_ORDER: Column[] = [ + { + key: SpanIndexedField.ID, + name: t('Span ID'), + width: COL_WIDTH_UNDEFINED, + }, + { + key: SpanIndexedField.TIMESTAMP, + name: t('Timestamp'), + width: COL_WIDTH_UNDEFINED, + }, + { + key: SpanIndexedField.SPAN_DURATION, + name: t('Span Duration'), + width: COL_WIDTH_UNDEFINED, + }, +]; + +const COLUMN_TYPE: Omit< + Record, + 'spans' | 'transactionDuration' +> = { + span_id: 'string', + timestamp: 'date', + 'span.duration': 'duration', +}; + +const LIMIT = 8; + +type Props = { + project: Project | undefined; +}; + +export default function SpanSummaryTable(props: Props) { + const {project} = props; + const organization = useOrganization(); + const {spanSlug} = useParams(); + const [spanOp, groupId] = spanSlug.split(':'); + + const location = useLocation(); + const {transaction} = location.query; + const spansCursor = decodeScalar(location.query?.[QueryParameterNames.SPANS_CURSOR]); + + const filters: SpanMetricsQueryFilters = { + 'span.group': groupId, + 'span.op': spanOp, + transaction: transaction as string, + }; + + const sort = useSpanSummarySort(); + + const { + data: rowData, + pageLinks, + isLoading: isRowDataLoading, + } = useIndexedSpans({ + fields: [ + SpanIndexedField.ID, + SpanIndexedField.TRANSACTION_ID, + SpanIndexedField.TIMESTAMP, + SpanIndexedField.SPAN_DURATION, + SpanIndexedField.TRACE, + ], + search: MutableSearch.fromQueryObject(filters), + limit: LIMIT, + referrer: SpanSummaryReferrer.SPAN_SUMMARY_TABLE, + sorts: [sort], + cursor: spansCursor, + }); + + const transactionIds = rowData?.map(row => row[SpanIndexedField.TRANSACTION_ID]); + + const eventView = EventView.fromNewQueryWithLocation( + { + name: 'Transaction Durations', + query: MutableSearch.fromQueryObject({ + project: project?.slug, + id: `[${transactionIds?.join() ?? ''}]`, + }).formatString(), + fields: ['id', 'transaction.duration'], + version: 2, + }, + location + ); + + const { + isLoading: isTxnDurationDataLoading, + data: txnDurationData, + isError: isTxnDurationError, + } = useGenericDiscoverQuery< + { + data: any[]; + meta: MetaType; + }, + DiscoverQueryProps + >({ + route: 'events', + eventView, + location, + orgSlug: organization.slug, + getRequestPayload: () => ({ + ...eventView.getEventsAPIPayload(location), + interval: eventView.interval, + }), + limit: LIMIT, + options: { + refetchOnWindowFocus: false, + enabled: Boolean(rowData), + }, + referrer: SpanSummaryReferrer.SPAN_SUMMARY_TABLE, + }); + + // Restructure the transaction durations into a map for faster lookup + const transactionDurationMap = {}; + txnDurationData?.data.forEach(datum => { + transactionDurationMap[datum.id] = datum['transaction.duration']; + }); + + const mergedData: DataRow[] = + rowData?.map((row: Pick) => { + const transactionId = row[SpanIndexedField.TRANSACTION_ID]; + const newRow = { + ...row, + 'transaction.duration': transactionDurationMap[transactionId], + }; + return newRow; + }) ?? []; + + const handleCursor: CursorHandler = (cursor, pathname, query) => { + browserHistory.push({ + pathname, + query: {...query, [QueryParameterNames.SPANS_CURSOR]: cursor}, + }); + }; + + return ( + + + + renderHeadCell({ + column, + location, + sort, + }), + renderBodyCell: renderBodyCell( + location, + organization, + spanOp, + isTxnDurationDataLoading || isTxnDurationError + ), + }} + location={location} + /> + + + + ); +} + +function renderBodyCell( + location: Location, + organization: Organization, + spanOp: string = '', + isTxnDurationDataLoading: boolean +) { + return function (column: Column, dataRow: DataRow): React.ReactNode { + const {timestamp, span_id, trace, project} = dataRow; + const spanDuration = dataRow[SpanIndexedField.SPAN_DURATION]; + const transactionId = dataRow[SpanIndexedField.TRANSACTION_ID]; + const transactionDuration = dataRow['transaction.duration']; + + if (column.key === SpanIndexedField.SPAN_DURATION) { + if (isTxnDurationDataLoading) { + return ; + } + + return ( + + ); + } + + if (column.key === SpanIndexedField.ID) { + return ( + + ); + } + + const fieldRenderer = getFieldRenderer(column.key, COLUMN_TYPE); + const rendered = fieldRenderer(dataRow, {location, organization}); + + return rendered; + }; +} + +const SpanDurationBarLoading = styled('div')` + height: ${ROW_HEIGHT - 2 * ROW_PADDING}px; + width: 100%; + position: relative; + display: flex; + top: ${space(0.5)}; + background-color: ${p => p.theme.gray100}; +`; diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/useSpanSummarySort.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/useSpanSummarySort.tsx new file mode 100644 index 00000000000000..87395016db4d9a --- /dev/null +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/useSpanSummarySort.tsx @@ -0,0 +1,42 @@ +import type {Sort} from 'sentry/utils/discover/fields'; +import {decodeSorts} from 'sentry/utils/queryString'; +import {useLocation} from 'sentry/utils/useLocation'; +import {SpanIndexedField} from 'sentry/views/starfish/types'; +import type {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; + +type Query = { + sort?: string; +}; + +const SORTABLE_FIELDS = [ + SpanIndexedField.TIMESTAMP, + SpanIndexedField.SPAN_DURATION, +] as const; + +export type ValidSort = Sort & { + field: (typeof SORTABLE_FIELDS)[number]; +}; + +/** + * Parses a `Sort` object from the URL. In case of multiple specified sorts + * picks the first one, since span module UIs only support one sort at a time. + */ +export function useSpanSummarySort( + sortParameterName: QueryParameterNames | 'sort' = 'sort', + fallback: Sort = DEFAULT_SORT +) { + const location = useLocation(); + + return ( + decodeSorts(location.query[sortParameterName]).filter(isAValidSort)[0] ?? fallback + ); +} + +const DEFAULT_SORT: Sort = { + kind: 'desc', + field: SORTABLE_FIELDS[0], +}; + +function isAValidSort(sort: Sort): sort is ValidSort { + return (SORTABLE_FIELDS as unknown as string[]).includes(sort.field); +} diff --git a/static/app/views/starfish/colors.tsx b/static/app/views/starfish/colors.tsx index 6546d9c067633c..104a5098132908 100644 --- a/static/app/views/starfish/colors.tsx +++ b/static/app/views/starfish/colors.tsx @@ -4,6 +4,7 @@ import {CHART_PALETTE} from 'sentry/constants/chartPalette'; export const COUNT_COLOR = CHART_PALETTE[0][0]; export const THROUGHPUT_COLOR = CHART_PALETTE[3][3]; +export const TXN_THROUGHPUT_COLOR = CHART_PALETTE[3][2]; export const P50_COLOR = CHART_PALETTE[3][1]; export const P95_COLOR = CHART_PALETTE[0][0]; export const AVG_COLOR = CHART_PALETTE[0][0]; diff --git a/static/app/views/starfish/components/tableCells/renderHeadCell.tsx b/static/app/views/starfish/components/tableCells/renderHeadCell.tsx index 46a77ca39ed083..7046ee1b847302 100644 --- a/static/app/views/starfish/components/tableCells/renderHeadCell.tsx +++ b/static/app/views/starfish/components/tableCells/renderHeadCell.tsx @@ -53,6 +53,8 @@ export const SORTABLE_FIELDS = new Set([ `avg(${HTTP_RESPONSE_CONTENT_LENGTH})`, `${CACHE_HIT_RATE}()`, `${CACHE_MISS_RATE}()`, + SpanIndexedField.TIMESTAMP, + SpanIndexedField.SPAN_DURATION, `avg(${CACHE_ITEM_SIZE})`, ]); From 6bf4781c3e1c656aade0ea2d850ad153386058c2 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Wed, 8 May 2024 16:01:09 -0400 Subject: [PATCH 174/376] ref(crons): Don't error on missed check-ins (#70506) Fixes SENTRY-38Z6 --- src/sentry/monitors/clock_tasks/check_timeout.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/src/sentry/monitors/clock_tasks/check_timeout.py b/src/sentry/monitors/clock_tasks/check_timeout.py index 8ed1c9f2247859..9cf672519144a3 100644 --- a/src/sentry/monitors/clock_tasks/check_timeout.py +++ b/src/sentry/monitors/clock_tasks/check_timeout.py @@ -56,11 +56,17 @@ def dispatch_check_timeout(ts: datetime): def mark_checkin_timeout(checkin_id: int, ts: datetime): logger.info("checkin_timeout", extra={"checkin_id": checkin_id}) - checkin = ( - MonitorCheckIn.objects.select_related("monitor_environment") - .select_related("monitor_environment__monitor") - .get(id=checkin_id) - ) + try: + checkin = ( + MonitorCheckIn.objects.select_related("monitor_environment") + .select_related("monitor_environment__monitor") + .get(id=checkin_id) + ) + except MonitorCheckIn.DoesNotExist: + # The monitor may have been deleted or the timeout may have reached + # it's retention period (less likely) + metrics.incr("sentry.monitors.tasks.check_timeout.not_found") + return monitor_environment = checkin.monitor_environment monitor = monitor_environment.monitor From 8e9fccfbed95cace9f4331ea3a1a365fc1892ae1 Mon Sep 17 00:00:00 2001 From: Nathan Hsieh <6186377+nhsiehgit@users.noreply.github.com> Date: Wed, 8 May 2024 13:02:26 -0700 Subject: [PATCH 175/376] add timebox start/end to query subscription (#70518) Model migrations for https://github.com/getsentry/sentry/pull/70486 --- migrations_lockfile.txt | 2 +- .../0717_query_subscription_timebox.py | 38 +++++++++++++++++++ src/sentry/snuba/models.py | 3 ++ .../test_default_comparators.pysnap | 4 +- 4 files changed, 45 insertions(+), 2 deletions(-) create mode 100644 src/sentry/migrations/0717_query_subscription_timebox.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index ba2673163e960c..a311f852ab3cb8 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0016_add_control_cacheversion nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0716_remove_actormodel +sentry: 0717_query_subscription_timebox social_auth: 0002_default_auto_field diff --git a/src/sentry/migrations/0717_query_subscription_timebox.py b/src/sentry/migrations/0717_query_subscription_timebox.py new file mode 100644 index 00000000000000..2fbd3e74834bf0 --- /dev/null +++ b/src/sentry/migrations/0717_query_subscription_timebox.py @@ -0,0 +1,38 @@ +# Generated by Django 5.0.4 on 2024-05-08 18:47 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0716_remove_actormodel"), + ] + + operations = [ + migrations.AddField( + model_name="querysubscription", + name="timebox_end", + field=models.DateTimeField(null=True), + ), + migrations.AddField( + model_name="querysubscription", + name="timebox_start", + field=models.DateTimeField(null=True), + ), + ] diff --git a/src/sentry/snuba/models.py b/src/sentry/snuba/models.py index c8468eb52057f4..65057ceea604bf 100644 --- a/src/sentry/snuba/models.py +++ b/src/sentry/snuba/models.py @@ -117,6 +117,9 @@ class Status(Enum): query_extra = models.TextField( null=True ) # additional query filters to attach to the query created in Snuba such as datetime filters, or release/deploy tags + # timebox_start/end is optional timebox restrictions to apply to the snuba query + timebox_start = models.DateTimeField(null=True) + timebox_end = models.DateTimeField(null=True) objects: ClassVar[BaseManager[Self]] = BaseManager( cache_fields=("pk", "subscription_id"), cache_ttl=int(timedelta(hours=1).total_seconds()) diff --git a/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap b/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap index b164c3d3ca368b..644876c001ccfe 100644 --- a/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap +++ b/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap @@ -1,5 +1,5 @@ --- -created: '2024-05-08T14:31:35.472856+00:00' +created: '2024-05-08T18:49:05.727788+00:00' creator: sentry source: tests/sentry/backup/test_comparators.py --- @@ -1150,6 +1150,8 @@ source: tests/sentry/backup/test_comparators.py - class: DatetimeEqualityComparator fields: - date_added + - timebox_end + - timebox_start - class: ForeignKeyComparator fields: - project From 025d52dede7ee87915894c1e2b05ed85ade48264 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Wed, 8 May 2024 16:12:03 -0400 Subject: [PATCH 176/376] chore(sdk): Turn off interactions experiment (#70530) We think this is interacting poorly the trace IDs and causing a lot of `http.client` transactions with incorrect durations. --- static/app/bootstrap/initializeSdk.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/bootstrap/initializeSdk.tsx b/static/app/bootstrap/initializeSdk.tsx index d254da62ea6cfd..51811f819ef059 100644 --- a/static/app/bootstrap/initializeSdk.tsx +++ b/static/app/bootstrap/initializeSdk.tsx @@ -59,7 +59,7 @@ function getSentryIntegrations(routes?: Function) { routes: typeof routes === 'function' ? createRoutes(routes()) : [], match, _experiments: { - enableInteractions: true, + enableInteractions: false, }, }), Sentry.browserProfilingIntegration(), From b0ee032bae0704b40a1a9a96b4c3f7cca82373ea Mon Sep 17 00:00:00 2001 From: Isabella Enriquez Date: Wed, 8 May 2024 16:26:03 -0400 Subject: [PATCH 177/376] feat(spend-visibility): Update user notification settings with spend (#69933) Awaiting new feature flag Closes https://getsentry.atlassian.net/browse/RV-1591 ![image](https://github.com/getsentry/sentry/assets/45607721/cb392ae0-3400-4db5-a2c7-e8a37ebc1457) ![image](https://github.com/getsentry/sentry/assets/45607721/49495e41-714c-4b6c-8478-dea5b8f5d2d6) --- .../account/accountNotificationFineTuning.tsx | 10 +++ .../settings/account/notifications/fields.tsx | 1 + .../account/notifications/fields2.tsx | 23 ++++++- .../notificationSettings.spec.tsx | 23 +++++++ .../notifications/notificationSettings.tsx | 5 ++ .../notificationSettingsByType.spec.tsx | 16 +++++ .../notificationSettingsByType.tsx | 68 ++++++++++++++----- 7 files changed, 127 insertions(+), 19 deletions(-) diff --git a/static/app/views/settings/account/accountNotificationFineTuning.tsx b/static/app/views/settings/account/accountNotificationFineTuning.tsx index 78460c1d9da323..204ddb746c7559 100644 --- a/static/app/views/settings/account/accountNotificationFineTuning.tsx +++ b/static/app/views/settings/account/accountNotificationFineTuning.tsx @@ -215,6 +215,16 @@ class AccountNotificationFineTuning extends DeprecatedAsyncView { const isProject = isGroupedByProject(fineTuneType) && organizations.length > 0; const field = ACCOUNT_NOTIFICATION_FIELDS[fineTuneType]; + // TODO(isabella): once GA, remove this + if ( + fineTuneType === 'quota' && + organizations.some(org => org.features?.includes('spend-visibility-notifications')) + ) { + field.title = t('Spend Notifications'); + field.description = t( + 'Control the notifications you receive for organization spend.' + ); + } const {title, description} = field; const [stateKey] = isProject ? this.getEndpoints()[2] : []; diff --git a/static/app/views/settings/account/notifications/fields.tsx b/static/app/views/settings/account/notifications/fields.tsx index 7ca27c1edd6d7b..826f8856903175 100644 --- a/static/app/views/settings/account/notifications/fields.tsx +++ b/static/app/views/settings/account/notifications/fields.tsx @@ -66,6 +66,7 @@ export const ACCOUNT_NOTIFICATION_FIELDS: Record = { // No choices here because it's going to have dynamic content // Component will create choices, }, + // TODO(isabella): Once GA, replace the following with Spend Notifications quota: { title: t('Quota Notifications'), description: t( diff --git a/static/app/views/settings/account/notifications/fields2.tsx b/static/app/views/settings/account/notifications/fields2.tsx index 0ce87913eaa5e9..cd63d0bacfc355 100644 --- a/static/app/views/settings/account/notifications/fields2.tsx +++ b/static/app/views/settings/account/notifications/fields2.tsx @@ -76,7 +76,7 @@ export const NOTIFICATION_SETTING_FIELDS: Record = { ['always', t('On')], ['never', t('Off')], ], - help: t('Error, transaction, and attachment quota limits.'), + help: t('Error, transaction, replay, attachment, and cron monitor quota limits.'), }, reports: { name: 'reports', @@ -127,6 +127,7 @@ export const NOTIFICATION_SETTING_FIELDS: Record = { }, }; +// TODO(isabella): Once spend vis notifs are GA, remove this // partial field definition for quota sub-categories export const QUOTA_FIELDS = [ { @@ -192,7 +193,7 @@ export const QUOTA_FIELDS = [ name: 'quotaMonitorSeats', label: t('Cron Monitors'), help: tct( - 'Receive notifications about your cron monitors quotas. [learnMore:Learn more]', + 'Receive notifications about your cron monitor quotas. [learnMore:Learn more]', { learnMore: , } @@ -217,3 +218,21 @@ export const QUOTA_FIELDS = [ ] as const, }, ]; + +export const SPEND_FIELDS = [ + { + name: 'quotaWarnings', + label: t('Spend Notifications'), + help: tct( + 'Receive notifications when your spend crosses predefined or custom thresholds. [learnMore:Learn more]', + { + learnMore: , // TODO(isabella): replace with proper link + } + ), + choices: [ + ['always', t('On')], + ['never', t('Off')], + ] as const, + }, + ...QUOTA_FIELDS.slice(1), +]; diff --git a/static/app/views/settings/account/notifications/notificationSettings.spec.tsx b/static/app/views/settings/account/notifications/notificationSettings.spec.tsx index eff42511ea1874..d76b5a2033cfba 100644 --- a/static/app/views/settings/account/notifications/notificationSettings.spec.tsx +++ b/static/app/views/settings/account/notifications/notificationSettings.spec.tsx @@ -1,3 +1,5 @@ +import {OrganizationFixture} from 'sentry-fixture/organization'; + import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen} from 'sentry-test/reactTestingLibrary'; @@ -74,4 +76,25 @@ describe('NotificationSettings', function () { } expect(screen.getByText('Issue Alerts')).toBeInTheDocument(); }); + + it('renders spend section instead of quota section with feature flag', async function () { + const {routerContext, organization} = initializeOrg({ + organization: { + features: ['slack-overage-notifications', 'spend-visibility-notifications'], + }, + }); + + const organizationNoFlag = OrganizationFixture(); + organizationNoFlag.features.push('slack-overage-notifications'); + + renderMockRequests({}); + + render(, { + context: routerContext, + }); + + expect(await screen.findByText('Spend')).toBeInTheDocument(); + + expect(screen.queryByText('Quota')).not.toBeInTheDocument(); + }); }); diff --git a/static/app/views/settings/account/notifications/notificationSettings.tsx b/static/app/views/settings/account/notifications/notificationSettings.tsx index 6afa585339e926..4e14230b0d9239 100644 --- a/static/app/views/settings/account/notifications/notificationSettings.tsx +++ b/static/app/views/settings/account/notifications/notificationSettings.tsx @@ -45,7 +45,12 @@ function NotificationSettings({organizations}: NotificationSettingsProps) { }); const renderOneSetting = (type: string) => { + // TODO(isabella): Once GA, remove this const field = NOTIFICATION_SETTING_FIELDS[type]; + if (type === 'quota' && checkFeatureFlag('spend-visibility-notifications')) { + field.label = t('Spend'); + field.help = t('Notifications that help avoid surprise invoices.'); + } return (
diff --git a/static/app/views/settings/account/notifications/notificationSettingsByType.spec.tsx b/static/app/views/settings/account/notifications/notificationSettingsByType.spec.tsx index 9704b79e0763dd..1641c8c06ee2f2 100644 --- a/static/app/views/settings/account/notifications/notificationSettingsByType.spec.tsx +++ b/static/app/views/settings/account/notifications/notificationSettingsByType.spec.tsx @@ -259,4 +259,20 @@ describe('NotificationSettingsByType', function () { await selectEvent.select(multiSelect, ['Email']); expect(changeProvidersMock).toHaveBeenCalledTimes(1); }); + + it('renders spend notifications page instead of quota notifications with flag', async function () { + const organizationWithFlag = OrganizationFixture(); + organizationWithFlag.features.push('spend-visibility-notifications'); + const organizationNoFlag = OrganizationFixture(); + renderComponent({ + notificationType: 'quota', + organizations: [organizationWithFlag, organizationNoFlag], + }); + + expect(await screen.getAllByText('Spend Notifications').length).toEqual(2); + expect(screen.queryByText('Quota Notifications')).not.toBeInTheDocument(); + expect( + screen.getByText('Control the notifications you receive for organization spend.') + ).toBeInTheDocument(); + }); }); diff --git a/static/app/views/settings/account/notifications/notificationSettingsByType.tsx b/static/app/views/settings/account/notifications/notificationSettingsByType.tsx index 99e6b15ea1d9d8..afc78f5f083c97 100644 --- a/static/app/views/settings/account/notifications/notificationSettingsByType.tsx +++ b/static/app/views/settings/account/notifications/notificationSettingsByType.tsx @@ -27,7 +27,7 @@ import type { } from './constants'; import {SUPPORTED_PROVIDERS} from './constants'; import {ACCOUNT_NOTIFICATION_FIELDS} from './fields'; -import {NOTIFICATION_SETTING_FIELDS, QUOTA_FIELDS} from './fields2'; +import {NOTIFICATION_SETTING_FIELDS, QUOTA_FIELDS, SPEND_FIELDS} from './fields2'; import NotificationSettingsByEntity from './notificationSettingsByEntity'; import type {Identity} from './types'; import UnlinkedAlert from './unlinkedAlert'; @@ -183,7 +183,7 @@ class NotificationSettingsByTypeV2 extends DeprecatedAsyncComponent ({ - ...field, - type: 'select' as const, - getData: data => { - return { - type: field.name, - scopeType: 'user', - scopeIdentifier: ConfigStore.get('user').id, - value: data[field.name], - }; - }, - })) - ); + if ( + organizations.some(organization => + organization.features?.includes('spend-visibility-notifications') + ) + ) { + fields.push( + ...SPEND_FIELDS.map(field => ({ + ...field, + type: 'select' as const, + getData: data => { + return { + type: field.name, + scopeType: 'user', + scopeIdentifier: ConfigStore.get('user').id, + value: data[field.name], + }; + }, + })) + ); + } else { + // TODO(isabella): Once GA, remove this case + fields.push( + ...QUOTA_FIELDS.map(field => ({ + ...field, + type: 'select' as const, + getData: data => { + return { + type: field.name, + scopeType: 'user', + scopeIdentifier: ConfigStore.get('user').id, + value: data[field.name], + }; + }, + })) + ); + } } else { const defaultField: Field = Object.assign( {}, @@ -342,10 +364,22 @@ class NotificationSettingsByTypeV2 extends DeprecatedAsyncComponent org.features?.includes('spend-visibility-notifications')) + ) { + notificationDetails.title = t('Spend Notifications'); + notificationDetails.description = t( + 'Control the notifications you receive for organization spend.' + ); + } const {title, description} = ACCOUNT_NOTIFICATION_FIELDS[notificationType]; + const entityType = isGroupedByProject(notificationType) ? 'project' : 'organization'; return ( From 8686c9859424ebfa90799f3d983b25607383f152 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Wed, 8 May 2024 16:46:10 -0400 Subject: [PATCH 178/376] ref(crons): Normalize crons incident issues (#70481) Prior to incidents we created issues for each type of faiure (error, timeout, missed). This is because only one failed check-in was needed to create an issue. With incidents you can configure how many failures are needed, meaning there could be 2 missed, 1 timeout, and 1 error. This removes the various issue occurrence types and replaces them with a single MonitorIncidentType --- src/sentry/issues/grouptype.py | 32 +-- src/sentry/monitors/constants.py | 3 - src/sentry/monitors/logic/mark_failed.py | 54 +--- .../slack/notifications/test_issue_alert.py | 6 +- .../slack/test_message_builder.py | 4 +- tests/sentry/issues/test_ingest.py | 4 +- tests/sentry/mail/test_adapter.py | 10 +- .../sentry/monitors/logic/test_mark_failed.py | 237 +----------------- tests/sentry/tasks/test_weekly_reports.py | 6 +- 9 files changed, 39 insertions(+), 317 deletions(-) diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py index 6c2c0658a6f809..ce0d22ee469403 100644 --- a/src/sentry/issues/grouptype.py +++ b/src/sentry/issues/grouptype.py @@ -245,10 +245,6 @@ class PerformanceGroupTypeDefaults: noise_config = NoiseConfig() -class CronGroupTypeDefaults: - notification_config = NotificationConfig(context=[]) - - class ReplayGroupTypeDefaults: notification_config = NotificationConfig(context=[]) @@ -516,36 +512,32 @@ class ProfileFunctionRegressionType(GroupType): @dataclass(frozen=True) -class MonitorCheckInFailure(CronGroupTypeDefaults, GroupType): +class MonitorIncidentType(GroupType): type_id = 4001 slug = "monitor_check_in_failure" - description = "Monitor Check In Failed" + description = "Crons Monitor Failed" category = GroupCategory.CRON.value released = True creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds default_priority = PriorityLevel.HIGH + notification_config = NotificationConfig(context=[]) + + +# XXX(epurkhiser): We renamed this group type but we keep the alias since we +# store group type in pickles +MonitorCheckInFailure = MonitorIncidentType @dataclass(frozen=True) -class MonitorCheckInTimeout(CronGroupTypeDefaults, GroupType): +class MonitorCheckInTimeout(MonitorIncidentType): + # This is deprecated, only kept around for it's type_id type_id = 4002 - slug = "monitor_check_in_timeout" - description = "Monitor Check In Timeout" - category = GroupCategory.CRON.value - released = True - creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds - default_priority = PriorityLevel.HIGH @dataclass(frozen=True) -class MonitorCheckInMissed(CronGroupTypeDefaults, GroupType): +class MonitorCheckInMissed(MonitorIncidentType): + # This is deprecated, only kept around for it's type_id type_id = 4003 - slug = "monitor_check_in_missed" - description = "Monitor Check In Missed" - category = GroupCategory.CRON.value - released = True - creation_quota = Quota(3600, 60, 60_000) # 60,000 per hour, sliding window of 60 seconds - default_priority = PriorityLevel.HIGH @dataclass(frozen=True) diff --git a/src/sentry/monitors/constants.py b/src/sentry/monitors/constants.py index ca6b15bb4a9b27..8f06d3589dff4a 100644 --- a/src/sentry/monitors/constants.py +++ b/src/sentry/monitors/constants.py @@ -7,9 +7,6 @@ # current limit is 28 days MAX_TIMEOUT = 40_320 -# Format to use in the issue subtitle for the missed check-in timestamp -SUBTITLE_DATETIME_FORMAT = "%b %d, %I:%M %p %Z" - # maximum value for incident + recovery thresholds to be set # affects the performance of recent check-ins query # lowering this may invalidate monitors + block check-ins diff --git a/src/sentry/monitors/logic/mark_failed.py b/src/sentry/monitors/logic/mark_failed.py index f6df630b628bfd..73b20e3e69d4e1 100644 --- a/src/sentry/monitors/logic/mark_failed.py +++ b/src/sentry/monitors/logic/mark_failed.py @@ -7,13 +7,8 @@ from django.db.models import Q from sentry import features -from sentry.issues.grouptype import ( - MonitorCheckInFailure, - MonitorCheckInMissed, - MonitorCheckInTimeout, -) +from sentry.issues.grouptype import MonitorIncidentType from sentry.models.organization import Organization -from sentry.monitors.constants import SUBTITLE_DATETIME_FORMAT, TIMEOUT from sentry.monitors.models import ( CheckInStatus, MonitorCheckIn, @@ -243,10 +238,8 @@ def create_issue_platform_occurrence( monitor_env = failed_checkin.monitor_environment current_timestamp = datetime.now(timezone.utc) - occurrence_data = get_occurrence_data(failed_checkin) - # Get last successful check-in to show in evidence display - last_successful_checkin_timestamp = "None" + last_successful_checkin_timestamp = "Never" last_successful_checkin = monitor_env.get_last_successful_checkin() if last_successful_checkin: last_successful_checkin_timestamp = last_successful_checkin.date_added.isoformat() @@ -257,11 +250,11 @@ def create_issue_platform_occurrence( project_id=monitor_env.monitor.project_id, event_id=uuid.uuid4().hex, fingerprint=[incident.grouphash], - type=occurrence_data["group_type"], + type=MonitorIncidentType, issue_title=f"Monitor failure: {monitor_env.monitor.name}", - subtitle=occurrence_data["subtitle"], + subtitle="Your monitor has reached its failure threshold.", evidence_display=[ - IssueEvidence(name="Failure reason", value=occurrence_data["reason"], important=True), + IssueEvidence(name="Failure reason", value="incident", important=True), IssueEvidence( name="Environment", value=monitor_env.get_environment().name, important=False ), @@ -272,9 +265,9 @@ def create_issue_platform_occurrence( ), ], evidence_data={}, - culprit=occurrence_data["reason"], + culprit="incident", detection_time=current_timestamp, - level=occurrence_data["level"], + level="error", assignee=monitor_env.monitor.owner_actor, ) @@ -324,36 +317,3 @@ def get_monitor_environment_context(monitor_environment: MonitorEnvironment): "status": monitor_environment.get_status_display(), "type": monitor_environment.monitor.get_type_display(), } - - -def get_occurrence_data(checkin: MonitorCheckIn): - if checkin.status == CheckInStatus.MISSED: - expected_time = ( - checkin.expected_time.astimezone(checkin.monitor.timezone).strftime( - SUBTITLE_DATETIME_FORMAT - ) - if checkin.expected_time - else "the expected time" - ) - return { - "group_type": MonitorCheckInMissed, - "level": "warning", - "reason": "missed_checkin", - "subtitle": f"No check-in reported on {expected_time}.", - } - - if checkin.status == CheckInStatus.TIMEOUT: - duration = (checkin.monitor.config or {}).get("max_runtime") or TIMEOUT - return { - "group_type": MonitorCheckInTimeout, - "level": "error", - "reason": "duration", - "subtitle": f"Check-in exceeded maximum duration of {duration} minutes.", - } - - return { - "group_type": MonitorCheckInFailure, - "level": "error", - "reason": "error", - "subtitle": "An error occurred during the latest check-in.", - } diff --git a/tests/sentry/integrations/slack/notifications/test_issue_alert.py b/tests/sentry/integrations/slack/notifications/test_issue_alert.py index ce1724e196f7d6..feba6bf1ffd3a9 100644 --- a/tests/sentry/integrations/slack/notifications/test_issue_alert.py +++ b/tests/sentry/integrations/slack/notifications/test_issue_alert.py @@ -12,7 +12,7 @@ from sentry.digests.backends.redis import RedisBackend from sentry.digests.notifications import event_to_record from sentry.integrations.slack.message_builder.issues import get_tags -from sentry.issues.grouptype import MonitorCheckInFailure +from sentry.issues.grouptype import MonitorIncidentType from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence from sentry.models.identity import Identity, IdentityStatus from sentry.models.integrations.external_actor import ExternalActor @@ -157,7 +157,7 @@ def test_crons_issue_alert_user_block(self): IssueEvidence("Evidence 2", "Value 2", False), IssueEvidence("Evidence 3", "Value 3", False), ], - MonitorCheckInFailure, + MonitorIncidentType, datetime.now(UTC), "info", "/api/123", @@ -165,7 +165,7 @@ def test_crons_issue_alert_user_block(self): occurrence.save() event.occurrence = occurrence - event.group.type = MonitorCheckInFailure.type_id + event.group.type = MonitorIncidentType.type_id notification = AlertRuleNotification( Notification(event=event, rule=self.rule), ActionTargetType.MEMBER, self.user.id ) diff --git a/tests/sentry/integrations/slack/test_message_builder.py b/tests/sentry/integrations/slack/test_message_builder.py index cf207e84de7dad..5bc7170cab4d32 100644 --- a/tests/sentry/integrations/slack/test_message_builder.py +++ b/tests/sentry/integrations/slack/test_message_builder.py @@ -26,7 +26,7 @@ from sentry.issues.grouptype import ( ErrorGroupType, FeedbackGroup, - MonitorCheckInFailure, + MonitorIncidentType, PerformanceP95EndpointRegressionGroupType, ProfileFileIOGroupType, ) @@ -1321,7 +1321,7 @@ def setUp(self): type=PerformanceP95EndpointRegressionGroupType.type_id ) - self.cron_issue = self.create_group(type=MonitorCheckInFailure.type_id) + self.cron_issue = self.create_group(type=MonitorIncidentType.type_id) self.feedback_issue = self.create_group( type=FeedbackGroup.type_id, substatus=GroupSubStatus.NEW ) diff --git a/tests/sentry/issues/test_ingest.py b/tests/sentry/issues/test_ingest.py index d1600b7f4a01e9..cb12627d233684 100644 --- a/tests/sentry/issues/test_ingest.py +++ b/tests/sentry/issues/test_ingest.py @@ -14,7 +14,7 @@ GroupCategory, GroupType, GroupTypeRegistry, - MonitorCheckInFailure, + MonitorIncidentType, NoiseConfig, ) from sentry.issues.ingest import ( @@ -248,7 +248,7 @@ def test_existing_group_different_category(self) -> None: new_event = self.store_event(data={}, project_id=self.project.id) new_occurrence = self.build_occurrence( - fingerprint=["some-fingerprint"], type=MonitorCheckInFailure.type_id + fingerprint=["some-fingerprint"], type=MonitorIncidentType.type_id ) with mock.patch("sentry.issues.ingest.logger") as logger: assert save_issue_from_occurrence(new_occurrence, new_event, None) is None diff --git a/tests/sentry/mail/test_adapter.py b/tests/sentry/mail/test_adapter.py index 2e2d6d9e8d0226..94e3d21653528e 100644 --- a/tests/sentry/mail/test_adapter.py +++ b/tests/sentry/mail/test_adapter.py @@ -17,7 +17,7 @@ from sentry.api.serializers.models.userreport import UserReportWithGroupSerializer from sentry.digests.notifications import build_digest, event_to_record from sentry.event_manager import EventManager, get_event_type -from sentry.issues.grouptype import MonitorCheckInFailure +from sentry.issues.grouptype import MonitorIncidentType from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence from sentry.mail import build_subject_prefix, mail_adapter from sentry.models.activity import Activity @@ -328,7 +328,7 @@ def test_simple_notification_generic(self): IssueEvidence("Evidence 2", "Value 2", False), IssueEvidence("Evidence 3", "Value 3", False), ], - MonitorCheckInFailure, + MonitorIncidentType, timezone.now(), "info", "/api/123", @@ -336,7 +336,7 @@ def test_simple_notification_generic(self): occurrence.save() event.occurrence = occurrence - event.group.type = MonitorCheckInFailure.type_id + event.group.type = MonitorIncidentType.type_id rule = Rule.objects.create(project=self.project, label="my rule") ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) @@ -384,7 +384,7 @@ def test_simple_notification_generic_no_evidence(self): "1234", {"Test": 123}, [], # no evidence - MonitorCheckInFailure, + MonitorIncidentType, timezone.now(), "info", "/api/123", @@ -392,7 +392,7 @@ def test_simple_notification_generic_no_evidence(self): occurrence.save() event.occurrence = occurrence - event.group.type = MonitorCheckInFailure.type_id + event.group.type = MonitorIncidentType.type_id rule = Rule.objects.create(project=self.project, label="my rule") ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True) diff --git a/tests/sentry/monitors/logic/test_mark_failed.py b/tests/sentry/monitors/logic/test_mark_failed.py index df8f44dcb9b601..2d13f9069f5ecb 100644 --- a/tests/sentry/monitors/logic/test_mark_failed.py +++ b/tests/sentry/monitors/logic/test_mark_failed.py @@ -5,15 +5,10 @@ from django.utils import timezone -from sentry.issues.grouptype import ( - MonitorCheckInFailure, - MonitorCheckInMissed, - MonitorCheckInTimeout, -) +from sentry.issues.grouptype import MonitorIncidentType from sentry.issues.ingest import process_occurrence_data from sentry.models.groupassignee import GroupAssignee from sentry.models.grouphash import GroupHash -from sentry.monitors.constants import SUBTITLE_DATETIME_FORMAT from sentry.monitors.logic.mark_failed import mark_failed from sentry.monitors.models import ( CheckInStatus, @@ -283,11 +278,11 @@ def test_mark_failed_default_params_issue_platform(self, mock_produce_occurrence "project_id": self.project.id, "fingerprint": [monitor_incidents[0].grouphash], "issue_title": f"Monitor failure: {monitor.name}", - "subtitle": "An error occurred during the latest check-in.", + "subtitle": "Your monitor has reached its failure threshold.", "resource_id": None, "evidence_data": {}, "evidence_display": [ - {"name": "Failure reason", "value": "error", "important": True}, + {"name": "Failure reason", "value": "incident", "important": True}, { "name": "Environment", "value": monitor_environment.get_environment().name, @@ -299,9 +294,9 @@ def test_mark_failed_default_params_issue_platform(self, mock_produce_occurrence "important": False, }, ], - "type": MonitorCheckInFailure.type_id, + "type": MonitorIncidentType.type_id, "level": "error", - "culprit": "error", + "culprit": "incident", }, ) == dict(occurrence) @@ -341,228 +336,6 @@ def test_mark_failed_default_params_issue_platform(self, mock_produce_occurrence }, ) == dict(event) - @with_feature("organizations:issue-platform") - @patch("sentry.issues.producer.produce_occurrence_to_kafka") - def test_mark_failed_with_timeout_reason_issue_platform(self, mock_produce_occurrence_to_kafka): - monitor = Monitor.objects.create( - name="test monitor", - organization_id=self.organization.id, - project_id=self.project.id, - type=MonitorType.CRON_JOB, - config={ - "schedule": [1, "month"], - "schedule_type": ScheduleType.INTERVAL, - "max_runtime": 10, - "checkin_margin": None, - }, - ) - monitor_environment = MonitorEnvironment.objects.create( - monitor=monitor, - environment_id=self.environment.id, - status=monitor.status, - ) - successful_check_in = MonitorCheckIn.objects.create( - monitor=monitor, - monitor_environment=monitor_environment, - project_id=self.project.id, - status=CheckInStatus.OK, - ) - last_checkin = timezone.now() - - failed_checkin = MonitorCheckIn.objects.create( - monitor=monitor, - monitor_environment=monitor_environment, - project_id=self.project.id, - status=CheckInStatus.TIMEOUT, - date_added=last_checkin, - duration=monitor.config.get("max_runtime"), - ) - assert mark_failed(failed_checkin, ts=failed_checkin.date_added) - - monitor_environment.refresh_from_db() - assert monitor_environment.status == MonitorStatus.ERROR - - monitor_incidents = MonitorIncident.objects.filter(monitor_environment=monitor_environment) - assert len(monitor_incidents) == 1 - - assert len(mock_produce_occurrence_to_kafka.mock_calls) == 1 - - kwargs = mock_produce_occurrence_to_kafka.call_args.kwargs - occurrence = kwargs["occurrence"] - event = kwargs["event_data"] - occurrence = occurrence.to_dict() - - assert dict( - occurrence, - **{ - "project_id": self.project.id, - "fingerprint": [monitor_incidents[0].grouphash], - "issue_title": f"Monitor failure: {monitor.name}", - "subtitle": "Check-in exceeded maximum duration of 10 minutes.", - "resource_id": None, - "evidence_data": {}, - "evidence_display": [ - {"name": "Failure reason", "value": "duration", "important": True}, - { - "name": "Environment", - "value": monitor_environment.get_environment().name, - "important": False, - }, - { - "name": "Last successful check-in", - "value": successful_check_in.date_added.isoformat(), - "important": False, - }, - ], - "type": MonitorCheckInTimeout.type_id, - "level": "error", - "culprit": "duration", - }, - ) == dict(occurrence) - - assert dict( - event, - **{ - "contexts": { - "monitor": { - "status": "error", - "type": "cron_job", - "config": { - "schedule_type": 2, - "schedule": [1, "month"], - "max_runtime": 10, - "checkin_margin": None, - }, - "id": str(monitor.guid), - "name": monitor.name, - "slug": str(monitor.slug), - } - }, - "environment": monitor_environment.get_environment().name, - "event_id": occurrence["event_id"], - "fingerprint": [monitor_incidents[0].grouphash], - "platform": "other", - "project_id": monitor.project_id, - "sdk": None, - "tags": { - "monitor.id": str(monitor.guid), - "monitor.slug": str(monitor.slug), - "monitor.incident": str(monitor_incidents[0].id), - }, - }, - ) == dict(event) - - @with_feature("organizations:issue-platform") - @patch("sentry.issues.producer.produce_occurrence_to_kafka") - def test_mark_failed_with_missed_reason_issue_platform(self, mock_produce_occurrence_to_kafka): - last_checkin = timezone.now().replace(second=0, microsecond=0) - next_checkin = last_checkin + timedelta(hours=1) - - monitor = Monitor.objects.create( - name="test monitor", - organization_id=self.organization.id, - project_id=self.project.id, - type=MonitorType.CRON_JOB, - config={ - "schedule": [1, "hour"], - "schedule_type": ScheduleType.INTERVAL, - "max_runtime": None, - "checkin_margin": None, - }, - ) - monitor_environment = MonitorEnvironment.objects.create( - monitor=monitor, - environment_id=self.environment.id, - last_checkin=last_checkin, - next_checkin=next_checkin, - next_checkin_latest=next_checkin + timedelta(minutes=1), - status=monitor.status, - ) - - failed_checkin = MonitorCheckIn.objects.create( - monitor=monitor, - monitor_environment=monitor_environment, - project_id=self.project.id, - status=CheckInStatus.MISSED, - expected_time=next_checkin, - date_added=next_checkin + timedelta(minutes=1), - ) - assert mark_failed(failed_checkin, ts=failed_checkin.date_added) - - monitor.refresh_from_db() - monitor_environment.refresh_from_db() - assert monitor_environment.status == MonitorStatus.ERROR - - monitor_incidents = MonitorIncident.objects.filter(monitor_environment=monitor_environment) - assert len(monitor_incidents) == 1 - - assert len(mock_produce_occurrence_to_kafka.mock_calls) == 1 - - kwargs = mock_produce_occurrence_to_kafka.call_args.kwargs - occurrence = kwargs["occurrence"] - event = kwargs["event_data"] - occurrence = occurrence.to_dict() - - assert dict( - occurrence, - **{ - "project_id": self.project.id, - "fingerprint": [monitor_incidents[0].grouphash], - "issue_title": f"Monitor failure: {monitor.name}", - "subtitle": f"No check-in reported on {next_checkin.strftime(SUBTITLE_DATETIME_FORMAT)}.", - "resource_id": None, - "evidence_data": {}, - "evidence_display": [ - {"name": "Failure reason", "value": "missed_checkin", "important": True}, - { - "name": "Environment", - "value": monitor_environment.get_environment().name, - "important": False, - }, - { - "name": "Last successful check-in", - "value": "None", - "important": False, - }, - ], - "type": MonitorCheckInMissed.type_id, - "level": "warning", - "culprit": "missed_checkin", - }, - ) == dict(occurrence) - - assert dict( - event, - **{ - "contexts": { - "monitor": { - "status": "error", - "type": "cron_job", - "config": { - "schedule_type": 2, - "schedule": [1, "hour"], - "max_runtime": None, - "checkin_margin": None, - }, - "id": str(monitor.guid), - "name": monitor.name, - "slug": str(monitor.slug), - } - }, - "environment": monitor_environment.get_environment().name, - "event_id": occurrence["event_id"], - "fingerprint": [monitor_incidents[0].grouphash], - "platform": "other", - "project_id": monitor.project_id, - "sdk": None, - "tags": { - "monitor.id": str(monitor.guid), - "monitor.slug": str(monitor.slug), - "monitor.incident": str(monitor_incidents[0].id), - }, - }, - ) == dict(event) - @with_feature("organizations:issue-platform") @patch("sentry.issues.producer.produce_occurrence_to_kafka") def test_mark_failed_muted(self, mock_produce_occurrence_to_kafka): diff --git a/tests/sentry/tasks/test_weekly_reports.py b/tests/sentry/tasks/test_weekly_reports.py index 83621236bd4163..971d100129a18a 100644 --- a/tests/sentry/tasks/test_weekly_reports.py +++ b/tests/sentry/tasks/test_weekly_reports.py @@ -11,7 +11,7 @@ from django.utils import timezone from sentry.constants import DataCategory -from sentry.issues.grouptype import MonitorCheckInFailure, PerformanceNPlusOneGroupType +from sentry.issues.grouptype import MonitorIncidentType, PerformanceNPlusOneGroupType from sentry.models.group import GroupStatus from sentry.models.grouphistory import GroupHistoryStatus from sentry.models.notificationsettingoption import NotificationSettingOption @@ -381,7 +381,7 @@ def test_message_builder_simple(self, message_builder, record): self.create_performance_issue(fingerprint=f"{PerformanceNPlusOneGroupType.type_id}-group2") # store a crons issue just to make sure it's not counted in key_performance_issues - self.create_group(type=MonitorCheckInFailure.type_id) + self.create_group(type=MonitorIncidentType.type_id) prepare_organization_report(self.now.timestamp(), ONE_DAY * 7, self.organization.id) for call_args in message_builder.call_args_list: @@ -458,7 +458,7 @@ def test_message_builder_filter_resolved(self, message_builder, record): self.create_performance_issue(fingerprint=f"{PerformanceNPlusOneGroupType.type_id}-group2") # store a crons issue just to make sure it's not counted in key_performance_issues - self.create_group(type=MonitorCheckInFailure.type_id) + self.create_group(type=MonitorIncidentType.type_id) prepare_organization_report(self.now.timestamp(), ONE_DAY * 7, self.organization.id) for call_args in message_builder.call_args_list: From 9fc6f1dac4b69aa3b9daf752744c9834d242d92e Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 8 May 2024 16:55:40 -0400 Subject: [PATCH 179/376] ref: clean up some dead code in sentry.utils.services (#70534) --- src/sentry/utils/services.py | 146 ----------------------------------- 1 file changed, 146 deletions(-) diff --git a/src/sentry/utils/services.py b/src/sentry/utils/services.py index 871a8b6f50813a..6659a612a905af 100644 --- a/src/sentry/utils/services.py +++ b/src/sentry/utils/services.py @@ -7,7 +7,6 @@ import logging import threading from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence -from concurrent import futures from typing import TYPE_CHECKING, Any, TypeVar from django.utils.functional import LazyObject, empty @@ -22,8 +21,6 @@ logger = logging.getLogger(__name__) -STATUS_SUCCESS = "success" - class Service: __all__: Iterable[str] = () @@ -529,146 +526,3 @@ def selector(context: Context, method: str, callargs: Mapping[str, Any]) -> list return [move_from, move_to] return selector - - -def get_invalid_timing_reason(timing: tuple[float | None, float | None]) -> str: - start, stop = timing - if start is None and stop is None: - return "no_data" - elif start is None: - return "no_start" - elif stop is None: - return "no_stop" - else: - raise Exception("unexpected value for timing") - - -def get_future_status(future: TimedFuture) -> str: - try: - future.result(timeout=0) - return STATUS_SUCCESS - except futures.CancelledError: - return "cancelled" # neither succeeded nor failed - except futures.TimeoutError: - raise # tried to check before ready - except Exception: - return "failure" - - -def callback_timing( - context: Context, - method_name: str, - callargs: Mapping[str, Any], - backend_names: Sequence[str], - results: Sequence[TimedFuture], - metric_name: str, - result_comparator: Callable[[str, str, str, Any, Any], Mapping[str, str]] | None = None, - sample_rate: float | None = None, -) -> None: - """ - Collects timing stats on results returned to the callback method of a `ServiceDelegator`. Either - partial this and pass it directly as the `callback_func` or - :param metric_name: Prefix to use when writing these timing metrics to Datadog - :param method_name: method_name passed to callback - :param backend_names: backend_names passed to callback - :param results: results passed to callback - :param result_comparator: An optional comparator to compare the primary result to each secondary - result. Should return a dict represents the result of the comparison. This will be merged into - tags to be stored in the metrics backend. - :return: - """ - if not len(backend_names) > 1: - return - primary_backend_name = backend_names[0] - primary_future = results[0] - primary_status = get_future_status(primary_future) - primary_timing = primary_future.get_timing() - - # If either endpoint of the timing data is not set, just ignore this call. - # This really shouldn't happen on the primary backend, but playing it safe - # here out of an abundance of caution. - if not all(primary_timing): - logger.warning( - "Received timing with unexpected endpoint: %r, primary_backend_name: %r, future_status: %r", - primary_timing, - primary_backend_name, - primary_status, - ) - return - - primary_duration_ms = (primary_timing[1] - primary_timing[0]) * 1000 - - metric_kwargs = {} - if sample_rate is not None: - metric_kwargs["sample_rate"] = sample_rate - - metrics.timing( - f"{metric_name}.timing_ms", - primary_duration_ms, - tags={ - "method": method_name, - "backend": primary_backend_name, - "status": primary_status, - "primary": "true", - }, - **metric_kwargs, # type: ignore[arg-type] - ) - - for i, secondary_backend_name in enumerate(backend_names[1:], 1): - secondary_future = results[i] - secondary_timing = secondary_future.get_timing() - secondary_status = get_future_status(secondary_future) - - tags = { - "method": method_name, - "primary_backend": primary_backend_name, - "primary_status": primary_status, - "secondary_backend": secondary_backend_name, - "secondary_status": secondary_status, - } - - if result_comparator: - comparator_result = result_comparator( - method_name, - primary_status, - secondary_status, - primary_future.result(), - secondary_future.result(), - ) - tags.update(comparator_result) - - # If either endpoint of the timing data is not set, this means - # something weird happened (more than likely a cancellation.) - if not all(secondary_timing): - metrics.incr( - f"{metric_name}.timing_invalid", - tags={**tags, "reason": get_invalid_timing_reason(secondary_timing)}, - ) - else: - secondary_duration_ms = (secondary_timing[1] - secondary_timing[0]) * 1000 - metrics.distribution( - f"{metric_name}.timing_ms", - secondary_duration_ms, - tags={ - "method": method_name, - "backend": secondary_backend_name, - "status": secondary_status, - "primary": "false", - }, - unit="millisecond", - **metric_kwargs, # type: ignore[arg-type] - ) - metrics.distribution( - f"{metric_name}.timing_delta_ms", - secondary_duration_ms - primary_duration_ms, - tags=tags, - unit="millisecond", - **metric_kwargs, # type: ignore[arg-type] - ) - metrics.distribution( - f"{metric_name}.timing_relative_delta", - secondary_duration_ms / primary_duration_ms, - tags=tags, - unit="millisecond", - **metric_kwargs, # type: ignore[arg-type] - ) From 999c143968d22e00c9900095e2c8fe26c40d36c6 Mon Sep 17 00:00:00 2001 From: William Mak Date: Wed, 8 May 2024 17:12:11 -0400 Subject: [PATCH 180/376] fix(metrics): Don't allow filtering by org id (#70537) - Prevent queries that use org_id on the metrics dataset in the filter - We didn't expose any additional data, because project_id is still a required param --- src/sentry/search/events/builder/metrics.py | 2 ++ .../endpoints/test_organization_events_mep.py | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/src/sentry/search/events/builder/metrics.py b/src/sentry/search/events/builder/metrics.py index 38fe3f369e9554..61b3f54513ac0b 100644 --- a/src/sentry/search/events/builder/metrics.py +++ b/src/sentry/search/events/builder/metrics.py @@ -678,6 +678,8 @@ def default_filter_converter(self, search_filter: SearchFilter) -> WhereType | N 1, ) + if name in ["organization_id", "org_id"]: + raise IncompatibleMetricsQuery(f"{name} isn't compatible with metrics queries") lhs = self.resolve_column(name) # If this is an aliasedexpression, we don't need the alias here, just the expression if isinstance(lhs, AliasedExpression): diff --git a/tests/snuba/api/endpoints/test_organization_events_mep.py b/tests/snuba/api/endpoints/test_organization_events_mep.py index e949a2eb4368ed..30e45c0226c5e6 100644 --- a/tests/snuba/api/endpoints/test_organization_events_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_mep.py @@ -3241,6 +3241,25 @@ def test_on_demand_with_mep(self): assert data[0]["faketag"] == "foo" assert not meta["isMetricsData"] + def test_filtering_by_org_id_is_not_compatible(self): + """Implicitly test the fact that percentiles are their own 'dataset'""" + self.store_transaction_metric( + 1, + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + + response = self.do_request( + { + "field": ["title", "p50()", "count()"], + "query": "event.type:transaction organization_id:2", + "dataset": "metrics", + "project": self.project.id, + "per_page": 50, + } + ) + assert response.status_code == 400, response.content + class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithOnDemandMetrics( MetricsEnhancedPerformanceTestCase From 3cc51f23c0e3b3ff4119c191ef1d8f8c02155d32 Mon Sep 17 00:00:00 2001 From: Evan Hicks Date: Wed, 8 May 2024 17:13:35 -0400 Subject: [PATCH 181/376] fix: Tag tests with missing db fixture (#70541) These tests fail when run on their own (in the Snuba CI) with errors about missing this fixture. --- tests/sentry/event_manager/interfaces/test_breadcrumbs.py | 3 +++ tests/sentry/event_manager/interfaces/test_frame.py | 3 +++ tests/sentry/event_manager/interfaces/test_http.py | 7 +++++++ .../event_manager/interfaces/test_single_exception.py | 3 +++ tests/sentry/event_manager/test_ensure_has_ip.py | 2 ++ 5 files changed, 18 insertions(+) diff --git a/tests/sentry/event_manager/interfaces/test_breadcrumbs.py b/tests/sentry/event_manager/interfaces/test_breadcrumbs.py index 26b84911b88fc2..a9c94a913a443b 100644 --- a/tests/sentry/event_manager/interfaces/test_breadcrumbs.py +++ b/tests/sentry/event_manager/interfaces/test_breadcrumbs.py @@ -2,6 +2,7 @@ from sentry import eventstore from sentry.event_manager import EventManager +from sentry.testutils.pytest.fixtures import django_db_all @pytest.fixture @@ -33,6 +34,7 @@ def test_simple(make_breadcrumbs_snapshot): ) +@django_db_all @pytest.mark.parametrize( "input", [ @@ -48,6 +50,7 @@ def test_null_values(make_breadcrumbs_snapshot, input): make_breadcrumbs_snapshot(input) +@django_db_all def test_non_string_keys(make_breadcrumbs_snapshot): make_breadcrumbs_snapshot( dict( diff --git a/tests/sentry/event_manager/interfaces/test_frame.py b/tests/sentry/event_manager/interfaces/test_frame.py index 2ede39db1fb42c..c32fe2a166924f 100644 --- a/tests/sentry/event_manager/interfaces/test_frame.py +++ b/tests/sentry/event_manager/interfaces/test_frame.py @@ -2,6 +2,7 @@ from sentry import eventstore from sentry.event_manager import EventManager +from sentry.testutils.pytest.fixtures import django_db_all @pytest.fixture @@ -17,6 +18,7 @@ def inner(data): return inner +@django_db_all @pytest.mark.parametrize( "input", [ @@ -31,6 +33,7 @@ def test_bad_input(make_frames_snapshot, input): make_frames_snapshot(input) +@django_db_all @pytest.mark.parametrize( "x", [float("inf"), float("-inf"), float("nan")], ids=["inf", "neginf", "nan"] ) diff --git a/tests/sentry/event_manager/interfaces/test_http.py b/tests/sentry/event_manager/interfaces/test_http.py index 84c64753ed0170..ed874a08d22687 100644 --- a/tests/sentry/event_manager/interfaces/test_http.py +++ b/tests/sentry/event_manager/interfaces/test_http.py @@ -2,6 +2,7 @@ from sentry import eventstore from sentry.event_manager import EventManager +from sentry.testutils.pytest.fixtures import django_db_all @pytest.fixture @@ -22,6 +23,7 @@ def test_basic(make_http_snapshot): make_http_snapshot(dict(url="http://example.com")) +@django_db_all def test_full(make_http_snapshot): make_http_snapshot( dict( @@ -50,6 +52,7 @@ def test_data_as_dict(make_http_snapshot): make_http_snapshot(dict(url="http://example.com", data={"foo": "bar"})) +@django_db_all def test_urlencoded_data(make_http_snapshot): make_http_snapshot( dict( @@ -78,6 +81,7 @@ def test_infer_json_content_type(make_http_snapshot): make_http_snapshot(dict(url="http://example.com", data='{"foo":"bar"}')) +@django_db_all def test_cookies_as_string(make_http_snapshot): make_http_snapshot(dict(url="http://example.com", cookies="a=b;c=d")) make_http_snapshot(dict(url="http://example.com", cookies="a=b;c=d")) @@ -99,6 +103,7 @@ def test_query_string_and_fragment_as_params(make_http_snapshot): ) +@django_db_all def test_query_string_and_fragment_in_url(make_http_snapshot): make_http_snapshot(dict(url="http://example.com?foo\ufffd=bar#fragment\u2026")) @@ -115,10 +120,12 @@ def test_invalid_method(make_http_snapshot): make_http_snapshot(dict(url="http://example.com", method="1234")) +@django_db_all def test_invalid_method2(make_http_snapshot): make_http_snapshot(dict(url="http://example.com", method="A" * 33)) +@django_db_all def test_invalid_method3(make_http_snapshot): make_http_snapshot(dict(url="http://example.com", method="A")) diff --git a/tests/sentry/event_manager/interfaces/test_single_exception.py b/tests/sentry/event_manager/interfaces/test_single_exception.py index 090bd3cb0d6a77..3acd3101f416d1 100644 --- a/tests/sentry/event_manager/interfaces/test_single_exception.py +++ b/tests/sentry/event_manager/interfaces/test_single_exception.py @@ -2,6 +2,7 @@ from sentry import eventstore from sentry.event_manager import EventManager +from sentry.testutils.pytest.fixtures import django_db_all @pytest.fixture @@ -22,6 +23,7 @@ def inner(data): return inner +@django_db_all def test_basic(make_single_exception_snapshot): make_single_exception_snapshot(dict(type="ValueError", value="hello world", module="foo.bar")) @@ -38,6 +40,7 @@ def test_coerces_object_value_to_string(make_single_exception_snapshot): make_single_exception_snapshot({"type": "ValueError", "value": {"unauthorized": True}}) +@django_db_all def test_handles_type_in_value(make_single_exception_snapshot): make_single_exception_snapshot(dict(value="ValueError: unauthorized")) diff --git a/tests/sentry/event_manager/test_ensure_has_ip.py b/tests/sentry/event_manager/test_ensure_has_ip.py index 8a92672be244d2..0d1023fa0d41b7 100644 --- a/tests/sentry/event_manager/test_ensure_has_ip.py +++ b/tests/sentry/event_manager/test_ensure_has_ip.py @@ -1,4 +1,5 @@ from sentry.event_manager import EventManager +from sentry.testutils.pytest.fixtures import django_db_all def validate_and_normalize(report, client_ip=None): @@ -29,6 +30,7 @@ def test_with_user_auto_ip(): assert out["user"]["ip_address"] == "127.0.0.1" +@django_db_all def test_without_ip_values(): inp = { "platform": "javascript", From 3ae84e05830e90436404908a1f215b6f8769c5ef Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Wed, 8 May 2024 14:16:03 -0700 Subject: [PATCH 182/376] chore(api): Temporarily Update Allowlist (#70542) In my mega [pr](https://github.com/getsentry/sentry/pull/70081), I just updated the allowlist straight up to _id_or_slug, but now that I am breaking it up, it is very annoying to update the allowlist individually. So I am just mass updating them now and have two versions, one with `organization_slug` and one with `organization_id_or_slug`. Ill delete once the rollout is complete. --- .../api_ownership_allowlist_dont_modify.py | 318 ++++++ ...pi_publish_status_allowlist_dont_modify.py | 949 ++++++++++++++++++ 2 files changed, 1267 insertions(+) diff --git a/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py b/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py index 86c9d5fefb329e..b5e847ab38d1d3 100644 --- a/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py +++ b/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py @@ -320,3 +320,321 @@ "/api/0/projects/{organization_slug}/{project_id_or_slug}/filters/", "/api/0/teams/{organization_slug}/{team_id_or_slug}/alerts-triggered/", ] + + +API_OWNERSHIP_ALLOWLIST_DONT_MODIFY += [ + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/alert-rules/{alert_rule_id}/", + "/api/0/organizations/{organization_id_or_slug}/relay_usage/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/owners/", + "/api/0/organizations/{organization_id_or_slug}/data-export/{data_export_id}/", + "/api/0/users/{user_id}/social-identities/{identity_id}/", + "/api/0/users/{user_id}/identities/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/external-teams/{external_team_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/artifact-lookup/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/environments/{environment}/", + "/extensions/github-enterprise/webhook/", + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/comments/{activity_id}/", + "/api/0/organizations/{organization_id_or_slug}/integrations/{integration_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/replays/{replay_id}/recording-segments/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/", + "/api/0/users/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/alerts-triggered-index/", + "/api/0/users/{user_id}/notification-settings/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/alert-rules/{rule_id}/snooze/", + "/api/0/users/{user_id}/password/", + "/extensions/jira/search/{organization_id_or_slug}/{integration_id}/", + "/api/0/organizations/{organization_id_or_slug}/external-users/", + "/api/0/accept-invite/{member_id}/{token}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/create-sample-transaction/", + "/api/0/organizations/{organization_id_or_slug}/members/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/tombstones/{tombstone_id}/", + "/api/0/sentry-app-installations/{uuid}/external-requests/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/commits/", + "/api/0/organizations/{organization_id_or_slug}/config/integrations/", + "/api/0/organizations/{organization_id_or_slug}/data-export/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/commits/", + "/extensions/vercel/delete/", + "/api/0/userroles/{role_name}/", + "/api/0/organizations/{organization_id_or_slug}/alert-rules/", + "/api/0/organizations/{organization_id_or_slug}/onboarding-tasks/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/tombstones/", + "/api/0/sentry-apps/{sentry_app_id_or_slug}/features/", + "/api/0/organizations/{organization_id_or_slug}/monitors/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/filters/{filter_id}/", + "/api/0/organizations/{organization_id_or_slug}/members/{member_id}/unreleased-commits/", + "/api/0/sentry-apps/{sentry_app_id_or_slug}/api-tokens/", + "/api/0/internal/quotas/", + "/api/0/sentry-apps/{sentry_app_id_or_slug}/stats/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/teams/", + "/extensions/bitbucket/uninstalled/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/files/{file_id}/", + "/api/0/sentry-app-installations/{uuid}/authorizations/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/combined-rules/", + "/api/0/internal/stats/", + "/api/0/organizations/{organization_id_or_slug}/shortids/{short_id}/", + "/api/0/organizations/{organization_id_or_slug}/missing-members/", + "/api/0/organizations/{organization_id_or_slug}/alert-rules/{alert_rule_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/keys/{key_id}/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/issues/old/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/replays/{replay_id}/clicks/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rule-task/{task_uuid}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/replays/{replay_id}/recording-segments/{segment_id}/", + "/api/0/organizations/{organization_id_or_slug}/monitors/{monitor_slug}/", + "/api/0/organizations/{organization_id_or_slug}/monitors/{monitor_id_or_slug}/", + "/api/0/organizations/{organization_id_or_slug}/derive-code-mappings/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/unresolved-issue-age/", + "/api/0/organizations/{organization_id_or_slug}/alert-rules/available-actions/", + "/api/0/organizations/{organization_id_or_slug}/minimal-projects/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/token/", + "/api/0/wizard/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/processingissues/", + "/api/0/sentry-app-installations/{uuid}/external-issues/{external_issue_id}/", + "/extensions/github/webhook/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/tags/{key}/values/", + "/api/0/users/{user_id}/avatar/", + "/api/0/organizations/{organization_id_or_slug}/integrations/{integration_id}/issues/", + "/api/0/organizations/{organization_id_or_slug}/sentry-apps/", + "/api/0/organizations/{organization_id_or_slug}/repos/{repo_id}/commits/", + "/api/0/organizations/{organization_id_or_slug}/pinned-searches/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/create-sample/", + "/api/0/organizations/{organization_id_or_slug}/slugs/", + "/api/0/organizations/{organization_id_or_slug}/sentry-app-installations/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/artifact-bundles/{bundle_id}/files/", + "/api/0/organizations/{organization_id_or_slug}/projects/", + "/api/0/doc-integrations/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/members/", + "/api/0/organizations/{organization_id_or_slug}/code-mappings/{config_id}/", + "/api/0/users/{user_id}/organization-integrations/", + "/api/0/users/{user_id}/emails/confirm/", + "/api/0/sentry-apps-stats/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/teams/{team_id_or_slug}/", + "/api/0/monitors/{monitor_id_or_slug}/checkins/", + "/api/0/users/{user_id}/social-identities/", + "/api/0/organizations/{organization_id_or_slug}/discover/saved/", + "/api/0/organizations/{organization_id_or_slug}/integration-requests/", + "/extensions/vsts/issue-updated/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/ai-fix-suggest/", + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/artifact-bundles/", + "/api/0/organizations/{organization_id_or_slug}/broadcasts/", + "/api/0/organizations/{organization_id_or_slug}/users/{user_id}/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/time-to-resolution/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/replays/{replay_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/reprocessing/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/processingissues/discard/", + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/subscriptions/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/transfer/", + "/oauth/userinfo/", + "/api/0/organizations/{organization_id_or_slug}/sentry-app-components/", + "/api/0/users/{user_id}/identities/{identity_id}/", + "/api/0/monitors/{monitor_id_or_slug}/checkins/{checkin_id}/", + "/api/0/organizations/{organization_id_or_slug}/invite-requests/{member_id}/", + "/api/0/organizations/{organization_id_or_slug}/users/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/reprocessable/", + "/api/0/users/{user_id}/organizations/", + "/api/0/organizations/{organization_id_or_slug}/notifications/actions/", + "/api/0/sentry-app-installations/{uuid}/external-issue-actions/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/repo-path-parsing/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/alert-rules/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rules/{rule_id}/snooze/", + "/api/0/organizations/{organization_id_or_slug}/user-feedback/", + "/api/0/sentry-apps/{sentry_app_id_or_slug}/", + "/api/0/organizations/{organization_id_or_slug}/codeowners-associations/", + "/api/0/organizations/{organization_id_or_slug}/monitors-stats/", + "/api/0/organizations/{organization_id_or_slug}/searches/{search_id}/", + "/api/0/sentry-apps/{sentry_app_id_or_slug}/api-tokens/{api_token}/", + "/api/0/organizations/{organization_id_or_slug}/incidents/", + "/api/0/builtin-symbol-sources/", + "/api/0/organizations/{organization_id_or_slug}/builtin-symbol-sources/", + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/assemble/", + "/api/0/users/{user_id}/notifications/", + "/extensions/bitbucket/organizations/{organization_id}/webhook/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/external-teams/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/transaction-threshold/configure/", + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/meta/", + "/extensions/jira/installed/", + "/extensions/bitbucket/search/{organization_id_or_slug}/{integration_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/ownership/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/stats/", + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/seen/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/performance-issues/configure/", + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/json/", + "/api/0/broadcasts/", + "/api/0/organizations/{organization_id_or_slug}/access-requests/{request_id}/", + "/api/0/organizations/{organization_id_or_slug}/combined-rules/", + "/api/0/assistant/", + "/api/0/organizations/{organization_id_or_slug}/code-mappings/{config_id}/codeowners/", + "/api/0/organizations/{organization_id_or_slug}/join-request/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rules/configuration/", + "/api/0/users/{user_id}/user-identities/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/files/", + "/api/0/organizations/{organization_id_or_slug}/notifications/available-actions/", + "/api/0/sentry-app-installations/{uuid}/external-issues/", + "/api/0/internal/packages/", + "/api/0/users/{user_id}/notifications/{notification_type}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/users/{user_hash}/", + "/api/0/organizations/{organization_id_or_slug}/functions/{function_id_or_slug}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/{var}/stats/", + "/api/0/internal/queue/tasks/", + "/api/0/organizations/{organization_id_or_slug}/members/{member_id}/", + "/api/0/users/{user_id}/user-identities/{category}/{identity_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/stats/", + "/extensions/bitbucket/descriptor/", + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/deploys/", + "/api/0/organizations/{organization_id_or_slug}/integrations/", + "/api/0/issue-occurrence/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/avatar/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/members/", + "/extensions/vsts/search/{organization_id_or_slug}/{integration_id}/", + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/comments/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/dsyms/unknown/", + "/extensions/github/search/{organization_id_or_slug}/{integration_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/docs/{platform}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/dsyms/associate/", + "/api/0/organizations/{organization_id_or_slug}/access-requests/", + "/api/0/", + "/extensions/slack/event/", + "/extensions/jira/descriptor/", + "/api/0/organizations/{organization_id_or_slug}/avatar/", + "/api/0/internal/health/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/stats/", + "/api/0/broadcasts/{broadcast_id}/", + "/extensions/jira/issue-updated/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/codeowners/{codeowners_id}/", + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/resolved/", + "/api/0/organizations/{organization_id_or_slug}/notifications/actions/{action_id}/", + "/api/0/organizations/{organization_id_or_slug}/shared/{var}/{share_id}/", + "/api/0/organizations/{organization_id_or_slug}/functions/", + "/api/0/organizations/{organization_id_or_slug}/replays/", + "/api/0/organizations/{organization_id_or_slug}/monitors/{monitor_id_or_slug}/stats/", + "/extensions/gitlab/webhook/", + "/api/0/internal/mail/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/release-count/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/projects/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/keys/{key_id}/stats/", + "/api/0/doc-integrations/{doc_integration_id_or_slug}/avatar/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/grouping-configs/", + "/api/0/sentry-apps/{sentry_app_id_or_slug}/publish-request/", + "/api/0/projects/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/grouping-info/", + "/api/0/sentry-apps/{sentry_app_id_or_slug}/components/", + "/api/0/organizations/{organization_id_or_slug}/processingissues/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/notification-settings/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/platforms/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/user-stats/", + "/api/0/accept-invite/{organization_id_or_slug}/{member_id}/{token}/", + "/api/0/internal/beacon/", + "/api/0/organizations/{organization_id_or_slug}/eventids/{event_id}/", + "/api/0/organizations/{organization_id_or_slug}/releases/stats/", + "/extensions/jira-server/issue-updated/{token}/", + "/extensions/discord/interactions/", + "/api/0/organizations/{organization_id_or_slug}/user-teams/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/resolved/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/apple-crash-report", + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/files/{file_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/plugins/{plugin_id}/", + "/extensions/jira-server/search/{organization_id_or_slug}/{integration_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/committers/", + "/extensions/msteams/webhook/", + "/api/0/organizations/{organization_id_or_slug}/discover/saved/{query_id}/", + "/api/0/internal/project-config/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/proguard-artifact-releases", + "/api/0/organizations/{organization_id_or_slug}/projects-count/", + "/api/0/organizations/{organization_id_or_slug}/repos/{repo_id}/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/issue-breakdown/", + "/api/0/organizations/{organization_id_or_slug}/sent-first-event/", + "/api/0/organizations/{organization_id_or_slug}/environments/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/keys/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/users/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/artifact-bundles/{bundle_id}/files/{file_id}/", + "/api/0/users/{user_id}/roles/{role_name}/", + "/api/0/wizard/{wizard_hash}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/attachments/{attachment_id}/", + "/api/0/organizations/{organization_id_or_slug}/discover/homepage/", + "/api/0/api-applications/", + "/extensions/slack/commands/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/", + "/api/0/api-applications/{app_id}/rotate-secret/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/{var}/", + "/api/0/users/{user_id}/roles/", + "/api/0/sentry-apps/{sentry_app_id_or_slug}/avatar/", + "/api/0/organizations/{organization_id_or_slug}/config/repos/", + "/api/0/organizations/{organization_id_or_slug}/api-keys/", + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/commits/", + "/extensions/gitlab/search/{organization_id_or_slug}/{integration_id}/", + "/api/0/organizations/{organization_id_or_slug}/replays/{replay_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/hooks/{hook_id}/", + "/api/0/userroles/", + "/api/0/projects/{organization_id_or_slug}/rule-conditions/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/codeowners/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/difs/assemble/", + "/extensions/vercel/webhook/", + "/api/0/organizations/{organization_id_or_slug}/request-project-creation/", + "/api/0/organizations/{organization_id_or_slug}/recent-searches/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/environments/", + "/api/0/organizations/{organization_id_or_slug}/members/{member_id}/teams/{team_id_or_slug}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/hooks/{hook_id}/stats/", + "/api/0/internal/warnings/", + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/files/", + "/api/0/organizations/{organization_id_or_slug}/integrations/{integration_id}/serverless-functions/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/attachments/", + "/api/0/api-applications/{app_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/cluster-transaction-names/", + "/api/0/organizations/{organization_id_or_slug}/plugins/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/tags/{key}/", + "/api/0/sentry-app-installations/{uuid}/", + "/api/0/users/{user_id}/ips/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/dsyms/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/plugins/", + "/api/0/users/{user_id}/", + "/api/0/organizations/{organization_id_or_slug}/artifactbundle/assemble/", + "/api/0/organizations/{organization_id_or_slug}/releases/", + "/extensions/slack/action/", + "/api/0/organizations/{organization_id_or_slug}/discover/saved/{query_id}/visit/", + "/api/0/prompts-activity/", + "/api/0/organizations/{organization_id_or_slug}/prompts-activity/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/avatar/", + "/api/0/organizations/{organization_id_or_slug}/api-keys/{api_key_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/completion/", + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/activity/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/hooks/", + "/api/0/sentry-apps/{sentry_app_id_or_slug}/interaction/", + "/api/0/internal/check-am2-compatibility/", + "/api/0/organizations/{organization_id_or_slug}/", + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/commitfiles/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/tags/", + "/api/0/sentry-apps/{sentry_app_id_or_slug}/requests/", + "/api/0/organizations/{organization_id_or_slug}/code-mappings/", + "/api/0/organizations/{organization_id_or_slug}/data-scrubbing-selector-suggestions/", + "/api/0/grouping-configs/", + "/api/0/organizations/{organization_id_or_slug}/grouping-configs/", + "/api/0/organizations/{organization_id_or_slug}/plugins/configs/", + "/api/0/accept-transfer/", + "/api/0/organizations/", + "/extensions/bitbucket/installed/", + "/api/0/integration-features/", + "/api/0/organizations/{organization_id_or_slug}/invite-requests/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/source-maps/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rules/preview/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/alert-rule-task/{task_uuid}/", + "/api/0/users/{user_id}/emails/", + "/api/0/organizations/{organization_id_or_slug}/teams/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/", + "/api/0/api-tokens/", + "/api/0/users/{user_id}/subscriptions/", + "/api/0/internal/options/", + "/api/0/shared/{var}/{share_id}/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/repositories/", + "/api/0/organizations/{organization_id_or_slug}/external-users/{external_user_id}/", + "/api/0/doc-integrations/{doc_integration_id_or_slug}/", + "/api/0/organizations/{organization_id_or_slug}/repos/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/all-unresolved-issues/", + "/api/0/internal/environment/", + "/extensions/jira/uninstalled/", + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/filters/", + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/alerts-triggered/", +] diff --git a/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py b/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py index 0f9e36b246003a..a7eec0e7f854d9 100644 --- a/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py +++ b/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py @@ -803,3 +803,952 @@ "/extensions/msteams/webhook/": {"POST"}, "/extensions/discord/interactions/": {"POST"}, } + +API_PUBLISH_STATUS_ALLOWLIST_DONT_MODIFY.update( + { + "/api/0/relays/": {"GET"}, + "/api/0/relays/register/challenge/": {"POST"}, + "/api/0/relays/register/response/": {"POST"}, + "/api/0/relays/projectconfigs/": {"POST"}, + "/api/0/relays/projectids/": {"POST"}, + "/api/0/relays/publickeys/": {"POST"}, + "/api/0/relays/live/": {"GET"}, + "/api/0/relays/{relay_id}/": {"DELETE"}, + "/api/0/{var}/{issue_id}/": {"DELETE", "GET", "PUT"}, + "/api/0/{var}/{issue_id}/activities/": {"GET"}, + "/api/0/{var}/{issue_id}/events/": {"GET"}, + "/api/0/{var}/{issue_id}/events/{event_id}/": {"GET"}, + "/api/0/{var}/{issue_id}/{var}/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/{var}/{note_id}/": {"DELETE", "PUT"}, + "/api/0/{var}/{issue_id}/hashes/": {"GET", "DELETE"}, + "/api/0/{var}/{issue_id}/hashes/split/": {"DELETE", "GET", "PUT"}, + "/api/0/{var}/{issue_id}/reprocessing/": {"POST"}, + "/api/0/{var}/{issue_id}/stats/": {"GET"}, + "/api/0/{var}/{issue_id}/tags/": {"GET"}, + "/api/0/{var}/{issue_id}/tags/{key}/": {"GET"}, + "/api/0/{var}/{issue_id}/tags/{key}/values/": {"GET"}, + "/api/0/{var}/{issue_id}/attachments/": {"GET"}, + "/api/0/{var}/{issue_id}/similar/": {"GET"}, + "/api/0/{var}/{issue_id}/external-issues/": {"GET"}, + "/api/0/{var}/{issue_id}/external-issues/{external_issue_id}/": {"DELETE"}, + "/api/0/{var}/{issue_id}/integrations/": {"GET"}, + "/api/0/{var}/{issue_id}/integrations/{integration_id}/": {"DELETE", "GET", "PUT", "POST"}, + "/api/0/{var}/{issue_id}/current-release/": {"GET"}, + "/api/0/{var}/{issue_id}/first-last-release/": {"GET"}, + "/api/0/{var}/{issue_id}/plugins/asana/create/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/asana/link/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/asana/unlink/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/asana/autocomplete": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/bitbucket/create/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/bitbucket/link/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/bitbucket/unlink/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/bitbucket/autocomplete": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/github/create/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/github/link/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/github/unlink/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/github/autocomplete": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/gitlab/create/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/gitlab/link/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/gitlab/unlink/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/jira/create/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/jira/link/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/jira/unlink/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/jira/autocomplete": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/phabricator/create/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/phabricator/link/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/phabricator/unlink/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/phabricator/autocomplete": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/pivotal/create/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/pivotal/link/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/pivotal/unlink/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/pivotal/autocomplete": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/trello/create/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/trello/link/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/trello/unlink/": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/trello/options": {"GET", "POST"}, + "/api/0/{var}/{issue_id}/plugins/trello/autocomplete": {"GET", "POST"}, + "/api/0/organizations/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/": {"DELETE", "GET", "PUT"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/activities/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/events/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/events/{event_id}/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/{var}/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/{var}/{note_id}/": { + "DELETE", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/hashes/": { + "GET", + "DELETE", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/hashes/split/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/reprocessing/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/stats/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/tags/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/tags/{key}/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/tags/{key}/values/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/attachments/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/similar/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/external-issues/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/external-issues/{external_issue_id}/": { + "DELETE" + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/integrations/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/integrations/{integration_id}/": { + "DELETE", + "GET", + "PUT", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/current-release/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/first-last-release/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/asana/create/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/asana/link/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/asana/unlink/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/asana/autocomplete": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/bitbucket/create/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/bitbucket/link/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/bitbucket/unlink/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/bitbucket/autocomplete": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/github/create/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/github/link/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/github/unlink/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/github/autocomplete": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/gitlab/create/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/gitlab/link/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/gitlab/unlink/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/jira/create/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/jira/link/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/jira/unlink/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/jira/autocomplete": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/phabricator/create/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/phabricator/link/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/phabricator/unlink/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/phabricator/autocomplete": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/pivotal/create/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/pivotal/link/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/pivotal/unlink/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/pivotal/autocomplete": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/trello/create/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/trello/link/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/trello/unlink/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/trello/options": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/plugins/trello/autocomplete": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/alert-rules/available-actions/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/alert-rules/{alert_rule_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/alert-rules/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/combined-rules/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/data-export/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/data-export/{data_export_id}/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/activity/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/comments/": { + "POST" + }, + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/comments/{activity_id}/": { + "DELETE", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/": { + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/incidents/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/seen/": { + "POST" + }, + "/api/0/organizations/{organization_id_or_slug}/incidents/{incident_identifier}/subscriptions/": { + "DELETE", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/chunk-upload/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/code-mappings/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/derive-code-mappings/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/code-mappings/{config_id}/": { + "DELETE", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/code-mappings/{config_id}/codeowners/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/codeowners-associations/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/discover/homepage/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/discover/saved/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/discover/saved/{query_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/discover/saved/{query_id}/visit/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/key-transactions/": { + "DELETE", + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/key-transactions-list/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/related-issues/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/project-transaction-threshold-override/": { + "DELETE", + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/dashboards/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/dashboards/widgets/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/dashboards/{dashboard_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/dashboards/{dashboard_id}/visit/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/shortids/{short_id}/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/eventids/{event_id}/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/data-scrubbing-selector-suggestions/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/slugs/": {"PUT"}, + "/api/0/organizations/{organization_id_or_slug}/access-requests/": {"GET", "PUT"}, + "/api/0/organizations/{organization_id_or_slug}/access-requests/{request_id}/": { + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/activity/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/api-keys/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/api-keys/{api_key_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/audit-logs/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/auth-provider/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/auth-providers/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/auth-provider/send-reminders/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/avatar/": {"GET", "PUT"}, + "/api/0/organizations/{organization_id_or_slug}/artifactbundle/assemble/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/config/integrations/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/config/repos/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/sdk-updates/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/sdks/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events/{project_id_or_slug}:{event_id}/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/events-stats/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/metrics-estimation-stats/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-facets/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-facets-stats/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-starfish/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-facets-performance/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-facets-performance-histogram/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/events-span-ops/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-spans/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-spans-performance/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-spans-stats/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-root-cause-analysis/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-meta/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/spans-samples/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/metrics-compatibility/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/metrics-compatibility-sums/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/missing-members/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-histogram/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-spans-histogram/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-trends/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-vitals/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-has-measurements/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-trends-stats/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-trends-statsv2/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-trace-light/{trace_id}/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-trace/{trace_id}/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/events-trace-meta/{trace_id}/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/measurements-meta/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/issues/": {"DELETE", "GET", "PUT"}, + "/api/0/organizations/{organization_id_or_slug}/issues-count/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/issues-stats/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/integrations/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/integrations/{integration_id}/": { + "DELETE", + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/integrations/{integration_id}/repos/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/integrations/{integration_id}/issues/": { + "PUT" + }, + "/api/0/organizations/{organization_id_or_slug}/integrations/{integration_id}/migrate-opsgenie/": { + "PUT" + }, + "/api/0/organizations/{organization_id_or_slug}/integrations/{integration_id}/serverless-functions/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/members/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/external-users/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/external-users/{external_user_id}/": { + "DELETE", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/integration-requests/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/invite-requests/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/invite-requests/{member_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/notifications/actions/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/notifications/actions/{action_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/notifications/available-actions/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/monitors-stats/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/monitors/{monitor_id_or_slug}/stats/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/pinned-searches/": {"DELETE", "PUT"}, + "/api/0/organizations/{organization_id_or_slug}/recent-searches/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/searches/{search_id}/": {"DELETE", "PUT"}, + "/api/0/organizations/{organization_id_or_slug}/searches/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/sessions/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/resolved/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/request-project-creation/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/members/{member_id}/unreleased-commits/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/members/{member_id}/teams/{team_id_or_slug}/": { + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/onboarding-continuation-email/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/processingissues/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/experimental/projects/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/projects-count/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/sent-first-event/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/repos/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/repos/{repo_id}/": {"DELETE", "PUT"}, + "/api/0/organizations/{organization_id_or_slug}/repos/{repo_id}/commits/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/plugins/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/plugins/configs/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/releases/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/releases/stats/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/meta/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/assemble/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/files/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/files/{file_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/commitfiles/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/deploys/": { + "GET", + "POST", + }, + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/commits/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/releases/{version}/previous-with-commits/": { + "GET" + }, + "/api/0/organizations/{organization_id_or_slug}/user-feedback/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/user-teams/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/users/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/users/{user_id}/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/sentry-app-installations/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/sentry-apps/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/sentry-app-components/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/org-auth-tokens/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/org-auth-tokens/{token_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/stats/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/tags/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/tags/{key}/values/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/onboarding-tasks/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/environments/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/broadcasts/": {"GET", "PUT", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/join-request/": {"POST"}, + "/api/0/organizations/{organization_id_or_slug}/transaction-anomaly-detection/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/relay_usage/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/replay-selectors/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/replay-count/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/replays-events-meta/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/functions/": {"GET", "POST"}, + "/api/0/organizations/{organization_id_or_slug}/functions/{function_id_or_slug}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/organizations/{organization_id_or_slug}/scim/v2/Users/{member_id}": {"PUT"}, + "/api/0/organizations/{organization_id_or_slug}/scim/v2/Groups/{team_id}": {"PUT"}, + "/api/0/organizations/{organization_id_or_slug}/scim/v2/Schemas": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/metrics/meta/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/metrics/meta/{metric_name}/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/metrics/data/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/metrics/tags/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/metrics/tags/{tag_name}/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/profiling/filters/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/profiling/flamegraph/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/profiling/function-trends/": {"GET"}, + "/api/0/projects/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/rule-conditions/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/alert-rules/{alert_rule_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/alert-rules/": { + "GET", + "POST", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/alert-rule-task/{task_uuid}/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/combined-rules/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/avatar/": {"GET", "PUT"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/create-sample/": {"POST"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/create-sample-transaction/": { + "POST" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/docs/{platform}/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/environments/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/environments/{environment}/": { + "GET", + "PUT", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/platforms/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/grouping-info/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/apple-crash-report": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/attachments/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/reprocessable/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/attachments/{attachment_id}/": { + "GET", + "DELETE", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/committers/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/json/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/owners/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/events/{event_id}/actionable-items/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/dsyms/": { + "DELETE", + "GET", + "POST", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/source-maps/": { + "GET", + "DELETE", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/artifact-bundles/": { + "GET", + "DELETE", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/proguard-artifact-releases": { + "GET", + "POST", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/difs/assemble/": { + "POST" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/dsyms/unknown/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/files/dsyms/associate/": { + "POST" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/filters/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/hooks/": {"GET", "POST"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/hooks/{hook_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/hooks/{hook_id}/stats/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/{var}/": { + "POST", + "DELETE", + "GET", + "PUT", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/{var}/stats/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/keys/{key_id}/stats/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/members/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/": {"GET", "POST"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/commits/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/token/": { + "GET", + "POST", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/completion/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/commits/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/repositories/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/resolved/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/stats/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/artifact-bundles/{bundle_id}/files/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/artifact-bundles/{bundle_id}/files/{file_id}/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/files/": { + "GET", + "POST", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/releases/{version}/files/{file_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/artifact-lookup/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rules/": {"GET", "POST"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/replays/{replay_id}/": { + "GET", + "DELETE", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/replays/{replay_id}/clicks/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/replays/{replay_id}/recording-segments/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/replays/{replay_id}/recording-segments/{segment_id}/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rules/configuration/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rules/{rule_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rules/{rule_id}/enable/": { + "PUT" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rules/{rule_id}/snooze/": { + "DELETE", + "POST", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/alert-rules/{rule_id}/snooze/": { + "DELETE", + "POST", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rules/preview/": {"POST"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rule-actions/": {"POST"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rules/{rule_id}/group-history/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rules/{rule_id}/stats/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/rule-task/{task_uuid}/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/stats/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/tags/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/tags/{key}/": { + "GET", + "DELETE", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/tags/{key}/values/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/teams/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/transfer/": {"POST"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/users/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/users/{user_hash}/": { + "GET", + "DELETE", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/user-stats/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/processingissues/": { + "GET", + "DELETE", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/reprocessing/": {"POST"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/processingissues/discard/": { + "DELETE" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/ownership/": {"GET", "PUT"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/codeowners/": { + "GET", + "POST", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/codeowners/{codeowners_id}/": { + "DELETE", + "PUT", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/transaction-threshold/configure/": { + "DELETE", + "GET", + "POST", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/performance-issues/configure/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/plugins/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/plugins/{plugin_id}/": { + "DELETE", + "GET", + "PUT", + "POST", + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/cluster-transaction-names/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/tombstones/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/tombstones/{tombstone_id}/": { + "DELETE" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/stacktrace-link/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/grouping-configs/": {"GET"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/appstoreconnect/": {"POST"}, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/appstoreconnect/apps/": { + "POST" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/appstoreconnect/status/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/appstoreconnect/{credentials_id}/": { + "POST" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/appstoreconnect/{credentials_id}/refresh/": { + "POST" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/profiling/functions/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/profiling/profiles/{profile_id}/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/profiling/raw_profiles/{profile_id}/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/profiling/flamegraph/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/profiling/transactions/{transaction_id}/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/dynamic-sampling/rate/": { + "GET" + }, + "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/repo-path-parsing/": { + "POST" + }, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/": {"DELETE", "GET", "PUT"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/issues/old/": {"GET"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/release-count/": {"GET"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/time-to-resolution/": {"GET"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/unresolved-issue-age/": {"GET"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/alerts-triggered/": {"GET"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/alerts-triggered-index/": {"GET"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/issue-breakdown/": {"GET"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/all-unresolved-issues/": {"GET"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/notification-settings/": { + "GET", + "PUT", + }, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/members/": {"GET"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/stats/": {"GET"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/avatar/": {"GET", "PUT"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/external-teams/": {"POST"}, + "/api/0/teams/{organization_id_or_slug}/{team_id_or_slug}/external-teams/{external_team_id}/": { + "DELETE", + "PUT", + }, + "/api/0/users/": {"GET"}, + "/api/0/users/{user_id}/": {"DELETE", "GET", "PUT"}, + "/api/0/users/{user_id}/avatar/": {"GET", "PUT"}, + "/api/0/users/{user_id}/authenticators/": {"GET"}, + "/api/0/users/{user_id}/authenticators/{interface_id}/enroll/": {"GET", "POST"}, + "/api/0/users/{user_id}/authenticators/{auth_id}/{interface_device_id}/": { + "DELETE", + "GET", + "PUT", + }, + "/api/0/users/{user_id}/authenticators/{auth_id}/": {"DELETE", "GET", "PUT"}, + "/api/0/users/{user_id}/emails/": {"DELETE", "GET", "PUT", "POST"}, + "/api/0/users/{user_id}/emails/confirm/": {"POST"}, + "/api/0/users/{user_id}/identities/{identity_id}/": {"DELETE"}, + "/api/0/users/{user_id}/identities/": {"GET"}, + "/api/0/users/{user_id}/ips/": {"GET"}, + "/api/0/users/{user_id}/organizations/": {"GET"}, + "/api/0/users/{user_id}/notification-settings/": {"GET", "PUT"}, + "/api/0/users/{user_id}/notifications/": {"GET", "PUT"}, + "/api/0/users/{user_id}/notifications/{notification_type}/": {"GET", "PUT"}, + "/api/0/users/{user_id}/password/": {"PUT"}, + "/api/0/users/{user_id}/permissions/": {"GET"}, + "/api/0/users/{user_id}/permissions/config/": {"GET"}, + "/api/0/users/{user_id}/permissions/{permission_name}/": {"DELETE", "GET", "POST"}, + "/api/0/users/{user_id}/roles/": {"GET"}, + "/api/0/users/{user_id}/roles/{role_name}/": {"DELETE", "GET", "POST"}, + "/api/0/users/{user_id}/social-identities/": {"GET"}, + "/api/0/users/{user_id}/social-identities/{identity_id}/": {"DELETE"}, + "/api/0/users/{user_id}/subscriptions/": {"GET", "PUT", "POST"}, + "/api/0/users/{user_id}/organization-integrations/": {"GET"}, + "/api/0/users/{user_id}/user-identities/": {"GET"}, + "/api/0/users/{user_id}/user-identities/{category}/{identity_id}/": {"GET", "DELETE"}, + "/api/0/userroles/": {"GET", "POST"}, + "/api/0/userroles/{role_name}/": {"DELETE", "GET", "PUT"}, + "/api/0/sentry-apps/": {"GET", "POST"}, + "/api/0/sentry-apps/{sentry_app_id_or_slug}/": {"DELETE", "GET", "PUT"}, + "/api/0/sentry-apps/{sentry_app_id_or_slug}/features/": {"GET"}, + "/api/0/sentry-apps/{sentry_app_id_or_slug}/components/": {"GET"}, + "/api/0/sentry-apps/{sentry_app_id_or_slug}/avatar/": {"GET", "PUT"}, + "/api/0/sentry-apps/{sentry_app_id_or_slug}/api-tokens/": {"GET", "POST"}, + "/api/0/sentry-apps/{sentry_app_id_or_slug}/api-tokens/{api_token}/": {"DELETE"}, + "/api/0/sentry-apps/{sentry_app_id_or_slug}/stats/": {"GET"}, + "/api/0/sentry-apps/{sentry_app_id_or_slug}/requests/": {"GET"}, + "/api/0/sentry-apps/{sentry_app_id_or_slug}/interaction/": {"GET", "POST"}, + "/api/0/sentry-apps/{sentry_app_id_or_slug}/publish-request/": {"POST"}, + "/api/0/sentry-app-installations/{uuid}/": {"DELETE", "GET", "PUT"}, + "/api/0/sentry-app-installations/{uuid}/authorizations/": {"POST"}, + "/api/0/sentry-app-installations/{uuid}/external-requests/": {"GET"}, + "/api/0/sentry-app-installations/{uuid}/external-issue-actions/": {"POST"}, + "/api/0/sentry-app-installations/{uuid}/external-issues/": {"POST"}, + "/api/0/sentry-app-installations/{uuid}/external-issues/{external_issue_id}/": {"DELETE"}, + "/api/0/auth/": {"DELETE", "GET", "PUT", "POST"}, + "/api/0/auth/config/": {"GET"}, + "/api/0/auth/login/": {"POST"}, + "/api/0/broadcasts/": {"GET", "PUT", "POST"}, + "/api/0/broadcasts/{broadcast_id}/": {"GET", "PUT"}, + "/api/0/assistant/": {"GET", "PUT"}, + "/api/0/api-applications/": {"GET", "POST"}, + "/api/0/api-applications/{app_id}/": {"DELETE", "GET", "PUT"}, + "/api/0/api-applications/{app_id}/rotate-secret/": {"POST"}, + "/api/0/api-authorizations/": {"GET", "DELETE"}, + "/api/0/api-tokens/": {"DELETE", "GET", "POST"}, + "/api/0/prompts-activity/": {"GET", "PUT"}, + "/api/0/organizations/{organization_id_or_slug}/prompts-activity/": {"GET", "PUT"}, + "/api/0/authenticators/": {"GET"}, + "/api/0/accept-transfer/": {"GET", "POST"}, + "/api/0/accept-invite/{organization_id_or_slug}/{member_id}/{token}/": {"GET", "POST"}, + "/api/0/accept-invite/{member_id}/{token}/": {"GET", "POST"}, + "/api/0/profiling/projects/{project_id}/profile/{profile_id}/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/participants/": {"GET"}, + "/api/0/{var}/{issue_id}/participants/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/shared/{var}/{share_id}/": {"GET"}, + "/api/0/shared/{var}/{share_id}/": {"GET"}, + "/api/0/sentry-apps-stats/": {"GET"}, + "/api/0/doc-integrations/": {"GET", "POST"}, + "/api/0/doc-integrations/{doc_integration_id_or_slug}/": {"DELETE", "GET", "PUT"}, + "/api/0/doc-integrations/{doc_integration_id_or_slug}/avatar/": {"GET", "PUT"}, + "/api/0/integration-features/": {"GET"}, + "/api/0/issue-occurrence/": {"POST"}, + "/api/0/grouping-configs/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/grouping-configs/": {"GET"}, + "/api/0/builtin-symbol-sources/": {"GET"}, + "/api/0/organizations/{organization_id_or_slug}/builtin-symbol-sources/": {"GET"}, + "/api/0/wizard/": {"GET", "DELETE"}, + "/api/0/wizard/{wizard_hash}/": {"GET", "DELETE"}, + "/api/0/internal/health/": {"GET"}, + "/api/0/internal/options/": {"GET", "PUT"}, + "/api/0/internal/beacon/": {"POST"}, + "/api/0/internal/quotas/": {"GET"}, + "/api/0/internal/queue/tasks/": {"GET"}, + "/api/0/internal/stats/": {"GET"}, + "/api/0/internal/warnings/": {"GET"}, + "/api/0/internal/packages/": {"GET"}, + "/api/0/internal/environment/": {"GET"}, + "/api/0/internal/mail/": {"GET", "POST"}, + "/api/0/internal/project-config/": {"GET"}, + "/api/0/internal/rpc/{service_name}/{method_name}/": {"POST"}, + "/api/0/internal/check-am2-compatibility/": {"GET"}, + "/api/0/": {"GET"}, + "/oauth/userinfo/": {"GET"}, + "/extensions/jira/descriptor/": {"GET"}, + "/extensions/jira/installed/": {"POST"}, + "/extensions/jira/uninstalled/": {"POST"}, + "/extensions/jira/issue-updated/": {"POST"}, + "/extensions/jira/search/{organization_id_or_slug}/{integration_id}/": {"GET"}, + "/extensions/jira-server/issue-updated/{token}/": {"POST"}, + "/extensions/jira-server/search/{organization_id_or_slug}/{integration_id}/": {"GET"}, + "/extensions/slack/action/": {"POST"}, + "/extensions/slack/commands/": {"POST"}, + "/extensions/slack/event/": {"POST"}, + "/extensions/github/webhook/": {"POST"}, + "/extensions/github/search/{organization_id_or_slug}/{integration_id}/": {"GET"}, + "/extensions/github-enterprise/webhook/": {"POST"}, + "/extensions/gitlab/search/{organization_id_or_slug}/{integration_id}/": {"GET"}, + "/extensions/gitlab/webhook/": {"POST"}, + "/extensions/vsts/issue-updated/": {"POST"}, + "/extensions/vsts/search/{organization_id_or_slug}/{integration_id}/": {"GET"}, + "/extensions/bitbucket/descriptor/": {"GET"}, + "/extensions/bitbucket/installed/": {"POST"}, + "/extensions/bitbucket/uninstalled/": {"POST"}, + "/extensions/bitbucket/organizations/{organization_id}/webhook/": {"POST"}, + "/extensions/bitbucket/search/{organization_id_or_slug}/{integration_id}/": {"GET"}, + "/extensions/vercel/delete/": {"DELETE", "POST"}, + "/extensions/vercel/webhook/": {"DELETE", "POST"}, + "/extensions/msteams/webhook/": {"POST"}, + "/extensions/discord/interactions/": {"POST"}, + } +) From c1f2d1ecdff4e346f9695d5cf011991cf15acbb8 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Wed, 8 May 2024 14:19:46 -0700 Subject: [PATCH 183/376] test(ui): Remove `babel-plugin-dynamic-import-node` (#70460) --- babel.config.ts | 2 -- package.json | 1 - yarn.lock | 9 +-------- 3 files changed, 1 insertion(+), 11 deletions(-) diff --git a/babel.config.ts b/babel.config.ts index 0359a78348769b..19214a68434db6 100644 --- a/babel.config.ts +++ b/babel.config.ts @@ -41,8 +41,6 @@ const config: TransformOptions = { test: { sourceMaps: process.env.CI ? false : true, plugins: [ - // Required, see https://github.com/facebook/jest/issues/9430 - 'dynamic-import-node', // Disable emotion sourcemaps in tests // Since emotion spends lots of time parsing and inserting sourcemaps [ diff --git a/package.json b/package.json index daf7e9e0d68d72..e9cf79b2efd25b 100644 --- a/package.json +++ b/package.json @@ -186,7 +186,6 @@ "@types/node": "^20.11.7", "babel-gettext-extractor": "^4.1.3", "babel-jest": "^29.6.2", - "babel-plugin-dynamic-import-node": "^2.3.3", "benchmark": "^2.1.4", "eslint": "8.57.0", "eslint-config-sentry-app": "2.7.0", diff --git a/yarn.lock b/yarn.lock index 2ae2628f45103c..bd279e51f1db56 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4863,13 +4863,6 @@ babel-plugin-add-react-displayname@^0.0.5: resolved "https://registry.yarnpkg.com/babel-plugin-add-react-displayname/-/babel-plugin-add-react-displayname-0.0.5.tgz#339d4cddb7b65fd62d1df9db9fe04de134122bd5" integrity sha1-M51M3be2X9YtHfnbn+BN4TQSK9U= -babel-plugin-dynamic-import-node@^2.3.3: - version "2.3.3" - resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" - integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== - dependencies: - object.assign "^4.1.0" - babel-plugin-istanbul@^6.1.1: version "6.1.1" resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" @@ -9398,7 +9391,7 @@ object-keys@^1.1.1: resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== -object.assign@^4.1.0, object.assign@^4.1.2, object.assign@^4.1.4, object.assign@^4.1.5: +object.assign@^4.1.2, object.assign@^4.1.4, object.assign@^4.1.5: version "4.1.5" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.5.tgz#3a833f9ab7fdb80fc9e8d2300c803d216d8fdbb0" integrity sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ== From 6ed338e0d8a571c5ab3c160c75caed18980fb79b Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 8 May 2024 17:30:26 -0400 Subject: [PATCH 184/376] feat(replay): create a wrapper class to init rrweb player alongside video replayer (#69927) Fixes https://github.com/getsentry/sentry/issues/69817 We have a `videoReplayer`, which uses video events to create the replay playback for mobile replays. However, in order to see the gestures (clicks, mouse movements, etc) we need to initialize an rrweb player too (the one that web replay uses). This PR introduces a `videoReplayerWithInteractions` classe which initializes both, so that mobile replays can utilize both players at once. ![image](https://github.com/getsentry/sentry/assets/56095982/8a81da2d-2f8c-4bac-acf0-988c04be08ec) Another key change we had to make was introducing a fake full snapshot event after every meta event in order to trick the rrweb player into thinking we had a node to map the mouse movement to. The rrweb player essentially fails to render any gesture if it doesn't find an element with a matching `id` to the `id` inside the `positions` array (see below picture), so we hardcoded the event to have `id: 0` (which is what the SDK is sending for the mobile rrweb events). This workaround should be safe to do since the full snapshot event doesn't affect the video playback at all. SCR-20240430-nqts Adding a snapshot event after every meta event also fixes the scrubbing bugs we were experiencing. Fixing mousetails not showing up involved absolutely positioning the `replayer-wrapper`. https://github.com/getsentry/sentry/assets/56095982/4a33cae4-ae1d-43b6-91a3-bf81fb36cf8c --- .../app/components/replays/replayContext.tsx | 18 ++- .../app/components/replays/replayPlayer.tsx | 14 ++ .../app/components/replays/videoReplayer.tsx | 2 +- .../replays/videoReplayerWithInteractions.tsx | 152 ++++++++++++++++++ 4 files changed, 181 insertions(+), 5 deletions(-) create mode 100644 static/app/components/replays/videoReplayerWithInteractions.tsx diff --git a/static/app/components/replays/replayContext.tsx b/static/app/components/replays/replayContext.tsx index 43625c93a771c9..84ce7ccdae62f7 100644 --- a/static/app/components/replays/replayContext.tsx +++ b/static/app/components/replays/replayContext.tsx @@ -15,6 +15,7 @@ import type { ReplayPrefs, } from 'sentry/components/replays/preferences/replayPreferences'; import useReplayHighlighting from 'sentry/components/replays/useReplayHighlighting'; +import {VideoReplayerWithInteractions} from 'sentry/components/replays/videoReplayerWithInteractions'; import {trackAnalytics} from 'sentry/utils/analytics'; import clamp from 'sentry/utils/number/clamp'; import type useInitialOffsetMs from 'sentry/utils/replays/hooks/useInitialTimeOffsetMs'; @@ -26,7 +27,6 @@ import useProjectFromId from 'sentry/utils/useProjectFromId'; import {useUser} from 'sentry/utils/useUser'; import {CanvasReplayerPlugin} from './canvasReplayerPlugin'; -import {VideoReplayer} from './videoReplayer'; type Dimensions = {height: number; width: number}; type RootElem = null | HTMLDivElement; @@ -474,16 +474,19 @@ function ProviderNonMemo({ return null; } - // check if this is a video replay and if we can use the video replayer + // check if this is a video replay and if we can use the video (wrapper) replayer if (!isVideoReplay || !videoEvents || !startTimestampMs) { return null; } - const inst = new VideoReplayer(videoEvents, { + // This is a wrapper class that wraps both the VideoReplayer + // and the rrweb Replayer + const inst = new VideoReplayerWithInteractions({ + // video specific + videoEvents, videoApiPrefix: `/api/0/projects/${ organization.slug }/${projectSlug}/replays/${replay?.getReplay().id}/videos/`, - root, start: startTimestampMs, onFinished: setReplayFinished, onLoaded: event => { @@ -501,6 +504,11 @@ function ProviderNonMemo({ }, clipWindow, durationMs, + // rrweb specific + theme, + events: events ?? [], + // common to both + root, }); // `.current` is marked as readonly, but it's safe to set the value from // inside a `useEffect` hook. @@ -520,6 +528,7 @@ function ProviderNonMemo({ isFetching, isVideoReplay, videoEvents, + events, organization.slug, projectSlug, replay, @@ -528,6 +537,7 @@ function ProviderNonMemo({ startTimeOffsetMs, clipWindow, durationMs, + theme, ] ); diff --git a/static/app/components/replays/replayPlayer.tsx b/static/app/components/replays/replayPlayer.tsx index 673811ea5db542..e64ff967fa6935 100644 --- a/static/app/components/replays/replayPlayer.tsx +++ b/static/app/components/replays/replayPlayer.tsx @@ -282,6 +282,20 @@ const SentryPlayerRoot = styled(PlayerRoot)` height: 10px; } } + + /* Correctly positions the canvas for video replays and shows the purple "mousetails" */ + &.video-replayer { + .replayer-wrapper { + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + } + .replayer-wrapper > iframe { + opacity: 0; + } + } `; const Overlay = styled('div')` diff --git a/static/app/components/replays/videoReplayer.tsx b/static/app/components/replays/videoReplayer.tsx index 3103ccd6db9e3b..6b146518599b61 100644 --- a/static/app/components/replays/videoReplayer.tsx +++ b/static/app/components/replays/videoReplayer.tsx @@ -24,7 +24,7 @@ interface VideoReplayerOptions { clipWindow?: ClipWindow; } -interface VideoReplayerConfig { +export interface VideoReplayerConfig { /** * Not supported, only here to maintain compat w/ rrweb player */ diff --git a/static/app/components/replays/videoReplayerWithInteractions.tsx b/static/app/components/replays/videoReplayerWithInteractions.tsx new file mode 100644 index 00000000000000..259e4bfa43f0c0 --- /dev/null +++ b/static/app/components/replays/videoReplayerWithInteractions.tsx @@ -0,0 +1,152 @@ +import type {Theme} from '@emotion/react'; +import {type eventWithTime, Replayer} from '@sentry-internal/rrweb'; + +import { + VideoReplayer, + type VideoReplayerConfig, +} from 'sentry/components/replays/videoReplayer'; +import type {ClipWindow, VideoEvent} from 'sentry/utils/replays/types'; + +type RootElem = HTMLDivElement | null; + +interface VideoReplayerWithInteractionsOptions { + durationMs: number; + events: eventWithTime[]; + onBuffer: (isBuffering: boolean) => void; + onFinished: () => void; + onLoaded: (event: any) => void; + root: RootElem; + start: number; + theme: Theme; + videoApiPrefix: string; + videoEvents: VideoEvent[]; + clipWindow?: ClipWindow; +} + +/** + * A wrapper replayer that wraps both VideoReplayer and the rrweb Replayer. + * We need both instances in order to render the video playback alongside gestures. + */ +export class VideoReplayerWithInteractions { + public config: VideoReplayerConfig = { + skipInactive: false, + speed: 1.0, + }; + private videoReplayer: VideoReplayer; + private replayer: Replayer; + + constructor({ + videoEvents, + events, + root, + start, + videoApiPrefix, + onBuffer, + onFinished, + onLoaded, + clipWindow, + durationMs, + theme, + }: VideoReplayerWithInteractionsOptions) { + this.videoReplayer = new VideoReplayer(videoEvents, { + videoApiPrefix, + root, + start, + onFinished, + onLoaded, + onBuffer, + clipWindow, + durationMs, + }); + + root?.classList.add('video-replayer'); + + const eventsWithSnapshots: eventWithTime[] = []; + events.forEach(e => { + eventsWithSnapshots.push(e); + if (e.type === 4) { + // Create a mock full snapshot event, in order to render rrweb gestures properly + // Need to add one for every meta event we see + // The hardcoded data.node.id here should match the ID of the data being sent + // in the `positions` arrays + const fullSnapshotEvent = { + type: 2, + data: { + node: { + type: 0, + childNodes: [ + { + type: 1, + name: 'html', + publicId: '', + systemId: '', + }, + { + type: 2, + tagName: 'html', + attributes: { + lang: 'en', + }, + childNodes: [], + }, + ], + id: 0, + }, + }, + timestamp: e.timestamp, + }; + eventsWithSnapshots.push(fullSnapshotEvent); + } + }); + + this.replayer = new Replayer(eventsWithSnapshots, { + root: root as Element, + blockClass: 'sentry-block', + mouseTail: { + duration: 0.75 * 1000, + lineCap: 'round', + lineWidth: 2, + strokeStyle: theme.purple200, + }, + plugins: [], + skipInactive: false, + speed: this.config.speed, + }); + } + + public destroy() { + this.videoReplayer.destroy(); + this.replayer.destroy(); + } + + /** + * Returns the current video time, using the video's external timer. + */ + public getCurrentTime() { + return this.videoReplayer.getCurrentTime(); + } + + /** + * Play both the rrweb and video player. + */ + public play(videoOffsetMs: number) { + this.videoReplayer.play(videoOffsetMs); + this.replayer.play(videoOffsetMs); + } + + /** + * Pause both the rrweb and video player. + */ + public pause(videoOffsetMs: number) { + this.videoReplayer.pause(videoOffsetMs); + this.replayer.pause(videoOffsetMs); + } + + /** + * Equivalent to rrweb's `setConfig()`, but here we only support the `speed` configuration. + */ + public setConfig(config: Partial): void { + this.videoReplayer.setConfig(config); + this.replayer.setConfig(config); + } +} From edcb35272abe005432c35eaa83df9cb4cbdcac1b Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Wed, 8 May 2024 14:31:55 -0700 Subject: [PATCH 185/376] feat(feedback): update spam detection prompt (#70544) from my testing ([colab notebook](https://colab.research.google.com/drive/1d_FPW27gghcCvX0Bw40xx7k5GGAcRb6n?usp=sharing)) this prompt does a lot better. also sets temperature to 0, which should have been done before. --- .../feedback/usecases/spam_detection.py | 42 +++++++++++++++++-- .../feedback/usecases/test_create_feedback.py | 10 ++--- 2 files changed, 43 insertions(+), 9 deletions(-) diff --git a/src/sentry/feedback/usecases/spam_detection.py b/src/sentry/feedback/usecases/spam_detection.py index aeac34c22be3b0..3fa6967e3611f7 100644 --- a/src/sentry/feedback/usecases/spam_detection.py +++ b/src/sentry/feedback/usecases/spam_detection.py @@ -5,15 +5,33 @@ logger = logging.getLogger(__name__) -PROMPT = """Classify the text into one of the following two classes: [Junk, Not Junk]. Choose Junk only if you are confident. Text: """ +PROMPT = """ +Please analyze the following input and output `spam` if the input is not coherent, and `not spam` if it is coherent. +Some example responses: + asdfasdf,spam + It doesn't work,not spam + es funktioniert nicht, not spam + لا يعمل,not spam, + Nothing,spam + ..,spam + hey,spam +Complete the following: +""" @metrics.wraps("feedback.spam_detection", sample_rate=1.0) def is_spam(message): is_spam = False - response = complete_prompt(usecase=LLMUseCase.SPAM_DETECTION, prompt=PROMPT, message=message) - if response and response.strip().lower() in ("junk", "[junk]"): - is_spam = True + trimmed_response = "" + response = complete_prompt( + usecase=LLMUseCase.SPAM_DETECTION, + prompt=PROMPT, + message=message + ",", # add a comma so it knows to complete the csv + temperature=0, + max_output_tokens=20, + ) + if response: + is_spam, trimmed_response = trim_response(response) logger.info( "Spam detection", @@ -21,7 +39,23 @@ def is_spam(message): "feedback_message": message, "is_spam": is_spam, "response": response, + "trimmed_response": trimmed_response, }, ) metrics.incr("spam-detection", tags={"is_spam": is_spam}, sample_rate=1.0) return is_spam + + +def trim_response(text): + trimmed_text = text.strip().lower() + + trimmed_text.replace("`", "") + + import re + + trimmed_text = re.sub(r"\W+", "", trimmed_text) + + if trimmed_text in ("spam", "[spam]"): + return True, trimmed_text + else: + return False, trimmed_text diff --git a/tests/sentry/feedback/usecases/test_create_feedback.py b/tests/sentry/feedback/usecases/test_create_feedback.py index b541dc23d5ac64..1e213d037f7deb 100644 --- a/tests/sentry/feedback/usecases/test_create_feedback.py +++ b/tests/sentry/feedback/usecases/test_create_feedback.py @@ -504,9 +504,9 @@ def dummy_response(*args, **kwargs): index=0, message=ChatCompletionMessage( content=( - "Junk" - if kwargs["messages"][1]["content"] == "This is definitely spam" - else "Not Junk" + "spam" + if kwargs["messages"][1]["content"] == "This is definitely spam," + else "not spam" ), role="assistant", ), @@ -602,9 +602,9 @@ def dummy_response(*args, **kwargs): index=0, message=ChatCompletionMessage( content=( - "Junk" + "spam" if kwargs["messages"][1]["content"] == "This is definitely spam" - else "Not Junk" + else "not spam" ), role="assistant", ), From 7609ac7a712d385c55b43f01a566c0b9b909dbf2 Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Wed, 8 May 2024 17:34:37 -0400 Subject: [PATCH 186/376] fix(issue-priority): Add Chevron-up state for when group priority dropdown is open (#70529) Currently, the chevron for the issue priority tag does not animate or flip up when the dropdown is toggled. This PR fixes that. Before: ![Mov to Gif conversion (1)](https://github.com/getsentry/sentry/assets/55160142/c892fc0e-d042-4e95-898f-e30f26c6feaf) After: ![Mov to Gif conversion](https://github.com/getsentry/sentry/assets/55160142/729ce25c-de21-43a5-8d15-c969f93c5144) --- static/app/components/badge/groupPriority.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/components/badge/groupPriority.tsx b/static/app/components/badge/groupPriority.tsx index cac5207a2efd98..8ad42816c4ff9b 100644 --- a/static/app/components/badge/groupPriority.tsx +++ b/static/app/components/badge/groupPriority.tsx @@ -222,14 +222,14 @@ export function GroupPriorityDropdown({ } minMenuWidth={230} - trigger={triggerProps => ( + trigger={(triggerProps, isOpen) => ( - + )} From abaea0d16109a9f796959566491fa2ae8b20fb3c Mon Sep 17 00:00:00 2001 From: Stephen Cefali Date: Wed, 8 May 2024 14:37:48 -0700 Subject: [PATCH 187/376] feat(analytics): adds analytics for issue search on the backend (#70538) We want to have a better understanding what queries are being used on the backend for issue search. We have analytics on the FE for issue stream loading but it doesn't cover a lot of places that make searches such as the releases tab. --- src/sentry/analytics/events/__init__.py | 1 + .../events/issue_search_endpoint_queried.py | 16 ++++++++++++++++ .../issues/endpoints/organization_group_index.py | 14 +++++++++++++- 3 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 src/sentry/analytics/events/issue_search_endpoint_queried.py diff --git a/src/sentry/analytics/events/__init__.py b/src/sentry/analytics/events/__init__.py index cddf1bb0cf87aa..cefa3ec215db57 100644 --- a/src/sentry/analytics/events/__init__.py +++ b/src/sentry/analytics/events/__init__.py @@ -43,6 +43,7 @@ from .issue_mark_reviewed import * # noqa: F401,F403 from .issue_priority import * # noqa: F401,F403 from .issue_resolved import * # noqa: F401,F403 +from .issue_search_endpoint_queried import * # noqa: F401,F403 from .issue_tracker_used import * # noqa: F401,F403 from .issue_unignored import * # noqa: F401,F403 from .issue_unresolved import * # noqa: F401,F403 diff --git a/src/sentry/analytics/events/issue_search_endpoint_queried.py b/src/sentry/analytics/events/issue_search_endpoint_queried.py new file mode 100644 index 00000000000000..f2ace304bd6037 --- /dev/null +++ b/src/sentry/analytics/events/issue_search_endpoint_queried.py @@ -0,0 +1,16 @@ +from sentry import analytics + + +class IssueSearchEndpointQueriedEvent(analytics.Event): + type = "issue_search.endpoint_queried" + + attributes = ( + analytics.Attribute("user_id"), + analytics.Attribute("organization_id"), + analytics.Attribute("project_ids"), # This is a list of project ids + analytics.Attribute("full_query_params"), + analytics.Attribute("query"), + ) + + +analytics.register(IssueSearchEndpointQueriedEvent) diff --git a/src/sentry/issues/endpoints/organization_group_index.py b/src/sentry/issues/endpoints/organization_group_index.py index b9773df5190a7e..204770007b83d8 100644 --- a/src/sentry/issues/endpoints/organization_group_index.py +++ b/src/sentry/issues/endpoints/organization_group_index.py @@ -10,7 +10,7 @@ from rest_framework.response import Response from sentry_sdk import start_span -from sentry import features, search +from sentry import analytics, features, search from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint @@ -306,6 +306,18 @@ def get(self, request: Request, organization) -> Response: # we ignore date range for both short id and event ids query = request.GET.get("query", "").strip() + + # record analytics for search query + if request.user: + analytics.record( + "issue_search.endpoint_queried", + user_id=request.user.id, + organization_id=organization.id, + project_ids=",".join(map(str, project_ids)), + full_query_params=",".join(f"{key}={value}" for key, value in request.GET.items()), + query=query, + ) + if query: # check to see if we've got an event ID event_id = normalize_event_id(query) From 97486c9ba1d6d9786f35ac921fb6d1b9f31b75ea Mon Sep 17 00:00:00 2001 From: Jenn Mueng <30991498+jennmueng@users.noreply.github.com> Date: Wed, 8 May 2024 14:55:18 -0700 Subject: [PATCH 188/376] fix(autofix): Allow project:read permission to make a codebase index (#70550) Unlock autofix codebase creation for now for internal testing, we should re-consider after LA whether we should actually lock it to accounts with project write access --- .../project_autofix_create_codebase_index.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/sentry/api/endpoints/project_autofix_create_codebase_index.py b/src/sentry/api/endpoints/project_autofix_create_codebase_index.py index 2ad63193077d94..54f488530514b9 100644 --- a/src/sentry/api/endpoints/project_autofix_create_codebase_index.py +++ b/src/sentry/api/endpoints/project_autofix_create_codebase_index.py @@ -9,7 +9,7 @@ from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint -from sentry.api.bases.project import ProjectEndpoint +from sentry.api.bases.project import ProjectEndpoint, ProjectPermission from sentry.api.helpers.repos import get_repos_from_project_code_mappings from sentry.models.project import Project from sentry.utils import json @@ -19,6 +19,13 @@ from rest_framework.request import Request +class ProjectAutofixCreateCodebaseIndexPermission(ProjectPermission): + scope_map = { + # We might want to re-evaluate this for LA/EA whether a user needs to have write access to the project to create a codebase index (probably yes?) + "POST": ["project:read", "project:write", "project:admin"], + } + + @region_silo_endpoint class ProjectAutofixCreateCodebaseIndexEndpoint(ProjectEndpoint): publish_status = { @@ -27,6 +34,8 @@ class ProjectAutofixCreateCodebaseIndexEndpoint(ProjectEndpoint): owner = ApiOwner.ML_AI private = True + permission_classes = (ProjectAutofixCreateCodebaseIndexPermission,) + def post(self, request: Request, project: Project) -> Response: """ Create a codebase index for for a project's repositories, uses the code mapping to determine which repositories to index From fb72953419e56b78527d2cf1d27788c696f4b52f Mon Sep 17 00:00:00 2001 From: John Date: Wed, 8 May 2024 15:03:26 -0700 Subject: [PATCH 189/376] ref(test): Remove b64 padding from send_metrics.py (#70551) ### Overview Remove b64 padding so send_metrics.py is on parity with relay --- bin/send_metrics.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/bin/send_metrics.py b/bin/send_metrics.py index 6e833660d0a7fa..d01d0ccc55ebab 100644 --- a/bin/send_metrics.py +++ b/bin/send_metrics.py @@ -49,7 +49,9 @@ def make_dist_payload(use_case, org_id, rand_str, value_len, b64_encode): "value": ( { "format": "base64", - "data": base64.b64encode(struct.pack(f"<{len(nums)}d", *nums)).decode("ascii"), + "data": base64.b64encode(struct.pack(f"<{len(nums)}d", *nums)) + .replace(b"=", b"") + .decode("ascii"), } if b64_encode else { @@ -80,7 +82,9 @@ def make_set_payload(use_case, org_id, rand_str, value_len, b64_encode): "format": "base64", "data": base64.b64encode( b"".join([num.to_bytes(INT_WIDTH, byteorder="little") for num in nums]) - ).decode("ascii"), + ) + .replace(b"=", b"") + .decode("ascii"), } if b64_encode else { From e91f75f15a9f8cecc474c607920cf49f1fd15a6f Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Wed, 8 May 2024 15:13:57 -0700 Subject: [PATCH 190/376] feat(api-idorslug): Update Team Endpoints to use `organization_id_or_slug` (#70536) A subset of changes from https://github.com/getsentry/sentry/pull/70081/! --- src/sentry/api/bases/team.py | 10 +++++-- .../codeowners/external_actor/team_details.py | 6 ++-- .../codeowners/external_actor/team_index.py | 2 +- src/sentry/api/endpoints/team_projects.py | 4 +-- src/sentry/api/endpoints/team_stats.py | 2 +- src/sentry/api/urls.py | 28 +++++++++---------- src/sentry/apidocs/parameters.py | 7 +++++ tests/apidocs/endpoints/teams/test_by_slug.py | 5 +++- .../apidocs/endpoints/teams/test_projects.py | 5 +++- tests/apidocs/endpoints/teams/test_stats.py | 5 +++- tests/sentry/api/endpoints/test_team_stats.py | 5 +++- 11 files changed, 51 insertions(+), 28 deletions(-) diff --git a/src/sentry/api/bases/team.py b/src/sentry/api/bases/team.py index 9563a571607bf9..51573b0ebf3354 100644 --- a/src/sentry/api/bases/team.py +++ b/src/sentry/api/bases/team.py @@ -36,12 +36,14 @@ def has_object_permission(self, request: Request, view, team): class TeamEndpoint(Endpoint): permission_classes: tuple[type[BasePermission], ...] = (TeamPermission,) - def convert_args(self, request: Request, organization_slug, team_id_or_slug, *args, **kwargs): + def convert_args( + self, request: Request, organization_id_or_slug, team_id_or_slug, *args, **kwargs + ): try: if id_or_slug_path_params_enabled(self.convert_args.__qualname__): team = ( Team.objects.filter( - organization__slug__id_or_slug=organization_slug, + organization__slug__id_or_slug=organization_id_or_slug, slug__id_or_slug=team_id_or_slug, ) .select_related("organization") @@ -49,7 +51,9 @@ def convert_args(self, request: Request, organization_slug, team_id_or_slug, *ar ) else: team = ( - Team.objects.filter(organization__slug=organization_slug, slug=team_id_or_slug) + Team.objects.filter( + organization__slug=organization_id_or_slug, slug=team_id_or_slug + ) .select_related("organization") .get() ) diff --git a/src/sentry/api/endpoints/codeowners/external_actor/team_details.py b/src/sentry/api/endpoints/codeowners/external_actor/team_details.py index e06285000816e1..1da8a63ce8e7d1 100644 --- a/src/sentry/api/endpoints/codeowners/external_actor/team_details.py +++ b/src/sentry/api/endpoints/codeowners/external_actor/team_details.py @@ -28,14 +28,14 @@ class ExternalTeamDetailsEndpoint(TeamEndpoint, ExternalActorEndpointMixin): def convert_args( self, request: Request, - organization_slug: str, + organization_id_or_slug: int | str, team_id_or_slug: int | str, external_team_id: int, *args: Any, **kwargs: Any, ) -> tuple[Any, Any]: args, kwargs = super().convert_args( - request, organization_slug, team_id_or_slug, *args, **kwargs + request, organization_id_or_slug, team_id_or_slug, *args, **kwargs ) kwargs["external_team"] = self.get_external_actor_or_404( external_team_id, kwargs["team"].organization @@ -47,7 +47,7 @@ def put(self, request: Request, team: Team, external_team: ExternalActor) -> Res Update an External Team ````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team belongs to. :pparam string team_id_or_slug: the id or slug of the team to get. :pparam string external_team_id: id of external_team object diff --git a/src/sentry/api/endpoints/codeowners/external_actor/team_index.py b/src/sentry/api/endpoints/codeowners/external_actor/team_index.py index b3efb832d38e1c..13078fbf086301 100644 --- a/src/sentry/api/endpoints/codeowners/external_actor/team_index.py +++ b/src/sentry/api/endpoints/codeowners/external_actor/team_index.py @@ -27,7 +27,7 @@ def post(self, request: Request, team: Team) -> Response: Create an External Team ````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team belongs to. :pparam string team_id_or_slug: the team_id_or_slug of the team to get. :param required string provider: enum("github", "gitlab") diff --git a/src/sentry/api/endpoints/team_projects.py b/src/sentry/api/endpoints/team_projects.py index cbef15a7fc15ee..97a7932149ab63 100644 --- a/src/sentry/api/endpoints/team_projects.py +++ b/src/sentry/api/endpoints/team_projects.py @@ -87,7 +87,7 @@ class TeamProjectsEndpoint(TeamEndpoint, EnvironmentMixin): @extend_schema( operation_id="List a Team's Projects", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.TEAM_ID_OR_SLUG, CursorQueryParam, ], @@ -142,7 +142,7 @@ def get(self, request: Request, team) -> Response: tags=["Projects"], operation_id="Create a New Project", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.TEAM_ID_OR_SLUG, ], request=ProjectPostSerializer, diff --git a/src/sentry/api/endpoints/team_stats.py b/src/sentry/api/endpoints/team_stats.py index 5121cdcde1a065..ab7f261058e7a1 100644 --- a/src/sentry/api/endpoints/team_stats.py +++ b/src/sentry/api/endpoints/team_stats.py @@ -33,7 +33,7 @@ def get(self, request: Request, team) -> Response: Query ranges are limited to Sentry's configured time-series resolutions. - :pparam string organization_slug: the slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string team_id_or_slug: the id or slug of the team. :qparam string stat: the name of the stat to query (``"received"``, ``"rejected"``) diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 1b2ce693f3bf7e..b5f49725675a76 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -2766,72 +2766,72 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: TEAM_URLS = [ re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/$", TeamDetailsEndpoint.as_view(), name="sentry-api-0-team-details", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/issues/old/$", + r"^(?P[^\/]+)/(?P[^\/]+)/issues/old/$", TeamGroupsOldEndpoint.as_view(), name="sentry-api-0-team-oldest-issues", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/release-count/$", + r"^(?P[^\/]+)/(?P[^\/]+)/release-count/$", TeamReleaseCountEndpoint.as_view(), name="sentry-api-0-team-release-count", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/time-to-resolution/$", + r"^(?P[^\/]+)/(?P[^\/]+)/time-to-resolution/$", TeamTimeToResolutionEndpoint.as_view(), name="sentry-api-0-team-time-to-resolution", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/unresolved-issue-age/$", + r"^(?P[^\/]+)/(?P[^\/]+)/unresolved-issue-age/$", TeamUnresolvedIssueAgeEndpoint.as_view(), name="sentry-api-0-team-unresolved-issue-age", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/alerts-triggered/$", + r"^(?P[^\/]+)/(?P[^\/]+)/alerts-triggered/$", TeamAlertsTriggeredTotalsEndpoint.as_view(), name="sentry-api-0-team-alerts-triggered", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/alerts-triggered-index/$", + r"^(?P[^\/]+)/(?P[^\/]+)/alerts-triggered-index/$", TeamAlertsTriggeredIndexEndpoint.as_view(), name="sentry-api-0-team-alerts-triggered-index", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/issue-breakdown/$", + r"^(?P[^\/]+)/(?P[^\/]+)/issue-breakdown/$", TeamIssueBreakdownEndpoint.as_view(), name="sentry-api-0-team-issue-breakdown", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/all-unresolved-issues/$", + r"^(?P[^\/]+)/(?P[^\/]+)/all-unresolved-issues/$", TeamAllUnresolvedIssuesEndpoint.as_view(), name="sentry-api-0-team-all-unresolved-issues", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/members/$", + r"^(?P[^\/]+)/(?P[^\/]+)/members/$", TeamMembersEndpoint.as_view(), name="sentry-api-0-team-members", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/projects/$", + r"^(?P[^\/]+)/(?P[^\/]+)/projects/$", TeamProjectsEndpoint.as_view(), name="sentry-api-0-team-project-index", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/stats/$", + r"^(?P[^\/]+)/(?P[^\/]+)/stats/$", TeamStatsEndpoint.as_view(), name="sentry-api-0-team-stats", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/external-teams/$", + r"^(?P[^\/]+)/(?P[^\/]+)/external-teams/$", ExternalTeamEndpoint.as_view(), name="sentry-api-0-external-team", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/external-teams/(?P[^\/]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/external-teams/(?P[^\/]+)/$", ExternalTeamDetailsEndpoint.as_view(), name="sentry-api-0-external-team-details", ), diff --git a/src/sentry/apidocs/parameters.py b/src/sentry/apidocs/parameters.py index ce3c6353e82e22..e9d553324d64e1 100644 --- a/src/sentry/apidocs/parameters.py +++ b/src/sentry/apidocs/parameters.py @@ -25,6 +25,13 @@ class GlobalParams: type=str, location="path", ) + ORG_ID_OR_SLUG = OpenApiParameter( + name="organization_id_or_slug", + description="The id or slug of the organization the resource belongs to.", + required=True, + type=str, + location="path", + ) PROJECT_ID_OR_SLUG = OpenApiParameter( name="project_id_or_slug", description="The id or slug of the project the resource belongs to.", diff --git a/tests/apidocs/endpoints/teams/test_by_slug.py b/tests/apidocs/endpoints/teams/test_by_slug.py index ef2f8db8fbcb01..bb90362b5f12cb 100644 --- a/tests/apidocs/endpoints/teams/test_by_slug.py +++ b/tests/apidocs/endpoints/teams/test_by_slug.py @@ -10,7 +10,10 @@ def setUp(self): self.url = reverse( "sentry-api-0-team-details", - kwargs={"organization_slug": self.organization.slug, "team_id_or_slug": team.slug}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "team_id_or_slug": team.slug, + }, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/teams/test_projects.py b/tests/apidocs/endpoints/teams/test_projects.py index 6622e00a99ccd0..b99e20f058ce62 100644 --- a/tests/apidocs/endpoints/teams/test_projects.py +++ b/tests/apidocs/endpoints/teams/test_projects.py @@ -11,7 +11,10 @@ def setUp(self): self.url = reverse( "sentry-api-0-team-project-index", - kwargs={"organization_slug": self.organization.slug, "team_id_or_slug": team.slug}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "team_id_or_slug": team.slug, + }, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/teams/test_stats.py b/tests/apidocs/endpoints/teams/test_stats.py index f6842afd1f15f7..2958f775b10b76 100644 --- a/tests/apidocs/endpoints/teams/test_stats.py +++ b/tests/apidocs/endpoints/teams/test_stats.py @@ -15,7 +15,10 @@ def setUp(self): self.url = reverse( "sentry-api-0-team-stats", - kwargs={"organization_slug": self.organization.slug, "team_id_or_slug": self.team.slug}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "team_id_or_slug": self.team.slug, + }, ) self.login_as(user=self.user) diff --git a/tests/sentry/api/endpoints/test_team_stats.py b/tests/sentry/api/endpoints/test_team_stats.py index 9b925967d3c2fe..ccb62db4746c66 100644 --- a/tests/sentry/api/endpoints/test_team_stats.py +++ b/tests/sentry/api/endpoints/test_team_stats.py @@ -29,7 +29,10 @@ def test_simple(self): url = reverse( "sentry-api-0-team-stats", - kwargs={"organization_slug": team.organization.slug, "team_id_or_slug": team.slug}, + kwargs={ + "organization_id_or_slug": team.organization.slug, + "team_id_or_slug": team.slug, + }, ) response = self.client.get(url) assert response.status_code == 200, response.content From e5cfbd8d285f8fdb9c662e00fe06a2bff1b85c19 Mon Sep 17 00:00:00 2001 From: Stephen Cefali Date: Wed, 8 May 2024 15:38:27 -0700 Subject: [PATCH 191/376] feat(snub-heavy-search): adds exceptions to pre-fetch groupids from postgres under certain conditions (#70527) There are certain filters such as `bookmarked_by`, `linked`, `subscribed_by` that require us to do joins in Postgres to other tables. These tables do not exist in Snuba and likely never will because these queries are somewhat uncommon. As a result, this PR adds pre-fetching group_ids when users are trying to filter with one of these conditions with the new `GroupAttributesPostgresSnubaQueryExecutor`. The long term goal is to remove `UNSUPPORTED_SNUBA_FILTERS` so we can have all queries use `GroupAttributesPostgresSnubaQueryExecutor` instead of `PostgresSnubaQueryExecutor`. --- .../endpoints/organization_group_index.py | 3 - src/sentry/search/snuba/backend.py | 34 +++-- src/sentry/search/snuba/executors.py | 32 +++++ .../test_organization_group_index.py | 131 +++++++++++++++++- 4 files changed, 183 insertions(+), 17 deletions(-) diff --git a/src/sentry/issues/endpoints/organization_group_index.py b/src/sentry/issues/endpoints/organization_group_index.py index 204770007b83d8..0055cac2b7237c 100644 --- a/src/sentry/issues/endpoints/organization_group_index.py +++ b/src/sentry/issues/endpoints/organization_group_index.py @@ -51,9 +51,6 @@ # these filters are currently not supported in the snuba only search # and will use PostgresSnubaQueryExecutor instead of GroupAttributesPostgresSnubaQueryExecutor UNSUPPORTED_SNUBA_FILTERS = [ - "bookmarked_by", - "linked", - "subscribed_by", "regressed_in_release", "issue.priority", "firstRelease", diff --git a/src/sentry/search/snuba/backend.py b/src/sentry/search/snuba/backend.py index a0b91edc42a0bb..3315f26d0bb1b7 100644 --- a/src/sentry/search/snuba/backend.py +++ b/src/sentry/search/snuba/backend.py @@ -35,6 +35,7 @@ from sentry.search.base import SearchBackend from sentry.search.events.constants import EQUALITY_OPERATORS, OPERATOR_TO_DJANGO from sentry.search.snuba.executors import ( + POSTGRES_ONLY_SEARCH_FIELDS, AbstractQueryExecutor, InvalidQueryForExecutor, PostgresSnubaQueryExecutor, @@ -493,16 +494,11 @@ def query( retention_window_start = None if use_group_snuba_dataset: - # just use the basic group initialization query which prevents us from - # returning groups that are pending deletion or merge - # this query is only used after we query snuba to filter out groups we don't want - group_queryset = Group.objects.filter(project__in=projects).exclude( - status__in=[ - GroupStatus.PENDING_DELETION, - GroupStatus.DELETION_IN_PROGRESS, - GroupStatus.PENDING_MERGE, - ] - ) + # we need to handle two cases fo the group queryset: + # 1. Limit results to groups that are not pending deletion or merge + # 2. Handle queries snuba doesn't support such as bookmarked_by, linked, subscribed_by + # For the second case, we hit postgres before Snuba to get the group ids + group_queryset = self._build_limited_group_queryset(projects, search_filters) else: group_queryset = self._build_group_queryset( @@ -593,6 +589,24 @@ def query( return query_results + def _build_limited_group_queryset( + self, projects: Sequence[Project], search_filters: Sequence[SearchFilter] + ) -> QuerySet: + """ + Builds a group queryset to handle joins for data that doesn't exist in Clickhouse on the group_attributes dataset + """ + # Filter search_filters to only include 'bookmarked_by', 'linked', 'subscribed_by' + filtered_search_filters = [ + sf for sf in search_filters if sf.key.name in POSTGRES_ONLY_SEARCH_FIELDS + ] + # Use the filtered search filters for further processing + return self._build_group_queryset( + projects=projects, + environments=None, + search_filters=filtered_search_filters, + retention_window_start=None, + ) + def _build_group_queryset( self, projects: Sequence[Project], diff --git a/src/sentry/search/snuba/executors.py b/src/sentry/search/snuba/executors.py index 1ce875f6ea06c5..8f94f3dcb957f0 100644 --- a/src/sentry/search/snuba/executors.py +++ b/src/sentry/search/snuba/executors.py @@ -93,6 +93,11 @@ class Clauses(Enum): WHERE = auto() +# we cannot use snuba for these fields because they require a join with tables that don't exist there +# if we ever see these fields, we will use postgres to get the group_ids before sending back to ClickHouse +POSTGRES_ONLY_SEARCH_FIELDS = ["bookmarked_by", "linked", "subscribed_by"] + + @dataclass class TrendsParams: # (event or issue age_hours) / (event or issue halflife hours) @@ -1538,6 +1543,17 @@ def query( if len(projects) == 0: return self.empty_result + # Check if any search filters are in POSTGRES_ONLY_SEARCH_FIELDS + search_filters = search_filters or () + group_ids_to_pass_to_snuba = None + if any(sf.key.name in POSTGRES_ONLY_SEARCH_FIELDS for sf in search_filters): + group_ids_to_pass_to_snuba = list(group_queryset.values_list("id", flat=True)) + + # remove the search filters that are only for postgres + search_filters = [ + sf for sf in search_filters if sf.key.name not in POSTGRES_ONLY_SEARCH_FIELDS + ] + organization = projects[0].organization event_entity = self.entities["event"] @@ -1564,6 +1580,22 @@ def query( Condition(Column("timestamp", joined_entity), Op.LT, end), ] having = [] + # if we need to prefetch from postgres, we add filter by the group ids + if group_ids_to_pass_to_snuba is not None: + # will not find any matches, we can return early + if len(group_ids_to_pass_to_snuba) == 0: + return self.empty_result + + # limit groups and events to the group ids + for entity_with_group_id in [attr_entity, joined_entity]: + where_conditions.append( + Condition( + Column("group_id", entity_with_group_id), + Op.IN, + group_ids_to_pass_to_snuba, + ) + ) + for search_filter in search_filters or (): # use the stored function if it exists in our mapping, otherwise use the basic lookup lookup = self.group_conditions_lookup.get(search_filter.key.name) diff --git a/tests/sentry/issues/endpoints/test_organization_group_index.py b/tests/sentry/issues/endpoints/test_organization_group_index.py index 2f114e7d8c43de..9c029cec859c1a 100644 --- a/tests/sentry/issues/endpoints/test_organization_group_index.py +++ b/tests/sentry/issues/endpoints/test_organization_group_index.py @@ -2777,10 +2777,6 @@ def test_snuba_assignee_filter(self): def test_snuba_unsupported_filters(self): self.login_as(user=self.user) for query in [ - "bookmarks:me", - "is:linked", - "is:unlinked", - "subscribed:me", "regressed_in_release:latest", "issue.priority:high", ]: @@ -3132,6 +3128,133 @@ def test_first_seen_and_last_seen_filters(self): ) assert len(response.data) == 0 + @override_options({"issues.group_attributes.send_kafka": True}) + def test_filter_by_bookmarked_by(self): + self.login_as(user=self.user) + project = self.project + user2 = self.create_user(email="user2@example.com") + + # Create two issues, one bookmarked by each user + event1 = self.store_event( + data={ + "timestamp": iso_format(before_now(minutes=1)), + "message": "Error 1", + "fingerprint": ["group-1"], + }, + project_id=project.id, + ) + group1 = event1.group + GroupBookmark.objects.create(user_id=self.user.id, group=group1, project_id=project.id) + + event2 = self.store_event( + data={ + "timestamp": iso_format(before_now(minutes=1)), + "message": "Error 2", + "fingerprint": ["group-2"], + }, + project_id=project.id, + ) + group2 = event2.group + GroupBookmark.objects.create(user_id=user2.id, group=group2, project_id=project.id) + + # Filter by bookmarked_by the first user + response = self.get_success_response( + query=f"bookmarked_by:{self.user.email}", useGroupSnubaDataset=1 + ) + assert len(response.data) == 1 + assert int(response.data[0]["id"]) == group1.id + + # Filter by bookmarked_by the second user + response = self.get_success_response( + query=f"bookmarked_by:{user2.email}", useGroupSnubaDataset=1 + ) + assert len(response.data) == 1 + assert int(response.data[0]["id"]) == group2.id + + @override_options({"issues.group_attributes.send_kafka": True}) + def test_filter_by_linked(self): + self.login_as(user=self.user) + project = self.project + + # Create two issues, one linked and one not linked + event1 = self.store_event( + data={ + "timestamp": iso_format(before_now(minutes=1)), + "message": "Error 1", + "fingerprint": ["group-1"], + }, + project_id=project.id, + ) + group1 = event1.group + GroupLink.objects.create( + group_id=group1.id, + project=project, + linked_type=GroupLink.LinkedType.issue, + linked_id=1, + ) + event2 = self.store_event( + data={ + "timestamp": iso_format(before_now(minutes=1)), + "message": "Error 2", + "fingerprint": ["group-2"], + }, + project_id=project.id, + ) + group2 = event2.group + + # Filter by linked issues + response = self.get_success_response(query="is:linked", useGroupSnubaDataset=1) + assert len(response.data) == 1 + assert int(response.data[0]["id"]) == group1.id + + # Ensure the unlinked issue is not returned + response = self.get_success_response(query="is:unlinked", useGroupSnubaDataset=1) + assert len(response.data) == 1 + assert int(response.data[0]["id"]) == group2.id + + @override_options({"issues.group_attributes.send_kafka": True}) + def test_filter_by_subscribed_by(self): + self.login_as(user=self.user) + project = self.project + + # Create two issues, one subscribed by user1 and one not subscribed + event1 = self.store_event( + data={ + "timestamp": iso_format(before_now(minutes=1)), + "message": "Error 1", + "fingerprint": ["group-1"], + }, + project_id=project.id, + ) + group1 = event1.group + GroupSubscription.objects.create( + user_id=self.user.id, + group=group1, + project=project, + is_active=True, + ) + self.store_event( + data={ + "timestamp": iso_format(before_now(minutes=1)), + "message": "Error 2", + "fingerprint": ["group-2"], + }, + project_id=project.id, + ) + + # Filter by subscriptions + response = self.get_success_response( + query=f"subscribed:{self.user.email}", useGroupSnubaDataset=1 + ) + assert len(response.data) == 1 + assert int(response.data[0]["id"]) == group1.id + + # ensure we don't return ny results + response = self.get_success_response( + query="subscribed:fake@fake.com", useGroupSnubaDataset=1 + ) + assert len(response.data) == 0 + class GroupUpdateTest(APITestCase, SnubaTestCase): endpoint = "sentry-api-0-organization-group-index" From ee37da492a4a050390ce297f299e386e196b372a Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Wed, 8 May 2024 16:07:37 -0700 Subject: [PATCH 192/376] fix(billing): Remove explicit reserved legend (#70548) --- .../views/organizationStats/usageChart/index.tsx | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/static/app/views/organizationStats/usageChart/index.tsx b/static/app/views/organizationStats/usageChart/index.tsx index ff627642c31794..6d8f7614d1c012 100644 --- a/static/app/views/organizationStats/usageChart/index.tsx +++ b/static/app/views/organizationStats/usageChart/index.tsx @@ -118,7 +118,6 @@ const enum SeriesTypes { ACCEPTED = 'Accepted', DROPPED = 'Dropped', PROJECTED = 'Projected', - RESERVED = 'Reserved', FILTERED = 'Filtered', } @@ -413,13 +412,13 @@ function UsageChartBody({ function chartLegendData() { const legend: LegendComponentOption['data'] = [ - chartData.reserved && chartData.reserved.length > 0 - ? { - name: SeriesTypes.RESERVED, - } - : { - name: SeriesTypes.ACCEPTED, - }, + ...(chartData.reserved && chartData.reserved.length > 0 + ? [] + : [ + { + name: SeriesTypes.ACCEPTED, + }, + ]), ]; if (chartData.filtered && chartData.filtered.length > 0) { From 8ff17a99a1ed3c4b7d84daccfa5a3fd3077cf935 Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Wed, 8 May 2024 16:59:54 -0700 Subject: [PATCH 193/376] chore(chartcuterie): Revert Style Changes (#70558) This reverts 1354478967d580699107222d42a95373b5a4dcaa and 173b69012db4737336284ebb4c75adbf08704e70 --- static/app/chartcuterie/performance.tsx | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/static/app/chartcuterie/performance.tsx b/static/app/chartcuterie/performance.tsx index 452d440eecddcf..a4d01c31df2c2f 100644 --- a/static/app/chartcuterie/performance.tsx +++ b/static/app/chartcuterie/performance.tsx @@ -1,4 +1,3 @@ -import type {LineChartProps} from 'sentry/components/charts/lineChart'; import {transformToLineSeries} from 'sentry/components/charts/lineChart'; import getBreakpointChartOptionsFromData, { type EventBreakpointChartData, @@ -11,33 +10,17 @@ import {ChartType} from './types'; export const performanceCharts: RenderDescriptor[] = []; -function modifyOptionsForSlack(options: Omit) { - options.legend = options.legend || {}; - options.legend.icon = 'none'; - options.legend.left = '25'; - options.legend.top = '20'; - - return { - ...options, - grid: slackChartDefaults.grid, - visualMap: options.options?.visualMap, - }; -} - performanceCharts.push({ key: ChartType.SLACK_PERFORMANCE_ENDPOINT_REGRESSION, getOption: (data: EventBreakpointChartData) => { const {chartOptions, series} = getBreakpointChartOptionsFromData(data, theme); const transformedSeries = transformToLineSeries({series}); - const modifiedOptions = modifyOptionsForSlack(chartOptions); return { - ...modifiedOptions, - + ...chartOptions, backgroundColor: theme.background, series: transformedSeries, grid: slackChartDefaults.grid, - visualMap: modifiedOptions.options?.visualMap, }; }, ...slackChartSize, From 4fd036557e9cec85260f7fd1d7e507c8d93590f4 Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Wed, 8 May 2024 17:07:13 -0700 Subject: [PATCH 194/376] cogs(access-logs): dont log relay endpoint requests (#70560) these logs are noisy, and aren't used to monitor api usage. these requests are also mainly internal. --- src/sentry/conf/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 6791d2fc6ed3ba..e8a466e2bd4ef8 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3707,7 +3707,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: # We should not run access logging middleware on some endpoints as # it is very noisy, and these views are hit by internal services. -ACCESS_LOGS_EXCLUDE_PATHS = ("/api/0/internal/",) +ACCESS_LOGS_EXCLUDE_PATHS = ("/api/0/internal/", "/api/0/relays/") VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON = True DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL = False From 4ad0dfcd8567eb0628b237b228da31c37cad755b Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Wed, 8 May 2024 18:11:54 -0700 Subject: [PATCH 195/376] fix(feedback): more prompt engineering (#70565) was having issues with commas escaping the prompt --- .../feedback/usecases/spam_detection.py | 34 +++++++++++-------- .../feedback/usecases/test_create_feedback.py | 4 +-- 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/src/sentry/feedback/usecases/spam_detection.py b/src/sentry/feedback/usecases/spam_detection.py index 3fa6967e3611f7..8ee08fe691c67e 100644 --- a/src/sentry/feedback/usecases/spam_detection.py +++ b/src/sentry/feedback/usecases/spam_detection.py @@ -5,18 +5,25 @@ logger = logging.getLogger(__name__) -PROMPT = """ -Please analyze the following input and output `spam` if the input is not coherent, and `not spam` if it is coherent. -Some example responses: - asdfasdf,spam - It doesn't work,not spam - es funktioniert nicht, not spam - لا يعمل,not spam, - Nothing,spam - ..,spam - hey,spam -Complete the following: -""" + +def make_input_prompt(input): + return f"""**Classification Task** +**Instructions: Please analyze the following input and output `spam` if the input is not coherent, and `notspam` if it is coherent.** +**Label Options:** spam, notspam + +**Few-shot Examples:** +* **Example 1:** "asdasdfasd" -> spam +* **Example 2:** "It doesn't work," -> notspam +* **Example 3:** "es funktioniert nicht" -> notspam +* **Example 4:** "is there another way to do payment?" -> notspam +* **Example 5:** "this thing does not function how it should" -> notspam +* **Example 6:** "i was playing a great game now it crashed" -> notspam +* **Example 7:** "i can't login to my account wtf??!" -> notspam +* **Example 8:** "ฉันไม่สามารถเข้าสู่ระบบและไม่มีอะไรทำงาน " -> notspam + +**Input Text:** "{input}" + +**Classify:** """ @metrics.wraps("feedback.spam_detection", sample_rate=1.0) @@ -25,8 +32,7 @@ def is_spam(message): trimmed_response = "" response = complete_prompt( usecase=LLMUseCase.SPAM_DETECTION, - prompt=PROMPT, - message=message + ",", # add a comma so it knows to complete the csv + message=make_input_prompt(message), temperature=0, max_output_tokens=20, ) diff --git a/tests/sentry/feedback/usecases/test_create_feedback.py b/tests/sentry/feedback/usecases/test_create_feedback.py index 1e213d037f7deb..16067738a07dcf 100644 --- a/tests/sentry/feedback/usecases/test_create_feedback.py +++ b/tests/sentry/feedback/usecases/test_create_feedback.py @@ -505,7 +505,7 @@ def dummy_response(*args, **kwargs): message=ChatCompletionMessage( content=( "spam" - if kwargs["messages"][1]["content"] == "This is definitely spam," + if "This is definitely spam" in kwargs["messages"][0]["content"] else "not spam" ), role="assistant", @@ -603,7 +603,7 @@ def dummy_response(*args, **kwargs): message=ChatCompletionMessage( content=( "spam" - if kwargs["messages"][1]["content"] == "This is definitely spam" + if kwargs["messages"][0]["content"] == "This is definitely spam" else "not spam" ), role="assistant", From 4d8451c96121a4f5fec26e5db6c616000e5b83ba Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Wed, 8 May 2024 19:02:11 -0700 Subject: [PATCH 196/376] feat(api-idorslug): Update Some Endpoints use `organization_id_or_slug` (#70556) A subset of changes from https://github.com/getsentry/sentry/pull/70081! --- src/sentry/api/bases/group.py | 14 ++++++++------ src/sentry/api/bases/sentryapps.py | 12 +++++++----- src/sentry/api/urls.py | 4 ++-- .../test_sentry_app_installations.py | 2 +- 4 files changed, 18 insertions(+), 14 deletions(-) diff --git a/src/sentry/api/bases/group.py b/src/sentry/api/bases/group.py index d4f4974c8536bc..0391d5de481b2d 100644 --- a/src/sentry/api/bases/group.py +++ b/src/sentry/api/bases/group.py @@ -40,7 +40,9 @@ class GroupEndpoint(Endpoint): owner = ApiOwner.ISSUES permission_classes = (GroupPermission,) - def convert_args(self, request: Request, issue_id, organization_slug=None, *args, **kwargs): + def convert_args( + self, request: Request, issue_id, organization_id_or_slug=None, *args, **kwargs + ): # TODO(tkaemming): Ideally, this would return a 302 response, rather # than just returning the data that is bound to the new group. (It # technically shouldn't be a 301, since the response could change again @@ -51,17 +53,17 @@ def convert_args(self, request: Request, issue_id, organization_slug=None, *args # string replacement, or making the endpoint aware of the URL pattern # that caused it to be dispatched, and reversing it with the correct # `issue_id` keyword argument. - if organization_slug: + if organization_id_or_slug: try: if ( id_or_slug_path_params_enabled( - self.convert_args.__qualname__, str(organization_slug) + self.convert_args.__qualname__, str(organization_id_or_slug) ) - and str(organization_slug).isdecimal() + and str(organization_id_or_slug).isdecimal() ): - organization = Organization.objects.get_from_cache(id=organization_slug) + organization = Organization.objects.get_from_cache(id=organization_id_or_slug) else: - organization = Organization.objects.get_from_cache(slug=organization_slug) + organization = Organization.objects.get_from_cache(slug=organization_id_or_slug) except Organization.DoesNotExist: raise ResourceDoesNotExist diff --git a/src/sentry/api/bases/sentryapps.py b/src/sentry/api/bases/sentryapps.py index ca9c87b2b6ff2a..2d4428612bd4e3 100644 --- a/src/sentry/api/bases/sentryapps.py +++ b/src/sentry/api/bases/sentryapps.py @@ -325,22 +325,24 @@ def has_object_permission(self, request: Request, view, organization): class SentryAppInstallationsBaseEndpoint(IntegrationPlatformEndpoint): permission_classes = (SentryAppInstallationsPermission,) - def convert_args(self, request: Request, organization_slug, *args, **kwargs): + def convert_args(self, request: Request, organization_id_or_slug, *args, **kwargs): extra_args = {} # We need to pass user_id if the user is not a superuser if not is_active_superuser(request): extra_args["user_id"] = request.user.id if ( - id_or_slug_path_params_enabled(self.convert_args.__qualname__, str(organization_slug)) - and str(organization_slug).isdecimal() + id_or_slug_path_params_enabled( + self.convert_args.__qualname__, str(organization_id_or_slug) + ) + and str(organization_id_or_slug).isdecimal() ): organization = organization_service.get_org_by_id( - id=int(organization_slug), **extra_args + id=int(organization_id_or_slug), **extra_args ) else: organization = organization_service.get_org_by_slug( - slug=organization_slug, **extra_args + slug=str(organization_id_or_slug), **extra_args ) if organization is None: diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index b5f49725675a76..95be8f266235b8 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -1113,7 +1113,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-details", ), re_path( - r"^(?P[^\/]+)/(?:issues|groups)/", + r"^(?P[^\/]+)/(?:issues|groups)/", include(create_group_urls("sentry-api-0-organization-group")), ), # Alert Rules @@ -1870,7 +1870,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-user-details", ), re_path( - r"^(?P[^\/]+)/sentry-app-installations/$", + r"^(?P[^\/]+)/sentry-app-installations/$", SentryAppInstallationsEndpoint.as_view(), name="sentry-api-0-sentry-app-installations", ), diff --git a/tests/apidocs/endpoints/integration_platform/test_sentry_app_installations.py b/tests/apidocs/endpoints/integration_platform/test_sentry_app_installations.py index ede99a9fd3d959..141a12b9c6f524 100644 --- a/tests/apidocs/endpoints/integration_platform/test_sentry_app_installations.py +++ b/tests/apidocs/endpoints/integration_platform/test_sentry_app_installations.py @@ -24,7 +24,7 @@ def setUp(self): self.login_as(user=self.user) self.url = reverse( "sentry-api-0-sentry-app-installations", - kwargs={"organization_slug": self.org.slug}, + kwargs={"organization_id_or_slug": self.org.slug}, ) def test_get(self): From afbae7062b2ebd88913e3ded2bee70bca77e01ec Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Wed, 8 May 2024 20:02:39 -0700 Subject: [PATCH 197/376] chore(api-idorslug): Updating Comments to Reflect Support for `organization_id_or_slug` (#70568) I am breaking apart a large PR I had to rollout `organization_id_or_slug` changes, this pr just changes some comments en-masse. --- src/sentry/api/endpoints/artifact_bundles.py | 4 ++-- src/sentry/api/endpoints/artifact_lookup.py | 2 +- src/sentry/api/endpoints/codeowners/details.py | 2 +- .../codeowners/external_actor/user_details.py | 2 +- .../codeowners/external_actor/user_index.py | 2 +- src/sentry/api/endpoints/codeowners/index.py | 2 +- src/sentry/api/endpoints/debug_files.py | 12 ++++++------ src/sentry/api/endpoints/event_attachment_details.py | 2 +- src/sentry/api/endpoints/event_attachments.py | 2 +- src/sentry/api/endpoints/event_reprocessable.py | 2 +- src/sentry/api/endpoints/filechange.py | 2 +- src/sentry/api/endpoints/group_tombstone_details.py | 2 +- .../api/endpoints/organization_api_key_details.py | 6 +++--- .../endpoints/organization_code_mapping_details.py | 2 +- .../api/endpoints/organization_code_mappings.py | 4 ++-- src/sentry/api/endpoints/organization_dashboards.py | 4 ++-- src/sentry/api/endpoints/organization_details.py | 6 +++--- src/sentry/api/endpoints/organization_eventid.py | 2 +- .../organization_issues_resolved_in_release.py | 2 +- .../organization_member/requests/invite/details.py | 4 ++-- .../organization_member/requests/invite/index.py | 2 +- .../api/endpoints/organization_processingissues.py | 2 +- .../endpoints/organization_projects_experiment.py | 2 +- .../organization_projects_sent_first_event.py | 2 +- .../api/endpoints/organization_release_commits.py | 2 +- .../api/endpoints/organization_release_details.py | 6 +++--- .../endpoints/organization_release_file_details.py | 6 +++--- .../api/endpoints/organization_release_files.py | 4 ++-- .../api/endpoints/organization_release_meta.py | 2 +- src/sentry/api/endpoints/organization_releases.py | 2 +- src/sentry/api/endpoints/organization_shortid.py | 2 +- src/sentry/api/endpoints/organization_slugs.py | 2 +- src/sentry/api/endpoints/organization_stats.py | 2 +- .../api/endpoints/organization_user_reports.py | 2 +- src/sentry/api/endpoints/organization_users.py | 2 +- .../project_artifact_bundle_file_details.py | 2 +- .../api/endpoints/project_artifact_bundle_files.py | 2 +- src/sentry/api/endpoints/project_commits.py | 2 +- src/sentry/api/endpoints/project_environments.py | 2 +- src/sentry/api/endpoints/project_event_details.py | 2 +- src/sentry/api/endpoints/project_events.py | 2 +- src/sentry/api/endpoints/project_group_index.py | 6 +++--- .../endpoints/project_issues_resolved_in_release.py | 2 +- .../endpoints/project_performance_issue_settings.py | 2 +- src/sentry/api/endpoints/project_release_commits.py | 2 +- src/sentry/api/endpoints/project_release_details.py | 6 +++--- .../api/endpoints/project_release_file_details.py | 6 +++--- src/sentry/api/endpoints/project_release_files.py | 4 ++-- .../api/endpoints/project_release_repositories.py | 2 +- src/sentry/api/endpoints/project_release_stats.py | 2 +- src/sentry/api/endpoints/project_releases.py | 4 ++-- .../api/endpoints/project_servicehook_details.py | 6 +++--- src/sentry/api/endpoints/project_servicehooks.py | 4 ++-- src/sentry/api/endpoints/project_stats.py | 2 +- src/sentry/api/endpoints/project_tagkey_values.py | 2 +- src/sentry/api/endpoints/project_teams.py | 2 +- src/sentry/api/endpoints/project_transfer.py | 2 +- src/sentry/api/endpoints/project_user_reports.py | 4 ++-- src/sentry/api/endpoints/project_users.py | 2 +- src/sentry/api/endpoints/team_details.py | 4 ++-- .../issues/endpoints/organization_group_index.py | 6 +++--- .../organization_release_previous_commits.py | 2 +- 62 files changed, 95 insertions(+), 95 deletions(-) diff --git a/src/sentry/api/endpoints/artifact_bundles.py b/src/sentry/api/endpoints/artifact_bundles.py index acfbcd65da0888..0978afd672334f 100644 --- a/src/sentry/api/endpoints/artifact_bundles.py +++ b/src/sentry/api/endpoints/artifact_bundles.py @@ -67,7 +67,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of artifact bundles for a given project. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the artifact bundle belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the artifact bundles of. @@ -121,7 +121,7 @@ def delete(self, request: Request, project) -> Response: Delete all artifacts inside given archive. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the archive belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete the archive of. diff --git a/src/sentry/api/endpoints/artifact_lookup.py b/src/sentry/api/endpoints/artifact_lookup.py index c088f3d877d10e..62dbbb3f4a6ed2 100644 --- a/src/sentry/api/endpoints/artifact_lookup.py +++ b/src/sentry/api/endpoints/artifact_lookup.py @@ -104,7 +104,7 @@ def get(self, request: Request, project: Project) -> Response: Retrieve a list of individual artifacts or artifact bundles for a given project. - :pparam string organization_slug: the slug of the organization to query. + :pparam string organization_slug: the id or slug of the organization to query. :pparam string project_id_or_slug: the id or slug of the project to query. :qparam string debug_id: if set, will query and return the artifact bundle that matches the given `debug_id`. diff --git a/src/sentry/api/endpoints/codeowners/details.py b/src/sentry/api/endpoints/codeowners/details.py index e34240051abdb7..3fb5093be8870b 100644 --- a/src/sentry/api/endpoints/codeowners/details.py +++ b/src/sentry/api/endpoints/codeowners/details.py @@ -58,7 +58,7 @@ def put(self, request: Request, project: Project, codeowners: ProjectCodeOwners) Update a CodeOwners ````````````` - :pparam string organization_slug: the slug of the organization. + :pparam string organization_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project to get. :pparam string codeowners_id: id of codeowners object :param string raw: the raw CODEOWNERS text diff --git a/src/sentry/api/endpoints/codeowners/external_actor/user_details.py b/src/sentry/api/endpoints/codeowners/external_actor/user_details.py index 4d94716b9b544d..4192f120a6eeb4 100644 --- a/src/sentry/api/endpoints/codeowners/external_actor/user_details.py +++ b/src/sentry/api/endpoints/codeowners/external_actor/user_details.py @@ -48,7 +48,7 @@ def put( Update an External User ````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the user belongs to. :pparam int user_id: the User id. :pparam string external_user_id: id of external_user object diff --git a/src/sentry/api/endpoints/codeowners/external_actor/user_index.py b/src/sentry/api/endpoints/codeowners/external_actor/user_index.py index e1bf71156b00c9..737eb9794216bb 100644 --- a/src/sentry/api/endpoints/codeowners/external_actor/user_index.py +++ b/src/sentry/api/endpoints/codeowners/external_actor/user_index.py @@ -27,7 +27,7 @@ def post(self, request: Request, organization: Organization) -> Response: Create an External User ````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the user belongs to. :param required string provider: enum("github", "gitlab", "slack") :param required string external_name: the associated username for this provider. diff --git a/src/sentry/api/endpoints/codeowners/index.py b/src/sentry/api/endpoints/codeowners/index.py index 333c52e1dd7176..6159398dc03f0a 100644 --- a/src/sentry/api/endpoints/codeowners/index.py +++ b/src/sentry/api/endpoints/codeowners/index.py @@ -88,7 +88,7 @@ def post(self, request: Request, project: Project) -> Response: Upload a CODEOWNERS for project ````````````` - :pparam string organization_slug: the slug of the organization. + :pparam string organization_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project to get. :param string raw: the raw CODEOWNERS text :param string codeMappingId: id of the RepositoryProjectPathConfig object diff --git a/src/sentry/api/endpoints/debug_files.py b/src/sentry/api/endpoints/debug_files.py index 74fb4ce861ecfb..fcd82d05a2c92d 100644 --- a/src/sentry/api/endpoints/debug_files.py +++ b/src/sentry/api/endpoints/debug_files.py @@ -157,7 +157,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of associated releases for a given Proguard File. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the file belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the DIFs of. @@ -227,7 +227,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of debug information files for a given project. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the file belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the DIFs of. @@ -312,7 +312,7 @@ def delete(self, request: Request, project: Project) -> Response: Delete a debug information file for a given project. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the file belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete the DIF. @@ -346,7 +346,7 @@ def post(self, request: Request, project) -> Response: contains the individual debug images. Uploading through this endpoint will create different files for the contained images. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to change the release of. @@ -521,7 +521,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of source map archives (releases, later bundles) for a given project. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the source map archive belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the source map archives of. @@ -585,7 +585,7 @@ def delete(self, request: Request, project) -> Response: Delete all artifacts inside given archive. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the archive belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete the archive of. diff --git a/src/sentry/api/endpoints/event_attachment_details.py b/src/sentry/api/endpoints/event_attachment_details.py index 312c5f913fd43b..1711b0b8a9aeb2 100644 --- a/src/sentry/api/endpoints/event_attachment_details.py +++ b/src/sentry/api/endpoints/event_attachment_details.py @@ -78,7 +78,7 @@ def get(self, request: Request, project, event_id, attachment_id) -> Response: Retrieve an Attachment `````````````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the event belongs to. diff --git a/src/sentry/api/endpoints/event_attachments.py b/src/sentry/api/endpoints/event_attachments.py index 2b36c145737050..ef979e59ba311b 100644 --- a/src/sentry/api/endpoints/event_attachments.py +++ b/src/sentry/api/endpoints/event_attachments.py @@ -24,7 +24,7 @@ def get(self, request: Request, project, event_id) -> Response: Retrieve attachments for an event ````````````````````````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the event belongs to. diff --git a/src/sentry/api/endpoints/event_reprocessable.py b/src/sentry/api/endpoints/event_reprocessable.py index 78810bab26d288..39c5a8f3df15dd 100644 --- a/src/sentry/api/endpoints/event_reprocessable.py +++ b/src/sentry/api/endpoints/event_reprocessable.py @@ -48,7 +48,7 @@ def get(self, request: Request, project, event_id) -> Response: * `attachment.not_found`: A required attachment, such as the original minidump, is missing. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the event belongs to. diff --git a/src/sentry/api/endpoints/filechange.py b/src/sentry/api/endpoints/filechange.py index a5a23690e9a886..cbc7617f6a228e 100644 --- a/src/sentry/api/endpoints/filechange.py +++ b/src/sentry/api/endpoints/filechange.py @@ -29,7 +29,7 @@ def get(self, request: Request, organization, version) -> Response: Retrieve a list of files that were changed in a given release's commits. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. diff --git a/src/sentry/api/endpoints/group_tombstone_details.py b/src/sentry/api/endpoints/group_tombstone_details.py index 06b51314e9601d..27020170d88da1 100644 --- a/src/sentry/api/endpoints/group_tombstone_details.py +++ b/src/sentry/api/endpoints/group_tombstone_details.py @@ -25,7 +25,7 @@ def delete(self, request: Request, project, tombstone_id) -> Response: Undiscards a group such that new events in that group will be captured. This does not restore any previous data. - :pparam string organization_slug: the slug of the organization. + :pparam string organization_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project to which this tombstone belongs. :pparam string tombstone_id: the ID of the tombstone to remove. :auth: required diff --git a/src/sentry/api/endpoints/organization_api_key_details.py b/src/sentry/api/endpoints/organization_api_key_details.py index 59afc5f603ef3e..cef671fe023d7c 100644 --- a/src/sentry/api/endpoints/organization_api_key_details.py +++ b/src/sentry/api/endpoints/organization_api_key_details.py @@ -36,7 +36,7 @@ def get(self, request: Request, organization_context, organization, api_key_id) Retrieves API Key details ````````````````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team belongs to. :pparam string api_key_id: the ID of the api key to delete :auth: required @@ -53,7 +53,7 @@ def put(self, request: Request, organization_context, organization, api_key_id) Update an API Key ````````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team belongs to. :pparam string api_key_id: the ID of the api key to delete :param string label: the new label for the api key @@ -89,7 +89,7 @@ def delete(self, request: Request, organization_context, organization, api_key_i Deletes an API Key `````````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team belongs to. :pparam string api_key_id: the ID of the api key to delete :auth: required diff --git a/src/sentry/api/endpoints/organization_code_mapping_details.py b/src/sentry/api/endpoints/organization_code_mapping_details.py index 2f231a4b19df2c..296ca265b939fa 100644 --- a/src/sentry/api/endpoints/organization_code_mapping_details.py +++ b/src/sentry/api/endpoints/organization_code_mapping_details.py @@ -50,7 +50,7 @@ def put(self, request: Request, config_id, organization, config) -> Response: Update a repository project path config `````````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team should be created for. :param int repository_id: :param int project_id: diff --git a/src/sentry/api/endpoints/organization_code_mappings.py b/src/sentry/api/endpoints/organization_code_mappings.py index f3f7fbd118aea3..e2be4bb2bf1c75 100644 --- a/src/sentry/api/endpoints/organization_code_mappings.py +++ b/src/sentry/api/endpoints/organization_code_mappings.py @@ -141,7 +141,7 @@ def get(self, request: Request, organization) -> Response: """ Get the list of repository project path configs - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team should be created for. :qparam int integrationId: the optional integration id. :qparam int project: Optional. Pass "-1" to filter to 'all projects user has access to'. Omit to filter for 'all projects user is a member of'. @@ -175,7 +175,7 @@ def post(self, request: Request, organization) -> Response: Create a new repository project path config `````````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team should be created for. :param int repositoryId: :param int projectId: diff --git a/src/sentry/api/endpoints/organization_dashboards.py b/src/sentry/api/endpoints/organization_dashboards.py index 85896e0afd08d5..b3bc3ab64343cc 100644 --- a/src/sentry/api/endpoints/organization_dashboards.py +++ b/src/sentry/api/endpoints/organization_dashboards.py @@ -49,7 +49,7 @@ def get(self, request: Request, organization) -> Response: If on the first page, this endpoint will also include any pre-built dashboards that haven't been replaced or removed. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the dashboards belongs to. :qparam string query: the title of the dashboard being searched for. :auth: required @@ -142,7 +142,7 @@ def post(self, request: Request, organization, retry=0) -> Response: `````````````````````````````````````````` Create a new dashboard for the given Organization - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the dashboards belongs to. """ if not features.has("organizations:dashboards-edit", organization, actor=request.user): diff --git a/src/sentry/api/endpoints/organization_details.py b/src/sentry/api/endpoints/organization_details.py index 77326595f59103..2eea3a854706b1 100644 --- a/src/sentry/api/endpoints/organization_details.py +++ b/src/sentry/api/endpoints/organization_details.py @@ -555,7 +555,7 @@ def get(self, request: Request, organization) -> Response: Return details on an individual organization including various details such as membership access, features, and teams. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team should be created for. :param string detailed: Specify '0' to retrieve details without projects and teams. :auth: required @@ -581,7 +581,7 @@ def put(self, request: Request, organization) -> Response: Update various attributes and configurable settings for the given organization. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team should be created for. :param string name: an optional new name for the organization. :param string slug: an optional new slug for the organization. Needs @@ -714,7 +714,7 @@ def delete(self, request: Request, organization) -> Response: However once deletion has begun the state of an organization changes and will be hidden from most public views. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team should be created for. :auth: required, user-context-needed """ diff --git a/src/sentry/api/endpoints/organization_eventid.py b/src/sentry/api/endpoints/organization_eventid.py index 9f6e497effacee..a0f5cf74db89a3 100644 --- a/src/sentry/api/endpoints/organization_eventid.py +++ b/src/sentry/api/endpoints/organization_eventid.py @@ -35,7 +35,7 @@ def get(self, request: Request, organization, event_id) -> Response: This resolves an event ID to the project slug and internal issue ID and internal event ID. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the event ID should be looked up in. :param string event_id: the event ID to look up. validated by a regex in the URL. diff --git a/src/sentry/api/endpoints/organization_issues_resolved_in_release.py b/src/sentry/api/endpoints/organization_issues_resolved_in_release.py index d415b5b20fbca5..753a27e7352ce6 100644 --- a/src/sentry/api/endpoints/organization_issues_resolved_in_release.py +++ b/src/sentry/api/endpoints/organization_issues_resolved_in_release.py @@ -26,7 +26,7 @@ def get(self, request: Request, organization, version) -> Response: Retrieve a list of issues to be resolved in a given release. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required diff --git a/src/sentry/api/endpoints/organization_member/requests/invite/details.py b/src/sentry/api/endpoints/organization_member/requests/invite/details.py index a05f9457ae3829..03b5ed54426662 100644 --- a/src/sentry/api/endpoints/organization_member/requests/invite/details.py +++ b/src/sentry/api/endpoints/organization_member/requests/invite/details.py @@ -90,7 +90,7 @@ def put( Update and/or approve an invite request to an organization. - :pparam string organization_slug: the slug of the organization the member will belong to + :pparam string organization_slug: the id or slug of the organization the member will belong to :param string member_id: the member ID :param boolean approve: allows the member to be invited :param string role: the suggested role of the new member @@ -171,7 +171,7 @@ def delete( Delete an invite request to an organization. - :pparam string organization_slug: the slug of the organization the member would belong to + :pparam string organization_slug: the id or slug of the organization the member would belong to :param string member_id: the member ID :auth: required diff --git a/src/sentry/api/endpoints/organization_member/requests/invite/index.py b/src/sentry/api/endpoints/organization_member/requests/invite/index.py index d6acb9b0cc810b..7f90b4bbcee38d 100644 --- a/src/sentry/api/endpoints/organization_member/requests/invite/index.py +++ b/src/sentry/api/endpoints/organization_member/requests/invite/index.py @@ -61,7 +61,7 @@ def post(self, request: Request, organization) -> Response: Creates an invite request given an email and suggested role / teams. - :pparam string organization_slug: the slug of the organization the member will belong to + :pparam string organization_slug: the id or slug of the organization the member will belong to :param string email: the email address to invite :param string role: the suggested role of the new member :param string orgRole: the suggested org-role of the new member diff --git a/src/sentry/api/endpoints/organization_processingissues.py b/src/sentry/api/endpoints/organization_processingissues.py index 4b70f0be1d40b9..eb4b3f350a8ec9 100644 --- a/src/sentry/api/endpoints/organization_processingissues.py +++ b/src/sentry/api/endpoints/organization_processingissues.py @@ -21,7 +21,7 @@ def get(self, request: Request, organization) -> Response: For each Project in an Organization, list its processing issues. Can be passed `project` to filter down to specific projects. - :pparam string organization_slug: the slug of the organization. + :pparam string organization_slug: the id or slug of the organization. :qparam array[string] project: An optional list of project ids to filter to within the organization :auth: required diff --git a/src/sentry/api/endpoints/organization_projects_experiment.py b/src/sentry/api/endpoints/organization_projects_experiment.py index eab986537d3982..8ebff6b033f68e 100644 --- a/src/sentry/api/endpoints/organization_projects_experiment.py +++ b/src/sentry/api/endpoints/organization_projects_experiment.py @@ -71,7 +71,7 @@ def post(self, request: Request, organization: Organization) -> Response: If this is taken, a random three letter suffix is added as needed (eg: ...-gnm, ...-zls). Then create a new project bound to this team - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team should be created for. :param string name: the name for the new project. :param string platform: the optional platform that this project is for. diff --git a/src/sentry/api/endpoints/organization_projects_sent_first_event.py b/src/sentry/api/endpoints/organization_projects_sent_first_event.py index 20c94045d0cfff..dab0e3f054e17c 100644 --- a/src/sentry/api/endpoints/organization_projects_sent_first_event.py +++ b/src/sentry/api/endpoints/organization_projects_sent_first_event.py @@ -23,7 +23,7 @@ def get(self, request: Request, organization) -> Response: Returns true if any projects within the organization have received a first event, false otherwise. - :pparam string organization_slug: the slug of the organization + :pparam string organization_slug: the id or slug of the organization containing the projects to check for a first event from. :qparam array[string] project: An optional list of project ids to filter diff --git a/src/sentry/api/endpoints/organization_release_commits.py b/src/sentry/api/endpoints/organization_release_commits.py index e1ddb2ae8f0f0b..3533619bd83c03 100644 --- a/src/sentry/api/endpoints/organization_release_commits.py +++ b/src/sentry/api/endpoints/organization_release_commits.py @@ -23,7 +23,7 @@ def get(self, request: Request, organization, version) -> Response: Retrieve a list of commits for a given release. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required diff --git a/src/sentry/api/endpoints/organization_release_details.py b/src/sentry/api/endpoints/organization_release_details.py index eaa1672694c34a..e2b9f65ae9881e 100644 --- a/src/sentry/api/endpoints/organization_release_details.py +++ b/src/sentry/api/endpoints/organization_release_details.py @@ -288,7 +288,7 @@ def get(self, request: Request, organization, version) -> Response: Return details on an individual release. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required @@ -383,7 +383,7 @@ def put(self, request: Request, organization, version) -> Response: Update a release. This can change some metadata associated with the release (the ref, url, and dates). - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :param string ref: an optional commit reference. This is useful if @@ -510,7 +510,7 @@ def delete(self, request: Request, organization, version) -> Response: Permanently remove a release and all of its files. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required diff --git a/src/sentry/api/endpoints/organization_release_file_details.py b/src/sentry/api/endpoints/organization_release_file_details.py index d3f3a83a0a60d6..2b190e4fb068ad 100644 --- a/src/sentry/api/endpoints/organization_release_file_details.py +++ b/src/sentry/api/endpoints/organization_release_file_details.py @@ -33,7 +33,7 @@ def get(self, request: Request, organization, version, file_id) -> Response: not actually return the contents of the file, just the associated metadata. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :pparam string file_id: the ID of the file to retrieve. @@ -62,7 +62,7 @@ def put(self, request: Request, organization, version, file_id) -> Response: Update metadata of an existing file. Currently only the name of the file can be changed. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :pparam string file_id: the ID of the file to update. @@ -89,7 +89,7 @@ def delete(self, request: Request, organization, version, file_id) -> Response: This will also remove the physical file from storage. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :pparam string file_id: the ID of the file to delete. diff --git a/src/sentry/api/endpoints/organization_release_files.py b/src/sentry/api/endpoints/organization_release_files.py index 1394426206ad06..91abde8ceac0c2 100644 --- a/src/sentry/api/endpoints/organization_release_files.py +++ b/src/sentry/api/endpoints/organization_release_files.py @@ -25,7 +25,7 @@ def get(self, request: Request, organization, version) -> Response: Retrieve a list of files for a given release. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :qparam string query: If set, only files with these partial names will be returned. @@ -56,7 +56,7 @@ def post(self, request: Request, organization, version) -> Response: that this file will be referenced as. For example, in the case of JavaScript you might specify the full web URI. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :param string name: the name (full path) of the file. diff --git a/src/sentry/api/endpoints/organization_release_meta.py b/src/sentry/api/endpoints/organization_release_meta.py index 3c4348768c67cc..987629564165a5 100644 --- a/src/sentry/api/endpoints/organization_release_meta.py +++ b/src/sentry/api/endpoints/organization_release_meta.py @@ -28,7 +28,7 @@ def get(self, request: Request, organization, version) -> Response: The data returned from here is auxiliary meta data that the UI uses. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required diff --git a/src/sentry/api/endpoints/organization_releases.py b/src/sentry/api/endpoints/organization_releases.py index 06365aa69609f6..23f2d92376d665 100644 --- a/src/sentry/api/endpoints/organization_releases.py +++ b/src/sentry/api/endpoints/organization_releases.py @@ -421,7 +421,7 @@ def post(self, request: Request, organization) -> Response: Releases are also necessary for sourcemaps and other debug features that require manual upload for functioning well. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :param string version: a version identifier for this release. Can be a version number, a commit hash etc. diff --git a/src/sentry/api/endpoints/organization_shortid.py b/src/sentry/api/endpoints/organization_shortid.py index 0d85881c8ebd92..8af3609c66d1f3 100644 --- a/src/sentry/api/endpoints/organization_shortid.py +++ b/src/sentry/api/endpoints/organization_shortid.py @@ -24,7 +24,7 @@ def get(self, request: Request, organization, short_id) -> Response: This resolves a short ID to the project slug and internal issue ID. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the short ID should be looked up in. :pparam string short_id: the short ID to look up. :auth: required diff --git a/src/sentry/api/endpoints/organization_slugs.py b/src/sentry/api/endpoints/organization_slugs.py index 84f188e72962f7..94bd9ef07df280 100644 --- a/src/sentry/api/endpoints/organization_slugs.py +++ b/src/sentry/api/endpoints/organization_slugs.py @@ -25,7 +25,7 @@ def put(self, request: Request, organization) -> Response: Updates the slugs of projects within the organization. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the short ID should be looked up in. :param slugs: a dictionary of project IDs to their intended slugs. :auth: required diff --git a/src/sentry/api/endpoints/organization_stats.py b/src/sentry/api/endpoints/organization_stats.py index 54cf9c99f4903b..45a1de7c05ad58 100644 --- a/src/sentry/api/endpoints/organization_stats.py +++ b/src/sentry/api/endpoints/organization_stats.py @@ -32,7 +32,7 @@ def get(self, request: Request, organization) -> Response: Return a set of points representing a normalized timestamp and the number of events seen in the period. - :pparam string organization_slug: the slug of the organization for + :pparam string organization_slug: the id or slug of the organization for which the stats should be retrieved. :qparam string stat: the name of the stat to query (``"received"``, diff --git a/src/sentry/api/endpoints/organization_user_reports.py b/src/sentry/api/endpoints/organization_user_reports.py index 4d54db36cd49a8..9550595381670c 100644 --- a/src/sentry/api/endpoints/organization_user_reports.py +++ b/src/sentry/api/endpoints/organization_user_reports.py @@ -35,7 +35,7 @@ def get(self, request: Request, organization) -> Response: Return a list of user feedback items within this organization. Can be filtered by projects/environments/creation date. - :pparam string organization_slug: the slug of the organization. + :pparam string organization_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :auth: required """ diff --git a/src/sentry/api/endpoints/organization_users.py b/src/sentry/api/endpoints/organization_users.py index 6b2dfcbd989b03..242677487a93dc 100644 --- a/src/sentry/api/endpoints/organization_users.py +++ b/src/sentry/api/endpoints/organization_users.py @@ -27,7 +27,7 @@ def get(self, request: Request, organization) -> Response: Return a list of users that belong to a given organization and are part of a project. :qparam string project: restrict results to users who have access to a given project ID - :pparam string organization_slug: the slug of the organization for which the users + :pparam string organization_slug: the id or slug of the organization for which the users should be listed. :auth: required """ diff --git a/src/sentry/api/endpoints/project_artifact_bundle_file_details.py b/src/sentry/api/endpoints/project_artifact_bundle_file_details.py index df47a8083252a6..16530c4ee9f64d 100644 --- a/src/sentry/api/endpoints/project_artifact_bundle_file_details.py +++ b/src/sentry/api/endpoints/project_artifact_bundle_file_details.py @@ -63,7 +63,7 @@ def get(self, request: Request, project, bundle_id, file_id) -> Response: not actually return the contents of the file, just the associated metadata. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to retrieve the file of. diff --git a/src/sentry/api/endpoints/project_artifact_bundle_files.py b/src/sentry/api/endpoints/project_artifact_bundle_files.py index b39efaa45ad381..2ae9b585ebfa53 100644 --- a/src/sentry/api/endpoints/project_artifact_bundle_files.py +++ b/src/sentry/api/endpoints/project_artifact_bundle_files.py @@ -69,7 +69,7 @@ def get(self, request: Request, project, bundle_id) -> Response: Retrieve a list of files for a given artifact bundle. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the artifact bundle belongs to. :pparam string project_id_or_slug: the id or slug of the project the artifact bundle belongs to. diff --git a/src/sentry/api/endpoints/project_commits.py b/src/sentry/api/endpoints/project_commits.py index 68a5292c564b83..bfcbf574032535 100644 --- a/src/sentry/api/endpoints/project_commits.py +++ b/src/sentry/api/endpoints/project_commits.py @@ -26,7 +26,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of commits for a given project. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the commit belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the commits of. diff --git a/src/sentry/api/endpoints/project_environments.py b/src/sentry/api/endpoints/project_environments.py index 908860f531e2df..668f2f19212e27 100644 --- a/src/sentry/api/endpoints/project_environments.py +++ b/src/sentry/api/endpoints/project_environments.py @@ -27,7 +27,7 @@ def get(self, request: Request, project) -> Response: environments, or ``"all"`` for both hidden and visible environments. - :pparam string organization_slug: the slug of the organization the project + :pparam string organization_slug: the id or slug of the organization the project belongs to. :pparam string project_id_or_slug: the id or slug of the project. diff --git a/src/sentry/api/endpoints/project_event_details.py b/src/sentry/api/endpoints/project_event_details.py index ebbab831c6cb31..4d584f978e9c24 100644 --- a/src/sentry/api/endpoints/project_event_details.py +++ b/src/sentry/api/endpoints/project_event_details.py @@ -64,7 +64,7 @@ def get(self, request: Request, project, event_id) -> Response: Return details on an individual event. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the event belongs to. :pparam string project_id_or_slug: the id or slug of the project the event belongs to. diff --git a/src/sentry/api/endpoints/project_events.py b/src/sentry/api/endpoints/project_events.py index 917b0da47bc198..c0a91ccce6ccab 100644 --- a/src/sentry/api/endpoints/project_events.py +++ b/src/sentry/api/endpoints/project_events.py @@ -46,7 +46,7 @@ def get(self, request: Request, project) -> Response: :qparam bool sample: return events in pseudo-random order. This is deterministic, same query will return the same events in the same order. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the groups belong to. :pparam string project_id_or_slug: the id or slug of the project the groups belong to. diff --git a/src/sentry/api/endpoints/project_group_index.py b/src/sentry/api/endpoints/project_group_index.py index a09db76380971a..f4a24bc3d30935 100644 --- a/src/sentry/api/endpoints/project_group_index.py +++ b/src/sentry/api/endpoints/project_group_index.py @@ -77,7 +77,7 @@ def get(self, request: Request, project) -> Response: ``"is:unresolved"`` is assumed.) :qparam string environment: this restricts the issues to ones containing events from this environment - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the issues belong to. @@ -214,7 +214,7 @@ def put(self, request: Request, project) -> Response: specified status. Valid values are ``"resolved"``, ``"unresolved"`` and ``"ignored"``. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the issues belong to. @@ -271,7 +271,7 @@ def delete(self, request: Request, project) -> Response: :qparam int id: a list of IDs of the issues to be removed. This parameter shall be repeated for each issue. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the issues belong to. diff --git a/src/sentry/api/endpoints/project_issues_resolved_in_release.py b/src/sentry/api/endpoints/project_issues_resolved_in_release.py index 98e889d6a8c789..760e51e5f5083a 100644 --- a/src/sentry/api/endpoints/project_issues_resolved_in_release.py +++ b/src/sentry/api/endpoints/project_issues_resolved_in_release.py @@ -26,7 +26,7 @@ def get(self, request: Request, project, version) -> Response: Retrieve a list of issues to be resolved in a given release. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project associated with the release. :pparam string version: the version identifier of the release. diff --git a/src/sentry/api/endpoints/project_performance_issue_settings.py b/src/sentry/api/endpoints/project_performance_issue_settings.py index c5d6ce5eb4540f..9e02b05511c575 100644 --- a/src/sentry/api/endpoints/project_performance_issue_settings.py +++ b/src/sentry/api/endpoints/project_performance_issue_settings.py @@ -184,7 +184,7 @@ def get(self, request: Request, project) -> Response: Return settings for performance issues - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the project belongs to. :pparam string project_id_or_slug: the id or slug of the project to configure. :auth: required diff --git a/src/sentry/api/endpoints/project_release_commits.py b/src/sentry/api/endpoints/project_release_commits.py index b331cf0efc9af7..1e692c01fc0378 100644 --- a/src/sentry/api/endpoints/project_release_commits.py +++ b/src/sentry/api/endpoints/project_release_commits.py @@ -26,7 +26,7 @@ def get(self, request: Request, project, version) -> Response: Retrieve a list of commits for a given release. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the release files of. diff --git a/src/sentry/api/endpoints/project_release_details.py b/src/sentry/api/endpoints/project_release_details.py index cb7e65c5463720..7eeba16e5eaa59 100644 --- a/src/sentry/api/endpoints/project_release_details.py +++ b/src/sentry/api/endpoints/project_release_details.py @@ -34,7 +34,7 @@ def get(self, request: Request, project, version) -> Response: Return details on an individual release. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to retrieve the release of. @@ -78,7 +78,7 @@ def put(self, request: Request, project, version) -> Response: Update a release. This can change some metadata associated with the release (the ref, url, and dates). - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to change the release of. @@ -154,7 +154,7 @@ def delete(self, request: Request, project, version) -> Response: Permanently remove a release and all of its files. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete the release of. diff --git a/src/sentry/api/endpoints/project_release_file_details.py b/src/sentry/api/endpoints/project_release_file_details.py index 802e0b109dae8f..43ef2174994ce8 100644 --- a/src/sentry/api/endpoints/project_release_file_details.py +++ b/src/sentry/api/endpoints/project_release_file_details.py @@ -217,7 +217,7 @@ def get(self, request: Request, project, version, file_id) -> Response: not actually return the contents of the file, just the associated metadata. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to retrieve the file of. @@ -247,7 +247,7 @@ def put(self, request: Request, project, version, file_id) -> Response: Update metadata of an existing file. Currently only the name of the file can be changed. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to update the file of. @@ -276,7 +276,7 @@ def delete(self, request: Request, project, version, file_id) -> Response: This will also remove the physical file from storage, except if it is stored as part of an artifact bundle. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete the file of. diff --git a/src/sentry/api/endpoints/project_release_files.py b/src/sentry/api/endpoints/project_release_files.py index 1dd5dad5eb9ed7..b5873e23258323 100644 --- a/src/sentry/api/endpoints/project_release_files.py +++ b/src/sentry/api/endpoints/project_release_files.py @@ -244,7 +244,7 @@ def get(self, request: Request, project, version) -> Response: Retrieve a list of files for a given release. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the release files of. @@ -276,7 +276,7 @@ def post(self, request: Request, project, version) -> Response: that this file will be referenced as. For example, in the case of JavaScript you might specify the full web URI. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to change the release of. diff --git a/src/sentry/api/endpoints/project_release_repositories.py b/src/sentry/api/endpoints/project_release_repositories.py index d7b584555de69e..6e6e7e9f94c76f 100644 --- a/src/sentry/api/endpoints/project_release_repositories.py +++ b/src/sentry/api/endpoints/project_release_repositories.py @@ -26,7 +26,7 @@ def get(self, request: Request, project, version) -> Response: This endpoint is used in the commits and changed files tab of the release details page - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to retrieve the release of. diff --git a/src/sentry/api/endpoints/project_release_stats.py b/src/sentry/api/endpoints/project_release_stats.py index 9cd53508887253..d0860b5650df88 100644 --- a/src/sentry/api/endpoints/project_release_stats.py +++ b/src/sentry/api/endpoints/project_release_stats.py @@ -39,7 +39,7 @@ def get(self, request: Request, project, version) -> Response: Returns the stats of a given release under a project. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the release files of. diff --git a/src/sentry/api/endpoints/project_releases.py b/src/sentry/api/endpoints/project_releases.py index 03ca984b212f51..6c530a95a81f24 100644 --- a/src/sentry/api/endpoints/project_releases.py +++ b/src/sentry/api/endpoints/project_releases.py @@ -42,7 +42,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of releases for a given project. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the releases of. @@ -97,7 +97,7 @@ def post(self, request: Request, project) -> Response: Releases are also necessary for sourcemaps and other debug features that require manual upload for functioning well. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to create a release for. diff --git a/src/sentry/api/endpoints/project_servicehook_details.py b/src/sentry/api/endpoints/project_servicehook_details.py index 49210842780913..e06d6a344450ae 100644 --- a/src/sentry/api/endpoints/project_servicehook_details.py +++ b/src/sentry/api/endpoints/project_servicehook_details.py @@ -31,7 +31,7 @@ def get(self, request: Request, project, hook_id) -> Response: Return a service hook bound to a project. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the client keys belong to. :pparam string project_id_or_slug: the id or slug of the project the client keys belong to. @@ -49,7 +49,7 @@ def put(self, request: Request, project, hook_id) -> Response: Update a Service Hook ````````````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the client keys belong to. :pparam string project_id_or_slug: the id or slug of the project the client keys belong to. @@ -102,7 +102,7 @@ def delete(self, request: Request, project, hook_id) -> Response: Remove a Service Hook ````````````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the client keys belong to. :pparam string project_id_or_slug: the id or slug of the project the client keys belong to. diff --git a/src/sentry/api/endpoints/project_servicehooks.py b/src/sentry/api/endpoints/project_servicehooks.py index e182a06eb0a437..8b5c1d2938c054 100644 --- a/src/sentry/api/endpoints/project_servicehooks.py +++ b/src/sentry/api/endpoints/project_servicehooks.py @@ -37,7 +37,7 @@ def get(self, request: Request, project) -> Response: This endpoint requires the 'servicehooks' feature to be enabled for your project. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the client keys belong to. :pparam string project_id_or_slug: the id or slug of the project the client keys belong to. @@ -83,7 +83,7 @@ def post(self, request: Request, project) -> Response: This endpoint requires the 'servicehooks' feature to be enabled for your project. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the client keys belong to. :pparam string project_id_or_slug: the id or slug of the project the client keys belong to. diff --git a/src/sentry/api/endpoints/project_stats.py b/src/sentry/api/endpoints/project_stats.py index 4b3b3d1d09a717..b0e0e3583bca13 100644 --- a/src/sentry/api/endpoints/project_stats.py +++ b/src/sentry/api/endpoints/project_stats.py @@ -31,7 +31,7 @@ def get(self, request: Request, project) -> Response: Query ranges are limited to Sentry's configured time-series resolutions. - :pparam string organization_slug: the slug of the organization. + :pparam string organization_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :qparam string stat: the name of the stat to query (``"received"``, ``"rejected"``, ``"blacklisted"``, ``generated``) diff --git a/src/sentry/api/endpoints/project_tagkey_values.py b/src/sentry/api/endpoints/project_tagkey_values.py index 2038ad7f07680b..9f4cd23c0ff588 100644 --- a/src/sentry/api/endpoints/project_tagkey_values.py +++ b/src/sentry/api/endpoints/project_tagkey_values.py @@ -29,7 +29,7 @@ def get(self, request: Request, project, key) -> Response: values. When paginated can return at most 1000 values. - :pparam string organization_slug: the slug of the organization. + :pparam string organization_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :pparam string key: the tag key to look up. :auth: required diff --git a/src/sentry/api/endpoints/project_teams.py b/src/sentry/api/endpoints/project_teams.py index 17e015be670f5e..261eb1272515f5 100644 --- a/src/sentry/api/endpoints/project_teams.py +++ b/src/sentry/api/endpoints/project_teams.py @@ -24,7 +24,7 @@ def get(self, request: Request, project) -> Response: Return a list of teams that have access to this project. - :pparam string organization_slug: the slug of the organization. + :pparam string organization_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :auth: required """ diff --git a/src/sentry/api/endpoints/project_transfer.py b/src/sentry/api/endpoints/project_transfer.py index b2e120f93cc4b8..2112635baecf8e 100644 --- a/src/sentry/api/endpoints/project_transfer.py +++ b/src/sentry/api/endpoints/project_transfer.py @@ -39,7 +39,7 @@ def post(self, request: Request, project) -> Response: Schedules a project for transfer to a new organization. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the project belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete. :param string email: email of new owner. must be an organization owner diff --git a/src/sentry/api/endpoints/project_user_reports.py b/src/sentry/api/endpoints/project_user_reports.py index 76cdd7347ad7c8..3359d8b455dd7d 100644 --- a/src/sentry/api/endpoints/project_user_reports.py +++ b/src/sentry/api/endpoints/project_user_reports.py @@ -45,7 +45,7 @@ def get(self, request: Request, project) -> Response: Return a list of user feedback items within this project. - :pparam string organization_slug: the slug of the organization. + :pparam string organization_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :auth: required """ @@ -100,7 +100,7 @@ def post(self, request: Request, project) -> Response: Note: Feedback may be submitted with DSN authentication (see auth documentation). - :pparam string organization_slug: the slug of the organization. + :pparam string organization_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :auth: required :param string event_id: the event ID diff --git a/src/sentry/api/endpoints/project_users.py b/src/sentry/api/endpoints/project_users.py index 8261a2d54d03f5..943404972aa732 100644 --- a/src/sentry/api/endpoints/project_users.py +++ b/src/sentry/api/endpoints/project_users.py @@ -31,7 +31,7 @@ def get(self, request: Request, project) -> Response: Return a list of users seen within this project. - :pparam string organization_slug: the slug of the organization. + :pparam string organization_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :pparam string key: the tag key to look up. :auth: required diff --git a/src/sentry/api/endpoints/team_details.py b/src/sentry/api/endpoints/team_details.py index ab8f6c7290e17b..d09435aa351818 100644 --- a/src/sentry/api/endpoints/team_details.py +++ b/src/sentry/api/endpoints/team_details.py @@ -51,7 +51,7 @@ def get(self, request: Request, team) -> Response: Return details on an individual team. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team belongs to. :pparam string team_id_or_slug: the id or slug of the team to get. :qparam list expand: an optional list of strings to opt in to additional @@ -81,7 +81,7 @@ def put(self, request: Request, team) -> Response: Update various attributes and configurable settings for the given team. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the team belongs to. :pparam string team_id_or_slug: the id or slug of the team to get. :param string name: the new name for the team. diff --git a/src/sentry/issues/endpoints/organization_group_index.py b/src/sentry/issues/endpoints/organization_group_index.py index 0055cac2b7237c..a5766858316355 100644 --- a/src/sentry/issues/endpoints/organization_group_index.py +++ b/src/sentry/issues/endpoints/organization_group_index.py @@ -251,7 +251,7 @@ def get(self, request: Request, organization) -> Response: :qparam bool savedSearch: if this is set to False, then we are making the request without a saved search and will look for the default search from this endpoint. :qparam string searchId: if passed in, this is the selected search - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the issues belong to. :auth: required :qparam list expand: an optional list of strings to opt in to additional data. Supports `inbox` @@ -445,7 +445,7 @@ def put(self, request: Request, organization) -> Response: specified status. Valid values are ``"resolved"``, ``"unresolved"`` and ``"ignored"``. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the issues belong to. :param string status: the new status for the issues. Valid values are ``"resolved"``, ``"resolvedInNextRelease"``, @@ -521,7 +521,7 @@ def delete(self, request: Request, organization) -> Response: parameter shall be repeated for each issue, e.g. `?id=1&id=2&id=3`. If this parameter is not provided, it will attempt to remove the first 1000 issues. - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the issues belong to. :auth: required """ diff --git a/src/sentry/issues/endpoints/organization_release_previous_commits.py b/src/sentry/issues/endpoints/organization_release_previous_commits.py index 41ef12e0840fcf..3559400c0a49b1 100644 --- a/src/sentry/issues/endpoints/organization_release_previous_commits.py +++ b/src/sentry/issues/endpoints/organization_release_previous_commits.py @@ -26,7 +26,7 @@ def get(self, request: Request, organization: Organization, version: str) -> Res Retrieve an Organization's Most Recent Release with Commits ```````````````````````````````````````````````````````````` - :pparam string organization_slug: the slug of the organization the + :pparam string organization_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required From e9071c235eb171e65857bf759c0d76574451d36b Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Wed, 8 May 2024 20:38:35 -0700 Subject: [PATCH 198/376] chore(api-idorslug): Updating More Comments to Reflect Support for organization_id_or_slug (#70569) Some more comment changes en-masse. Should be the last of them. --- src/sentry/api/endpoints/artifact_bundles.py | 4 ++-- src/sentry/api/endpoints/artifact_lookup.py | 2 +- src/sentry/api/endpoints/codeowners/details.py | 2 +- .../codeowners/external_actor/user_details.py | 2 +- .../codeowners/external_actor/user_index.py | 2 +- src/sentry/api/endpoints/codeowners/index.py | 2 +- src/sentry/api/endpoints/debug_files.py | 12 ++++++------ src/sentry/api/endpoints/event_attachment_details.py | 2 +- src/sentry/api/endpoints/event_attachments.py | 2 +- src/sentry/api/endpoints/event_reprocessable.py | 2 +- src/sentry/api/endpoints/filechange.py | 2 +- src/sentry/api/endpoints/group_details.py | 2 +- src/sentry/api/endpoints/group_tombstone_details.py | 2 +- .../api/endpoints/organization_api_key_details.py | 6 +++--- .../api/endpoints/organization_api_key_index.py | 4 ++-- .../endpoints/organization_auth_provider_details.py | 2 +- .../api/endpoints/organization_auth_providers.py | 2 +- .../endpoints/organization_code_mapping_details.py | 2 +- .../api/endpoints/organization_code_mappings.py | 4 ++-- src/sentry/api/endpoints/organization_dashboards.py | 4 ++-- src/sentry/api/endpoints/organization_details.py | 6 +++--- src/sentry/api/endpoints/organization_eventid.py | 2 +- .../organization_issues_resolved_in_release.py | 2 +- .../organization_member/requests/invite/details.py | 4 ++-- .../organization_member/requests/invite/index.py | 2 +- .../api/endpoints/organization_processingissues.py | 2 +- .../endpoints/organization_projects_experiment.py | 2 +- .../organization_projects_sent_first_event.py | 2 +- .../api/endpoints/organization_release_commits.py | 2 +- .../api/endpoints/organization_release_details.py | 6 +++--- .../endpoints/organization_release_file_details.py | 6 +++--- .../api/endpoints/organization_release_files.py | 4 ++-- .../api/endpoints/organization_release_meta.py | 2 +- src/sentry/api/endpoints/organization_releases.py | 6 +++--- .../api/endpoints/organization_repositories.py | 2 +- .../api/endpoints/organization_repository_commits.py | 2 +- src/sentry/api/endpoints/organization_shortid.py | 2 +- src/sentry/api/endpoints/organization_slugs.py | 2 +- src/sentry/api/endpoints/organization_stats.py | 2 +- .../api/endpoints/organization_user_reports.py | 2 +- src/sentry/api/endpoints/organization_users.py | 2 +- .../project_artifact_bundle_file_details.py | 2 +- .../api/endpoints/project_artifact_bundle_files.py | 2 +- src/sentry/api/endpoints/project_commits.py | 2 +- src/sentry/api/endpoints/project_environments.py | 2 +- src/sentry/api/endpoints/project_event_details.py | 2 +- src/sentry/api/endpoints/project_events.py | 2 +- src/sentry/api/endpoints/project_group_index.py | 6 +++--- .../endpoints/project_issues_resolved_in_release.py | 2 +- .../endpoints/project_performance_issue_settings.py | 2 +- src/sentry/api/endpoints/project_release_commits.py | 2 +- src/sentry/api/endpoints/project_release_details.py | 6 +++--- .../api/endpoints/project_release_file_details.py | 6 +++--- src/sentry/api/endpoints/project_release_files.py | 4 ++-- .../api/endpoints/project_release_repositories.py | 2 +- src/sentry/api/endpoints/project_release_stats.py | 2 +- src/sentry/api/endpoints/project_releases.py | 4 ++-- .../api/endpoints/project_servicehook_details.py | 6 +++--- src/sentry/api/endpoints/project_servicehooks.py | 4 ++-- src/sentry/api/endpoints/project_stats.py | 2 +- src/sentry/api/endpoints/project_tagkey_values.py | 2 +- src/sentry/api/endpoints/project_teams.py | 2 +- src/sentry/api/endpoints/project_transfer.py | 2 +- src/sentry/api/endpoints/project_user_reports.py | 4 ++-- src/sentry/api/endpoints/project_users.py | 2 +- src/sentry/api/endpoints/release_deploys.py | 4 ++-- src/sentry/api/endpoints/team_details.py | 4 ++-- .../issues/endpoints/organization_group_index.py | 6 +++--- .../organization_release_previous_commits.py | 2 +- 69 files changed, 106 insertions(+), 106 deletions(-) diff --git a/src/sentry/api/endpoints/artifact_bundles.py b/src/sentry/api/endpoints/artifact_bundles.py index 0978afd672334f..702aad5dc1133f 100644 --- a/src/sentry/api/endpoints/artifact_bundles.py +++ b/src/sentry/api/endpoints/artifact_bundles.py @@ -67,7 +67,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of artifact bundles for a given project. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the artifact bundle belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the artifact bundles of. @@ -121,7 +121,7 @@ def delete(self, request: Request, project) -> Response: Delete all artifacts inside given archive. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the archive belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete the archive of. diff --git a/src/sentry/api/endpoints/artifact_lookup.py b/src/sentry/api/endpoints/artifact_lookup.py index 62dbbb3f4a6ed2..7395eda362ea71 100644 --- a/src/sentry/api/endpoints/artifact_lookup.py +++ b/src/sentry/api/endpoints/artifact_lookup.py @@ -104,7 +104,7 @@ def get(self, request: Request, project: Project) -> Response: Retrieve a list of individual artifacts or artifact bundles for a given project. - :pparam string organization_slug: the id or slug of the organization to query. + :pparam string organization_id_or_slug: the id or slug of the organization to query. :pparam string project_id_or_slug: the id or slug of the project to query. :qparam string debug_id: if set, will query and return the artifact bundle that matches the given `debug_id`. diff --git a/src/sentry/api/endpoints/codeowners/details.py b/src/sentry/api/endpoints/codeowners/details.py index 3fb5093be8870b..e37b370da45191 100644 --- a/src/sentry/api/endpoints/codeowners/details.py +++ b/src/sentry/api/endpoints/codeowners/details.py @@ -58,7 +58,7 @@ def put(self, request: Request, project: Project, codeowners: ProjectCodeOwners) Update a CodeOwners ````````````` - :pparam string organization_slug: the id or slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project to get. :pparam string codeowners_id: id of codeowners object :param string raw: the raw CODEOWNERS text diff --git a/src/sentry/api/endpoints/codeowners/external_actor/user_details.py b/src/sentry/api/endpoints/codeowners/external_actor/user_details.py index 4192f120a6eeb4..929101bda7eeb5 100644 --- a/src/sentry/api/endpoints/codeowners/external_actor/user_details.py +++ b/src/sentry/api/endpoints/codeowners/external_actor/user_details.py @@ -48,7 +48,7 @@ def put( Update an External User ````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the user belongs to. :pparam int user_id: the User id. :pparam string external_user_id: id of external_user object diff --git a/src/sentry/api/endpoints/codeowners/external_actor/user_index.py b/src/sentry/api/endpoints/codeowners/external_actor/user_index.py index 737eb9794216bb..e6c7f801dc5772 100644 --- a/src/sentry/api/endpoints/codeowners/external_actor/user_index.py +++ b/src/sentry/api/endpoints/codeowners/external_actor/user_index.py @@ -27,7 +27,7 @@ def post(self, request: Request, organization: Organization) -> Response: Create an External User ````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the user belongs to. :param required string provider: enum("github", "gitlab", "slack") :param required string external_name: the associated username for this provider. diff --git a/src/sentry/api/endpoints/codeowners/index.py b/src/sentry/api/endpoints/codeowners/index.py index 6159398dc03f0a..28116d3505aae5 100644 --- a/src/sentry/api/endpoints/codeowners/index.py +++ b/src/sentry/api/endpoints/codeowners/index.py @@ -88,7 +88,7 @@ def post(self, request: Request, project: Project) -> Response: Upload a CODEOWNERS for project ````````````` - :pparam string organization_slug: the id or slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project to get. :param string raw: the raw CODEOWNERS text :param string codeMappingId: id of the RepositoryProjectPathConfig object diff --git a/src/sentry/api/endpoints/debug_files.py b/src/sentry/api/endpoints/debug_files.py index fcd82d05a2c92d..88cc895261e567 100644 --- a/src/sentry/api/endpoints/debug_files.py +++ b/src/sentry/api/endpoints/debug_files.py @@ -157,7 +157,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of associated releases for a given Proguard File. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the file belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the DIFs of. @@ -227,7 +227,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of debug information files for a given project. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the file belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the DIFs of. @@ -312,7 +312,7 @@ def delete(self, request: Request, project: Project) -> Response: Delete a debug information file for a given project. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the file belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete the DIF. @@ -346,7 +346,7 @@ def post(self, request: Request, project) -> Response: contains the individual debug images. Uploading through this endpoint will create different files for the contained images. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to change the release of. @@ -521,7 +521,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of source map archives (releases, later bundles) for a given project. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the source map archive belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the source map archives of. @@ -585,7 +585,7 @@ def delete(self, request: Request, project) -> Response: Delete all artifacts inside given archive. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the archive belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete the archive of. diff --git a/src/sentry/api/endpoints/event_attachment_details.py b/src/sentry/api/endpoints/event_attachment_details.py index 1711b0b8a9aeb2..e1366da8081f3f 100644 --- a/src/sentry/api/endpoints/event_attachment_details.py +++ b/src/sentry/api/endpoints/event_attachment_details.py @@ -78,7 +78,7 @@ def get(self, request: Request, project, event_id, attachment_id) -> Response: Retrieve an Attachment `````````````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the event belongs to. diff --git a/src/sentry/api/endpoints/event_attachments.py b/src/sentry/api/endpoints/event_attachments.py index ef979e59ba311b..64ae882475abb2 100644 --- a/src/sentry/api/endpoints/event_attachments.py +++ b/src/sentry/api/endpoints/event_attachments.py @@ -24,7 +24,7 @@ def get(self, request: Request, project, event_id) -> Response: Retrieve attachments for an event ````````````````````````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the event belongs to. diff --git a/src/sentry/api/endpoints/event_reprocessable.py b/src/sentry/api/endpoints/event_reprocessable.py index 39c5a8f3df15dd..9358add9776c5f 100644 --- a/src/sentry/api/endpoints/event_reprocessable.py +++ b/src/sentry/api/endpoints/event_reprocessable.py @@ -48,7 +48,7 @@ def get(self, request: Request, project, event_id) -> Response: * `attachment.not_found`: A required attachment, such as the original minidump, is missing. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the event belongs to. diff --git a/src/sentry/api/endpoints/filechange.py b/src/sentry/api/endpoints/filechange.py index cbc7617f6a228e..15818c993087ac 100644 --- a/src/sentry/api/endpoints/filechange.py +++ b/src/sentry/api/endpoints/filechange.py @@ -29,7 +29,7 @@ def get(self, request: Request, organization, version) -> Response: Retrieve a list of files that were changed in a given release's commits. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. diff --git a/src/sentry/api/endpoints/group_details.py b/src/sentry/api/endpoints/group_details.py index 25f3b89540466f..46f409d0da3cbf 100644 --- a/src/sentry/api/endpoints/group_details.py +++ b/src/sentry/api/endpoints/group_details.py @@ -136,7 +136,7 @@ def get(self, request: Request, group) -> Response: the issue (title, last seen, first seen), some overall numbers (number of comments, user reports) as well as the summarized event data. - :pparam string organization_slug: The slug of the organization. + :pparam string organization_id_or_slug: The slug of the organization. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ diff --git a/src/sentry/api/endpoints/group_tombstone_details.py b/src/sentry/api/endpoints/group_tombstone_details.py index 27020170d88da1..addd62e9489545 100644 --- a/src/sentry/api/endpoints/group_tombstone_details.py +++ b/src/sentry/api/endpoints/group_tombstone_details.py @@ -25,7 +25,7 @@ def delete(self, request: Request, project, tombstone_id) -> Response: Undiscards a group such that new events in that group will be captured. This does not restore any previous data. - :pparam string organization_slug: the id or slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project to which this tombstone belongs. :pparam string tombstone_id: the ID of the tombstone to remove. :auth: required diff --git a/src/sentry/api/endpoints/organization_api_key_details.py b/src/sentry/api/endpoints/organization_api_key_details.py index cef671fe023d7c..66e59a3316a76e 100644 --- a/src/sentry/api/endpoints/organization_api_key_details.py +++ b/src/sentry/api/endpoints/organization_api_key_details.py @@ -36,7 +36,7 @@ def get(self, request: Request, organization_context, organization, api_key_id) Retrieves API Key details ````````````````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team belongs to. :pparam string api_key_id: the ID of the api key to delete :auth: required @@ -53,7 +53,7 @@ def put(self, request: Request, organization_context, organization, api_key_id) Update an API Key ````````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team belongs to. :pparam string api_key_id: the ID of the api key to delete :param string label: the new label for the api key @@ -89,7 +89,7 @@ def delete(self, request: Request, organization_context, organization, api_key_i Deletes an API Key `````````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team belongs to. :pparam string api_key_id: the ID of the api key to delete :auth: required diff --git a/src/sentry/api/endpoints/organization_api_key_index.py b/src/sentry/api/endpoints/organization_api_key_index.py index 45438b30ad5fd2..06713f169f6d26 100644 --- a/src/sentry/api/endpoints/organization_api_key_index.py +++ b/src/sentry/api/endpoints/organization_api_key_index.py @@ -30,7 +30,7 @@ def get(self, request: Request, organization_context, organization) -> Response: List an Organization's API Keys ``````````````````````````````````` - :pparam string organization_slug: the organization short name + :pparam string organization_id_or_slug: the id or slug of the organization :auth: required """ queryset = sorted( @@ -44,7 +44,7 @@ def post(self, request: Request, organization_context, organization) -> Response Create an Organization API Key ``````````````````````````````````` - :pparam string organization_slug: the organization short name + :pparam string organization_id_or_slug: the id or slug of the organization :auth: required """ key = ApiKey.objects.create(organization_id=organization.id, scope_list=DEFAULT_SCOPES) diff --git a/src/sentry/api/endpoints/organization_auth_provider_details.py b/src/sentry/api/endpoints/organization_auth_provider_details.py index 7ecb1e113e266e..144c4b22004bf0 100644 --- a/src/sentry/api/endpoints/organization_auth_provider_details.py +++ b/src/sentry/api/endpoints/organization_auth_provider_details.py @@ -26,7 +26,7 @@ def get(self, request: Request, organization: Organization) -> Response: currently installed auth_provider `````````````````````````````````````````````````````` - :pparam string organization_slug: the organization short name + :pparam string organization_id_or_slug: the id or slug of the organization :auth: required """ auth_provider = auth_service.get_auth_provider(organization_id=organization.id) diff --git a/src/sentry/api/endpoints/organization_auth_providers.py b/src/sentry/api/endpoints/organization_auth_providers.py index c844e8e5950349..cac490e3167fb0 100644 --- a/src/sentry/api/endpoints/organization_auth_providers.py +++ b/src/sentry/api/endpoints/organization_auth_providers.py @@ -22,7 +22,7 @@ def get(self, request: Request, organization) -> Response: List available auth providers that are available to use for an Organization ``````````````````````````````````````````````````````````````````````````` - :pparam string organization_slug: the organization short name + :pparam string organization_id_or_slug: the id or slug of the organization :auth: required """ provider_list = [] diff --git a/src/sentry/api/endpoints/organization_code_mapping_details.py b/src/sentry/api/endpoints/organization_code_mapping_details.py index 296ca265b939fa..6395e678de9967 100644 --- a/src/sentry/api/endpoints/organization_code_mapping_details.py +++ b/src/sentry/api/endpoints/organization_code_mapping_details.py @@ -50,7 +50,7 @@ def put(self, request: Request, config_id, organization, config) -> Response: Update a repository project path config `````````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team should be created for. :param int repository_id: :param int project_id: diff --git a/src/sentry/api/endpoints/organization_code_mappings.py b/src/sentry/api/endpoints/organization_code_mappings.py index e2be4bb2bf1c75..e8f544e2aa935b 100644 --- a/src/sentry/api/endpoints/organization_code_mappings.py +++ b/src/sentry/api/endpoints/organization_code_mappings.py @@ -141,7 +141,7 @@ def get(self, request: Request, organization) -> Response: """ Get the list of repository project path configs - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team should be created for. :qparam int integrationId: the optional integration id. :qparam int project: Optional. Pass "-1" to filter to 'all projects user has access to'. Omit to filter for 'all projects user is a member of'. @@ -175,7 +175,7 @@ def post(self, request: Request, organization) -> Response: Create a new repository project path config `````````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team should be created for. :param int repositoryId: :param int projectId: diff --git a/src/sentry/api/endpoints/organization_dashboards.py b/src/sentry/api/endpoints/organization_dashboards.py index b3bc3ab64343cc..bcc6e754c61f7a 100644 --- a/src/sentry/api/endpoints/organization_dashboards.py +++ b/src/sentry/api/endpoints/organization_dashboards.py @@ -49,7 +49,7 @@ def get(self, request: Request, organization) -> Response: If on the first page, this endpoint will also include any pre-built dashboards that haven't been replaced or removed. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the dashboards belongs to. :qparam string query: the title of the dashboard being searched for. :auth: required @@ -142,7 +142,7 @@ def post(self, request: Request, organization, retry=0) -> Response: `````````````````````````````````````````` Create a new dashboard for the given Organization - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the dashboards belongs to. """ if not features.has("organizations:dashboards-edit", organization, actor=request.user): diff --git a/src/sentry/api/endpoints/organization_details.py b/src/sentry/api/endpoints/organization_details.py index 2eea3a854706b1..5fad6ab51224a2 100644 --- a/src/sentry/api/endpoints/organization_details.py +++ b/src/sentry/api/endpoints/organization_details.py @@ -555,7 +555,7 @@ def get(self, request: Request, organization) -> Response: Return details on an individual organization including various details such as membership access, features, and teams. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team should be created for. :param string detailed: Specify '0' to retrieve details without projects and teams. :auth: required @@ -581,7 +581,7 @@ def put(self, request: Request, organization) -> Response: Update various attributes and configurable settings for the given organization. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team should be created for. :param string name: an optional new name for the organization. :param string slug: an optional new slug for the organization. Needs @@ -714,7 +714,7 @@ def delete(self, request: Request, organization) -> Response: However once deletion has begun the state of an organization changes and will be hidden from most public views. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team should be created for. :auth: required, user-context-needed """ diff --git a/src/sentry/api/endpoints/organization_eventid.py b/src/sentry/api/endpoints/organization_eventid.py index a0f5cf74db89a3..3fddf7346d45c5 100644 --- a/src/sentry/api/endpoints/organization_eventid.py +++ b/src/sentry/api/endpoints/organization_eventid.py @@ -35,7 +35,7 @@ def get(self, request: Request, organization, event_id) -> Response: This resolves an event ID to the project slug and internal issue ID and internal event ID. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the event ID should be looked up in. :param string event_id: the event ID to look up. validated by a regex in the URL. diff --git a/src/sentry/api/endpoints/organization_issues_resolved_in_release.py b/src/sentry/api/endpoints/organization_issues_resolved_in_release.py index 753a27e7352ce6..7eeec5edfd219c 100644 --- a/src/sentry/api/endpoints/organization_issues_resolved_in_release.py +++ b/src/sentry/api/endpoints/organization_issues_resolved_in_release.py @@ -26,7 +26,7 @@ def get(self, request: Request, organization, version) -> Response: Retrieve a list of issues to be resolved in a given release. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required diff --git a/src/sentry/api/endpoints/organization_member/requests/invite/details.py b/src/sentry/api/endpoints/organization_member/requests/invite/details.py index 03b5ed54426662..35a7e36ca545d1 100644 --- a/src/sentry/api/endpoints/organization_member/requests/invite/details.py +++ b/src/sentry/api/endpoints/organization_member/requests/invite/details.py @@ -90,7 +90,7 @@ def put( Update and/or approve an invite request to an organization. - :pparam string organization_slug: the id or slug of the organization the member will belong to + :pparam string organization_id_or_slug: the id or slug of the organization the member will belong to :param string member_id: the member ID :param boolean approve: allows the member to be invited :param string role: the suggested role of the new member @@ -171,7 +171,7 @@ def delete( Delete an invite request to an organization. - :pparam string organization_slug: the id or slug of the organization the member would belong to + :pparam string organization_id_or_slug: the id or slug of the organization the member would belong to :param string member_id: the member ID :auth: required diff --git a/src/sentry/api/endpoints/organization_member/requests/invite/index.py b/src/sentry/api/endpoints/organization_member/requests/invite/index.py index 7f90b4bbcee38d..1f335a57698deb 100644 --- a/src/sentry/api/endpoints/organization_member/requests/invite/index.py +++ b/src/sentry/api/endpoints/organization_member/requests/invite/index.py @@ -61,7 +61,7 @@ def post(self, request: Request, organization) -> Response: Creates an invite request given an email and suggested role / teams. - :pparam string organization_slug: the id or slug of the organization the member will belong to + :pparam string organization_id_or_slug: the id or slug of the organization the member will belong to :param string email: the email address to invite :param string role: the suggested role of the new member :param string orgRole: the suggested org-role of the new member diff --git a/src/sentry/api/endpoints/organization_processingissues.py b/src/sentry/api/endpoints/organization_processingissues.py index eb4b3f350a8ec9..27836dde854cdd 100644 --- a/src/sentry/api/endpoints/organization_processingissues.py +++ b/src/sentry/api/endpoints/organization_processingissues.py @@ -21,7 +21,7 @@ def get(self, request: Request, organization) -> Response: For each Project in an Organization, list its processing issues. Can be passed `project` to filter down to specific projects. - :pparam string organization_slug: the id or slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :qparam array[string] project: An optional list of project ids to filter to within the organization :auth: required diff --git a/src/sentry/api/endpoints/organization_projects_experiment.py b/src/sentry/api/endpoints/organization_projects_experiment.py index 8ebff6b033f68e..8c3b614204d2d2 100644 --- a/src/sentry/api/endpoints/organization_projects_experiment.py +++ b/src/sentry/api/endpoints/organization_projects_experiment.py @@ -71,7 +71,7 @@ def post(self, request: Request, organization: Organization) -> Response: If this is taken, a random three letter suffix is added as needed (eg: ...-gnm, ...-zls). Then create a new project bound to this team - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team should be created for. :param string name: the name for the new project. :param string platform: the optional platform that this project is for. diff --git a/src/sentry/api/endpoints/organization_projects_sent_first_event.py b/src/sentry/api/endpoints/organization_projects_sent_first_event.py index dab0e3f054e17c..9d2d087e0b2be3 100644 --- a/src/sentry/api/endpoints/organization_projects_sent_first_event.py +++ b/src/sentry/api/endpoints/organization_projects_sent_first_event.py @@ -23,7 +23,7 @@ def get(self, request: Request, organization) -> Response: Returns true if any projects within the organization have received a first event, false otherwise. - :pparam string organization_slug: the id or slug of the organization + :pparam string organization_id_or_slug: the id or slug of the organization containing the projects to check for a first event from. :qparam array[string] project: An optional list of project ids to filter diff --git a/src/sentry/api/endpoints/organization_release_commits.py b/src/sentry/api/endpoints/organization_release_commits.py index 3533619bd83c03..fc93f9b5dc6353 100644 --- a/src/sentry/api/endpoints/organization_release_commits.py +++ b/src/sentry/api/endpoints/organization_release_commits.py @@ -23,7 +23,7 @@ def get(self, request: Request, organization, version) -> Response: Retrieve a list of commits for a given release. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required diff --git a/src/sentry/api/endpoints/organization_release_details.py b/src/sentry/api/endpoints/organization_release_details.py index e2b9f65ae9881e..1d32a1af27f45a 100644 --- a/src/sentry/api/endpoints/organization_release_details.py +++ b/src/sentry/api/endpoints/organization_release_details.py @@ -288,7 +288,7 @@ def get(self, request: Request, organization, version) -> Response: Return details on an individual release. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required @@ -383,7 +383,7 @@ def put(self, request: Request, organization, version) -> Response: Update a release. This can change some metadata associated with the release (the ref, url, and dates). - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :param string ref: an optional commit reference. This is useful if @@ -510,7 +510,7 @@ def delete(self, request: Request, organization, version) -> Response: Permanently remove a release and all of its files. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required diff --git a/src/sentry/api/endpoints/organization_release_file_details.py b/src/sentry/api/endpoints/organization_release_file_details.py index 2b190e4fb068ad..f24233bc8da3ca 100644 --- a/src/sentry/api/endpoints/organization_release_file_details.py +++ b/src/sentry/api/endpoints/organization_release_file_details.py @@ -33,7 +33,7 @@ def get(self, request: Request, organization, version, file_id) -> Response: not actually return the contents of the file, just the associated metadata. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :pparam string file_id: the ID of the file to retrieve. @@ -62,7 +62,7 @@ def put(self, request: Request, organization, version, file_id) -> Response: Update metadata of an existing file. Currently only the name of the file can be changed. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :pparam string file_id: the ID of the file to update. @@ -89,7 +89,7 @@ def delete(self, request: Request, organization, version, file_id) -> Response: This will also remove the physical file from storage. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :pparam string file_id: the ID of the file to delete. diff --git a/src/sentry/api/endpoints/organization_release_files.py b/src/sentry/api/endpoints/organization_release_files.py index 91abde8ceac0c2..4e057726f2c47f 100644 --- a/src/sentry/api/endpoints/organization_release_files.py +++ b/src/sentry/api/endpoints/organization_release_files.py @@ -25,7 +25,7 @@ def get(self, request: Request, organization, version) -> Response: Retrieve a list of files for a given release. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :qparam string query: If set, only files with these partial names will be returned. @@ -56,7 +56,7 @@ def post(self, request: Request, organization, version) -> Response: that this file will be referenced as. For example, in the case of JavaScript you might specify the full web URI. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :param string name: the name (full path) of the file. diff --git a/src/sentry/api/endpoints/organization_release_meta.py b/src/sentry/api/endpoints/organization_release_meta.py index 987629564165a5..89b85033600bab 100644 --- a/src/sentry/api/endpoints/organization_release_meta.py +++ b/src/sentry/api/endpoints/organization_release_meta.py @@ -28,7 +28,7 @@ def get(self, request: Request, organization, version) -> Response: The data returned from here is auxiliary meta data that the UI uses. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required diff --git a/src/sentry/api/endpoints/organization_releases.py b/src/sentry/api/endpoints/organization_releases.py index 23f2d92376d665..1a9e1653bb1b7b 100644 --- a/src/sentry/api/endpoints/organization_releases.py +++ b/src/sentry/api/endpoints/organization_releases.py @@ -243,7 +243,7 @@ def get(self, request: Request, organization) -> Response: ``````````````````````````````` Return a list of releases for a given organization. - :pparam string organization_slug: the organization short name + :pparam string organization_id_or_slug: the id or slug of the organization :qparam string query: this parameter can be used to create a "starts with" filter for the version. """ @@ -421,7 +421,7 @@ def post(self, request: Request, organization) -> Response: Releases are also necessary for sourcemaps and other debug features that require manual upload for functioning well. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :param string version: a version identifier for this release. Can be a version number, a commit hash etc. @@ -595,7 +595,7 @@ def get(self, request: Request, organization) -> Response: ``````````````````````````````` Return a list of releases for a given organization, sorted for most recent releases. - :pparam string organization_slug: the organization short name + :pparam string organization_id_or_slug: the id or slug of the organization """ query = request.GET.get("query") diff --git a/src/sentry/api/endpoints/organization_repositories.py b/src/sentry/api/endpoints/organization_repositories.py index eaff3ab3350bd6..63af7088763af2 100644 --- a/src/sentry/api/endpoints/organization_repositories.py +++ b/src/sentry/api/endpoints/organization_repositories.py @@ -40,7 +40,7 @@ def get(self, request: Request, organization) -> Response: Return a list of version control repositories for a given organization. - :pparam string organization_slug: the organization short name + :pparam string organization_id_or_slug: the id or slug of the organization :qparam string query: optional filter by repository name :auth: required """ diff --git a/src/sentry/api/endpoints/organization_repository_commits.py b/src/sentry/api/endpoints/organization_repository_commits.py index 97997b42769d61..ac4211ef3e2bc0 100644 --- a/src/sentry/api/endpoints/organization_repository_commits.py +++ b/src/sentry/api/endpoints/organization_repository_commits.py @@ -26,7 +26,7 @@ def get(self, request: Request, organization, repo_id) -> Response: Return a list of commits for a given repository. - :pparam string organization_slug: the organization short name + :pparam string organization_id_or_slug: the id or slug of the organization :pparam string repo_id: the repository ID :auth: required """ diff --git a/src/sentry/api/endpoints/organization_shortid.py b/src/sentry/api/endpoints/organization_shortid.py index 8af3609c66d1f3..c7c50db957f054 100644 --- a/src/sentry/api/endpoints/organization_shortid.py +++ b/src/sentry/api/endpoints/organization_shortid.py @@ -24,7 +24,7 @@ def get(self, request: Request, organization, short_id) -> Response: This resolves a short ID to the project slug and internal issue ID. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the short ID should be looked up in. :pparam string short_id: the short ID to look up. :auth: required diff --git a/src/sentry/api/endpoints/organization_slugs.py b/src/sentry/api/endpoints/organization_slugs.py index 94bd9ef07df280..f1151059bd809e 100644 --- a/src/sentry/api/endpoints/organization_slugs.py +++ b/src/sentry/api/endpoints/organization_slugs.py @@ -25,7 +25,7 @@ def put(self, request: Request, organization) -> Response: Updates the slugs of projects within the organization. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the short ID should be looked up in. :param slugs: a dictionary of project IDs to their intended slugs. :auth: required diff --git a/src/sentry/api/endpoints/organization_stats.py b/src/sentry/api/endpoints/organization_stats.py index 45a1de7c05ad58..927cd779822aef 100644 --- a/src/sentry/api/endpoints/organization_stats.py +++ b/src/sentry/api/endpoints/organization_stats.py @@ -32,7 +32,7 @@ def get(self, request: Request, organization) -> Response: Return a set of points representing a normalized timestamp and the number of events seen in the period. - :pparam string organization_slug: the id or slug of the organization for + :pparam string organization_id_or_slug: the id or slug of the organization for which the stats should be retrieved. :qparam string stat: the name of the stat to query (``"received"``, diff --git a/src/sentry/api/endpoints/organization_user_reports.py b/src/sentry/api/endpoints/organization_user_reports.py index 9550595381670c..a083d32db550c7 100644 --- a/src/sentry/api/endpoints/organization_user_reports.py +++ b/src/sentry/api/endpoints/organization_user_reports.py @@ -35,7 +35,7 @@ def get(self, request: Request, organization) -> Response: Return a list of user feedback items within this organization. Can be filtered by projects/environments/creation date. - :pparam string organization_slug: the id or slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :auth: required """ diff --git a/src/sentry/api/endpoints/organization_users.py b/src/sentry/api/endpoints/organization_users.py index 242677487a93dc..7be07bf57876b5 100644 --- a/src/sentry/api/endpoints/organization_users.py +++ b/src/sentry/api/endpoints/organization_users.py @@ -27,7 +27,7 @@ def get(self, request: Request, organization) -> Response: Return a list of users that belong to a given organization and are part of a project. :qparam string project: restrict results to users who have access to a given project ID - :pparam string organization_slug: the id or slug of the organization for which the users + :pparam string organization_id_or_slug: the id or slug of the organization for which the users should be listed. :auth: required """ diff --git a/src/sentry/api/endpoints/project_artifact_bundle_file_details.py b/src/sentry/api/endpoints/project_artifact_bundle_file_details.py index 16530c4ee9f64d..6b941dc2f91fc2 100644 --- a/src/sentry/api/endpoints/project_artifact_bundle_file_details.py +++ b/src/sentry/api/endpoints/project_artifact_bundle_file_details.py @@ -63,7 +63,7 @@ def get(self, request: Request, project, bundle_id, file_id) -> Response: not actually return the contents of the file, just the associated metadata. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to retrieve the file of. diff --git a/src/sentry/api/endpoints/project_artifact_bundle_files.py b/src/sentry/api/endpoints/project_artifact_bundle_files.py index 2ae9b585ebfa53..423b2e4cb2ee64 100644 --- a/src/sentry/api/endpoints/project_artifact_bundle_files.py +++ b/src/sentry/api/endpoints/project_artifact_bundle_files.py @@ -69,7 +69,7 @@ def get(self, request: Request, project, bundle_id) -> Response: Retrieve a list of files for a given artifact bundle. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the artifact bundle belongs to. :pparam string project_id_or_slug: the id or slug of the project the artifact bundle belongs to. diff --git a/src/sentry/api/endpoints/project_commits.py b/src/sentry/api/endpoints/project_commits.py index bfcbf574032535..b280df59dbeece 100644 --- a/src/sentry/api/endpoints/project_commits.py +++ b/src/sentry/api/endpoints/project_commits.py @@ -26,7 +26,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of commits for a given project. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the commit belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the commits of. diff --git a/src/sentry/api/endpoints/project_environments.py b/src/sentry/api/endpoints/project_environments.py index 668f2f19212e27..80536ec9de6f9b 100644 --- a/src/sentry/api/endpoints/project_environments.py +++ b/src/sentry/api/endpoints/project_environments.py @@ -27,7 +27,7 @@ def get(self, request: Request, project) -> Response: environments, or ``"all"`` for both hidden and visible environments. - :pparam string organization_slug: the id or slug of the organization the project + :pparam string organization_id_or_slug: the id or slug of the organization the project belongs to. :pparam string project_id_or_slug: the id or slug of the project. diff --git a/src/sentry/api/endpoints/project_event_details.py b/src/sentry/api/endpoints/project_event_details.py index 4d584f978e9c24..b99258f2ce80fa 100644 --- a/src/sentry/api/endpoints/project_event_details.py +++ b/src/sentry/api/endpoints/project_event_details.py @@ -64,7 +64,7 @@ def get(self, request: Request, project, event_id) -> Response: Return details on an individual event. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the event belongs to. :pparam string project_id_or_slug: the id or slug of the project the event belongs to. diff --git a/src/sentry/api/endpoints/project_events.py b/src/sentry/api/endpoints/project_events.py index c0a91ccce6ccab..ee39b55e471c7f 100644 --- a/src/sentry/api/endpoints/project_events.py +++ b/src/sentry/api/endpoints/project_events.py @@ -46,7 +46,7 @@ def get(self, request: Request, project) -> Response: :qparam bool sample: return events in pseudo-random order. This is deterministic, same query will return the same events in the same order. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the groups belong to. :pparam string project_id_or_slug: the id or slug of the project the groups belong to. diff --git a/src/sentry/api/endpoints/project_group_index.py b/src/sentry/api/endpoints/project_group_index.py index f4a24bc3d30935..637a5fd6fea6e6 100644 --- a/src/sentry/api/endpoints/project_group_index.py +++ b/src/sentry/api/endpoints/project_group_index.py @@ -77,7 +77,7 @@ def get(self, request: Request, project) -> Response: ``"is:unresolved"`` is assumed.) :qparam string environment: this restricts the issues to ones containing events from this environment - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the issues belong to. @@ -214,7 +214,7 @@ def put(self, request: Request, project) -> Response: specified status. Valid values are ``"resolved"``, ``"unresolved"`` and ``"ignored"``. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the issues belong to. @@ -271,7 +271,7 @@ def delete(self, request: Request, project) -> Response: :qparam int id: a list of IDs of the issues to be removed. This parameter shall be repeated for each issue. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the issues belong to. :pparam string project_id_or_slug: the id or slug of the project the issues belong to. diff --git a/src/sentry/api/endpoints/project_issues_resolved_in_release.py b/src/sentry/api/endpoints/project_issues_resolved_in_release.py index 760e51e5f5083a..5d9d97dcc8090f 100644 --- a/src/sentry/api/endpoints/project_issues_resolved_in_release.py +++ b/src/sentry/api/endpoints/project_issues_resolved_in_release.py @@ -26,7 +26,7 @@ def get(self, request: Request, project, version) -> Response: Retrieve a list of issues to be resolved in a given release. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project associated with the release. :pparam string version: the version identifier of the release. diff --git a/src/sentry/api/endpoints/project_performance_issue_settings.py b/src/sentry/api/endpoints/project_performance_issue_settings.py index 9e02b05511c575..1d2aaea6e3bd8f 100644 --- a/src/sentry/api/endpoints/project_performance_issue_settings.py +++ b/src/sentry/api/endpoints/project_performance_issue_settings.py @@ -184,7 +184,7 @@ def get(self, request: Request, project) -> Response: Return settings for performance issues - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the project belongs to. :pparam string project_id_or_slug: the id or slug of the project to configure. :auth: required diff --git a/src/sentry/api/endpoints/project_release_commits.py b/src/sentry/api/endpoints/project_release_commits.py index 1e692c01fc0378..624d669d91cf8f 100644 --- a/src/sentry/api/endpoints/project_release_commits.py +++ b/src/sentry/api/endpoints/project_release_commits.py @@ -26,7 +26,7 @@ def get(self, request: Request, project, version) -> Response: Retrieve a list of commits for a given release. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the release files of. diff --git a/src/sentry/api/endpoints/project_release_details.py b/src/sentry/api/endpoints/project_release_details.py index 7eeba16e5eaa59..75a68e1b3e7492 100644 --- a/src/sentry/api/endpoints/project_release_details.py +++ b/src/sentry/api/endpoints/project_release_details.py @@ -34,7 +34,7 @@ def get(self, request: Request, project, version) -> Response: Return details on an individual release. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to retrieve the release of. @@ -78,7 +78,7 @@ def put(self, request: Request, project, version) -> Response: Update a release. This can change some metadata associated with the release (the ref, url, and dates). - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to change the release of. @@ -154,7 +154,7 @@ def delete(self, request: Request, project, version) -> Response: Permanently remove a release and all of its files. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete the release of. diff --git a/src/sentry/api/endpoints/project_release_file_details.py b/src/sentry/api/endpoints/project_release_file_details.py index 43ef2174994ce8..b72a1c2b1a03a9 100644 --- a/src/sentry/api/endpoints/project_release_file_details.py +++ b/src/sentry/api/endpoints/project_release_file_details.py @@ -217,7 +217,7 @@ def get(self, request: Request, project, version, file_id) -> Response: not actually return the contents of the file, just the associated metadata. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to retrieve the file of. @@ -247,7 +247,7 @@ def put(self, request: Request, project, version, file_id) -> Response: Update metadata of an existing file. Currently only the name of the file can be changed. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to update the file of. @@ -276,7 +276,7 @@ def delete(self, request: Request, project, version, file_id) -> Response: This will also remove the physical file from storage, except if it is stored as part of an artifact bundle. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete the file of. diff --git a/src/sentry/api/endpoints/project_release_files.py b/src/sentry/api/endpoints/project_release_files.py index b5873e23258323..5674787662a85d 100644 --- a/src/sentry/api/endpoints/project_release_files.py +++ b/src/sentry/api/endpoints/project_release_files.py @@ -244,7 +244,7 @@ def get(self, request: Request, project, version) -> Response: Retrieve a list of files for a given release. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the release files of. @@ -276,7 +276,7 @@ def post(self, request: Request, project, version) -> Response: that this file will be referenced as. For example, in the case of JavaScript you might specify the full web URI. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to change the release of. diff --git a/src/sentry/api/endpoints/project_release_repositories.py b/src/sentry/api/endpoints/project_release_repositories.py index 6e6e7e9f94c76f..95b57ce4e0dc46 100644 --- a/src/sentry/api/endpoints/project_release_repositories.py +++ b/src/sentry/api/endpoints/project_release_repositories.py @@ -26,7 +26,7 @@ def get(self, request: Request, project, version) -> Response: This endpoint is used in the commits and changed files tab of the release details page - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to retrieve the release of. diff --git a/src/sentry/api/endpoints/project_release_stats.py b/src/sentry/api/endpoints/project_release_stats.py index d0860b5650df88..f769298539d52a 100644 --- a/src/sentry/api/endpoints/project_release_stats.py +++ b/src/sentry/api/endpoints/project_release_stats.py @@ -39,7 +39,7 @@ def get(self, request: Request, project, version) -> Response: Returns the stats of a given release under a project. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the release files of. diff --git a/src/sentry/api/endpoints/project_releases.py b/src/sentry/api/endpoints/project_releases.py index 6c530a95a81f24..c69b315e7b3bb5 100644 --- a/src/sentry/api/endpoints/project_releases.py +++ b/src/sentry/api/endpoints/project_releases.py @@ -42,7 +42,7 @@ def get(self, request: Request, project) -> Response: Retrieve a list of releases for a given project. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to list the releases of. @@ -97,7 +97,7 @@ def post(self, request: Request, project) -> Response: Releases are also necessary for sourcemaps and other debug features that require manual upload for functioning well. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string project_id_or_slug: the id or slug of the project to create a release for. diff --git a/src/sentry/api/endpoints/project_servicehook_details.py b/src/sentry/api/endpoints/project_servicehook_details.py index e06d6a344450ae..5908675291dcf6 100644 --- a/src/sentry/api/endpoints/project_servicehook_details.py +++ b/src/sentry/api/endpoints/project_servicehook_details.py @@ -31,7 +31,7 @@ def get(self, request: Request, project, hook_id) -> Response: Return a service hook bound to a project. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the client keys belong to. :pparam string project_id_or_slug: the id or slug of the project the client keys belong to. @@ -49,7 +49,7 @@ def put(self, request: Request, project, hook_id) -> Response: Update a Service Hook ````````````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the client keys belong to. :pparam string project_id_or_slug: the id or slug of the project the client keys belong to. @@ -102,7 +102,7 @@ def delete(self, request: Request, project, hook_id) -> Response: Remove a Service Hook ````````````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the client keys belong to. :pparam string project_id_or_slug: the id or slug of the project the client keys belong to. diff --git a/src/sentry/api/endpoints/project_servicehooks.py b/src/sentry/api/endpoints/project_servicehooks.py index 8b5c1d2938c054..6199cb96ba0249 100644 --- a/src/sentry/api/endpoints/project_servicehooks.py +++ b/src/sentry/api/endpoints/project_servicehooks.py @@ -37,7 +37,7 @@ def get(self, request: Request, project) -> Response: This endpoint requires the 'servicehooks' feature to be enabled for your project. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the client keys belong to. :pparam string project_id_or_slug: the id or slug of the project the client keys belong to. @@ -83,7 +83,7 @@ def post(self, request: Request, project) -> Response: This endpoint requires the 'servicehooks' feature to be enabled for your project. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the client keys belong to. :pparam string project_id_or_slug: the id or slug of the project the client keys belong to. diff --git a/src/sentry/api/endpoints/project_stats.py b/src/sentry/api/endpoints/project_stats.py index b0e0e3583bca13..f0ae58a5590815 100644 --- a/src/sentry/api/endpoints/project_stats.py +++ b/src/sentry/api/endpoints/project_stats.py @@ -31,7 +31,7 @@ def get(self, request: Request, project) -> Response: Query ranges are limited to Sentry's configured time-series resolutions. - :pparam string organization_slug: the id or slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :qparam string stat: the name of the stat to query (``"received"``, ``"rejected"``, ``"blacklisted"``, ``generated``) diff --git a/src/sentry/api/endpoints/project_tagkey_values.py b/src/sentry/api/endpoints/project_tagkey_values.py index 9f4cd23c0ff588..83bed72538978c 100644 --- a/src/sentry/api/endpoints/project_tagkey_values.py +++ b/src/sentry/api/endpoints/project_tagkey_values.py @@ -29,7 +29,7 @@ def get(self, request: Request, project, key) -> Response: values. When paginated can return at most 1000 values. - :pparam string organization_slug: the id or slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :pparam string key: the tag key to look up. :auth: required diff --git a/src/sentry/api/endpoints/project_teams.py b/src/sentry/api/endpoints/project_teams.py index 261eb1272515f5..20faec5e946dab 100644 --- a/src/sentry/api/endpoints/project_teams.py +++ b/src/sentry/api/endpoints/project_teams.py @@ -24,7 +24,7 @@ def get(self, request: Request, project) -> Response: Return a list of teams that have access to this project. - :pparam string organization_slug: the id or slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :auth: required """ diff --git a/src/sentry/api/endpoints/project_transfer.py b/src/sentry/api/endpoints/project_transfer.py index 2112635baecf8e..3a429d12c27d42 100644 --- a/src/sentry/api/endpoints/project_transfer.py +++ b/src/sentry/api/endpoints/project_transfer.py @@ -39,7 +39,7 @@ def post(self, request: Request, project) -> Response: Schedules a project for transfer to a new organization. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the project belongs to. :pparam string project_id_or_slug: the id or slug of the project to delete. :param string email: email of new owner. must be an organization owner diff --git a/src/sentry/api/endpoints/project_user_reports.py b/src/sentry/api/endpoints/project_user_reports.py index 3359d8b455dd7d..36dda69e763ff7 100644 --- a/src/sentry/api/endpoints/project_user_reports.py +++ b/src/sentry/api/endpoints/project_user_reports.py @@ -45,7 +45,7 @@ def get(self, request: Request, project) -> Response: Return a list of user feedback items within this project. - :pparam string organization_slug: the id or slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :auth: required """ @@ -100,7 +100,7 @@ def post(self, request: Request, project) -> Response: Note: Feedback may be submitted with DSN authentication (see auth documentation). - :pparam string organization_slug: the id or slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :auth: required :param string event_id: the event ID diff --git a/src/sentry/api/endpoints/project_users.py b/src/sentry/api/endpoints/project_users.py index 943404972aa732..fffd5df8ac17b0 100644 --- a/src/sentry/api/endpoints/project_users.py +++ b/src/sentry/api/endpoints/project_users.py @@ -31,7 +31,7 @@ def get(self, request: Request, project) -> Response: Return a list of users seen within this project. - :pparam string organization_slug: the id or slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string project_id_or_slug: the id or slug of the project. :pparam string key: the tag key to look up. :auth: required diff --git a/src/sentry/api/endpoints/release_deploys.py b/src/sentry/api/endpoints/release_deploys.py index 8aba55033e8e3b..65aa46ed31eaee 100644 --- a/src/sentry/api/endpoints/release_deploys.py +++ b/src/sentry/api/endpoints/release_deploys.py @@ -52,7 +52,7 @@ def get(self, request: Request, organization, version) -> Response: Returns a list of deploys based on the organization, version, and project. - :pparam string organization_slug: the organization short name + :pparam string organization_id_or_slug: the id or slug of the organization :pparam string version: the version identifier of the release. """ try: @@ -92,7 +92,7 @@ def post(self, request: Request, organization, version) -> Response: Create a deploy for a given release. - :pparam string organization_slug: the organization short name + :pparam string organization_id_or_slug: the id or slug of the organization :pparam string version: the version identifier of the release. :param string environment: the environment you're deploying to :param string name: the optional name of the deploy diff --git a/src/sentry/api/endpoints/team_details.py b/src/sentry/api/endpoints/team_details.py index d09435aa351818..148556db671577 100644 --- a/src/sentry/api/endpoints/team_details.py +++ b/src/sentry/api/endpoints/team_details.py @@ -51,7 +51,7 @@ def get(self, request: Request, team) -> Response: Return details on an individual team. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team belongs to. :pparam string team_id_or_slug: the id or slug of the team to get. :qparam list expand: an optional list of strings to opt in to additional @@ -81,7 +81,7 @@ def put(self, request: Request, team) -> Response: Update various attributes and configurable settings for the given team. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the team belongs to. :pparam string team_id_or_slug: the id or slug of the team to get. :param string name: the new name for the team. diff --git a/src/sentry/issues/endpoints/organization_group_index.py b/src/sentry/issues/endpoints/organization_group_index.py index a5766858316355..57f85ca63456c7 100644 --- a/src/sentry/issues/endpoints/organization_group_index.py +++ b/src/sentry/issues/endpoints/organization_group_index.py @@ -251,7 +251,7 @@ def get(self, request: Request, organization) -> Response: :qparam bool savedSearch: if this is set to False, then we are making the request without a saved search and will look for the default search from this endpoint. :qparam string searchId: if passed in, this is the selected search - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the issues belong to. :auth: required :qparam list expand: an optional list of strings to opt in to additional data. Supports `inbox` @@ -445,7 +445,7 @@ def put(self, request: Request, organization) -> Response: specified status. Valid values are ``"resolved"``, ``"unresolved"`` and ``"ignored"``. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the issues belong to. :param string status: the new status for the issues. Valid values are ``"resolved"``, ``"resolvedInNextRelease"``, @@ -521,7 +521,7 @@ def delete(self, request: Request, organization) -> Response: parameter shall be repeated for each issue, e.g. `?id=1&id=2&id=3`. If this parameter is not provided, it will attempt to remove the first 1000 issues. - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the issues belong to. :auth: required """ diff --git a/src/sentry/issues/endpoints/organization_release_previous_commits.py b/src/sentry/issues/endpoints/organization_release_previous_commits.py index 3559400c0a49b1..edc49300f09610 100644 --- a/src/sentry/issues/endpoints/organization_release_previous_commits.py +++ b/src/sentry/issues/endpoints/organization_release_previous_commits.py @@ -26,7 +26,7 @@ def get(self, request: Request, organization: Organization, version: str) -> Res Retrieve an Organization's Most Recent Release with Commits ```````````````````````````````````````````````````````````` - :pparam string organization_slug: the id or slug of the organization the + :pparam string organization_id_or_slug: the id or slug of the organization the release belongs to. :pparam string version: the version identifier of the release. :auth: required From d325ca9d585fbea87d12e0b0d878a4a712fb32ca Mon Sep 17 00:00:00 2001 From: Mark Story Date: Thu, 9 May 2024 09:59:45 -0400 Subject: [PATCH 199/376] chore(actor) Rename RpcActor -> Actor (#70545) With the Actor Django model removed we can use the term `Actor` for what is now the only actor in the application. Once this lands I'll update all the usage in both sentry & getsentry. --- src/sentry/services/hybrid_cloud/actor.py | 306 +--------------------- src/sentry/types/actor.py | 289 ++++++++++++++++++++ 2 files changed, 301 insertions(+), 294 deletions(-) create mode 100644 src/sentry/types/actor.py diff --git a/src/sentry/services/hybrid_cloud/actor.py b/src/sentry/services/hybrid_cloud/actor.py index c7c5a7a4a7cb7b..f5729723be6645 100644 --- a/src/sentry/services/hybrid_cloud/actor.py +++ b/src/sentry/services/hybrid_cloud/actor.py @@ -1,294 +1,12 @@ -# Please do not use -# from __future__ import annotations -# in modules such as this one where hybrid cloud data models or service classes are -# defined, because we want to reflect on type annotations and avoid forward references. - -from collections import defaultdict -from collections.abc import Iterable, MutableMapping, Sequence -from enum import Enum -from typing import TYPE_CHECKING, Any, Union, overload - -from django.core.exceptions import ObjectDoesNotExist -from rest_framework import serializers - -from sentry.services.hybrid_cloud import RpcModel -from sentry.services.hybrid_cloud.user import RpcUser - -if TYPE_CHECKING: - from sentry.models.team import Team - from sentry.models.user import User - from sentry.services.hybrid_cloud.organization import RpcTeam - - -class ActorType(str, Enum): - USER = "User" - TEAM = "Team" - - -ActorTarget = Union["RpcActor", "User", "RpcUser", "Team", "RpcTeam"] - - -class RpcActor(RpcModel): - """Can represent any model object with a foreign key to Actor.""" - - id: int - """The id of the user/team this actor represents""" - - actor_type: ActorType - """Whether this actor is a User or Team""" - - slug: str | None = None - - class InvalidActor(ObjectDoesNotExist): - """Raised when an Actor fails to resolve or be found""" - - pass - - @classmethod - def resolve_many(cls, actors: Sequence["RpcActor"]) -> list["Team | RpcUser"]: - """ - Resolve a list of actors in a batch to the Team/User the Actor references. - - Will generate more efficient queries to load actors than calling - RpcActor.resolve() individually will. - """ - from sentry.models.team import Team - from sentry.services.hybrid_cloud.user.service import user_service - - if not actors: - return [] - actors_by_type: dict[ActorType, list[RpcActor]] = defaultdict(list) - for actor in actors: - actors_by_type[actor.actor_type].append(actor) - results: dict[tuple[ActorType, int], Team | RpcUser] = {} - for actor_type, actor_list in actors_by_type.items(): - if actor_type == ActorType.USER: - for user in user_service.get_many(filter={"user_ids": [u.id for u in actor_list]}): - results[(actor_type, user.id)] = user - if actor_type == ActorType.TEAM: - for team in Team.objects.filter(id__in=[t.id for t in actor_list]): - results[(actor_type, team.id)] = team - - return list(filter(None, [results.get((actor.actor_type, actor.id)) for actor in actors])) - - @classmethod - def many_from_object(cls, objects: Iterable[ActorTarget]) -> list["RpcActor"]: - """ - Create a list of RpcActor instances based on a collection of 'objects' - - Objects will be grouped by the kind of actor they would be related to. - Queries for actors are batched to increase efficiency. Users that are - missing actors will have actors generated. - """ - from sentry.models.team import Team - from sentry.models.user import User - from sentry.services.hybrid_cloud.organization import RpcTeam - - result: list["RpcActor"] = [] - grouped_by_type: MutableMapping[str, list[int]] = defaultdict(list) - team_slugs: MutableMapping[int, str] = {} - for obj in objects: - if isinstance(obj, cls): - result.append(obj) - if isinstance(obj, (User, RpcUser)): - grouped_by_type[ActorType.USER].append(obj.id) - if isinstance(obj, (Team, RpcTeam)): - team_slugs[obj.id] = obj.slug - grouped_by_type[ActorType.TEAM].append(obj.id) - - if grouped_by_type[ActorType.TEAM]: - team_ids = grouped_by_type[ActorType.TEAM] - for team_id in team_ids: - result.append( - RpcActor( - id=team_id, - actor_type=ActorType.TEAM, - slug=team_slugs.get(team_id), - ) - ) - - if grouped_by_type[ActorType.USER]: - user_ids = grouped_by_type[ActorType.USER] - for user_id in user_ids: - result.append(RpcActor(id=user_id, actor_type=ActorType.USER)) - return result - - @classmethod - def from_object(cls, obj: ActorTarget) -> "RpcActor": - """ - fetch_actor: whether to make an extra query or call to fetch the actor id - Without the actor_id the RpcActor acts as a tuple of id and type. - """ - from sentry.models.team import Team - from sentry.models.user import User - from sentry.services.hybrid_cloud.organization import RpcTeam - - if isinstance(obj, cls): - return obj - if isinstance(obj, User): - return cls.from_orm_user(obj) - if isinstance(obj, Team): - return cls.from_orm_team(obj) - if isinstance(obj, RpcUser): - return cls.from_rpc_user(obj) - if isinstance(obj, RpcTeam): - return cls.from_rpc_team(obj) - raise TypeError(f"Cannot build RpcActor from {type(obj)}") - - @classmethod - def from_orm_user(cls, user: "User") -> "RpcActor": - return cls( - id=user.id, - actor_type=ActorType.USER, - ) - - @classmethod - def from_rpc_user(cls, user: RpcUser) -> "RpcActor": - return cls( - id=user.id, - actor_type=ActorType.USER, - ) - - @classmethod - def from_orm_team(cls, team: "Team") -> "RpcActor": - return cls(id=team.id, actor_type=ActorType.TEAM, slug=team.slug) - - @classmethod - def from_rpc_team(cls, team: "RpcTeam") -> "RpcActor": - return cls(id=team.id, actor_type=ActorType.TEAM, slug=team.slug) - - @overload - @classmethod - def from_identifier(cls, id: None) -> None: - ... - - @overload - @classmethod - def from_identifier(cls, id: int | str) -> "RpcActor": - ... - - @classmethod - def from_identifier(cls, id: str | int | None) -> "RpcActor | None": - """ - Parse an actor identifier into an RpcActor - - Forms `id` can take: - 1231 -> look up User by id - "1231" -> look up User by id - "user:1231" -> look up User by id - "team:1231" -> look up Team by id - "maiseythedog" -> look up User by username - "maisey@dogsrule.com" -> look up User by primary email - """ - from sentry.services.hybrid_cloud.user.service import user_service - - if not id: - return None - # If we have an integer, fall back to assuming it's a User - if isinstance(id, int): - return cls(id=id, actor_type=ActorType.USER) - - # If the actor_identifier is a simple integer as a string, - # we're also a User - if id.isdigit(): - return cls(id=int(id), actor_type=ActorType.USER) - - if id.startswith("user:"): - return cls(id=int(id[5:]), actor_type=ActorType.USER) - - if id.startswith("team:"): - return cls(id=int(id[5:]), actor_type=ActorType.TEAM) - - try: - user = user_service.get_by_username(username=id)[0] - return cls(id=user.id, actor_type=ActorType.USER) - except IndexError as e: - raise cls.InvalidActor(f"Unable to resolve actor identifier: {e}") - - @classmethod - def from_id(cls, user_id: int | None = None, team_id: int | None = None) -> "RpcActor": - if user_id and team_id: - raise cls.InvalidActor("You can only provide one of user_id and team_id") - if user_id: - return cls(id=user_id, actor_type=ActorType.USER) - if team_id: - return cls(id=team_id, actor_type=ActorType.TEAM) - raise cls.InvalidActor("You must provide one of user_id and team_id") - - def __post_init__(self) -> None: - if not self.is_team and self.slug is not None: - raise ValueError("Slugs are expected for teams only") - - def __hash__(self) -> int: - return hash((self.id, self.actor_type)) - - def __eq__(self, other: Any) -> bool: - return ( - isinstance(other, self.__class__) - and self.id == other.id - and self.actor_type == other.actor_type - ) - - def resolve(self) -> "Team | RpcUser": - """ - Resolve an Actor into the Team or RpcUser it represents. - - Will raise Team.DoesNotExist or User.DoesNotExist when the actor is invalid - """ - from sentry.models.team import Team - from sentry.services.hybrid_cloud.user.service import user_service - - if self.is_team: - team = Team.objects.filter(id=self.id).first() - if team: - return team - raise RpcActor.InvalidActor(f"Cannot find a team with id={self.id}") - if self.is_user: - user = user_service.get_user(user_id=self.id) - if user: - return user - raise RpcActor.InvalidActor(f"Cannot find a User with id={self.id}") - # This should be un-reachable - raise RpcActor.InvalidActor("Cannot resolve an actor with an unknown type") - - @property - def identifier(self) -> str: - return f"{self.actor_type.lower()}:{self.id}" - - @property - def is_team(self) -> bool: - return self.actor_type == ActorType.TEAM - - @property - def is_user(self) -> bool: - return self.actor_type == ActorType.USER - - -def parse_and_validate_actor(actor_identifier: str | None, organization_id: int) -> RpcActor | None: - from sentry.models.organizationmember import OrganizationMember - from sentry.models.team import Team - - if not actor_identifier: - return None - - try: - actor = RpcActor.from_identifier(actor_identifier) - except Exception: - raise serializers.ValidationError( - "Could not parse actor. Format should be `type:id` where type is `team` or `user`." - ) - try: - obj = actor.resolve() - except RpcActor.InvalidActor: - raise serializers.ValidationError(f"{actor.actor_type} does not exist") - - if isinstance(obj, Team): - if obj.organization_id != organization_id: - raise serializers.ValidationError("Team is not a member of this organization") - elif isinstance(obj, RpcUser): - if not OrganizationMember.objects.filter( - organization_id=organization_id, user_id=obj.id - ).exists(): - raise serializers.ValidationError("User is not a member of this organization") - - return actor +# Deprecated module for actor imports +# Use sentry.types.actor instead. +from sentry.types.actor import Actor, ActorTarget, ActorType, parse_and_validate_actor + +RpcActor = Actor + +__all__ = ( + "RpcActor", + "ActorType", + "ActorTarget", + "parse_and_validate_actor", +) diff --git a/src/sentry/types/actor.py b/src/sentry/types/actor.py new file mode 100644 index 00000000000000..b029e0f3784739 --- /dev/null +++ b/src/sentry/types/actor.py @@ -0,0 +1,289 @@ +from collections import defaultdict +from collections.abc import Iterable, MutableMapping, Sequence +from enum import Enum +from typing import TYPE_CHECKING, Any, Union, overload + +from django.core.exceptions import ObjectDoesNotExist +from rest_framework import serializers + +from sentry.services.hybrid_cloud import RpcModel +from sentry.services.hybrid_cloud.user import RpcUser + +if TYPE_CHECKING: + from sentry.models.team import Team + from sentry.models.user import User + from sentry.services.hybrid_cloud.organization import RpcTeam + + +class ActorType(str, Enum): + USER = "User" + TEAM = "Team" + + +ActorTarget = Union["Actor", "User", "RpcUser", "Team", "RpcTeam"] + + +class Actor(RpcModel): + """Can represent any model object with a foreign key to Actor.""" + + id: int + """The id of the user/team this actor represents""" + + actor_type: ActorType + """Whether this actor is a User or Team""" + + slug: str | None = None + + class InvalidActor(ObjectDoesNotExist): + """Raised when an Actor fails to resolve or be found""" + + pass + + @classmethod + def resolve_many(cls, actors: Sequence["Actor"]) -> list["Team | RpcUser"]: + """ + Resolve a list of actors in a batch to the Team/User the Actor references. + + Will generate more efficient queries to load actors than calling + Actor.resolve() individually will. + """ + from sentry.models.team import Team + from sentry.services.hybrid_cloud.user.service import user_service + + if not actors: + return [] + actors_by_type: dict[ActorType, list[Actor]] = defaultdict(list) + for actor in actors: + actors_by_type[actor.actor_type].append(actor) + results: dict[tuple[ActorType, int], Team | RpcUser] = {} + for actor_type, actor_list in actors_by_type.items(): + if actor_type == ActorType.USER: + for user in user_service.get_many(filter={"user_ids": [u.id for u in actor_list]}): + results[(actor_type, user.id)] = user + if actor_type == ActorType.TEAM: + for team in Team.objects.filter(id__in=[t.id for t in actor_list]): + results[(actor_type, team.id)] = team + + return list(filter(None, [results.get((actor.actor_type, actor.id)) for actor in actors])) + + @classmethod + def many_from_object(cls, objects: Iterable[ActorTarget]) -> list["Actor"]: + """ + Create a list of Actor instances based on a collection of 'objects' + + Objects will be grouped by the kind of actor they would be related to. + Queries for actors are batched to increase efficiency. Users that are + missing actors will have actors generated. + """ + from sentry.models.team import Team + from sentry.models.user import User + from sentry.services.hybrid_cloud.organization import RpcTeam + + result: list["Actor"] = [] + grouped_by_type: MutableMapping[str, list[int]] = defaultdict(list) + team_slugs: MutableMapping[int, str] = {} + for obj in objects: + if isinstance(obj, cls): + result.append(obj) + if isinstance(obj, (User, RpcUser)): + grouped_by_type[ActorType.USER].append(obj.id) + if isinstance(obj, (Team, RpcTeam)): + team_slugs[obj.id] = obj.slug + grouped_by_type[ActorType.TEAM].append(obj.id) + + if grouped_by_type[ActorType.TEAM]: + team_ids = grouped_by_type[ActorType.TEAM] + for team_id in team_ids: + result.append( + Actor( + id=team_id, + actor_type=ActorType.TEAM, + slug=team_slugs.get(team_id), + ) + ) + + if grouped_by_type[ActorType.USER]: + user_ids = grouped_by_type[ActorType.USER] + for user_id in user_ids: + result.append(Actor(id=user_id, actor_type=ActorType.USER)) + return result + + @classmethod + def from_object(cls, obj: ActorTarget) -> "Actor": + """ + fetch_actor: whether to make an extra query or call to fetch the actor id + Without the actor_id the Actor acts as a tuple of id and type. + """ + from sentry.models.team import Team + from sentry.models.user import User + from sentry.services.hybrid_cloud.organization import RpcTeam + + if isinstance(obj, cls): + return obj + if isinstance(obj, User): + return cls.from_orm_user(obj) + if isinstance(obj, Team): + return cls.from_orm_team(obj) + if isinstance(obj, RpcUser): + return cls.from_rpc_user(obj) + if isinstance(obj, RpcTeam): + return cls.from_rpc_team(obj) + raise TypeError(f"Cannot build Actor from {type(obj)}") + + @classmethod + def from_orm_user(cls, user: "User") -> "Actor": + return cls( + id=user.id, + actor_type=ActorType.USER, + ) + + @classmethod + def from_rpc_user(cls, user: RpcUser) -> "Actor": + return cls( + id=user.id, + actor_type=ActorType.USER, + ) + + @classmethod + def from_orm_team(cls, team: "Team") -> "Actor": + return cls(id=team.id, actor_type=ActorType.TEAM, slug=team.slug) + + @classmethod + def from_rpc_team(cls, team: "RpcTeam") -> "Actor": + return cls(id=team.id, actor_type=ActorType.TEAM, slug=team.slug) + + @overload + @classmethod + def from_identifier(cls, id: None) -> None: + ... + + @overload + @classmethod + def from_identifier(cls, id: int | str) -> "Actor": + ... + + @classmethod + def from_identifier(cls, id: str | int | None) -> "Actor | None": + """ + Parse an actor identifier into an Actor + + Forms `id` can take: + 1231 -> look up User by id + "1231" -> look up User by id + "user:1231" -> look up User by id + "team:1231" -> look up Team by id + "maiseythedog" -> look up User by username + "maisey@dogsrule.com" -> look up User by primary email + """ + from sentry.services.hybrid_cloud.user.service import user_service + + if not id: + return None + # If we have an integer, fall back to assuming it's a User + if isinstance(id, int): + return cls(id=id, actor_type=ActorType.USER) + + # If the actor_identifier is a simple integer as a string, + # we're also a User + if id.isdigit(): + return cls(id=int(id), actor_type=ActorType.USER) + + if id.startswith("user:"): + return cls(id=int(id[5:]), actor_type=ActorType.USER) + + if id.startswith("team:"): + return cls(id=int(id[5:]), actor_type=ActorType.TEAM) + + try: + user = user_service.get_by_username(username=id)[0] + return cls(id=user.id, actor_type=ActorType.USER) + except IndexError as e: + raise cls.InvalidActor(f"Unable to resolve actor identifier: {e}") + + @classmethod + def from_id(cls, user_id: int | None = None, team_id: int | None = None) -> "Actor": + if user_id and team_id: + raise cls.InvalidActor("You can only provide one of user_id and team_id") + if user_id: + return cls(id=user_id, actor_type=ActorType.USER) + if team_id: + return cls(id=team_id, actor_type=ActorType.TEAM) + raise cls.InvalidActor("You must provide one of user_id and team_id") + + def __post_init__(self) -> None: + if not self.is_team and self.slug is not None: + raise ValueError("Slugs are expected for teams only") + + def __hash__(self) -> int: + return hash((self.id, self.actor_type)) + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.id == other.id + and self.actor_type == other.actor_type + ) + + def resolve(self) -> "Team | RpcUser": + """ + Resolve an Actor into the Team or RpcUser it represents. + + Will raise Team.DoesNotExist or User.DoesNotExist when the actor is invalid + """ + from sentry.models.team import Team + from sentry.services.hybrid_cloud.user.service import user_service + + if self.is_team: + team = Team.objects.filter(id=self.id).first() + if team: + return team + raise Actor.InvalidActor(f"Cannot find a team with id={self.id}") + if self.is_user: + user = user_service.get_user(user_id=self.id) + if user: + return user + raise Actor.InvalidActor(f"Cannot find a User with id={self.id}") + # This should be un-reachable + raise Actor.InvalidActor("Cannot resolve an actor with an unknown type") + + @property + def identifier(self) -> str: + return f"{self.actor_type.lower()}:{self.id}" + + @property + def is_team(self) -> bool: + return self.actor_type == ActorType.TEAM + + @property + def is_user(self) -> bool: + return self.actor_type == ActorType.USER + + +def parse_and_validate_actor(actor_identifier: str | None, organization_id: int) -> Actor | None: + from sentry.models.organizationmember import OrganizationMember + from sentry.models.team import Team + + if not actor_identifier: + return None + + try: + actor = Actor.from_identifier(actor_identifier) + except Exception: + raise serializers.ValidationError( + "Could not parse actor. Format should be `type:id` where type is `team` or `user`." + ) + try: + obj = actor.resolve() + except Actor.InvalidActor: + raise serializers.ValidationError(f"{actor.actor_type} does not exist") + + if isinstance(obj, Team): + if obj.organization_id != organization_id: + raise serializers.ValidationError("Team is not a member of this organization") + elif isinstance(obj, RpcUser): + if not OrganizationMember.objects.filter( + organization_id=organization_id, user_id=obj.id + ).exists(): + raise serializers.ValidationError("User is not a member of this organization") + + return actor From 1e5349e5fc04d08792bf3ac1edbf1dcdaa60da65 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Thu, 9 May 2024 10:04:47 -0400 Subject: [PATCH 200/376] chore(actor) Remove old actor columns (#70549) Remove now unused actor columns from rule, alertrule, team, grouphistory. These columns were removed from django state in #69784, #69873, #69976, #70090 Refs HC-1178 --- migrations_lockfile.txt | 2 +- .../migrations/0718_remove_actor_columns.py | 53 +++++++++++++++++++ 2 files changed, 54 insertions(+), 1 deletion(-) create mode 100644 src/sentry/migrations/0718_remove_actor_columns.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index a311f852ab3cb8..3900ddc9dcebb1 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0016_add_control_cacheversion nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0717_query_subscription_timebox +sentry: 0718_remove_actor_columns social_auth: 0002_default_auto_field diff --git a/src/sentry/migrations/0718_remove_actor_columns.py b/src/sentry/migrations/0718_remove_actor_columns.py new file mode 100644 index 00000000000000..c3a19b0ca71c03 --- /dev/null +++ b/src/sentry/migrations/0718_remove_actor_columns.py @@ -0,0 +1,53 @@ +# Generated by Django 5.0.4 on 2024-05-08 21:10 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0717_query_subscription_timebox"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + state_operations=[], + database_operations=[ + migrations.RunSQL( + sql="ALTER TABLE sentry_team DROP COLUMN actor_id", + reverse_sql="ALTER TABLE sentry_team ADD COLUMN actor_id BIGINT NULL", + hints={"tables": ["sentry_team"]}, + ), + migrations.RunSQL( + sql="ALTER TABLE sentry_rule DROP COLUMN owner_id", + reverse_sql="ALTER TABLE sentry_rule ADD COLUMN owner_id BIGINT NULL", + hints={"tables": ["sentry_rule"]}, + ), + migrations.RunSQL( + sql="ALTER TABLE sentry_alertrule DROP COLUMN owner_id", + reverse_sql="ALTER TABLE sentry_alertrule ADD COLUMN owner_id BIGINT NULL", + hints={"tables": ["sentry_alertrule"]}, + ), + migrations.RunSQL( + sql="ALTER TABLE sentry_grouphistory DROP COLUMN actor_id", + reverse_sql="ALTER TABLE sentry_grouphistory ADD COLUMN actor_id BIGINT NULL", + hints={"tables": ["sentry_grouphistory"]}, + ), + ], + ) + ] From 4003ba275b7f8273a0cb6de386bb01606b1c3a8c Mon Sep 17 00:00:00 2001 From: colin-sentry <161344340+colin-sentry@users.noreply.github.com> Date: Thu, 9 May 2024 10:05:52 -0400 Subject: [PATCH 201/376] feat(ai-monitoring): Show table even if there is no cost data (#70555) If costs or token usage fail to load, still show the table with zero values. --- static/app/views/aiMonitoring/PipelinesTable.tsx | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/static/app/views/aiMonitoring/PipelinesTable.tsx b/static/app/views/aiMonitoring/PipelinesTable.tsx index 03d1039a2f8f56..34346bef401797 100644 --- a/static/app/views/aiMonitoring/PipelinesTable.tsx +++ b/static/app/views/aiMonitoring/PipelinesTable.tsx @@ -119,11 +119,7 @@ export function PipelinesTable() { referrer: 'api.ai-pipelines.view', }); - const { - data: tokensUsedData, - error: tokensUsedError, - isLoading: tokensUsedLoading, - } = useSpanMetrics({ + const {data: tokensUsedData, isLoading: tokensUsedLoading} = useSpanMetrics({ search: new MutableSearch( `span.category:ai span.ai.pipeline.group:[${(data as Row[])?.map(x => x['span.group']).join(',')}]` ), @@ -185,7 +181,7 @@ export function PipelinesTable() { /> Date: Thu, 9 May 2024 14:24:57 +0000 Subject: [PATCH 202/376] Revert "chore(actor) Remove old actor columns (#70549)" This reverts commit 1e5349e5fc04d08792bf3ac1edbf1dcdaa60da65. Co-authored-by: markstory <24086+markstory@users.noreply.github.com> --- migrations_lockfile.txt | 2 +- .../migrations/0718_remove_actor_columns.py | 53 ------------------- 2 files changed, 1 insertion(+), 54 deletions(-) delete mode 100644 src/sentry/migrations/0718_remove_actor_columns.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index 3900ddc9dcebb1..a311f852ab3cb8 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0016_add_control_cacheversion nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0718_remove_actor_columns +sentry: 0717_query_subscription_timebox social_auth: 0002_default_auto_field diff --git a/src/sentry/migrations/0718_remove_actor_columns.py b/src/sentry/migrations/0718_remove_actor_columns.py deleted file mode 100644 index c3a19b0ca71c03..00000000000000 --- a/src/sentry/migrations/0718_remove_actor_columns.py +++ /dev/null @@ -1,53 +0,0 @@ -# Generated by Django 5.0.4 on 2024-05-08 21:10 - -from django.db import migrations - -from sentry.new_migrations.migrations import CheckedMigration - - -class Migration(CheckedMigration): - # This flag is used to mark that a migration shouldn't be automatically run in production. - # This should only be used for operations where it's safe to run the migration after your - # code has deployed. So this should not be used for most operations that alter the schema - # of a table. - # Here are some things that make sense to mark as post deployment: - # - Large data migrations. Typically we want these to be run manually so that they can be - # monitored and not block the deploy for a long period of time while they run. - # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to - # run this outside deployments so that we don't block them. Note that while adding an index - # is a schema change, it's completely safe to run the operation after the code has deployed. - # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment - - is_post_deployment = False - - dependencies = [ - ("sentry", "0717_query_subscription_timebox"), - ] - - operations = [ - migrations.SeparateDatabaseAndState( - state_operations=[], - database_operations=[ - migrations.RunSQL( - sql="ALTER TABLE sentry_team DROP COLUMN actor_id", - reverse_sql="ALTER TABLE sentry_team ADD COLUMN actor_id BIGINT NULL", - hints={"tables": ["sentry_team"]}, - ), - migrations.RunSQL( - sql="ALTER TABLE sentry_rule DROP COLUMN owner_id", - reverse_sql="ALTER TABLE sentry_rule ADD COLUMN owner_id BIGINT NULL", - hints={"tables": ["sentry_rule"]}, - ), - migrations.RunSQL( - sql="ALTER TABLE sentry_alertrule DROP COLUMN owner_id", - reverse_sql="ALTER TABLE sentry_alertrule ADD COLUMN owner_id BIGINT NULL", - hints={"tables": ["sentry_alertrule"]}, - ), - migrations.RunSQL( - sql="ALTER TABLE sentry_grouphistory DROP COLUMN actor_id", - reverse_sql="ALTER TABLE sentry_grouphistory ADD COLUMN actor_id BIGINT NULL", - hints={"tables": ["sentry_grouphistory"]}, - ), - ], - ) - ] From 36980c87426b62c07e236e36670ce4ce5ccc1428 Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Thu, 9 May 2024 11:25:48 -0400 Subject: [PATCH 203/376] feat(cache): plot and highlight transaction samples (#70521) 1. plots the transaction samples on the cache sidebar 2. Allows samples to be highlighted (similar to other starfish modules) image --- .../charts/transactionDurationChart.tsx | 46 ++++++++++++++++++- .../cache/samplePanel/samplePanel.tsx | 36 ++++++++++++++- .../cache/tables/spanSamplesTable.tsx | 4 +- .../performance/http/data/useSpanSamples.tsx | 1 + .../performance/http/httpSamplesPanel.tsx | 16 ++++--- .../starfish/utils/chart/findDataPoint.ts | 9 ++++ .../useSampleScatterPlotSeries.tsx | 9 ++-- 7 files changed, 107 insertions(+), 14 deletions(-) create mode 100644 static/app/views/starfish/utils/chart/findDataPoint.ts diff --git a/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx b/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx index 077b87fb7be17d..71d89532146e21 100644 --- a/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx +++ b/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx @@ -1,16 +1,31 @@ +import type {EChartHighlightHandler} from 'sentry/types/echarts'; import {decodeScalar} from 'sentry/utils/queryString'; import {MutableSearch} from 'sentry/utils/tokenizeSearch'; import useLocationQuery from 'sentry/utils/url/useLocationQuery'; import {Referrer} from 'sentry/views/performance/cache/referrers'; import {CHART_HEIGHT} from 'sentry/views/performance/cache/settings'; +import type {DataRow} from 'sentry/views/performance/cache/tables/spanSamplesTable'; import {AVG_COLOR} from 'sentry/views/starfish/colors'; import Chart, {ChartType} from 'sentry/views/starfish/components/chart'; import ChartPanel from 'sentry/views/starfish/components/chartPanel'; import {useMetricsSeries} from 'sentry/views/starfish/queries/useDiscoverSeries'; import type {MetricsQueryFilters} from 'sentry/views/starfish/types'; import {DataTitles} from 'sentry/views/starfish/views/spans/types'; +import {useSampleScatterPlotSeries} from 'sentry/views/starfish/views/spanSummaryPage/sampleList/durationChart/useSampleScatterPlotSeries'; -export function TransactionDurationChart() { +type Props = { + averageTransactionDuration: number; + onHighlight: EChartHighlightHandler; + samples: DataRow[]; + highlightedSpanId?: string; +}; + +export function TransactionDurationChart({ + samples, + averageTransactionDuration, + onHighlight, + highlightedSpanId, +}: Props) { const {transaction} = useLocationQuery({ fields: { project: decodeScalar, @@ -28,6 +43,33 @@ export function TransactionDurationChart() { referrer: Referrer.SAMPLES_CACHE_TRANSACTION_DURATION, }); + const sampledSpanDataSeries = useSampleScatterPlotSeries( + samples, + averageTransactionDuration, + highlightedSpanId, + 'transaction.duration' + ); + + // TODO: This is duplicated from `DurationChart` in `SampleList`. Resolve the duplication + const handleChartHighlight: EChartHighlightHandler = function (event) { + // TODO: Gross hack. Even though `scatterPlot` is a separate prop, it's just an array of `Series` that gets appended to the main series. To find the point that was hovered, we re-construct the correct series order. It would have been cleaner to just pass the scatter plot as its own, single series + const allSeries = [ + data['avg(transaction.duration)'], + ...(sampledSpanDataSeries ?? []), + ]; + + const highlightedDataPoints = event.batch.map(batch => { + const {seriesIndex, dataIndex} = batch; + + const highlightedSeries = allSeries?.[seriesIndex]; + const highlightedDataPoint = highlightedSeries.data?.[dataIndex]; + + return {series: highlightedSeries, dataPoint: highlightedDataPoint}; + }); + + onHighlight?.(highlightedDataPoints, event); + }; + return ( diff --git a/static/app/views/performance/cache/samplePanel/samplePanel.tsx b/static/app/views/performance/cache/samplePanel/samplePanel.tsx index d0a1ae2b5388dc..27130737e38f12 100644 --- a/static/app/views/performance/cache/samplePanel/samplePanel.tsx +++ b/static/app/views/performance/cache/samplePanel/samplePanel.tsx @@ -21,6 +21,7 @@ import {Referrer} from 'sentry/views/performance/cache/referrers'; import {TransactionDurationChart} from 'sentry/views/performance/cache/samplePanel/charts/transactionDurationChart'; import {BASE_FILTERS} from 'sentry/views/performance/cache/settings'; import {SpanSamplesTable} from 'sentry/views/performance/cache/tables/spanSamplesTable'; +import {useDebouncedState} from 'sentry/views/performance/http/useDebouncedState'; import {MetricReadout} from 'sentry/views/performance/metricReadout'; import * as ModuleLayout from 'sentry/views/performance/moduleLayout'; import DetailPanel from 'sentry/views/starfish/components/detailPanel'; @@ -37,6 +38,7 @@ import { SpanMetricsField, type SpanMetricsQueryFilters, } from 'sentry/views/starfish/types'; +import {findSampleFromDataPoint} from 'sentry/views/starfish/utils/chart/findDataPoint'; import {DataTitles, getThroughputTitle} from 'sentry/views/starfish/views/spans/types'; // This is similar to http sample table, its difficult to use the generic span samples sidebar as we require a bunch of custom things. @@ -51,6 +53,12 @@ export function CacheSamplePanel() { }, }); + const [highlightedSpanId, setHighlightedSpanId] = useDebouncedState( + undefined, + [], + 10 + ); + // `detailKey` controls whether the panel is open. If all required properties are ailable, concat them to make a key, otherwise set to `undefined` and hide the panel const detailKey = query.transaction ? [query.transaction].filter(Boolean).join(':') @@ -240,7 +248,30 @@ export function CacheSamplePanel() { - + { + const firstHighlight = highlights[0]; + + if (!firstHighlight) { + setHighlightedSpanId(undefined); + return; + } + + const sample = findSampleFromDataPoint<(typeof spansWithDuration)[0]>( + firstHighlight.dataPoint, + spansWithDuration, + 'transaction.duration' + ); + setHighlightedSpanId(sample?.span_id); + }} + /> @@ -255,6 +286,9 @@ export function CacheSamplePanel() { units: {[SpanIndexedField.CACHE_ITEM_SIZE]: 'byte'}, }} isLoading={isCacheSpanSamplesFetching || isFetchingTransactions} + highlightedSpanId={highlightedSpanId} + onSampleMouseOver={sample => setHighlightedSpanId(sample.span_id)} + onSampleMouseOut={() => setHighlightedSpanId(undefined)} error={transactionError} /> diff --git a/static/app/views/performance/cache/tables/spanSamplesTable.tsx b/static/app/views/performance/cache/tables/spanSamplesTable.tsx index f575e78764128b..f739c49dfd4c70 100644 --- a/static/app/views/performance/cache/tables/spanSamplesTable.tsx +++ b/static/app/views/performance/cache/tables/spanSamplesTable.tsx @@ -35,7 +35,9 @@ type ColumnKeys = | SpanIndexedField.CACHE_ITEM_SIZE | 'transaction.duration'; -type DataRow = Pick & {'transaction.duration': number}; +export type DataRow = Pick & { + 'transaction.duration': number; +}; type Column = GridColumnHeader; diff --git a/static/app/views/performance/http/data/useSpanSamples.tsx b/static/app/views/performance/http/data/useSpanSamples.tsx index 5399ea0cbdc3a9..49565ec5517a6b 100644 --- a/static/app/views/performance/http/data/useSpanSamples.tsx +++ b/static/app/views/performance/http/data/useSpanSamples.tsx @@ -58,6 +58,7 @@ export const useSpanSamples = ( | SpanIndexedField.TIMESTAMP | SpanIndexedField.ID | SpanIndexedField.PROFILE_ID + | SpanIndexedField.SPAN_SELF_TIME >[] // This type is a little awkward but it explicitly states that the response could be empty. This doesn't enable unchecked access errors, but it at least indicates that it's possible that there's no data // eslint-disable-next-line @typescript-eslint/ban-types diff --git a/static/app/views/performance/http/httpSamplesPanel.tsx b/static/app/views/performance/http/httpSamplesPanel.tsx index 985419f5668982..e85ce946df6e90 100644 --- a/static/app/views/performance/http/httpSamplesPanel.tsx +++ b/static/app/views/performance/http/httpSamplesPanel.tsx @@ -49,6 +49,7 @@ import { SpanMetricsField, type SpanMetricsQueryFilters, } from 'sentry/views/starfish/types'; +import {findSampleFromDataPoint} from 'sentry/views/starfish/utils/chart/findDataPoint'; import {DataTitles, getThroughputTitle} from 'sentry/views/starfish/views/spans/types'; import {useSampleScatterPlotSeries} from 'sentry/views/starfish/views/spanSummaryPage/sampleList/durationChart/useSampleScatterPlotSeries'; @@ -242,12 +243,6 @@ export function HTTPSamplesPanel() { highlightedSpanId ); - const findSampleFromDataPoint = (dataPoint: {name: string | number; value: number}) => { - return durationSamplesData.find( - s => s.timestamp === dataPoint.name && s['span.self_time'] === dataPoint.value - ); - }; - const handleClose = () => { router.replace({ pathname: router.location.pathname, @@ -402,7 +397,14 @@ export function HTTPSamplesPanel() { return; } - const sample = findSampleFromDataPoint(firstHighlight.dataPoint); + const sample = findSampleFromDataPoint< + (typeof durationSamplesData)[0] + >( + firstHighlight.dataPoint, + durationSamplesData, + SpanIndexedField.SPAN_SELF_TIME + ); + setHighlightedSpanId(sample?.span_id); }} isLoading={isDurationDataFetching} diff --git a/static/app/views/starfish/utils/chart/findDataPoint.ts b/static/app/views/starfish/utils/chart/findDataPoint.ts new file mode 100644 index 00000000000000..fc7f52f28b77e7 --- /dev/null +++ b/static/app/views/starfish/utils/chart/findDataPoint.ts @@ -0,0 +1,9 @@ +export function findSampleFromDataPoint( + dataPoint: {name: string | number; value: number}, + data: T[], + matchKey: keyof T +) { + return data?.find( + s => s.timestamp === dataPoint.name && s[matchKey] === dataPoint.value + ); +} diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/useSampleScatterPlotSeries.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/useSampleScatterPlotSeries.tsx index 697b79d677197f..1d1a0f9bbf82c2 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/useSampleScatterPlotSeries.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/useSampleScatterPlotSeries.tsx @@ -12,15 +12,16 @@ import {getSampleChartSymbol} from 'sentry/views/starfish/views/spanSummaryPage/ export function useSampleScatterPlotSeries( spans: Partial[], average?: number, - highlightedSpanId?: string + highlightedSpanId?: string, + key: string = 'span.self_time' ): Series[] { const theme = useTheme(); return spans.map(span => { let symbol, color; - if (span['span.self_time'] && defined(average)) { - ({symbol, color} = getSampleChartSymbol(span['span.self_time'], average, theme)); + if (span[key] && defined(average)) { + ({symbol, color} = getSampleChartSymbol(span[key], average, theme)); } else { symbol = 'circle'; color = AVG_COLOR; @@ -30,7 +31,7 @@ export function useSampleScatterPlotSeries( data: [ { name: span?.timestamp ?? span.span_id ?? t('Span'), - value: span?.['span.self_time'] ?? 0, + value: span?.[key] ?? 0, }, ], symbol, From c979c6ba080954fa70021e37a0a502247de23241 Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Thu, 9 May 2024 11:31:16 -0400 Subject: [PATCH 204/376] perf: replace memcache with redis on assemble (#70490) Once https://github.com/getsentry/sentry-options-automator/pull/1358 lands, we can land this pull-request since we're right now officially writing to both Memcache and Redis for assemble status. --- src/sentry/tasks/assemble.py | 19 +++++-------------- tests/sentry/tasks/test_assemble.py | 2 +- 2 files changed, 6 insertions(+), 15 deletions(-) diff --git a/src/sentry/tasks/assemble.py b/src/sentry/tasks/assemble.py index e7abce54ed699b..c3867437aaa12a 100644 --- a/src/sentry/tasks/assemble.py +++ b/src/sentry/tasks/assemble.py @@ -17,7 +17,6 @@ from sentry import options from sentry.api.serializers import serialize -from sentry.cache import default_cache from sentry.constants import ObjectStatus from sentry.debug_files.artifact_bundles import ( INDEXING_THRESHOLD, @@ -190,20 +189,14 @@ def get_assemble_status(task, scope, checksum): notice or error message. """ cache_key = _get_cache_key(task, scope, checksum) - - if options.get("assemble.read_from_redis"): - client = _get_redis_cluster_for_assemble() - rv = client.get(cache_key) - - # It is stored as bytes with [state, detail] on Redis. - if rv: - rv = orjson.loads(rv) - else: - rv = default_cache.get(cache_key) + client = _get_redis_cluster_for_assemble() + rv = client.get(cache_key) if rv is None: return None, None - return tuple(rv) + + # It is stored as bytes with [state, detail] on Redis. + return tuple(orjson.loads(rv)) @sentry_sdk.tracing.trace @@ -212,7 +205,6 @@ def set_assemble_status(task, scope, checksum, state, detail=None): Updates the status of an assembling task. It is cached for 10 minutes. """ cache_key = _get_cache_key(task, scope, checksum) - default_cache.set(cache_key, (state, detail), 600) redis_client = _get_redis_cluster_for_assemble() redis_client.set(name=cache_key, value=orjson.dumps([state, detail]), ex=600) @@ -223,7 +215,6 @@ def delete_assemble_status(task, scope, checksum): Deletes the status of an assembling task. """ cache_key = _get_cache_key(task, scope, checksum) - default_cache.delete(cache_key) redis_client = _get_redis_cluster_for_assemble() redis_client.delete(cache_key) diff --git a/tests/sentry/tasks/test_assemble.py b/tests/sentry/tasks/test_assemble.py index 0393c40836664f..466fb9013668bf 100644 --- a/tests/sentry/tasks/test_assemble.py +++ b/tests/sentry/tasks/test_assemble.py @@ -1053,7 +1053,7 @@ def test_index_if_needed_with_newer_bundle_already_stored( ) -@use_redis_cluster(with_options={"assemble.read_from_redis": True}) +@use_redis_cluster() def test_redis_assemble_status(): task = AssembleTask.DIF project_id = uuid.uuid4().hex From f3a1601f83008f141d990f60a4902abe0fae273a Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Thu, 9 May 2024 11:39:26 -0400 Subject: [PATCH 205/376] ref(replay): update details tab title to be display name (#70576) fixes https://github.com/getsentry/sentry/issues/70275 replaces the tab title (previously the replay ID) to be the display name instead https://github.com/getsentry/sentry/assets/56095982/92edbe3e-fdd2-4d73-b6c6-d2ddeb7e561b --- static/app/views/replays/detail/page.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/views/replays/detail/page.tsx b/static/app/views/replays/detail/page.tsx index 5409f378ac0294..5f2a533fc223bc 100644 --- a/static/app/views/replays/detail/page.tsx +++ b/static/app/views/replays/detail/page.tsx @@ -40,7 +40,7 @@ export default function Page({ isVideoReplay, }: Props) { const title = replayRecord - ? `${replayRecord.id} — Session Replay — ${orgSlug}` + ? `${replayRecord.user.display_name ?? t('Anonymous User')} — Session Replay — ${orgSlug}` : `Session Replay — ${orgSlug}`; const onShareReplay = useShareReplayAtTimestamp(); From fee059fd0b91363eddacc1b849de1fc05e7bbfaa Mon Sep 17 00:00:00 2001 From: Florian Ellis <79283128+floels@users.noreply.github.com> Date: Thu, 9 May 2024 17:42:31 +0200 Subject: [PATCH 206/376] fix(replays): Fix 'clearQueryCache' method in 'useReplayData' hook (#70539) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # Goal The goal of this PR is to fix a bug currently causing the `clearQueryCache` method defined in `static/app/utils/replays/hooks/useReplayData.tsx` to have no effect (extra arrow function declaration). # Approach Remove the duplicated arrow function declaration syntax (`() => { ... }`). We also add a unit test to cover this behavior of the custom hook. You can see that the test fails if you revert the fix in `useReplayData.tsx`. # Legal Boilerplate Look, I get it. The entity doing business as "Sentry" was incorporated in the State of Delaware in 2015 as Functional Software, Inc. and is gonna need some rights from me in order to utilize my contributions in this here PR. So here's the deal: I retain all rights, title and interest in and to my contributions, and by keeping this boilerplate intact I confirm that Sentry can use, modify, copy, and redistribute my contributions, under Sentry's choice of terms. --- .../replays/hooks/useReplayData.spec.tsx | 66 ++++++++++++++++++- .../app/utils/replays/hooks/useReplayData.tsx | 28 ++++---- 2 files changed, 76 insertions(+), 18 deletions(-) diff --git a/static/app/utils/replays/hooks/useReplayData.spec.tsx b/static/app/utils/replays/hooks/useReplayData.spec.tsx index 2547427106a2fe..6b1f57f319eef7 100644 --- a/static/app/utils/replays/hooks/useReplayData.spec.tsx +++ b/static/app/utils/replays/hooks/useReplayData.spec.tsx @@ -32,10 +32,14 @@ jest.mocked(useProjects).mockReturnValue({ placeholders: [], }); +const mockInvalidateQueries = jest.fn(); + function wrapper({children}: {children?: ReactNode}) { - return ( - {children} - ); + const queryClient = makeTestQueryClient(); + + queryClient.invalidateQueries = mockInvalidateQueries; + + return {children}; } function getMockReplayRecord(replayRecord?: Partial) { @@ -59,6 +63,7 @@ function getMockReplayRecord(replayRecord?: Partial) { describe('useReplayData', () => { beforeEach(() => { MockApiClient.clearMockResponses(); + mockInvalidateQueries.mockClear(); }); it('should hydrate the replayRecord', async () => { @@ -505,4 +510,59 @@ describe('useReplayData', () => { ) ); }); + + it("should invalidate queries when result's 'onRetry' function is called", async () => { + const {mockReplayResponse} = getMockReplayRecord({ + count_errors: 0, + count_segments: 0, + error_ids: [], + }); + + const replayId = mockReplayResponse.id; + + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/replays/${replayId}/`, + body: {data: mockReplayResponse}, + }); + + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/replays-events-meta/`, + body: { + data: [], + }, + headers: { + Link: [ + '; rel="previous"; results="false"; cursor="0:1:0"', + '; rel="next"; results="false"; cursor="0:1:0"', + ].join(','), + }, + }); + + const {result} = renderHook(useReplayData, { + wrapper, + initialProps: { + replayId, + orgSlug: organization.slug, + }, + }); + + // We need this 'await waitFor()' for the following assertions to pass: + await waitFor(() => { + expect(result.current).toBeTruthy(); + }); + + result.current.onRetry(); + + expect(mockInvalidateQueries).toHaveBeenCalledWith({ + queryKey: [`/organizations/${organization.slug}/replays/${replayId}/`], + }); + expect(mockInvalidateQueries).toHaveBeenCalledWith({ + queryKey: [ + `/projects/${organization.slug}/${project.slug}/replays/${replayId}/recording-segments/`, + ], + }); + expect(mockInvalidateQueries).toHaveBeenCalledWith({ + queryKey: [`/organizations/${organization.slug}/replays-events-meta/`], + }); + }); }); diff --git a/static/app/utils/replays/hooks/useReplayData.tsx b/static/app/utils/replays/hooks/useReplayData.tsx index ba6d13168fe9ca..d89ce43ede58cc 100644 --- a/static/app/utils/replays/hooks/useReplayData.tsx +++ b/static/app/utils/replays/hooks/useReplayData.tsx @@ -214,21 +214,19 @@ function useReplayData({ }); const clearQueryCache = useCallback(() => { - () => { - queryClient.invalidateQueries({ - queryKey: [`/organizations/${orgSlug}/replays/${replayId}/`], - }); - queryClient.invalidateQueries({ - queryKey: [ - `/projects/${orgSlug}/${projectSlug}/replays/${replayId}/recording-segments/`, - ], - }); - // The next one isn't optimized - // This statement will invalidate the cache of fetched error events for all replayIds - queryClient.invalidateQueries({ - queryKey: [`/organizations/${orgSlug}/replays-events-meta/`], - }); - }; + queryClient.invalidateQueries({ + queryKey: [`/organizations/${orgSlug}/replays/${replayId}/`], + }); + queryClient.invalidateQueries({ + queryKey: [ + `/projects/${orgSlug}/${projectSlug}/replays/${replayId}/recording-segments/`, + ], + }); + // The next one isn't optimized + // This statement will invalidate the cache of fetched error events for all replayIds + queryClient.invalidateQueries({ + queryKey: [`/organizations/${orgSlug}/replays-events-meta/`], + }); }, [orgSlug, replayId, projectSlug, queryClient]); return useMemo(() => { From ae3ccb1b5d93aed9cac8ef2ac2341d010d5fb347 Mon Sep 17 00:00:00 2001 From: Ash <0Calories@users.noreply.github.com> Date: Thu, 9 May 2024 11:46:55 -0400 Subject: [PATCH 207/376] fix(perf): Span summary transaction throughput chart (#70241) Fixes an issue with the timestamps in the transaction throughput chart, on the new span summary page. This was caused by the timestamp values not being converted to milliseconds, which resulted in the x-axis of the chart, and the timestamps shown in the tooltip to be inaccurate. This also allows the cursor in the charts to be synchronized --- .../transactionSpans/spanSummary/spanSummaryCharts.tsx | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx index 55e595f223ad74..bdbba2d135132e 100644 --- a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx @@ -94,10 +94,6 @@ function SpanSummaryCharts() { getRequestPayload: () => ({ ...eventView.getEventsAPIPayload(location), yAxis: eventView.yAxis, - topEvents: eventView.topEvents, - excludeOther: 0, - partial: 1, - orderby: undefined, interval: eventView.interval, }), options: { @@ -111,7 +107,7 @@ function SpanSummaryCharts() { data: txnThroughputData?.data.map(datum => ({ value: datum[1][0].count, - name: datum[0], + name: datum[0] * 1000, })) ?? [], }; @@ -126,7 +122,6 @@ function SpanSummaryCharts() { type={ChartType.LINE} definedAxisTicks={4} aggregateOutputFormat="duration" - stacked error={avgDurationError} chartColors={[AVG_COLOR]} /> @@ -143,7 +138,6 @@ function SpanSummaryCharts() { definedAxisTicks={4} aggregateOutputFormat="rate" rateUnit={RateUnit.PER_MINUTE} - stacked error={throughputError} chartColors={[THROUGHPUT_COLOR]} tooltipFormatterOptions={{ @@ -163,7 +157,6 @@ function SpanSummaryCharts() { definedAxisTicks={4} aggregateOutputFormat="rate" rateUnit={RateUnit.PER_MINUTE} - stacked error={txnThroughputError} chartColors={[TXN_THROUGHPUT_COLOR]} tooltipFormatterOptions={{ From 5be07017e765dce13f1c395bd2f2964d8b6e17af Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Thu, 9 May 2024 11:47:26 -0400 Subject: [PATCH 208/376] perf: use orjson in billing metrics (#70579) We have enough confidence to say that `orjson` doesn't cause any errors/issues. We can now safely get rid of `json` and `rapidjson` in our repository. Ref: https://github.com/getsentry/sentry/issues/68903 --- src/sentry/ingest/billing_metrics_consumer.py | 6 ++--- .../test_billing_metrics_consumer_kafka.py | 4 ++-- .../sentry/ingest/ingest_consumer/test_dlq.py | 4 ++-- .../test_ingest_consumer_kafka.py | 4 ++-- .../test_ingest_consumer_processing.py | 24 +++++++++---------- 5 files changed, 20 insertions(+), 22 deletions(-) diff --git a/src/sentry/ingest/billing_metrics_consumer.py b/src/sentry/ingest/billing_metrics_consumer.py index ed8f64f7f102d8..430f012084add5 100644 --- a/src/sentry/ingest/billing_metrics_consumer.py +++ b/src/sentry/ingest/billing_metrics_consumer.py @@ -3,6 +3,7 @@ from datetime import datetime, timezone from typing import Any, cast +import orjson import sentry_sdk from arroyo.backends.kafka import KafkaPayload from arroyo.processing.strategies import ( @@ -27,7 +28,6 @@ from sentry.sentry_metrics.utils import reverse_resolve_tag_value from sentry.snuba.metrics import parse_mri from sentry.snuba.metrics.naming_layer.mri import is_custom_metric -from sentry.utils import json from sentry.utils.outcomes import Outcome, track_outcome logger = logging.getLogger(__name__) @@ -87,9 +87,7 @@ def submit(self, message: Message[KafkaPayload]) -> None: self.__next_step.submit(message) def _get_payload(self, message: Message[KafkaPayload]) -> GenericMetric: - payload = json.loads( - message.payload.value.decode("utf-8"), use_rapid_json=True, skip_trace=True - ) + payload = orjson.loads(message.payload.value) return cast(GenericMetric, payload) def _count_processed_items(self, generic_metric: GenericMetric) -> Mapping[DataCategory, int]: diff --git a/tests/sentry/ingest/billing_metrics_consumer/test_billing_metrics_consumer_kafka.py b/tests/sentry/ingest/billing_metrics_consumer/test_billing_metrics_consumer_kafka.py index 43736654fa5f7b..683ba433210641 100644 --- a/tests/sentry/ingest/billing_metrics_consumer/test_billing_metrics_consumer_kafka.py +++ b/tests/sentry/ingest/billing_metrics_consumer/test_billing_metrics_consumer_kafka.py @@ -4,6 +4,7 @@ from typing import cast from unittest import mock +import orjson from arroyo.backends.kafka import KafkaPayload from arroyo.types import BrokerValue, Message, Partition, Topic from django.core.cache import cache @@ -23,7 +24,6 @@ ) from sentry.sentry_metrics.use_case_id_registry import UseCaseID from sentry.testutils.pytest.fixtures import django_db_all -from sentry.utils import json from sentry.utils.outcomes import Outcome @@ -206,7 +206,7 @@ def test_outcomes_consumed(track_outcome, factories): def generate_kafka_message(generic_metric: GenericMetric) -> Message[KafkaPayload]: nonlocal generate_kafka_message_counter - encoded = json.dumps(generic_metric).encode() + encoded = orjson.dumps(generic_metric) payload = KafkaPayload(key=None, value=encoded, headers=[]) message = Message( BrokerValue( diff --git a/tests/sentry/ingest/ingest_consumer/test_dlq.py b/tests/sentry/ingest/ingest_consumer/test_dlq.py index ab576bbee2c0a4..331f3c406ee178 100644 --- a/tests/sentry/ingest/ingest_consumer/test_dlq.py +++ b/tests/sentry/ingest/ingest_consumer/test_dlq.py @@ -3,6 +3,7 @@ from unittest.mock import Mock import msgpack +import orjson import pytest from arroyo.backends.kafka import KafkaPayload from arroyo.dlq import InvalidMessage @@ -13,7 +14,6 @@ from sentry.ingest.consumer.factory import IngestStrategyFactory from sentry.ingest.types import ConsumerType from sentry.testutils.pytest.fixtures import django_db_all -from sentry.utils import json def make_message(payload: bytes, partition: Partition, offset: int) -> Message: @@ -60,7 +60,7 @@ def test_dlq_invalid_messages(factories, topic_name, consumer_type) -> None: { "type": "unsupported type", "project_id": project.id, - "payload": json.dumps(sample_event).encode("utf-8"), + "payload": orjson.dumps(sample_event), "start_time": int(time.time()), "event_id": "aaa", } diff --git a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py index 1a78fe5cda0336..b5f2ce5094f26a 100644 --- a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py +++ b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py @@ -5,6 +5,7 @@ import uuid import msgpack +import orjson import pytest from django.conf import settings @@ -15,7 +16,6 @@ from sentry.eventstore.processing import event_processing_store from sentry.testutils.pytest.fixtures import django_db_all from sentry.testutils.skips import requires_kafka, requires_snuba -from sentry.utils import json from sentry.utils.batching_kafka_consumer import create_topics from sentry.utils.kafka_config import get_topic_definition @@ -79,7 +79,7 @@ def inner(type, project=default_project): "start_time": int(time.time()), "event_id": event_id, "project_id": int(project_id), - "payload": json.dumps(normalized_event), + "payload": orjson.dumps(normalized_event), } val = msgpack.packb(message) diff --git a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py index e21980ea8097b6..999547f61f5a75 100644 --- a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py +++ b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_processing.py @@ -8,6 +8,7 @@ from typing import Any from unittest.mock import Mock +import orjson import pytest from arroyo.backends.kafka.consumer import KafkaPayload from arroyo.backends.local.backend import LocalBroker @@ -30,7 +31,6 @@ from sentry.testutils.pytest.fixtures import django_db_all from sentry.testutils.skips import requires_snuba from sentry.usage_accountant import accountant -from sentry.utils import json from sentry.utils.eventuser import EventUser from sentry.utils.json import loads @@ -87,7 +87,7 @@ def test_deduplication_works(default_project, task_runner, preprocess_event): for _ in range(2): process_event( { - "payload": json.dumps(payload), + "payload": orjson.dumps(payload).decode(), "start_time": start_time, "event_id": event_id, "project_id": project_id, @@ -137,7 +137,7 @@ def test_transactions_spawn_save_event_transaction( start_time = time.time() - 3600 process_event( { - "payload": json.dumps(payload), + "payload": orjson.dumps(payload).decode(), "start_time": start_time, "event_id": event_id, "project_id": project_id, @@ -186,7 +186,7 @@ def test_accountant_transaction(default_project): }, } payload = get_normalized_event(event, default_project) - serialized = json.dumps(payload) + serialized = orjson.dumps(payload).decode() process_event( { "payload": serialized, @@ -239,7 +239,7 @@ def test_feedbacks_spawn_save_event_feedback( start_time = time.time() - 3600 process_event( { - "payload": json.dumps(payload), + "payload": orjson.dumps(payload).decode(), "start_time": start_time, "event_id": event_id, "project_id": project_id, @@ -291,7 +291,7 @@ def test_with_attachments(default_project, task_runner, missing_chunks, monkeypa with task_runner(): process_event( { - "payload": json.dumps(payload), + "payload": orjson.dumps(payload).decode(), "start_time": start_time, "event_id": event_id, "project_id": project_id, @@ -361,7 +361,7 @@ def test_deobfuscate_view_hierarchy(default_project, task_runner): process_attachment_chunk( { - "payload": json.dumps_htmlsafe(obfuscated_view_hierarchy).encode(), + "payload": orjson.dumps(obfuscated_view_hierarchy), "event_id": event_id, "project_id": project_id, "id": attachment_id, @@ -372,7 +372,7 @@ def test_deobfuscate_view_hierarchy(default_project, task_runner): with task_runner(): process_event( { - "payload": json.dumps(payload), + "payload": orjson.dumps(payload).decode(), "start_time": start_time, "event_id": event_id, "project_id": project_id, @@ -499,14 +499,14 @@ def test_userreport(django_cache, default_project, monkeypatch): { "type": "user_report", "start_time": start_time, - "payload": json.dumps( + "payload": orjson.dumps( { "name": "Hans Gans", "event_id": event_id, "comments": "hello world", "email": "markus+dontatme@sentry.io", } - ), + ).decode(), "project_id": default_project.id, }, project=default_project, @@ -530,14 +530,14 @@ def test_userreport_reverse_order(django_cache, default_project, monkeypatch): { "type": "user_report", "start_time": start_time, - "payload": json.dumps( + "payload": orjson.dumps( { "name": "Hans Gans", "event_id": event_id, "comments": "hello world", "email": "markus+dontatme@sentry.io", } - ), + ).decode(), "project_id": default_project.id, }, project=default_project, From a2d7e9d2707f0e2e061600dc0daac9cdee06e858 Mon Sep 17 00:00:00 2001 From: Armen Zambrano G <44410+armenzg@users.noreply.github.com> Date: Thu, 9 May 2024 11:48:15 -0400 Subject: [PATCH 209/376] fix(issue_stream): Fallback to title if customTitle is null (#70427) When an event has a `title` inside of metadata set as `null` the UI would try to use it. In such a case, the UI would not let developers view an Issue from the Issue Stream as it would not be hyperlinked. This is what it would look like (not clickable issues) image In this change, we fall back to the standard title value rather than `metadata.title` which can be `null`. This happened because of a Relay deployment (https://github.com/getsentry/sentry-options-automator/pull/1367). --- static/app/utils/events.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/utils/events.tsx b/static/app/utils/events.tsx index abb2b75661553f..42ac576fac3e8e 100644 --- a/static/app/utils/events.tsx +++ b/static/app/utils/events.tsx @@ -178,7 +178,7 @@ export function getTitle( }; case EventOrGroupType.DEFAULT: return { - title: customTitle ?? metadata.title ?? '', + title: customTitle ?? title, subtitle: '', treeLabel: undefined, }; From f567c88d4fce6095d14d1d6f3665a3cbc0ba14ba Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Thu, 9 May 2024 11:51:29 -0400 Subject: [PATCH 210/376] ref(starifish): require referrer in discover hooks (#70540) Every discover query should have a referrer --- .../aggregateSpanDiff.tsx | 36 +++--- .../app/views/aiMonitoring/PipelinesTable.tsx | 61 +++++----- .../aiMonitoring/aiMonitoringDetailsPage.tsx | 49 +++++---- .../performance/browser/resources/referrer.ts | 5 + .../resources/resourceSummaryPage/index.tsx | 38 ++++--- .../resourceSummaryCharts.tsx | 44 ++++---- .../performance/cache/cacheLandingPage.tsx | 60 +++++----- .../app/views/performance/cache/referrers.ts | 1 + .../charts/transactionDurationChart.tsx | 12 +- .../cache/samplePanel/samplePanel.tsx | 43 ++++---- .../database/databaseLandingPage.tsx | 58 +++++----- .../database/databaseSpanSummaryPage.tsx | 98 +++++++++-------- .../http/httpDomainSummaryPage.tsx | 104 ++++++++++-------- .../performance/http/httpLandingPage.tsx | 76 +++++++------ .../performance/http/httpSamplesPanel.tsx | 44 ++++---- .../samples/samplesContainer.tsx | 14 ++- .../queues/messageConsumerSamplesPanel.tsx | 13 ++- .../queries/useQueuesByDestinationQuery.tsx | 40 +++---- .../queries/useQueuesByTransactionQuery.tsx | 42 +++---- .../queues/queries/useQueuesMetricsQuery.tsx | 36 +++--- .../queries/useQueuesTimeSeriesQuery.tsx | 22 ++-- .../transactionSpans/spanSummary/content.tsx | 21 ++-- .../spanSummary/spanSummaryCharts.tsx | 30 ++--- ...eMetrics.spec.tsx => useDiscover.spec.tsx} | 20 ++-- .../app/views/starfish/queries/useDiscover.ts | 21 ++-- .../queries/useDiscoverSeries.spec.tsx | 67 ++++++----- .../starfish/queries/useDiscoverSeries.ts | 15 ++- .../views/starfish/queries/useSpanSamples.tsx | 18 +-- .../sampleList/durationChart/index.tsx | 32 +++--- .../sampleList/sampleInfo/index.tsx | 26 +++-- .../sampleList/sampleTable/sampleTable.tsx | 18 +-- 31 files changed, 650 insertions(+), 514 deletions(-) create mode 100644 static/app/views/performance/browser/resources/referrer.ts rename static/app/views/starfish/queries/{useMetrics.spec.tsx => useDiscover.spec.tsx} (92%) diff --git a/static/app/components/events/eventStatisticalDetector/aggregateSpanDiff.tsx b/static/app/components/events/eventStatisticalDetector/aggregateSpanDiff.tsx index eb0e913628d4d2..29c62fef49df85 100644 --- a/static/app/components/events/eventStatisticalDetector/aggregateSpanDiff.tsx +++ b/static/app/components/events/eventStatisticalDetector/aggregateSpanDiff.tsx @@ -117,23 +117,25 @@ function AggregateSpanDiff({event, project}: AggregateSpanDiffProps) { data: spansData, isLoading: isSpansDataLoading, isError: isSpansDataError, - } = useSpanMetrics({ - search, - fields: [ - 'span.op', - 'any(span.description)', - 'span.group', - `regression_score(span.self_time,${breakpoint})`, - `avg_by_timestamp(span.self_time,less,${breakpoint})`, - `avg_by_timestamp(span.self_time,greater,${breakpoint})`, - `epm_by_timestamp(less,${breakpoint})`, - `epm_by_timestamp(greater,${breakpoint})`, - ], - sorts: [{field: `regression_score(span.self_time,${breakpoint})`, kind: 'desc'}], - limit: 10, - enabled: isSpansOnly, - referrer: 'api.performance.transactions.statistical-detector-root-cause-analysis', - }); + } = useSpanMetrics( + { + search, + fields: [ + 'span.op', + 'any(span.description)', + 'span.group', + `regression_score(span.self_time,${breakpoint})`, + `avg_by_timestamp(span.self_time,less,${breakpoint})`, + `avg_by_timestamp(span.self_time,greater,${breakpoint})`, + `epm_by_timestamp(less,${breakpoint})`, + `epm_by_timestamp(greater,${breakpoint})`, + ], + sorts: [{field: `regression_score(span.self_time,${breakpoint})`, kind: 'desc'}], + limit: 10, + enabled: isSpansOnly, + }, + 'api.performance.transactions.statistical-detector-root-cause-analysis' + ); const tableData = useMemo(() => { if (isSpansOnly) { diff --git a/static/app/views/aiMonitoring/PipelinesTable.tsx b/static/app/views/aiMonitoring/PipelinesTable.tsx index 34346bef401797..d8360a75772508 100644 --- a/static/app/views/aiMonitoring/PipelinesTable.tsx +++ b/static/app/views/aiMonitoring/PipelinesTable.tsx @@ -100,35 +100,40 @@ export function PipelinesTable() { sort = {field: 'spm()', kind: 'desc'}; } - const {data, isLoading, meta, pageLinks, error} = useSpanMetrics({ - search: MutableSearch.fromQueryObject({ - 'span.category': 'ai.pipeline', - 'span.description': spanDescription ? `*${spanDescription}*` : undefined, - }), - fields: [ - 'project.id', - 'span.group', - 'span.description', - 'spm()', - 'avg(span.duration)', - 'sum(span.duration)', - ], - sorts: [sort], - limit: 25, - cursor, - referrer: 'api.ai-pipelines.view', - }); + const {data, isLoading, meta, pageLinks, error} = useSpanMetrics( + { + search: MutableSearch.fromQueryObject({ + 'span.category': 'ai.pipeline', + 'span.description': spanDescription ? `*${spanDescription}*` : undefined, + }), + fields: [ + 'project.id', + 'span.group', + 'span.description', + 'spm()', + 'avg(span.duration)', + 'sum(span.duration)', + ], + sorts: [sort], + limit: 25, + cursor, + }, + 'api.ai-pipelines.view' + ); - const {data: tokensUsedData, isLoading: tokensUsedLoading} = useSpanMetrics({ - search: new MutableSearch( - `span.category:ai span.ai.pipeline.group:[${(data as Row[])?.map(x => x['span.group']).join(',')}]` - ), - fields: [ - 'span.ai.pipeline.group', - 'ai_total_tokens_used()', - 'ai_total_tokens_used(c:spans/ai.total_cost@usd)', - ], - }); + const {data: tokensUsedData, isLoading: tokensUsedLoading} = useSpanMetrics( + { + search: new MutableSearch( + `span.category:ai span.ai.pipeline.group:[${(data as Row[])?.map(x => x['span.group']).join(',')}]` + ), + fields: [ + 'span.ai.pipeline.group', + 'ai_total_tokens_used()', + 'ai_total_tokens_used(c:spans/ai.total_cost@usd)', + ], + }, + 'api.performance.ai-analytics.token-usage-chart' + ); const rows: Row[] = (data as Row[]).map(baseRow => { const row: Row = { diff --git a/static/app/views/aiMonitoring/aiMonitoringDetailsPage.tsx b/static/app/views/aiMonitoring/aiMonitoringDetailsPage.tsx index 948872a12ac1bc..18bab99f7b8ce1 100644 --- a/static/app/views/aiMonitoring/aiMonitoringDetailsPage.tsx +++ b/static/app/views/aiMonitoring/aiMonitoringDetailsPage.tsx @@ -54,29 +54,36 @@ export default function AiMonitoringPage({params}: Props) { 'span.category': 'ai.pipeline', }; - const {data, isLoading: areSpanMetricsLoading} = useSpanMetrics({ - search: MutableSearch.fromQueryObject(filters), - fields: [ - SpanMetricsField.SPAN_OP, - SpanMetricsField.SPAN_DESCRIPTION, - 'count()', - `${SpanFunction.SPM}()`, - `avg(${SpanMetricsField.SPAN_DURATION})`, - ], - enabled: Boolean(groupId), - referrer: 'api.ai-pipelines.view', - }); + const {data, isLoading: areSpanMetricsLoading} = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(filters), + fields: [ + SpanMetricsField.SPAN_OP, + SpanMetricsField.SPAN_DESCRIPTION, + 'count()', + `${SpanFunction.SPM}()`, + `avg(${SpanMetricsField.SPAN_DURATION})`, + ], + enabled: Boolean(groupId), + }, + 'api.ai-pipelines.view' + ); const spanMetrics = data[0] ?? {}; - const {data: totalTokenData, isLoading: isTotalTokenDataLoading} = useSpanMetrics({ - search: MutableSearch.fromQueryObject({ - 'span.category': 'ai', - 'span.ai.pipeline.group': groupId, - }), - fields: ['ai_total_tokens_used()', 'ai_total_tokens_used(c:spans/ai.total_cost@usd)'], - enabled: Boolean(groupId), - referrer: 'api.ai-pipelines.view', - }); + const {data: totalTokenData, isLoading: isTotalTokenDataLoading} = useSpanMetrics( + { + search: MutableSearch.fromQueryObject({ + 'span.category': 'ai', + 'span.ai.pipeline.group': groupId, + }), + fields: [ + 'ai_total_tokens_used()', + 'ai_total_tokens_used(c:spans/ai.total_cost@usd)', + ], + enabled: Boolean(groupId), + }, + 'api.ai-pipelines.view' + ); const tokenUsedMetric = totalTokenData[0] ?? {}; return ( diff --git a/static/app/views/performance/browser/resources/referrer.ts b/static/app/views/performance/browser/resources/referrer.ts new file mode 100644 index 00000000000000..3c031ce7d6c63b --- /dev/null +++ b/static/app/views/performance/browser/resources/referrer.ts @@ -0,0 +1,5 @@ +// TODO - all resource referrers here +export enum Referrer { + RESOURCE_SUMMARY_METRICS_RIBBON = 'api.performance.browser.resources.resource-summary-metrics-ribbon', + RESOURCE_SUMMARY_CHARTS = 'api.performance.browser.resources.resource-summary-charts', +} diff --git a/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx b/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx index 57c65ad0f7b8a4..8503aca2c58bd6 100644 --- a/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx +++ b/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx @@ -14,6 +14,7 @@ import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import {useParams} from 'sentry/utils/useParams'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; +import {Referrer} from 'sentry/views/performance/browser/resources/referrer'; import ResourceInfo from 'sentry/views/performance/browser/resources/resourceSummaryPage/resourceInfo'; import ResourceSummaryCharts from 'sentry/views/performance/browser/resources/resourceSummaryPage/resourceSummaryCharts'; import ResourceSummaryTable from 'sentry/views/performance/browser/resources/resourceSummaryPage/resourceSummaryTable'; @@ -46,23 +47,26 @@ function ResourceSummary() { const { query: {transaction}, } = useLocation(); - const {data} = useSpanMetrics({ - search: MutableSearch.fromQueryObject({ - 'span.group': groupId, - }), - fields: [ - `avg(${SPAN_SELF_TIME})`, - `avg(${HTTP_RESPONSE_CONTENT_LENGTH})`, - `avg(${HTTP_DECODED_RESPONSE_CONTENT_LENGTH})`, - `avg(${HTTP_RESPONSE_TRANSFER_SIZE})`, - `sum(${SPAN_SELF_TIME})`, - 'spm()', - SPAN_OP, - SPAN_DESCRIPTION, - 'time_spent_percentage()', - 'project.id', - ], - }); + const {data} = useSpanMetrics( + { + search: MutableSearch.fromQueryObject({ + 'span.group': groupId, + }), + fields: [ + `avg(${SPAN_SELF_TIME})`, + `avg(${HTTP_RESPONSE_CONTENT_LENGTH})`, + `avg(${HTTP_DECODED_RESPONSE_CONTENT_LENGTH})`, + `avg(${HTTP_RESPONSE_TRANSFER_SIZE})`, + `sum(${SPAN_SELF_TIME})`, + 'spm()', + SPAN_OP, + SPAN_DESCRIPTION, + 'time_spent_percentage()', + 'project.id', + ], + }, + Referrer.RESOURCE_SUMMARY_METRICS_RIBBON + ); const spanMetrics = selectedSpanOp ? data.find(item => item[SPAN_OP] === selectedSpanOp) ?? {} : data[0] ?? {}; diff --git a/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryCharts.tsx b/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryCharts.tsx index 9bbfa87b1891bd..a51a44c401cad9 100644 --- a/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryCharts.tsx +++ b/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryCharts.tsx @@ -4,6 +4,7 @@ import {formatRate} from 'sentry/utils/formatters'; import getDynamicText from 'sentry/utils/getDynamicText'; import {MutableSearch} from 'sentry/utils/tokenizeSearch'; import {RESOURCE_THROUGHPUT_UNIT} from 'sentry/views/performance/browser/resources'; +import {Referrer} from 'sentry/views/performance/browser/resources/referrer'; import {useResourceModuleFilters} from 'sentry/views/performance/browser/resources/utils/useResourceFilters'; import {AVG_COLOR, THROUGHPUT_COLOR} from 'sentry/views/starfish/colors'; import Chart, {ChartType} from 'sentry/views/starfish/components/chart'; @@ -27,29 +28,30 @@ const { function ResourceSummaryCharts(props: {groupId: string}) { const filters = useResourceModuleFilters(); - // console.log({ - // ...(filters[RESOURCE_RENDER_BLOCKING_STATUS] - // ? {[RESOURCE_RENDER_BLOCKING_STATUS]: filters[RESOURCE_RENDER_BLOCKING_STATUS]} - // : {}), - // }); const {data: spanMetricsSeriesData, isLoading: areSpanMetricsSeriesLoading} = - useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject({ - 'span.group': props.groupId, - ...(filters[RESOURCE_RENDER_BLOCKING_STATUS] - ? {[RESOURCE_RENDER_BLOCKING_STATUS]: filters[RESOURCE_RENDER_BLOCKING_STATUS]} - : {}), - }), - yAxis: [ - `spm()`, - `avg(${SPAN_SELF_TIME})`, - `avg(${HTTP_RESPONSE_CONTENT_LENGTH})`, - `avg(${HTTP_DECODED_RESPONSE_CONTENT_LENGTH})`, - `avg(${HTTP_RESPONSE_TRANSFER_SIZE})`, - ], - enabled: Boolean(props.groupId), - }); + useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject({ + 'span.group': props.groupId, + ...(filters[RESOURCE_RENDER_BLOCKING_STATUS] + ? { + [RESOURCE_RENDER_BLOCKING_STATUS]: + filters[RESOURCE_RENDER_BLOCKING_STATUS], + } + : {}), + }), + yAxis: [ + `spm()`, + `avg(${SPAN_SELF_TIME})`, + `avg(${HTTP_RESPONSE_CONTENT_LENGTH})`, + `avg(${HTTP_DECODED_RESPONSE_CONTENT_LENGTH})`, + `avg(${HTTP_RESPONSE_TRANSFER_SIZE})`, + ], + enabled: Boolean(props.groupId), + }, + Referrer.RESOURCE_SUMMARY_CHARTS + ); if (spanMetricsSeriesData) { spanMetricsSeriesData[`avg(${HTTP_RESPONSE_TRANSFER_SIZE})`].lineStyle = { diff --git a/static/app/views/performance/cache/cacheLandingPage.tsx b/static/app/views/performance/cache/cacheLandingPage.tsx index 69f31369476284..fdbb9ab5de0546 100644 --- a/static/app/views/performance/cache/cacheLandingPage.tsx +++ b/static/app/views/performance/cache/cacheLandingPage.tsx @@ -53,21 +53,25 @@ export function CacheLandingPage() { isLoading: isCacheHitRateLoading, data: cacheHitRateData, error: cacheHitRateError, - } = useSpanMetricsSeries({ - yAxis: [`${CACHE_MISS_RATE}()`], - search: MutableSearch.fromQueryObject(BASE_FILTERS), - referrer: Referrer.LANDING_CACHE_HIT_MISS_CHART, - }); + } = useSpanMetricsSeries( + { + yAxis: [`${CACHE_MISS_RATE}()`], + search: MutableSearch.fromQueryObject(BASE_FILTERS), + }, + Referrer.LANDING_CACHE_HIT_MISS_CHART + ); const { isLoading: isThroughputDataLoading, data: throughputData, error: throughputError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(BASE_FILTERS), - yAxis: ['spm()'], - referrer: Referrer.LANDING_CACHE_THROUGHPUT_CHART, - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(BASE_FILTERS), + yAxis: ['spm()'], + }, + Referrer.LANDING_CACHE_THROUGHPUT_CHART + ); const { isLoading: isTransactionsListLoading, @@ -75,23 +79,25 @@ export function CacheLandingPage() { meta: transactionsListMeta, error: transactionsListError, pageLinks: transactionsListPageLinks, - } = useSpanMetrics({ - search: MutableSearch.fromQueryObject(BASE_FILTERS), - fields: [ - 'project', - 'project.id', - 'transaction', - 'spm()', - `${CACHE_MISS_RATE}()`, - 'sum(span.self_time)', - 'time_spent_percentage()', - `avg(${CACHE_ITEM_SIZE})`, - ], - sorts: [sort], - cursor, - limit: TRANSACTIONS_TABLE_ROW_COUNT, - referrer: Referrer.LANDING_CACHE_TRANSACTION_LIST, - }); + } = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(BASE_FILTERS), + fields: [ + 'project', + 'project.id', + 'transaction', + 'spm()', + `${CACHE_MISS_RATE}()`, + 'sum(span.self_time)', + 'time_spent_percentage()', + `avg(${CACHE_ITEM_SIZE})`, + ], + sorts: [sort], + cursor, + limit: TRANSACTIONS_TABLE_ROW_COUNT, + }, + Referrer.LANDING_CACHE_TRANSACTION_LIST + ); addCustomMeta(transactionsListMeta); diff --git a/static/app/views/performance/cache/referrers.ts b/static/app/views/performance/cache/referrers.ts index 33c83e4e3e87fe..ba10434791f30c 100644 --- a/static/app/views/performance/cache/referrers.ts +++ b/static/app/views/performance/cache/referrers.ts @@ -4,6 +4,7 @@ export enum Referrer { LANDING_CACHE_TRANSACTION_LIST = 'api.performance.cache.landing-cache-transaction-list', SAMPLES_CACHE_METRICS_RIBBON = 'api.performance.cache.samples-cache-metrics-ribbon', + SAMPLES_CACHE_TRANSACTION_DURATION_CHART = 'api.performance.cache.samples-cache-transaction-duration-chart', SAMPLES_CACHE_TRANSACTION_DURATION = 'api.performance.cache.samples-cache-transaction-duration', SAMPLES_CACHE_SPAN_SAMPLES = 'api.performance.cache.samples-cache-span-samples', SAMPLES_CACHE_SPAN_SAMPLES_TRANSACTION_DURATION = 'api.performance.cache.samples-cache-span-samples', diff --git a/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx b/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx index 71d89532146e21..9fa37102441e11 100644 --- a/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx +++ b/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx @@ -37,11 +37,13 @@ export function TransactionDurationChart({ transaction, }; - const {data, isLoading} = useMetricsSeries({ - yAxis: ['avg(transaction.duration)'], - search: MutableSearch.fromQueryObject(search), - referrer: Referrer.SAMPLES_CACHE_TRANSACTION_DURATION, - }); + const {data, isLoading} = useMetricsSeries( + { + yAxis: ['avg(transaction.duration)'], + search: MutableSearch.fromQueryObject(search), + }, + Referrer.SAMPLES_CACHE_TRANSACTION_DURATION_CHART + ); const sampledSpanDataSeries = useSampleScatterPlotSeries( samples, diff --git a/static/app/views/performance/cache/samplePanel/samplePanel.tsx b/static/app/views/performance/cache/samplePanel/samplePanel.tsx index 27130737e38f12..1359c32ada6643 100644 --- a/static/app/views/performance/cache/samplePanel/samplePanel.tsx +++ b/static/app/views/performance/cache/samplePanel/samplePanel.tsx @@ -73,27 +73,32 @@ export function CacheSamplePanel() { }; const {data: cacheTransactionMetrics, isFetching: areCacheTransactionMetricsFetching} = - useSpanMetrics({ - search: MutableSearch.fromQueryObject(filters), - fields: [ - `${SpanFunction.SPM}()`, - `${SpanFunction.CACHE_MISS_RATE}()`, - `${SpanFunction.TIME_SPENT_PERCENTAGE}()`, - `sum(${SpanMetricsField.SPAN_SELF_TIME})`, - `avg(${SpanMetricsField.CACHE_ITEM_SIZE})`, - ], - enabled: isPanelOpen, - referrer: Referrer.SAMPLES_CACHE_METRICS_RIBBON, - }); + useSpanMetrics( + { + search: MutableSearch.fromQueryObject(filters), + fields: [ + `${SpanFunction.SPM}()`, + `${SpanFunction.CACHE_MISS_RATE}()`, + `${SpanFunction.TIME_SPENT_PERCENTAGE}()`, + `sum(${SpanMetricsField.SPAN_SELF_TIME})`, + `avg(${SpanMetricsField.CACHE_ITEM_SIZE})`, + ], + enabled: isPanelOpen, + }, + Referrer.SAMPLES_CACHE_METRICS_RIBBON + ); const {data: transactionDurationData, isLoading: isTransactionDurationLoading} = - useMetrics({ - search: MutableSearch.fromQueryObject({ - transaction: query.transaction, - } satisfies MetricsQueryFilters), - fields: [`avg(${MetricsFields.TRANSACTION_DURATION})`], - enabled: isPanelOpen && Boolean(query.transaction), - }); + useMetrics( + { + search: MutableSearch.fromQueryObject({ + transaction: query.transaction, + } satisfies MetricsQueryFilters), + fields: [`avg(${MetricsFields.TRANSACTION_DURATION})`], + enabled: isPanelOpen && Boolean(query.transaction), + }, + Referrer.SAMPLES_CACHE_TRANSACTION_DURATION + ); const sampleFilters: SpanIndexedQueryFilters = { ...BASE_FILTERS, diff --git a/static/app/views/performance/database/databaseLandingPage.tsx b/static/app/views/performance/database/databaseLandingPage.tsx index 429c136697b4d0..0fcbf19dd9acc9 100644 --- a/static/app/views/performance/database/databaseLandingPage.tsx +++ b/static/app/views/performance/database/databaseLandingPage.tsx @@ -80,42 +80,48 @@ export function DatabaseLandingPage() { const cursor = decodeScalar(location.query?.[QueryParameterNames.SPANS_CURSOR]); - const queryListResponse = useSpanMetrics({ - search: MutableSearch.fromQueryObject(tableFilters), - fields: [ - 'project.id', - 'span.group', - 'span.description', - 'spm()', - 'avg(span.self_time)', - 'sum(span.self_time)', - 'time_spent_percentage()', - ], - sorts: [sort], - limit: LIMIT, - cursor, - referrer: 'api.starfish.use-span-list', - }); + const queryListResponse = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(tableFilters), + fields: [ + 'project.id', + 'span.group', + 'span.description', + 'spm()', + 'avg(span.self_time)', + 'sum(span.self_time)', + 'time_spent_percentage()', + ], + sorts: [sort], + limit: LIMIT, + cursor, + }, + 'api.starfish.use-span-list' + ); const { isLoading: isThroughputDataLoading, data: throughputData, error: throughputError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(chartFilters), - yAxis: ['spm()'], - referrer: 'api.starfish.span-landing-page-metrics-chart', - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(chartFilters), + yAxis: ['spm()'], + }, + 'api.starfish.span-landing-page-metrics-chart' + ); const { isLoading: isDurationDataLoading, data: durationData, error: durationError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(chartFilters), - yAxis: [`${selectedAggregate}(${SpanMetricsField.SPAN_SELF_TIME})`], - referrer: 'api.starfish.span-landing-page-metrics-chart', - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(chartFilters), + yAxis: [`${selectedAggregate}(${SpanMetricsField.SPAN_SELF_TIME})`], + }, + 'api.starfish.span-landing-page-metrics-chart' + ); const isCriticalDataLoading = isThroughputDataLoading || isDurationDataLoading || queryListResponse.isLoading; diff --git a/static/app/views/performance/database/databaseSpanSummaryPage.tsx b/static/app/views/performance/database/databaseSpanSummaryPage.tsx index 21f68a790d0739..2dcd5efd52f9cc 100644 --- a/static/app/views/performance/database/databaseSpanSummaryPage.tsx +++ b/static/app/views/performance/database/databaseSpanSummaryPage.tsx @@ -65,23 +65,25 @@ export function DatabaseSpanSummaryPage({params}: Props) { const sort = decodeSorts(sortField).filter(isAValidSort).at(0) ?? DEFAULT_SORT; - const {data, isLoading: areSpanMetricsLoading} = useSpanMetrics({ - search: MutableSearch.fromQueryObject(filters), - fields: [ - SpanMetricsField.SPAN_OP, - SpanMetricsField.SPAN_DESCRIPTION, - SpanMetricsField.SPAN_ACTION, - SpanMetricsField.SPAN_DOMAIN, - 'count()', - `${SpanFunction.SPM}()`, - `sum(${SpanMetricsField.SPAN_SELF_TIME})`, - `avg(${SpanMetricsField.SPAN_SELF_TIME})`, - `${SpanFunction.TIME_SPENT_PERCENTAGE}()`, - `${SpanFunction.HTTP_ERROR_COUNT}()`, - ], - enabled: Boolean(groupId), - referrer: 'api.starfish.span-summary-page-metrics', - }); + const {data, isLoading: areSpanMetricsLoading} = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(filters), + fields: [ + SpanMetricsField.SPAN_OP, + SpanMetricsField.SPAN_DESCRIPTION, + SpanMetricsField.SPAN_ACTION, + SpanMetricsField.SPAN_DOMAIN, + 'count()', + `${SpanFunction.SPM}()`, + `sum(${SpanMetricsField.SPAN_SELF_TIME})`, + `avg(${SpanMetricsField.SPAN_SELF_TIME})`, + `${SpanFunction.TIME_SPENT_PERCENTAGE}()`, + `${SpanFunction.HTTP_ERROR_COUNT}()`, + ], + enabled: Boolean(groupId), + }, + 'api.starfish.span-summary-page-metrics' + ); const spanMetrics = data[0] ?? {}; @@ -91,22 +93,24 @@ export function DatabaseSpanSummaryPage({params}: Props) { meta: transactionsListMeta, error: transactionsListError, pageLinks: transactionsListPageLinks, - } = useSpanMetrics({ - search: MutableSearch.fromQueryObject(filters), - fields: [ - 'transaction', - 'transaction.method', - 'spm()', - `sum(${SpanMetricsField.SPAN_SELF_TIME})`, - `avg(${SpanMetricsField.SPAN_SELF_TIME})`, - 'time_spent_percentage()', - `${SpanFunction.HTTP_ERROR_COUNT}()`, - ], - sorts: [sort], - limit: TRANSACTIONS_TABLE_ROW_COUNT, - cursor, - referrer: 'api.starfish.span-transaction-metrics', - }); + } = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(filters), + fields: [ + 'transaction', + 'transaction.method', + 'spm()', + `sum(${SpanMetricsField.SPAN_SELF_TIME})`, + `avg(${SpanMetricsField.SPAN_SELF_TIME})`, + 'time_spent_percentage()', + `${SpanFunction.HTTP_ERROR_COUNT}()`, + ], + sorts: [sort], + limit: TRANSACTIONS_TABLE_ROW_COUNT, + cursor, + }, + 'api.starfish.span-transaction-metrics' + ); const span = { ...spanMetrics, @@ -123,23 +127,27 @@ export function DatabaseSpanSummaryPage({params}: Props) { isLoading: isThroughputDataLoading, data: throughputData, error: throughputError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(filters), - yAxis: ['spm()'], - enabled: Boolean(groupId), - referrer: 'api.starfish.span-summary-page-metrics-chart', - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(filters), + yAxis: ['spm()'], + enabled: Boolean(groupId), + }, + 'api.starfish.span-summary-page-metrics-chart' + ); const { isLoading: isDurationDataLoading, data: durationData, error: durationError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(filters), - yAxis: [`${selectedAggregate}(${SpanMetricsField.SPAN_SELF_TIME})`], - enabled: Boolean(groupId), - referrer: 'api.starfish.span-summary-page-metrics-chart', - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(filters), + yAxis: [`${selectedAggregate}(${SpanMetricsField.SPAN_SELF_TIME})`], + enabled: Boolean(groupId), + }, + 'api.starfish.span-summary-page-metrics-chart' + ); useSynchronizeCharts([!isThroughputDataLoading && !isDurationDataLoading]); diff --git a/static/app/views/performance/http/httpDomainSummaryPage.tsx b/static/app/views/performance/http/httpDomainSummaryPage.tsx index 2c1297854c7fc2..9bfb08d260e3a5 100644 --- a/static/app/views/performance/http/httpDomainSummaryPage.tsx +++ b/static/app/views/performance/http/httpDomainSummaryPage.tsx @@ -84,49 +84,57 @@ export function HTTPDomainSummaryPage() { const cursor = decodeScalar(location.query?.[QueryParameterNames.TRANSACTIONS_CURSOR]); - const {data: domainMetrics, isLoading: areDomainMetricsLoading} = useSpanMetrics({ - search: MutableSearch.fromQueryObject(filters), - fields: [ - `${SpanFunction.SPM}()`, - `avg(${SpanMetricsField.SPAN_SELF_TIME})`, - `sum(${SpanMetricsField.SPAN_SELF_TIME})`, - 'http_response_rate(3)', - 'http_response_rate(4)', - 'http_response_rate(5)', - `${SpanFunction.TIME_SPENT_PERCENTAGE}()`, - ], - referrer: Referrer.DOMAIN_SUMMARY_METRICS_RIBBON, - }); + const {data: domainMetrics, isLoading: areDomainMetricsLoading} = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(filters), + fields: [ + `${SpanFunction.SPM}()`, + `avg(${SpanMetricsField.SPAN_SELF_TIME})`, + `sum(${SpanMetricsField.SPAN_SELF_TIME})`, + 'http_response_rate(3)', + 'http_response_rate(4)', + 'http_response_rate(5)', + `${SpanFunction.TIME_SPENT_PERCENTAGE}()`, + ], + }, + Referrer.DOMAIN_SUMMARY_METRICS_RIBBON + ); const { isLoading: isThroughputDataLoading, data: throughputData, error: throughputError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(filters), - yAxis: ['spm()'], - referrer: Referrer.DOMAIN_SUMMARY_THROUGHPUT_CHART, - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(filters), + yAxis: ['spm()'], + }, + Referrer.DOMAIN_SUMMARY_THROUGHPUT_CHART + ); const { isLoading: isDurationDataLoading, data: durationData, error: durationError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(filters), - yAxis: [`avg(${SpanMetricsField.SPAN_SELF_TIME})`], - referrer: Referrer.DOMAIN_SUMMARY_DURATION_CHART, - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(filters), + yAxis: [`avg(${SpanMetricsField.SPAN_SELF_TIME})`], + }, + Referrer.DOMAIN_SUMMARY_DURATION_CHART + ); const { isLoading: isResponseCodeDataLoading, data: responseCodeData, error: responseCodeError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(filters), - yAxis: ['http_response_rate(3)', 'http_response_rate(4)', 'http_response_rate(5)'], - referrer: Referrer.DOMAIN_SUMMARY_RESPONSE_CODE_CHART, - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(filters), + yAxis: ['http_response_rate(3)', 'http_response_rate(4)', 'http_response_rate(5)'], + }, + Referrer.DOMAIN_SUMMARY_RESPONSE_CODE_CHART + ); const { isLoading: isTransactionsListLoading, @@ -134,25 +142,27 @@ export function HTTPDomainSummaryPage() { meta: transactionsListMeta, error: transactionsListError, pageLinks: transactionsListPageLinks, - } = useSpanMetrics({ - search: MutableSearch.fromQueryObject(filters), - fields: [ - 'project.id', - 'transaction', - 'transaction.method', - 'spm()', - 'http_response_rate(3)', - 'http_response_rate(4)', - 'http_response_rate(5)', - 'avg(span.self_time)', - 'sum(span.self_time)', - 'time_spent_percentage()', - ], - sorts: [sort], - limit: TRANSACTIONS_TABLE_ROW_COUNT, - cursor, - referrer: Referrer.DOMAIN_SUMMARY_TRANSACTIONS_LIST, - }); + } = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(filters), + fields: [ + 'project.id', + 'transaction', + 'transaction.method', + 'spm()', + 'http_response_rate(3)', + 'http_response_rate(4)', + 'http_response_rate(5)', + 'avg(span.self_time)', + 'sum(span.self_time)', + 'time_spent_percentage()', + ], + sorts: [sort], + limit: TRANSACTIONS_TABLE_ROW_COUNT, + cursor, + }, + Referrer.DOMAIN_SUMMARY_TRANSACTIONS_LIST + ); useSynchronizeCharts([!isThroughputDataLoading && !isDurationDataLoading]); diff --git a/static/app/views/performance/http/httpLandingPage.tsx b/static/app/views/performance/http/httpLandingPage.tsx index a7cb80886d4bf8..82c0fa3763e9ac 100644 --- a/static/app/views/performance/http/httpLandingPage.tsx +++ b/static/app/views/performance/http/httpLandingPage.tsx @@ -79,51 +79,59 @@ export function HTTPLandingPage() { isLoading: isThroughputDataLoading, data: throughputData, error: throughputError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(chartFilters), - yAxis: ['spm()'], - referrer: Referrer.LANDING_THROUGHPUT_CHART, - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(chartFilters), + yAxis: ['spm()'], + }, + Referrer.LANDING_THROUGHPUT_CHART + ); const { isLoading: isDurationDataLoading, data: durationData, error: durationError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(chartFilters), - yAxis: [`avg(span.self_time)`], - referrer: Referrer.LANDING_DURATION_CHART, - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(chartFilters), + yAxis: [`avg(span.self_time)`], + }, + Referrer.LANDING_DURATION_CHART + ); const { isLoading: isResponseCodeDataLoading, data: responseCodeData, error: responseCodeError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(chartFilters), - yAxis: ['http_response_rate(3)', 'http_response_rate(4)', 'http_response_rate(5)'], - referrer: Referrer.LANDING_RESPONSE_CODE_CHART, - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(chartFilters), + yAxis: ['http_response_rate(3)', 'http_response_rate(4)', 'http_response_rate(5)'], + }, + Referrer.LANDING_RESPONSE_CODE_CHART + ); - const domainsListResponse = useSpanMetrics({ - search: MutableSearch.fromQueryObject(tableFilters), - fields: [ - 'project', - 'project.id', - 'span.domain', - 'spm()', - 'http_response_rate(3)', - 'http_response_rate(4)', - 'http_response_rate(5)', - 'avg(span.self_time)', - 'sum(span.self_time)', - 'time_spent_percentage()', - ], - sorts: [sort], - limit: DOMAIN_TABLE_ROW_COUNT, - cursor, - referrer: Referrer.LANDING_DOMAINS_LIST, - }); + const domainsListResponse = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(tableFilters), + fields: [ + 'project', + 'project.id', + 'span.domain', + 'spm()', + 'http_response_rate(3)', + 'http_response_rate(4)', + 'http_response_rate(5)', + 'avg(span.self_time)', + 'sum(span.self_time)', + 'time_spent_percentage()', + ], + sorts: [sort], + limit: DOMAIN_TABLE_ROW_COUNT, + cursor, + }, + Referrer.LANDING_DOMAINS_LIST + ); useSynchronizeCharts([!isThroughputDataLoading && !isDurationDataLoading]); diff --git a/static/app/views/performance/http/httpSamplesPanel.tsx b/static/app/views/performance/http/httpSamplesPanel.tsx index e85ce946df6e90..3089ab7037621e 100644 --- a/static/app/views/performance/http/httpSamplesPanel.tsx +++ b/static/app/views/performance/http/httpSamplesPanel.tsx @@ -139,31 +139,35 @@ export function HTTPSamplesPanel() { const { data: domainTransactionMetrics, isFetching: areDomainTransactionMetricsFetching, - } = useSpanMetrics({ - search: MutableSearch.fromQueryObject(ribbonFilters), - fields: [ - `${SpanFunction.SPM}()`, - `avg(${SpanMetricsField.SPAN_SELF_TIME})`, - `sum(${SpanMetricsField.SPAN_SELF_TIME})`, - 'http_response_rate(3)', - 'http_response_rate(4)', - 'http_response_rate(5)', - `${SpanFunction.TIME_SPENT_PERCENTAGE}()`, - ], - enabled: isPanelOpen, - referrer: Referrer.SAMPLES_PANEL_METRICS_RIBBON, - }); + } = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(ribbonFilters), + fields: [ + `${SpanFunction.SPM}()`, + `avg(${SpanMetricsField.SPAN_SELF_TIME})`, + `sum(${SpanMetricsField.SPAN_SELF_TIME})`, + 'http_response_rate(3)', + 'http_response_rate(4)', + 'http_response_rate(5)', + `${SpanFunction.TIME_SPENT_PERCENTAGE}()`, + ], + enabled: isPanelOpen, + }, + Referrer.SAMPLES_PANEL_METRICS_RIBBON + ); const { isFetching: isDurationDataFetching, data: durationData, error: durationError, - } = useSpanMetricsSeries({ - search, - yAxis: [`avg(span.self_time)`], - enabled: isPanelOpen && query.panel === 'duration', - referrer: Referrer.SAMPLES_PANEL_DURATION_CHART, - }); + } = useSpanMetricsSeries( + { + search, + yAxis: [`avg(span.self_time)`], + enabled: isPanelOpen && query.panel === 'duration', + }, + Referrer.SAMPLES_PANEL_DURATION_CHART + ); const { isFetching: isResponseCodeDataLoading, diff --git a/static/app/views/performance/mobile/screenload/screenLoadSpans/samples/samplesContainer.tsx b/static/app/views/performance/mobile/screenload/screenLoadSpans/samples/samplesContainer.tsx index d364afe3e9fb1d..f44c00fdaf3a6e 100644 --- a/static/app/views/performance/mobile/screenload/screenLoadSpans/samples/samplesContainer.tsx +++ b/static/app/views/performance/mobile/screenload/screenLoadSpans/samples/samplesContainer.tsx @@ -96,12 +96,14 @@ export function ScreenLoadSampleContainer({ filters['span.op'] = spanOp; } - const {data} = useSpanMetrics({ - search: MutableSearch.fromQueryObject({...filters, ...additionalFilters}), - fields: [`avg(${SPAN_SELF_TIME})`, 'count()', SPAN_OP], - enabled: Boolean(groupId) && Boolean(transactionName), - referrer: 'api.starfish.span-summary-panel-samples-table-avg', - }); + const {data} = useSpanMetrics( + { + search: MutableSearch.fromQueryObject({...filters, ...additionalFilters}), + fields: [`avg(${SPAN_SELF_TIME})`, 'count()', SPAN_OP], + enabled: Boolean(groupId) && Boolean(transactionName), + }, + 'api.starfish.span-summary-panel-samples-table-avg' + ); const spanMetrics = data[0] ?? {}; diff --git a/static/app/views/performance/queues/messageConsumerSamplesPanel.tsx b/static/app/views/performance/queues/messageConsumerSamplesPanel.tsx index 86649dfdd0f1bc..b3f2bc6e40f1f6 100644 --- a/static/app/views/performance/queues/messageConsumerSamplesPanel.tsx +++ b/static/app/views/performance/queues/messageConsumerSamplesPanel.tsx @@ -75,11 +75,14 @@ export function MessageConsumerSamplesPanel() { isFetching: isDurationDataFetching, data: durationData, error: durationError, - } = useSpanMetricsSeries({ - search, - yAxis: [`avg(span.self_time)`], - enabled: isPanelOpen, - }); + } = useSpanMetricsSeries( + { + search, + yAxis: [`avg(span.self_time)`], + enabled: isPanelOpen, + }, + 'api.performance.queues.avg-duration-chart' + ); const durationAxisMax = computeAxisMax([durationData?.[`avg(span.self_time)`]]); diff --git a/static/app/views/performance/queues/queries/useQueuesByDestinationQuery.tsx b/static/app/views/performance/queues/queries/useQueuesByDestinationQuery.tsx index cc24f9e3b02d27..129550de620367 100644 --- a/static/app/views/performance/queues/queries/useQueuesByDestinationQuery.tsx +++ b/static/app/views/performance/queues/queries/useQueuesByDestinationQuery.tsx @@ -14,25 +14,27 @@ export function useQueuesByDestinationQuery({enabled}: Props) { const cursor = decodeScalar(location.query?.[QueryParameterNames.DESTINATIONS_CURSOR]); const mutableSearch = new MutableSearch(DEFAULT_QUERY_FILTER); - const response = useSpanMetrics({ - search: mutableSearch, - fields: [ - 'messaging.destination.name', - 'count()', - 'count_op(queue.publish)', - 'count_op(queue.process)', - 'sum(span.self_time)', - 'avg(span.self_time)', - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', - 'avg(messaging.message.receive.latency)', - ], - enabled, - sorts: [], - limit: 10, - cursor, - referrer: 'api.performance.queues.destination-summary', - }); + const response = useSpanMetrics( + { + search: mutableSearch, + fields: [ + 'messaging.destination.name', + 'count()', + 'count_op(queue.publish)', + 'count_op(queue.process)', + 'sum(span.self_time)', + 'avg(span.self_time)', + 'avg_if(span.self_time,span.op,queue.publish)', + 'avg_if(span.self_time,span.op,queue.process)', + 'avg(messaging.message.receive.latency)', + ], + enabled, + sorts: [], + limit: 10, + cursor, + }, + 'api.performance.queues.destination-summary' + ); return response; } diff --git a/static/app/views/performance/queues/queries/useQueuesByTransactionQuery.tsx b/static/app/views/performance/queues/queries/useQueuesByTransactionQuery.tsx index 863e99f4a76bba..015316a12f2623 100644 --- a/static/app/views/performance/queues/queries/useQueuesByTransactionQuery.tsx +++ b/static/app/views/performance/queues/queries/useQueuesByTransactionQuery.tsx @@ -18,26 +18,28 @@ export function useQueuesByTransactionQuery({destination, enabled}: Props) { if (destination) { mutableSearch.addFilterValue('messaging.destination.name', destination); } - const response = useSpanMetrics({ - search: mutableSearch, - fields: [ - 'transaction', - 'span.op', - 'count()', - 'count_op(queue.publish)', - 'count_op(queue.process)', - 'sum(span.self_time)', - 'avg(span.self_time)', - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', - 'avg(messaging.message.receive.latency)', - ], - enabled, - sorts: [], - limit: 10, - cursor, - referrer: 'api.performance.queues.destination-summary', - }); + const response = useSpanMetrics( + { + search: mutableSearch, + fields: [ + 'transaction', + 'span.op', + 'count()', + 'count_op(queue.publish)', + 'count_op(queue.process)', + 'sum(span.self_time)', + 'avg(span.self_time)', + 'avg_if(span.self_time,span.op,queue.publish)', + 'avg_if(span.self_time,span.op,queue.process)', + 'avg(messaging.message.receive.latency)', + ], + enabled, + sorts: [], + limit: 10, + cursor, + }, + 'api.performance.queues.destination-summary' + ); return response; } diff --git a/static/app/views/performance/queues/queries/useQueuesMetricsQuery.tsx b/static/app/views/performance/queues/queries/useQueuesMetricsQuery.tsx index f7f2f5d5f627b6..db076c78162580 100644 --- a/static/app/views/performance/queues/queries/useQueuesMetricsQuery.tsx +++ b/static/app/views/performance/queues/queries/useQueuesMetricsQuery.tsx @@ -16,23 +16,25 @@ export function useQueuesMetricsQuery({destination, transaction, enabled}: Props if (transaction) { mutableSearch.addFilterValue('transaction', transaction); } - const response = useSpanMetrics({ - search: mutableSearch, - fields: [ - 'count()', - 'count_op(queue.publish)', - 'count_op(queue.process)', - 'sum(span.self_time)', - 'avg(span.self_time)', - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', - 'avg(messaging.message.receive.latency)', - ], - enabled, - sorts: [], - limit: 10, - referrer: 'api.performance.queues.destination-summary', - }); + const response = useSpanMetrics( + { + search: mutableSearch, + fields: [ + 'count()', + 'count_op(queue.publish)', + 'count_op(queue.process)', + 'sum(span.self_time)', + 'avg(span.self_time)', + 'avg_if(span.self_time,span.op,queue.publish)', + 'avg_if(span.self_time,span.op,queue.process)', + 'avg(messaging.message.receive.latency)', + ], + enabled, + sorts: [], + limit: 10, + }, + 'api.performance.queues.destination-summary' + ); return response; } diff --git a/static/app/views/performance/queues/queries/useQueuesTimeSeriesQuery.tsx b/static/app/views/performance/queues/queries/useQueuesTimeSeriesQuery.tsx index 525302d785d40b..e4ba1229716f15 100644 --- a/static/app/views/performance/queues/queries/useQueuesTimeSeriesQuery.tsx +++ b/static/app/views/performance/queues/queries/useQueuesTimeSeriesQuery.tsx @@ -16,14 +16,16 @@ const yAxis: SpanMetricsProperty[] = [ ]; export function useQueuesTimeSeriesQuery({enabled, destination}: Props) { - return useSpanMetricsSeries({ - yAxis, - search: destination - ? MutableSearch.fromQueryObject({ - 'messaging.destination.name': destination, - }) - : undefined, - referrer: 'api.performance.queues.module-chart', - enabled, - }); + return useSpanMetricsSeries( + { + yAxis, + search: destination + ? MutableSearch.fromQueryObject({ + 'messaging.destination.name': destination, + }) + : undefined, + enabled, + }, + 'api.performance.queues.module-chart' + ); } diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.tsx index e7b9aa7bc0af61..b0c5e93d3f2a85 100644 --- a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.tsx +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.tsx @@ -109,13 +109,20 @@ function SpanSummaryContent(props: ContentProps) { transaction: transactionName, }; - const {data: spanHeaderData} = useSpanMetrics({ - search: MutableSearch.fromQueryObject(filters), - // TODO: query average duration instead of self time before releasing this - fields: ['span.description', 'avg(span.self_time)', 'sum(span.self_time)', 'count()'], - enabled: Boolean(groupId), - referrer: SpanSummaryReferrer.SPAN_SUMMARY_HEADER_DATA, - }); + const {data: spanHeaderData} = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(filters), + // TODO: query average duration instead of self time before releasing this + fields: [ + 'span.description', + 'avg(span.self_time)', + 'sum(span.self_time)', + 'count()', + ], + enabled: Boolean(groupId), + }, + SpanSummaryReferrer.SPAN_SUMMARY_HEADER_DATA + ); const description = spanHeaderData[0]?.['span.description'] ?? t('unknown'); const timeSpent = spanHeaderData[0]?.['sum(span.self_time)']; diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx index bdbba2d135132e..51374211d7473d 100644 --- a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx @@ -44,24 +44,28 @@ function SpanSummaryCharts() { isLoading: isThroughputDataLoading, data: throughputData, error: throughputError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(filters), - yAxis: ['spm()'], - enabled: Boolean(groupId), - referrer: SpanSummaryReferrer.SPAN_SUMMARY_THROUGHPUT_CHART, - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(filters), + yAxis: ['spm()'], + enabled: Boolean(groupId), + }, + SpanSummaryReferrer.SPAN_SUMMARY_THROUGHPUT_CHART + ); const { isLoading: isAvgDurationDataLoading, data: avgDurationData, error: avgDurationError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(filters), - // TODO: Switch this to SPAN_DURATION before release - yAxis: [`avg(${SpanMetricsField.SPAN_SELF_TIME})`], - enabled: Boolean(groupId), - referrer: SpanSummaryReferrer.SPAN_SUMMARY_DURATION_CHART, - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(filters), + // TODO: Switch this to SPAN_DURATION before release + yAxis: [`avg(${SpanMetricsField.SPAN_SELF_TIME})`], + enabled: Boolean(groupId), + }, + SpanSummaryReferrer.SPAN_SUMMARY_DURATION_CHART + ); const eventView = EventView.fromNewQueryWithLocation( { diff --git a/static/app/views/starfish/queries/useMetrics.spec.tsx b/static/app/views/starfish/queries/useDiscover.spec.tsx similarity index 92% rename from static/app/views/starfish/queries/useMetrics.spec.tsx rename to static/app/views/starfish/queries/useDiscover.spec.tsx index e3719360acc8e9..28cb1e39c31c8f 100644 --- a/static/app/views/starfish/queries/useMetrics.spec.tsx +++ b/static/app/views/starfish/queries/useDiscover.spec.tsx @@ -59,7 +59,7 @@ describe('useSpanMetrics', () => { }); const {result} = renderHook( - ({fields, enabled}) => useSpanMetrics({fields, enabled}), + ({fields, enabled}) => useSpanMetrics({fields, enabled}, 'span-metrics-series'), { wrapper: Wrapper, initialProps: { @@ -90,14 +90,16 @@ describe('useSpanMetrics', () => { const {result} = renderHook( ({filters, fields, sorts, limit, cursor, referrer}) => - useSpanMetrics({ - search: MutableSearch.fromQueryObject(filters), - fields, - sorts, - limit, - cursor, - referrer, - }), + useSpanMetrics( + { + search: MutableSearch.fromQueryObject(filters), + fields, + sorts, + limit, + cursor, + }, + referrer + ), { wrapper: Wrapper, initialProps: { diff --git a/static/app/views/starfish/queries/useDiscover.ts b/static/app/views/starfish/queries/useDiscover.ts index 72a1a5bd32e395..37faf1241356be 100644 --- a/static/app/views/starfish/queries/useDiscover.ts +++ b/static/app/views/starfish/queries/useDiscover.ts @@ -17,31 +17,38 @@ interface UseMetricsOptions { enabled?: boolean; fields?: Fields; limit?: number; - referrer?: string; search?: MutableSearch; sorts?: Sort[]; } export const useSpanMetrics = ( - options: UseMetricsOptions = {} + options: UseMetricsOptions = {}, + referrer: string ) => { return useDiscover( options, - DiscoverDatasets.SPANS_METRICS + DiscoverDatasets.SPANS_METRICS, + referrer ); }; export const useMetrics = ( - options: UseMetricsOptions = {} + options: UseMetricsOptions = {}, + referrer: string ) => { - return useDiscover(options, DiscoverDatasets.METRICS); + return useDiscover( + options, + DiscoverDatasets.METRICS, + referrer + ); }; const useDiscover = [], ResponseType>( options: UseMetricsOptions = {}, - dataset: DiscoverDatasets + dataset: DiscoverDatasets, + referrer: string ) => { - const {fields = [], search = undefined, sorts = [], limit, cursor, referrer} = options; + const {fields = [], search = undefined, sorts = [], limit, cursor} = options; const pageFilters = usePageFilters(); diff --git a/static/app/views/starfish/queries/useDiscoverSeries.spec.tsx b/static/app/views/starfish/queries/useDiscoverSeries.spec.tsx index 826d6aeedbe890..786c008be66305 100644 --- a/static/app/views/starfish/queries/useDiscoverSeries.spec.tsx +++ b/static/app/views/starfish/queries/useDiscoverSeries.spec.tsx @@ -63,10 +63,13 @@ describe('useSpanMetricsSeries', () => { const {result} = renderHook( ({filters, enabled}) => - useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject(filters), - enabled, - }), + useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(filters), + enabled, + }, + 'span-metrics-series' + ), { wrapper: Wrapper, initialProps: { @@ -98,7 +101,10 @@ describe('useSpanMetricsSeries', () => { const {result} = renderHook( ({filters, yAxis}) => - useSpanMetricsSeries({search: MutableSearch.fromQueryObject(filters), yAxis}), + useSpanMetricsSeries( + {search: MutableSearch.fromQueryObject(filters), yAxis}, + 'span-metrics-series' + ), { wrapper: Wrapper, initialProps: { @@ -141,12 +147,15 @@ describe('useSpanMetricsSeries', () => { body: {}, }); - const {rerender} = renderHook(({yAxis}) => useSpanMetricsSeries({yAxis}), { - wrapper: Wrapper, - initialProps: { - yAxis: ['avg(span.self_time)', 'spm()'] as SpanMetricsProperty[], - }, - }); + const {rerender} = renderHook( + ({yAxis}) => useSpanMetricsSeries({yAxis}, 'span-metrics-series'), + { + wrapper: Wrapper, + initialProps: { + yAxis: ['avg(span.self_time)', 'spm()'] as SpanMetricsProperty[], + }, + } + ); expect(eventsRequest).toHaveBeenLastCalledWith( '/organizations/org-slug/events-stats/', @@ -189,12 +198,15 @@ describe('useSpanMetricsSeries', () => { }, }); - const {result} = renderHook(({yAxis}) => useSpanMetricsSeries({yAxis}), { - wrapper: Wrapper, - initialProps: { - yAxis: ['spm()'] as SpanMetricsProperty[], - }, - }); + const {result} = renderHook( + ({yAxis}) => useSpanMetricsSeries({yAxis}, 'span-metrics-series'), + { + wrapper: Wrapper, + initialProps: { + yAxis: ['spm()'] as SpanMetricsProperty[], + }, + } + ); await waitFor(() => expect(result.current.isLoading).toEqual(false)); @@ -229,15 +241,18 @@ describe('useSpanMetricsSeries', () => { }, }); - const {result} = renderHook(({yAxis}) => useSpanMetricsSeries({yAxis}), { - wrapper: Wrapper, - initialProps: { - yAxis: [ - 'http_response_rate(3)', - 'http_response_rate(4)', - ] as SpanMetricsProperty[], - }, - }); + const {result} = renderHook( + ({yAxis}) => useSpanMetricsSeries({yAxis}, 'span-metrics-series'), + { + wrapper: Wrapper, + initialProps: { + yAxis: [ + 'http_response_rate(3)', + 'http_response_rate(4)', + ] as SpanMetricsProperty[], + }, + } + ); await waitFor(() => expect(result.current.isLoading).toEqual(false)); diff --git a/static/app/views/starfish/queries/useDiscoverSeries.ts b/static/app/views/starfish/queries/useDiscoverSeries.ts index ab0556de1cc0f7..032f2578ae80d3 100644 --- a/static/app/views/starfish/queries/useDiscoverSeries.ts +++ b/static/app/views/starfish/queries/useDiscoverSeries.ts @@ -21,22 +21,25 @@ interface UseMetricsSeriesOptions { } export const useSpanMetricsSeries = ( - options: UseMetricsSeriesOptions = {} + options: UseMetricsSeriesOptions = {}, + referrer: string ) => { - return useDiscoverSeries(options, DiscoverDatasets.SPANS_METRICS); + return useDiscoverSeries(options, DiscoverDatasets.SPANS_METRICS, referrer); }; export const useMetricsSeries = ( - options: UseMetricsSeriesOptions = {} + options: UseMetricsSeriesOptions = {}, + referrer: string ) => { - return useDiscoverSeries(options, DiscoverDatasets.METRICS); + return useDiscoverSeries(options, DiscoverDatasets.METRICS, referrer); }; const useDiscoverSeries = ( options: UseMetricsSeriesOptions = {}, - dataset: DiscoverDatasets + dataset: DiscoverDatasets, + referrer: string ) => { - const {search = undefined, yAxis = [], referrer = 'span-metrics-series'} = options; + const {search = undefined, yAxis = []} = options; const pageFilters = usePageFilters(); diff --git a/static/app/views/starfish/queries/useSpanSamples.tsx b/static/app/views/starfish/queries/useSpanSamples.tsx index a700834d140abf..3a7f2910893770 100644 --- a/static/app/views/starfish/queries/useSpanSamples.tsx +++ b/static/app/views/starfish/queries/useSpanSamples.tsx @@ -77,14 +77,16 @@ export const useSpanSamples = (options: Options) => { const dateCondtions = getDateConditions(pageFilter.selection); - const {isLoading: isLoadingSeries, data: spanMetricsSeriesData} = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject({'span.group': groupId, ...filters}), - yAxis: [`avg(${SPAN_SELF_TIME})`], - enabled: Object.values({'span.group': groupId, ...filters}).every(value => - Boolean(value) - ), - referrer: 'api.starfish.sidebar-span-metrics', - }); + const {isLoading: isLoadingSeries, data: spanMetricsSeriesData} = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject({'span.group': groupId, ...filters}), + yAxis: [`avg(${SPAN_SELF_TIME})`], + enabled: Object.values({'span.group': groupId, ...filters}).every(value => + Boolean(value) + ), + }, + 'api.starfish.sidebar-span-metrics' + ); const maxYValue = computeAxisMax([spanMetricsSeriesData?.[`avg(${SPAN_SELF_TIME})`]]); diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx index 6181db2af81fac..3eae9b017b294a 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx @@ -75,21 +75,25 @@ function DurationChart({ isLoading, data: spanMetricsSeriesData, error: spanMetricsSeriesError, - } = useSpanMetricsSeries({ - search: MutableSearch.fromQueryObject({...filters, ...additionalFilters}), - yAxis: [`avg(${SPAN_SELF_TIME})`], - enabled: Object.values({...filters, ...additionalFilters}).every(value => - Boolean(value) - ), - referrer: 'api.starfish.sidebar-span-metrics-chart', - }); + } = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject({...filters, ...additionalFilters}), + yAxis: [`avg(${SPAN_SELF_TIME})`], + enabled: Object.values({...filters, ...additionalFilters}).every(value => + Boolean(value) + ), + }, + 'api.starfish.sidebar-span-metrics-chart' + ); - const {data, error: spanMetricsError} = useSpanMetrics({ - search: MutableSearch.fromQueryObject(filters), - fields: [`avg(${SPAN_SELF_TIME})`, SPAN_OP], - enabled: Object.values(filters).every(value => Boolean(value)), - referrer: 'api.starfish.span-summary-panel-samples-table-avg', - }); + const {data, error: spanMetricsError} = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(filters), + fields: [`avg(${SPAN_SELF_TIME})`, SPAN_OP], + enabled: Object.values(filters).every(value => Boolean(value)), + }, + 'api.starfish.span-summary-panel-samples-table-avg' + ); const spanMetrics = data[0] ?? {}; diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleInfo/index.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleInfo/index.tsx index 222c4bbba255a4..dfc2345e884e2b 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleInfo/index.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleInfo/index.tsx @@ -31,18 +31,20 @@ function SampleInfo(props: Props) { ribbonFilters['transaction.method'] = transactionMethod; } - const {data, error, isLoading} = useSpanMetrics({ - search: MutableSearch.fromQueryObject(ribbonFilters), - fields: [ - SpanMetricsField.SPAN_OP, - 'spm()', - `sum(${SpanMetricsField.SPAN_SELF_TIME})`, - `avg(${SpanMetricsField.SPAN_SELF_TIME})`, - 'time_spent_percentage()', - ], - enabled: Object.values(ribbonFilters).every(value => Boolean(value)), - referrer: 'api.starfish.span-summary-panel-metrics', - }); + const {data, error, isLoading} = useSpanMetrics( + { + search: MutableSearch.fromQueryObject(ribbonFilters), + fields: [ + SpanMetricsField.SPAN_OP, + 'spm()', + `sum(${SpanMetricsField.SPAN_SELF_TIME})`, + `avg(${SpanMetricsField.SPAN_SELF_TIME})`, + 'time_spent_percentage()', + ], + enabled: Object.values(ribbonFilters).every(value => Boolean(value)), + }, + 'api.starfish.span-summary-panel-metrics' + ); const spanMetrics = data[0] ?? {}; diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx index f52cf6e3c3ea09..3a185fdaa0528c 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx @@ -65,14 +65,16 @@ function SampleTable({ filters.release = release; } - const {data, isFetching: isFetchingSpanMetrics} = useSpanMetrics({ - search: MutableSearch.fromQueryObject({...filters, ...additionalFilters}), - fields: [`avg(${SPAN_SELF_TIME})`, SPAN_OP], - enabled: Object.values({...filters, ...additionalFilters}).every(value => - Boolean(value) - ), - referrer: 'api.starfish.span-summary-panel-samples-table-avg', - }); + const {data, isFetching: isFetchingSpanMetrics} = useSpanMetrics( + { + search: MutableSearch.fromQueryObject({...filters, ...additionalFilters}), + fields: [`avg(${SPAN_SELF_TIME})`, SPAN_OP], + enabled: Object.values({...filters, ...additionalFilters}).every(value => + Boolean(value) + ), + }, + 'api.starfish.span-summary-panel-samples-table-avg' + ); const spanMetrics = data[0] ?? {}; From c25fe581fa7bcebda9b37cf9c6fc2fda37ada02c Mon Sep 17 00:00:00 2001 From: Matt Duncan <14761+mrduncan@users.noreply.github.com> Date: Thu, 9 May 2024 08:54:31 -0700 Subject: [PATCH 211/376] chore(issues): Low effort typing improvements (#70559) A handful of trivial followup improvements to #69828. For review: changes are all siloed to the individual files so each file should be reviewable separately --- pyproject.toml | 6 + .../issues/endpoints/test_actionable_items.py | 4 +- .../issues/endpoints/test_group_events.py | 46 ++- .../endpoints/test_organization_activity.py | 8 +- .../test_organization_group_index.py | 272 +++++++++--------- .../endpoints/test_organization_searches.py | 60 ++-- .../issues/endpoints/test_source_map_debug.py | 50 ++-- .../sentry/issues/test_occurrence_consumer.py | 8 +- .../issues/test_status_change_consumer.py | 13 +- 9 files changed, 236 insertions(+), 231 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 04fd6a08651e16..4e930afa0ff04d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -641,6 +641,10 @@ module = [ "tests.sentry.grouping.test_fingerprinting", "tests.sentry.issues", "tests.sentry.issues.endpoints", + "tests.sentry.issues.endpoints.test_actionable_items", + "tests.sentry.issues.endpoints.test_organization_activity", + "tests.sentry.issues.endpoints.test_organization_searches", + "tests.sentry.issues.endpoints.test_source_map_debug", "tests.sentry.issues.test_escalating_issues_alg", "tests.sentry.issues.test_group_attributes_dataset", "tests.sentry.issues.test_grouptype", @@ -649,9 +653,11 @@ module = [ "tests.sentry.issues.test_issue_occurrence", "tests.sentry.issues.test_json_schemas", "tests.sentry.issues.test_merge", + "tests.sentry.issues.test_occurrence_consumer", "tests.sentry.issues.test_ongoing", "tests.sentry.issues.test_search_issues_dataset", "tests.sentry.issues.test_status_change", + "tests.sentry.issues.test_status_change_consumer", "tests.sentry.issues.test_update_inbox", "tests.sentry.relay.config.test_metric_extraction", "tests.sentry.tasks.test_on_demand_metrics", diff --git a/tests/sentry/issues/endpoints/test_actionable_items.py b/tests/sentry/issues/endpoints/test_actionable_items.py index 2a7414bf43a2b3..601be3b848ab68 100644 --- a/tests/sentry/issues/endpoints/test_actionable_items.py +++ b/tests/sentry/issues/endpoints/test_actionable_items.py @@ -17,7 +17,7 @@ def setUp(self) -> None: self.login_as(self.user) return super().setUp() - def test_missing_event(self): + def test_missing_event(self) -> None: resp = self.get_error_response( self.organization.slug, self.project.slug, @@ -26,7 +26,7 @@ def test_missing_event(self): ) assert resp.data["detail"] == "Event not found" - def test_orders_event_errors_by_priority(self): + def test_orders_event_errors_by_priority(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, diff --git a/tests/sentry/issues/endpoints/test_group_events.py b/tests/sentry/issues/endpoints/test_group_events.py index 38f3ddc31b5290..2889e7bb4bae4b 100644 --- a/tests/sentry/issues/endpoints/test_group_events.py +++ b/tests/sentry/issues/endpoints/test_group_events.py @@ -10,15 +10,13 @@ class GroupEventsTest(APITestCase, SnubaTestCase, SearchIssueTestMixin, PerformanceIssueTestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.min_ago = before_now(minutes=1) self.two_min_ago = before_now(minutes=2) - self.features = {} - def do_request(self, url): - with self.feature(self.features): - return self.client.get(url, format="json") + def do_request(self, url: str): + return self.client.get(url, format="json") def _parse_links(self, header): # links come in {url: {...attrs}}, but we need {rel: {...attrs}} @@ -28,7 +26,7 @@ def _parse_links(self, header): attrs["href"] = url return links - def test_simple(self): + def test_simple(self) -> None: self.login_as(user=self.user) event_1 = self.store_event( @@ -60,7 +58,7 @@ def test_simple(self): assert "context" not in response.data[0] assert "context" not in response.data[1] - def test_full_false(self): + def test_full_false(self) -> None: self.login_as(user=self.user) event_1 = self.store_event( @@ -91,7 +89,7 @@ def test_full_false(self): assert "context" not in response.data[0] assert "context" not in response.data[1] - def test_full_true(self): + def test_full_true(self) -> None: self.login_as(user=self.user) event_1 = self.store_event( @@ -120,7 +118,7 @@ def test_full_true(self): assert "context" in response.data[0] assert "context" in response.data[1] - def test_tags(self): + def test_tags(self) -> None: self.login_as(user=self.user) event_1 = self.store_event( data={ @@ -186,7 +184,7 @@ def test_tags(self): assert len(response.data) == 2 assert {e["eventID"] for e in response.data} == {event_1.event_id, event_2.event_id} - def test_search_event_by_id(self): + def test_search_event_by_id(self) -> None: self.login_as(user=self.user) event_1 = self.store_event( data={ @@ -211,7 +209,7 @@ def test_search_event_by_id(self): assert len(response.data) == 1 assert response.data[0]["eventID"] == event_1.event_id - def test_search_event_by_message(self): + def test_search_event_by_message(self) -> None: self.login_as(user=self.user) event_1 = self.store_event( @@ -256,7 +254,7 @@ def test_search_event_by_message(self): [str(event_1.event_id), str(event_2.event_id)] ) - def test_search_by_release(self): + def test_search_by_release(self) -> None: self.login_as(user=self.user) self.create_release(self.project, version="first-release") event_1 = self.store_event( @@ -275,7 +273,7 @@ def test_search_by_release(self): assert len(response.data) == 1 assert response.data[0]["eventID"] == event_1.event_id - def test_environment(self): + def test_environment(self) -> None: self.login_as(user=self.user) events = {} @@ -320,7 +318,7 @@ def test_environment(self): assert response.status_code == 200, response.content assert response.data == [] - def test_filters_based_on_retention(self): + def test_filters_based_on_retention(self) -> None: self.login_as(user=self.user) self.store_event( data={"fingerprint": ["group_1"], "timestamp": iso_format(before_now(days=2))}, @@ -339,7 +337,7 @@ def test_filters_based_on_retention(self): assert len(response.data) == 1 assert sorted(map(lambda x: x["eventID"], response.data)) == sorted([str(event_2.event_id)]) - def test_search_event_has_tags(self): + def test_search_event_has_tags(self) -> None: self.login_as(user=self.user) event = self.store_event( data={ @@ -357,7 +355,7 @@ def test_search_event_has_tags(self): assert {"key": "logger", "value": "python"} in response.data[0]["tags"] @freeze_time() - def test_date_filters(self): + def test_date_filters(self) -> None: self.login_as(user=self.user) event_1 = self.store_event( data={"timestamp": iso_format(before_now(days=5)), "fingerprint": ["group-1"]}, @@ -384,14 +382,14 @@ def test_date_filters(self): assert len(response.data) == 1 assert response.data[0]["eventID"] == str(event_2.event_id) - def test_invalid_period(self): + def test_invalid_period(self) -> None: self.login_as(user=self.user) first_seen = timezone.now() - timedelta(days=5) group = self.create_group(first_seen=first_seen) response = self.client.get(f"/api/0/issues/{group.id}/events/", data={"statsPeriod": "lol"}) assert response.status_code == 400 - def test_invalid_query(self): + def test_invalid_query(self) -> None: self.login_as(user=self.user) first_seen = timezone.now() - timedelta(days=5) group = self.create_group(first_seen=first_seen) @@ -401,7 +399,7 @@ def test_invalid_query(self): ) assert response.status_code == 400 - def test_multiple_group(self): + def test_multiple_group(self) -> None: self.login_as(user=self.user) event_1 = self.store_event( @@ -430,7 +428,7 @@ def test_multiple_group(self): assert len(response.data) == 1, response.data assert list(map(lambda x: x["eventID"], response.data)) == [str(event.event_id)] - def test_pagination(self): + def test_pagination(self) -> None: self.login_as(user=self.user) for _ in range(2): @@ -452,7 +450,7 @@ def test_pagination(self): assert links["next"]["results"] == "true" assert len(response.data) == 1 - def test_orderby(self): + def test_orderby(self) -> None: self.login_as(user=self.user) event = self.store_event( @@ -480,7 +478,7 @@ def test_orderby(self): assert response.data[0]["eventID"] == "a" * 32 assert response.data[1]["eventID"] == "b" * 32 - def test_perf_issue(self): + def test_perf_issue(self) -> None: event_1 = self.create_performance_issue() event_2 = self.create_performance_issue() @@ -494,7 +492,7 @@ def test_perf_issue(self): [str(event_1.event_id), str(event_2.event_id)] ) - def test_generic_issue(self): + def test_generic_issue(self) -> None: event_1, _, group_info = self.store_search_issue( self.project.id, self.user.id, @@ -521,7 +519,7 @@ def test_generic_issue(self): [str(event_1.event_id), str(event_2.event_id)] ) - def test_sample(self): + def test_sample(self) -> None: """Test that random=true doesn't blow up. We can't really test if they're in random order.""" self.login_as(user=self.user) diff --git a/tests/sentry/issues/endpoints/test_organization_activity.py b/tests/sentry/issues/endpoints/test_organization_activity.py index 53016b51d6eb50..a9deb83515adbe 100644 --- a/tests/sentry/issues/endpoints/test_organization_activity.py +++ b/tests/sentry/issues/endpoints/test_organization_activity.py @@ -10,15 +10,15 @@ class OrganizationActivityTest(APITestCase): endpoint = "sentry-api-0-organization-activity" - def setUp(self): + def setUp(self) -> None: super().setUp() self.login_as(self.user) - def test_empty(self): + def test_empty(self) -> None: response = self.get_success_response(self.organization.slug) assert response.data == [] - def test_simple(self): + def test_simple(self) -> None: group = self.group org = group.organization @@ -33,7 +33,7 @@ def test_simple(self): response = self.get_success_response(org.slug) assert [r["id"] for r in response.data] == [str(activity.id)] - def test_paginate(self): + def test_paginate(self) -> None: group = self.group org = group.organization project_2 = self.create_project() diff --git a/tests/sentry/issues/endpoints/test_organization_group_index.py b/tests/sentry/issues/endpoints/test_organization_group_index.py index 9c029cec859c1a..2e3b810c0876bb 100644 --- a/tests/sentry/issues/endpoints/test_organization_group_index.py +++ b/tests/sentry/issues/endpoints/test_organization_group_index.py @@ -67,7 +67,7 @@ class GroupListTest(APITestCase, SnubaTestCase, SearchIssueTestMixin): endpoint = "sentry-api-0-organization-group-index" - def setUp(self): + def setUp(self) -> None: super().setUp() self.min_ago = before_now(minutes=1) @@ -86,7 +86,7 @@ def get_response(self, *args, **kwargs): org = args[0] return super().get_response(org, **kwargs) - def test_sort_by_date_with_tag(self): + def test_sort_by_date_with_tag(self) -> None: # XXX(dcramer): this tests a case where an ambiguous column name existed event = self.store_event( data={"event_id": "a" * 32, "timestamp": iso_format(before_now(seconds=1))}, @@ -99,7 +99,7 @@ def test_sort_by_date_with_tag(self): assert len(response.data) == 1 assert response.data[0]["id"] == str(group.id) - def test_query_for_archived(self): + def test_query_for_archived(self) -> None: event = self.store_event( data={"event_id": "a" * 32, "timestamp": iso_format(before_now(seconds=1))}, project_id=self.project.id, @@ -121,7 +121,7 @@ def test_query_for_archived(self): "sentry.search.snuba.executors.GroupAttributesPostgresSnubaQueryExecutor.query", side_effect=GroupAttributesPostgresSnubaQueryExecutor.query, ) - def test_sort_by_trends(self, mock_query): + def test_sort_by_trends(self, mock_query) -> None: group = self.store_event( data={ "timestamp": iso_format(before_now(seconds=10)), @@ -183,7 +183,7 @@ def test_sort_by_trends(self, mock_query): assert [item["id"] for item in response.data] == [str(group.id), str(group_2.id)] assert not mock_query.called - def test_sort_by_inbox(self): + def test_sort_by_inbox(self) -> None: group_1 = self.store_event( data={ "event_id": "a" * 32, @@ -218,7 +218,7 @@ def test_sort_by_inbox(self): ) assert [item["id"] for item in response.data] == [str(group_2.id)] - def test_sort_by_inbox_me_or_none(self): + def test_sort_by_inbox_me_or_none(self) -> None: group_1 = self.store_event( data={ "event_id": "a" * 32, @@ -303,7 +303,7 @@ def test_sort_by_inbox_me_or_none(self): ) assert [item["id"] for item in response.data] == [str(group_1.id), str(group_2.id)] - def test_trace_search(self): + def test_trace_search(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -329,7 +329,7 @@ def test_trace_search(self): assert len(response.data) == 1 assert response.data[0]["id"] == str(event.group.id) - def test_feature_gate(self): + def test_feature_gate(self) -> None: # ensure there are two or more projects self.create_project(organization=self.project.organization) self.login_as(user=self.user) @@ -342,13 +342,13 @@ def test_feature_gate(self): response = self.get_response() assert response.status_code == 200 - def test_replay_feature_gate(self): + def test_replay_feature_gate(self) -> None: # allow replays to query for backend self.create_project(organization=self.project.organization) self.login_as(user=self.user) self.get_success_response(extra_headers={"HTTP_X-Sentry-Replay-Request": "1"}) - def test_with_all_projects(self): + def test_with_all_projects(self) -> None: # ensure there are two or more projects self.create_project(organization=self.project.organization) self.login_as(user=self.user) @@ -357,7 +357,7 @@ def test_with_all_projects(self): response = self.get_success_response(project_id=[-1]) assert response.status_code == 200 - def test_boolean_search_feature_flag(self): + def test_boolean_search_feature_flag(self) -> None: self.login_as(user=self.user) response = self.get_response(sort_by="date", query="title:hello OR title:goodbye") assert response.status_code == 400 @@ -373,7 +373,7 @@ def test_boolean_search_feature_flag(self): == 'Error parsing search query: Boolean statements containing "OR" or "AND" are not supported in this search' ) - def test_invalid_query(self): + def test_invalid_query(self) -> None: now = timezone.now() self.create_group(last_seen=now - timedelta(seconds=1)) self.login_as(user=self.user) @@ -382,7 +382,7 @@ def test_invalid_query(self): assert response.status_code == 400 assert "Invalid number" in response.data["detail"] - def test_valid_numeric_query(self): + def test_valid_numeric_query(self) -> None: now = timezone.now() self.create_group(last_seen=now - timedelta(seconds=1)) self.login_as(user=self.user) @@ -390,7 +390,7 @@ def test_valid_numeric_query(self): response = self.get_response(sort_by="date", query="timesSeen:>1k") assert response.status_code == 200 - def test_invalid_sort_key(self): + def test_invalid_sort_key(self) -> None: now = timezone.now() self.create_group(last_seen=now - timedelta(seconds=1)) self.login_as(user=self.user) @@ -398,7 +398,7 @@ def test_invalid_sort_key(self): response = self.get_response(sort="meow", query="is:unresolved") assert response.status_code == 400 - def test_simple_pagination(self): + def test_simple_pagination(self) -> None: event1 = self.store_event( data={"timestamp": iso_format(before_now(seconds=2)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -429,7 +429,7 @@ def test_simple_pagination(self): assert links["previous"]["results"] == "true" assert links["next"]["results"] == "false" - def test_stats_period(self): + def test_stats_period(self) -> None: # TODO(dcramer): this test really only checks if validation happens # on groupStatsPeriod now = timezone.now() @@ -444,7 +444,7 @@ def test_stats_period(self): response = self.get_response(groupStatsPeriod="48h") assert response.status_code == 400 - def test_environment(self): + def test_environment(self) -> None: self.store_event( data={ "fingerprint": ["put-me-in-group1"], @@ -470,7 +470,7 @@ def test_environment(self): response = self.get_response(environment="garbage") assert response.status_code == 404 - def test_project(self): + def test_project(self) -> None: self.store_event( data={ "fingerprint": ["put-me-in-group1"], @@ -485,7 +485,7 @@ def test_project(self): response = self.get_success_response(query=f"project:{project.slug}") assert len(response.data) == 1 - def test_auto_resolved(self): + def test_auto_resolved(self) -> None: project = self.project project.update_option("sentry:resolve_age", 1) self.store_event( @@ -503,7 +503,7 @@ def test_auto_resolved(self): assert len(response.data) == 1 assert response.data[0]["id"] == str(group2.id) - def test_perf_issue(self): + def test_perf_issue(self) -> None: perf_group = self.create_group(type=PerformanceNPlusOneGroupType.type_id) self.login_as(user=self.user) with self.feature( @@ -515,7 +515,7 @@ def test_perf_issue(self): assert len(response.data) == 1 assert response.data[0]["id"] == str(perf_group.id) - def test_lookup_by_event_id(self): + def test_lookup_by_event_id(self) -> None: project = self.project project.update_option("sentry:resolve_age", 1) event_id = "c" * 32 @@ -532,7 +532,7 @@ def test_lookup_by_event_id(self): assert response.data[0]["id"] == str(event.group.id) assert response.data[0]["matchingEventId"] == event_id - def test_lookup_by_event_id_incorrect_project_id(self): + def test_lookup_by_event_id_incorrect_project_id(self) -> None: self.store_event( data={"event_id": "a" * 32, "timestamp": iso_format(self.min_ago)}, project_id=self.project.id, @@ -555,7 +555,7 @@ def test_lookup_by_event_id_incorrect_project_id(self): assert response.data[0]["id"] == str(event.group.id) assert response.data[0]["matchingEventId"] == event_id - def test_lookup_by_event_id_with_whitespace(self): + def test_lookup_by_event_id_with_whitespace(self) -> None: project = self.project project.update_option("sentry:resolve_age", 1) event_id = "c" * 32 @@ -571,7 +571,7 @@ def test_lookup_by_event_id_with_whitespace(self): assert response.data[0]["id"] == str(event.group.id) assert response.data[0]["matchingEventId"] == event_id - def test_lookup_by_unknown_event_id(self): + def test_lookup_by_unknown_event_id(self) -> None: project = self.project project.update_option("sentry:resolve_age", 1) self.create_group() @@ -581,7 +581,7 @@ def test_lookup_by_unknown_event_id(self): response = self.get_success_response(query="c" * 32) assert len(response.data) == 0 - def test_lookup_by_short_id(self): + def test_lookup_by_short_id(self) -> None: group = self.group short_id = group.qualified_short_id @@ -590,7 +590,7 @@ def test_lookup_by_short_id(self): assert len(response.data) == 1 assert response["X-Sentry-Direct-Hit"] == "1" - def test_lookup_by_short_id_alias(self): + def test_lookup_by_short_id_alias(self) -> None: event_id = "f" * 32 group = self.store_event( data={"event_id": event_id, "timestamp": iso_format(before_now(seconds=1))}, @@ -603,7 +603,7 @@ def test_lookup_by_short_id_alias(self): assert len(response.data) == 1 assert response["X-Sentry-Direct-Hit"] == "1" - def test_lookup_by_multiple_short_id_alias(self): + def test_lookup_by_multiple_short_id_alias(self) -> None: self.login_as(self.user) project = self.project project2 = self.create_project(name="baz", organization=project.organization) @@ -623,7 +623,7 @@ def test_lookup_by_multiple_short_id_alias(self): assert len(response.data) == 2 assert response.get("X-Sentry-Direct-Hit") != "1" - def test_lookup_by_short_id_ignores_project_list(self): + def test_lookup_by_short_id_ignores_project_list(self) -> None: organization = self.create_organization() project = self.create_project(organization=organization) project2 = self.create_project(organization=organization) @@ -641,7 +641,7 @@ def test_lookup_by_short_id_ignores_project_list(self): assert len(response.data) == 1 assert response.get("X-Sentry-Direct-Hit") == "1" - def test_lookup_by_short_id_no_perms(self): + def test_lookup_by_short_id_no_perms(self) -> None: organization = self.create_organization() project = self.create_project(organization=organization) group = self.create_group(project=project) @@ -656,7 +656,7 @@ def test_lookup_by_short_id_no_perms(self): assert len(response.data) == 0 assert response.get("X-Sentry-Direct-Hit") != "1" - def test_lookup_by_group_id(self): + def test_lookup_by_group_id(self) -> None: self.login_as(user=self.user) response = self.get_success_response(group=self.group.id) assert len(response.data) == 1 @@ -665,7 +665,7 @@ def test_lookup_by_group_id(self): response = self.get_success_response(group=[self.group.id, group_2.id]) assert {g["id"] for g in response.data} == {str(self.group.id), str(group_2.id)} - def test_lookup_by_group_id_no_perms(self): + def test_lookup_by_group_id_no_perms(self) -> None: organization = self.create_organization() project = self.create_project(organization=organization) group = self.create_group(project=project) @@ -675,7 +675,7 @@ def test_lookup_by_group_id_no_perms(self): response = self.get_response(group=[group.id]) assert response.status_code == 403 - def test_lookup_by_first_release(self): + def test_lookup_by_first_release(self) -> None: self.login_as(self.user) project = self.project project2 = self.create_project(name="baz", organization=project.organization) @@ -700,7 +700,7 @@ def test_lookup_by_first_release(self): assert int(issues[0]["id"]) == event2.group.id assert int(issues[1]["id"]) == event.group.id - def test_lookup_by_release(self): + def test_lookup_by_release(self) -> None: self.login_as(self.user) project = self.project release = Release.objects.create(organization=project.organization, version="12345") @@ -718,7 +718,7 @@ def test_lookup_by_release(self): assert len(issues) == 1 assert int(issues[0]["id"]) == event.group.id - def test_lookup_by_release_wildcard(self): + def test_lookup_by_release_wildcard(self) -> None: self.login_as(self.user) project = self.project release = Release.objects.create(organization=project.organization, version="12345") @@ -736,7 +736,7 @@ def test_lookup_by_release_wildcard(self): assert len(issues) == 1 assert int(issues[0]["id"]) == event.group.id - def test_lookup_by_regressed_in_release(self): + def test_lookup_by_regressed_in_release(self) -> None: self.login_as(self.user) project = self.project release = self.create_release() @@ -752,7 +752,7 @@ def test_lookup_by_regressed_in_release(self): issues = json.loads(response.content) assert [int(issue["id"]) for issue in issues] == [event.group.id] - def test_pending_delete_pending_merge_excluded(self): + def test_pending_delete_pending_merge_excluded(self) -> None: events = [] for i in "abcd": events.append( @@ -775,7 +775,7 @@ def test_pending_delete_pending_merge_excluded(self): assert len(response.data) == 1 assert response.data[0]["id"] == str(events[1].group.id) - def test_filters_based_on_retention(self): + def test_filters_based_on_retention(self) -> None: self.login_as(user=self.user) self.create_group(last_seen=timezone.now() - timedelta(days=2)) @@ -785,7 +785,7 @@ def test_filters_based_on_retention(self): assert len(response.data) == 0 - def test_token_auth(self): + def test_token_auth(self) -> None: with assume_test_silo_mode(SiloMode.CONTROL): token = ApiToken.objects.create(user=self.user, scope_list=["event:read"]) response = self.client.get( @@ -795,7 +795,7 @@ def test_token_auth(self): ) assert response.status_code == 200, response.content - def test_date_range(self): + def test_date_range(self) -> None: with self.options({"system.event-retention-days": 2}): event = self.store_event( data={"timestamp": iso_format(before_now(hours=5))}, project_id=self.project.id @@ -812,7 +812,7 @@ def test_date_range(self): assert len(response.data) == 0 @patch("sentry.analytics.record") - def test_advanced_search_errors(self, mock_record): + def test_advanced_search_errors(self, mock_record) -> None: self.login_as(user=self.user) response = self.get_response(sort_by="date", query="!has:user") assert response.status_code == 200, response.data @@ -839,7 +839,7 @@ def test_advanced_search_errors(self, mock_record): # the orderby being sent to snuba for a certain call. This function has a simple # return value and can be used to set variables in the snuba payload. @patch("sentry.utils.snuba.get_query_params_to_update_for_projects") - def test_assigned_to_pagination(self, patched_params_update): + def test_assigned_to_pagination(self, patched_params_update) -> None: old_sample_size = options.get("snuba.search.hits-sample-size") assert options.set("snuba.search.hits-sample-size", 1) @@ -897,7 +897,7 @@ def _my_patched_params(query_params, **kwargs): assert options.set("snuba.search.hits-sample-size", old_sample_size) - def test_assigned_me_none(self): + def test_assigned_me_none(self) -> None: self.login_as(user=self.user) groups = [] for i in range(5): @@ -924,7 +924,7 @@ def test_assigned_me_none(self): response = self.get_response(limit=10, query="assigned:[me, none]") assert len(response.data) == 4 - def test_seen_stats(self): + def test_seen_stats(self) -> None: self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1020,7 +1020,7 @@ def test_seen_stats(self): before_now_100_seconds ).replace(tzinfo=UTC) - def test_semver_seen_stats(self): + def test_semver_seen_stats(self) -> None: release_1 = self.create_release(version="test@1.2.3") release_2 = self.create_release(version="test@1.2.4") release_3 = self.create_release(version="test@1.2.5") @@ -1092,7 +1092,7 @@ def test_semver_seen_stats(self): assert int(group_data["lifetime"]["count"]) == 3 assert int(group_data["filtered"]["count"]) == 1 - def test_inbox_search(self): + def test_inbox_search(self) -> None: self.store_event( data={ "timestamp": iso_format(before_now(seconds=200)), @@ -1132,7 +1132,7 @@ def test_inbox_search(self): assert response.data[0]["inbox"] is not None assert response.data[0]["inbox"]["reason"] == GroupInboxReason.NEW.value - def test_inbox_search_outside_retention(self): + def test_inbox_search_outside_retention(self) -> None: self.login_as(user=self.user) response = self.get_response( sort="inbox", @@ -1146,7 +1146,7 @@ def test_inbox_search_outside_retention(self): assert response.status_code == 200 assert len(response.data) == 0 - def test_assigned_or_suggested_search(self): + def test_assigned_or_suggested_search(self) -> None: event = self.store_event( data={ "timestamp": iso_format(before_now(seconds=180)), @@ -1325,7 +1325,7 @@ def test_assigned_or_suggested_search(self): assert response.status_code == 200 assert len(response.data) == 0 - def test_semver(self): + def test_semver(self) -> None: release_1 = self.create_release(version="test@1.2.3") release_2 = self.create_release(version="test@1.2.4") release_3 = self.create_release(version="test@1.2.5") @@ -1416,7 +1416,7 @@ def test_semver(self): release_3_g_2, ] - def test_release_stage(self): + def test_release_stage(self) -> None: replaced_release = self.create_release( version="replaced_release", environments=[self.environment], @@ -1520,7 +1520,7 @@ def test_release_stage(self): adopted_release_g_2, ] - def test_semver_package(self): + def test_semver_package(self) -> None: release_1 = self.create_release(version="test@1.2.3") release_2 = self.create_release(version="test2@1.2.4") @@ -1564,7 +1564,7 @@ def test_semver_package(self): release_2_g_1, ] - def test_semver_build(self): + def test_semver_build(self) -> None: release_1 = self.create_release(version="test@1.2.3+123") release_2 = self.create_release(version="test2@1.2.4+124") @@ -1609,7 +1609,7 @@ def test_semver_build(self): response = self.get_response(sort_by="date", limit=10, query=f"{SEMVER_BUILD_ALIAS}:[124]") assert response.status_code == 400, response.content - def test_aggregate_stats_regression_test(self): + def test_aggregate_stats_regression_test(self) -> None: self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1623,7 +1623,7 @@ def test_aggregate_stats_regression_test(self): assert response.status_code == 200 assert len(response.data) == 1 - def test_skipped_fields(self): + def test_skipped_fields(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1650,7 +1650,7 @@ def test_skipped_fields(self): assert response.data[0]["lifetime"] is not None assert response.data[0]["filtered"] is not None - def test_inbox_fields(self): + def test_inbox_fields(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1684,7 +1684,7 @@ def test_inbox_fields(self): assert response.data[0]["inbox"]["reason"] == GroupInboxReason.UNIGNORED.value assert response.data[0]["inbox"]["reason_details"] == snooze_details - def test_inbox_fields_issue_states(self): + def test_inbox_fields_issue_states(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1716,7 +1716,7 @@ def test_inbox_fields_issue_states(self): assert response.data[0]["inbox"]["reason"] == GroupInboxReason.ONGOING.value assert response.data[0]["inbox"]["reason_details"] == snooze_details - def test_expand_string(self): + def test_expand_string(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1732,7 +1732,7 @@ def test_expand_string(self): assert response.data[0]["inbox"]["reason"] == GroupInboxReason.NEW.value assert response.data[0]["inbox"]["reason_details"] is None - def test_expand_plugin_actions_and_issues(self): + def test_expand_plugin_actions_and_issues(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1756,7 +1756,7 @@ def test_expand_plugin_actions_and_issues(self): assert "pluginActions" not in response.data[0] assert "pluginIssues" not in response.data[0] - def test_expand_integration_issues(self): + def test_expand_integration_issues(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1807,7 +1807,7 @@ def test_expand_integration_issues(self): assert response.data[0]["integrationIssues"][0]["title"] == external_issue_1.title assert response.data[0]["integrationIssues"][1]["title"] == external_issue_2.title - def test_expand_sentry_app_issues(self): + def test_expand_sentry_app_issues(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1862,7 +1862,7 @@ def test_expand_sentry_app_issues(self): assert response.data[0]["sentryAppIssues"][1]["displayName"] == issue_2.display_name @with_feature("organizations:event-attachments") - def test_expand_has_attachments(self): + def test_expand_has_attachments(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1902,7 +1902,7 @@ def test_expand_has_attachments(self): assert response.status_code == 200 assert response.data[0]["hasAttachments"] is True - def test_expand_owners(self): + def test_expand_owners(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1963,7 +1963,7 @@ def test_expand_owners(self): ) assert response.data[0]["owners"][2]["type"] == GROUP_OWNER_TYPE[GroupOwnerType.CODEOWNERS] - def test_filter_not_unresolved(self): + def test_filter_not_unresolved(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1976,7 +1976,7 @@ def test_filter_not_unresolved(self): assert response.status_code == 200 assert [int(r["id"]) for r in response.data] == [event.group.id] - def test_default_search(self): + def test_default_search(self) -> None: event1 = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1993,7 +1993,7 @@ def test_default_search(self): assert [int(r["id"]) for r in response.data] == [event1.group.id] @with_feature("organizations:issue-priority-ui") - def test_default_search_with_priority(self): + def test_default_search_with_priority(self) -> None: event1 = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2017,7 +2017,7 @@ def test_default_search_with_priority(self): assert response.status_code == 200 assert [int(r["id"]) for r in response.data] == [event1.group.id] - def test_collapse_stats(self): + def test_collapse_stats(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2037,7 +2037,7 @@ def test_collapse_stats(self): assert "lifetime" not in response.data[0] assert "filtered" not in response.data[0] - def test_collapse_lifetime(self): + def test_collapse_lifetime(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2056,7 +2056,7 @@ def test_collapse_lifetime(self): assert "lifetime" not in response.data[0] assert "filtered" in response.data[0] - def test_collapse_filtered(self): + def test_collapse_filtered(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2075,7 +2075,7 @@ def test_collapse_filtered(self): assert "lifetime" in response.data[0] assert "filtered" not in response.data[0] - def test_collapse_lifetime_and_filtered(self): + def test_collapse_lifetime_and_filtered(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2094,7 +2094,7 @@ def test_collapse_lifetime_and_filtered(self): assert "lifetime" not in response.data[0] assert "filtered" not in response.data[0] - def test_collapse_base(self): + def test_collapse_base(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2116,7 +2116,7 @@ def test_collapse_base(self): assert "lifetime" in response.data[0] assert "filtered" in response.data[0] - def test_collapse_stats_group_snooze_bug(self): + def test_collapse_stats_group_snooze_bug(self) -> None: # There was a bug where we tried to access attributes on seen_stats if this feature is active # but seen_stats could be null when we collapse stats. event = self.store_event( @@ -2140,7 +2140,7 @@ def test_collapse_stats_group_snooze_bug(self): assert int(response.data[0]["id"]) == event.group.id @with_feature("organizations:issue-stream-performance") - def test_collapse_unhandled(self): + def test_collapse_unhandled(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2154,7 +2154,7 @@ def test_collapse_unhandled(self): assert int(response.data[0]["id"]) == event.group.id assert "isUnhandled" not in response.data[0] - def test_selected_saved_search(self): + def test_selected_saved_search(self) -> None: saved_search = SavedSearch.objects.create( name="Saved Search", query="ZeroDivisionError", @@ -2191,7 +2191,7 @@ def test_selected_saved_search(self): assert len(response.data) == 1 assert int(response.data[0]["id"]) == event.group.id - def test_pinned_saved_search(self): + def test_pinned_saved_search(self) -> None: SavedSearch.objects.create( name="Saved Search", query="ZeroDivisionError", @@ -2228,7 +2228,7 @@ def test_pinned_saved_search(self): assert len(response.data) == 1 assert int(response.data[0]["id"]) == event.group.id - def test_pinned_saved_search_with_query(self): + def test_pinned_saved_search_with_query(self) -> None: SavedSearch.objects.create( name="Saved Search", query="TypeError", @@ -2266,7 +2266,7 @@ def test_pinned_saved_search_with_query(self): assert len(response.data) == 1 assert int(response.data[0]["id"]) == event.group.id - def test_query_status_and_substatus_overlapping(self): + def test_query_status_and_substatus_overlapping(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2320,7 +2320,7 @@ def test_query_status_and_substatus_overlapping(self): == [event.group.id] ) - def test_query_status_and_substatus_nonoverlapping(self): + def test_query_status_and_substatus_nonoverlapping(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2376,7 +2376,7 @@ def test_query_status_and_substatus_nonoverlapping(self): side_effect=GroupAttributesPostgresSnubaQueryExecutor.query, autospec=True, ) - def test_use_group_snuba_dataset(self, mock_query): + def test_use_group_snuba_dataset(self, mock_query) -> None: self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2387,7 +2387,7 @@ def test_use_group_snuba_dataset(self, mock_query): assert mock_query.call_count == 1 @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_order_by_first_seen_of_issue(self): + def test_snuba_order_by_first_seen_of_issue(self) -> None: # issue 1: issue 10 minutes ago time = datetime.now() - timedelta(minutes=10) event1 = self.store_event( @@ -2424,7 +2424,7 @@ def test_snuba_order_by_first_seen_of_issue(self): autospec=True, ) @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_order_by_freq(self, mock_query): + def test_snuba_order_by_freq(self, mock_query) -> None: event1 = self.store_event( data={"timestamp": iso_format(before_now(seconds=3)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2457,7 +2457,7 @@ def test_snuba_order_by_freq(self, mock_query): autospec=True, ) @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_order_by_user_count(self, mock_query): + def test_snuba_order_by_user_count(self, mock_query) -> None: user1 = { "email": "foo@example.com", } @@ -2525,7 +2525,7 @@ def test_snuba_order_by_user_count(self, mock_query): assert mock_query.call_count == 1 @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_assignee_filter(self): + def test_snuba_assignee_filter(self) -> None: # issue 1: assigned to user time = datetime.now() - timedelta(minutes=10) @@ -2774,7 +2774,7 @@ def test_snuba_assignee_filter(self): ) assert [int(row["id"]) for row in response.data] == expected_group_ids - def test_snuba_unsupported_filters(self): + def test_snuba_unsupported_filters(self) -> None: self.login_as(user=self.user) for query in [ "regressed_in_release:latest", @@ -2797,7 +2797,7 @@ def test_snuba_unsupported_filters(self): autospec=True, ) @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_query_title(self, mock_query): + def test_snuba_query_title(self, mock_query) -> None: self.project = self.create_project(organization=self.organization) event1 = self.store_event( data={"fingerprint": ["group-1"], "message": "MyMessage"}, @@ -2826,7 +2826,7 @@ def test_snuba_query_title(self, mock_query): ) @override_options({"issues.group_attributes.send_kafka": True}) @with_feature("organizations:issue-platform") - def test_snuba_perf_issue(self, mock_query): + def test_snuba_perf_issue(self, mock_query) -> None: self.project = self.create_project(organization=self.organization) # create a performance issue _, _, group_info = self.store_search_issue( @@ -2889,7 +2889,7 @@ def test_snuba_perf_issue(self, mock_query): @with_feature("organizations:issue-platform") @with_feature(PerformanceRenderBlockingAssetSpanGroupType.build_visible_feature_name()) @with_feature(PerformanceNPlusOneGroupType.build_visible_feature_name()) - def test_snuba_type_and_category(self, mock_query, mock_should_create_group): + def test_snuba_type_and_category(self, mock_query, mock_should_create_group) -> None: self.project = self.create_project(organization=self.organization) # create a render blocking issue _, _, group_info = self.store_search_issue( @@ -2971,7 +2971,7 @@ def test_snuba_type_and_category(self, mock_query, mock_should_create_group): assert len(response.data) == 0 @override_options({"issues.group_attributes.send_kafka": True}) - def test_pagination_and_x_hits_header(self): + def test_pagination_and_x_hits_header(self) -> None: # Create 30 issues for i in range(30): self.store_event( @@ -3025,7 +3025,7 @@ def test_pagination_and_x_hits_header(self): assert prev_obj["results"] == "true" @override_options({"issues.group_attributes.send_kafka": True}) - def test_find_error_by_message_with_snuba_only_search(self): + def test_find_error_by_message_with_snuba_only_search(self) -> None: self.login_as(user=self.user) project = self.project # Simulate sending an event with Kafka enabled @@ -3260,7 +3260,7 @@ class GroupUpdateTest(APITestCase, SnubaTestCase): endpoint = "sentry-api-0-organization-group-index" method = "put" - def setUp(self): + def setUp(self) -> None: super().setUp() self.min_ago = timezone.now() - timedelta(minutes=1) @@ -3271,10 +3271,10 @@ def get_response(self, *args, **kwargs): org = args[0] return super().get_response(org, **kwargs) - def assertNoResolution(self, group): + def assertNoResolution(self, group) -> None: assert not GroupResolution.objects.filter(group=group).exists() - def test_global_resolve(self): + def test_global_resolve(self) -> None: group1 = self.create_group(status=GroupStatus.RESOLVED) group2 = self.create_group(status=GroupStatus.UNRESOLVED) group3 = self.create_group(status=GroupStatus.IGNORED) @@ -3330,7 +3330,7 @@ def test_global_resolve(self): group=group4, status=GroupHistoryStatus.RESOLVED ).exists() - def test_resolve_member(self): + def test_resolve_member(self) -> None: group = self.create_group(status=GroupStatus.UNRESOLVED) member = self.create_user() self.create_member( @@ -3344,7 +3344,7 @@ def test_resolve_member(self): assert response.data == {"status": "resolved", "statusDetails": {}, "inbox": None} assert response.status_code == 200 - def test_resolve_ignored(self): + def test_resolve_ignored(self) -> None: group = self.create_group(status=GroupStatus.IGNORED) snooze = GroupSnooze.objects.create( group=group, until=timezone.now() - timedelta(minutes=1) @@ -3362,7 +3362,7 @@ def test_resolve_ignored(self): assert response.data == {"status": "resolved", "statusDetails": {}, "inbox": None} assert not GroupSnooze.objects.filter(id=snooze.id).exists() - def test_bulk_resolve(self): + def test_bulk_resolve(self) -> None: self.login_as(user=self.user) for i in range(200): @@ -3384,7 +3384,7 @@ def test_bulk_resolve(self): assert len(response.data) == 0 @patch("sentry.integrations.example.integration.ExampleIntegration.sync_status_outbound") - def test_resolve_with_integration(self, mock_sync_status_outbound): + def test_resolve_with_integration(self, mock_sync_status_outbound) -> None: self.login_as(user=self.user) org = self.organization @@ -3441,7 +3441,7 @@ def test_resolve_with_integration(self, mock_sync_status_outbound): assert len(response.data) == 0 @patch("sentry.integrations.example.integration.ExampleIntegration.sync_status_outbound") - def test_set_unresolved_with_integration(self, mock_sync_status_outbound): + def test_set_unresolved_with_integration(self, mock_sync_status_outbound) -> None: release = self.create_release(project=self.project, version="abc") group = self.create_group(status=GroupStatus.RESOLVED) with assume_test_silo_mode(SiloMode.CONTROL): @@ -3494,7 +3494,7 @@ def test_set_unresolved_with_integration(self, mock_sync_status_outbound): external_issue, False, group.project_id ) - def test_self_assign_issue(self): + def test_self_assign_issue(self) -> None: group = self.create_group(status=GroupStatus.UNRESOLVED) user = self.user @@ -3518,7 +3518,7 @@ def test_self_assign_issue(self): with assume_test_silo_mode(SiloMode.CONTROL): uo1.delete() - def test_self_assign_issue_next_release(self): + def test_self_assign_issue_next_release(self) -> None: release = Release.objects.create(organization_id=self.project.organization_id, version="a") release.add_project(self.project) @@ -3555,7 +3555,7 @@ def test_self_assign_issue_next_release(self): with assume_test_silo_mode(SiloMode.CONTROL): uo1.delete() - def test_in_semver_projects_group_resolution_stores_current_release_version(self): + def test_in_semver_projects_group_resolution_stores_current_release_version(self) -> None: """ Test that ensures that when we resolve a group in the next release, then GroupResolution.current_release_version is set to the latest release associated with a @@ -3623,7 +3623,7 @@ def test_in_semver_projects_group_resolution_stores_current_release_version(self assert "current_release_version" in activity.data assert activity.data["current_release_version"] == release_2.version - def test_in_non_semver_projects_group_resolution_stores_current_release_version(self): + def test_in_non_semver_projects_group_resolution_stores_current_release_version(self) -> None: """ Test that ensures that when we resolve a group in the next release, then GroupResolution.current_release_version is set to the most recent release associated with a @@ -3666,7 +3666,9 @@ def test_in_non_semver_projects_group_resolution_stores_current_release_version( for release in [release_2, release_3]: assert not GroupResolution.has_resolution(group=group, release=release) - def test_in_non_semver_projects_store_actual_current_release_version_not_cached_version(self): + def test_in_non_semver_projects_store_actual_current_release_version_not_cached_version( + self, + ) -> None: """ Test that ensures that the current_release_version is actually the latest version associated with a group, not the cached version because currently @@ -3724,7 +3726,7 @@ def test_in_non_semver_projects_store_actual_current_release_version_not_cached_ assert len(grp_resolution) == 1 assert grp_resolution[0].current_release_version == release_2.version - def test_in_non_semver_projects_resolved_in_next_release_is_equated_to_in_release(self): + def test_in_non_semver_projects_resolved_in_next_release_is_equated_to_in_release(self) -> None: """ Test that ensures that if we basically know the next release when clicking on Resolved In Next Release because that release exists, then we can short circuit setting @@ -3773,7 +3775,7 @@ def test_in_non_semver_projects_resolved_in_next_release_is_equated_to_in_releas ).first() assert activity.data["version"] == release_2.version - def test_selective_status_update(self): + def test_selective_status_update(self) -> None: group1 = self.create_group(status=GroupStatus.RESOLVED) group2 = self.create_group(status=GroupStatus.UNRESOLVED) group3 = self.create_group(status=GroupStatus.IGNORED) @@ -3809,7 +3811,7 @@ def test_selective_status_update(self): assert new_group4.resolved_at is None assert new_group4.status == GroupStatus.UNRESOLVED - def test_set_resolved_in_current_release(self): + def test_set_resolved_in_current_release(self) -> None: release = Release.objects.create(organization_id=self.project.organization_id, version="a") release.add_project(self.project) @@ -3845,7 +3847,7 @@ def test_set_resolved_in_current_release(self): group=group, status=GroupHistoryStatus.SET_RESOLVED_IN_RELEASE ).exists() - def test_set_resolved_in_explicit_release(self): + def test_set_resolved_in_explicit_release(self) -> None: release = Release.objects.create(organization_id=self.project.organization_id, version="a") release.add_project(self.project) release2 = Release.objects.create(organization_id=self.project.organization_id, version="b") @@ -3883,7 +3885,7 @@ def test_set_resolved_in_explicit_release(self): ) assert activity.data["version"] == release.version - def test_in_semver_projects_set_resolved_in_explicit_release(self): + def test_in_semver_projects_set_resolved_in_explicit_release(self) -> None: release_1 = self.create_release(version="fake_package@3.0.0") release_2 = self.create_release(version="fake_package@2.0.0") release_3 = self.create_release(version="fake_package@3.0.1") @@ -3930,7 +3932,7 @@ def test_in_semver_projects_set_resolved_in_explicit_release(self): assert GroupResolution.has_resolution(group=group, release=release_2) assert not GroupResolution.has_resolution(group=group, release=release_3) - def test_set_resolved_in_next_release(self): + def test_set_resolved_in_next_release(self) -> None: release = Release.objects.create(organization_id=self.project.organization_id, version="a") release.add_project(self.project) @@ -3964,7 +3966,7 @@ def test_set_resolved_in_next_release(self): ) assert activity.data["version"] == "" - def test_set_resolved_in_next_release_legacy(self): + def test_set_resolved_in_next_release_legacy(self) -> None: release = Release.objects.create(organization_id=self.project.organization_id, version="a") release.add_project(self.project) @@ -4001,7 +4003,7 @@ def test_set_resolved_in_next_release_legacy(self): ) assert activity.data["version"] == "" - def test_set_resolved_in_explicit_commit_unreleased(self): + def test_set_resolved_in_explicit_commit_unreleased(self) -> None: repo = self.create_repo(project=self.project, name=self.project.name) commit = self.create_commit(project=self.project, repo=repo) group = self.create_group(status=GroupStatus.UNRESOLVED) @@ -4036,7 +4038,7 @@ def test_set_resolved_in_explicit_commit_unreleased(self): group=group, status=GroupHistoryStatus.SET_RESOLVED_IN_COMMIT ).exists() - def test_set_resolved_in_explicit_commit_released(self): + def test_set_resolved_in_explicit_commit_released(self) -> None: release = self.create_release(project=self.project) repo = self.create_repo(project=self.project, name=self.project.name) commit = self.create_commit(project=self.project, repo=repo, release=release) @@ -4078,7 +4080,7 @@ def test_set_resolved_in_explicit_commit_released(self): group=group, status=GroupHistoryStatus.SET_RESOLVED_IN_COMMIT ).exists() - def test_set_resolved_in_explicit_commit_missing(self): + def test_set_resolved_in_explicit_commit_missing(self) -> None: repo = self.create_repo(project=self.project, name=self.project.name) group = self.create_group(status=GroupStatus.UNRESOLVED) @@ -4098,7 +4100,7 @@ def test_set_resolved_in_explicit_commit_missing(self): group=group, status=GroupHistoryStatus.SET_RESOLVED_IN_COMMIT ).exists() - def test_set_unresolved(self): + def test_set_unresolved(self) -> None: release = self.create_release(project=self.project, version="abc") group = self.create_group(status=GroupStatus.RESOLVED) GroupResolution.objects.create(group=group, release=release) @@ -4120,7 +4122,7 @@ def test_set_unresolved(self): user_id=self.user.id, group=group, is_active=True ).exists() - def test_set_unresolved_on_snooze(self): + def test_set_unresolved_on_snooze(self) -> None: group = self.create_group(status=GroupStatus.IGNORED) GroupSnooze.objects.create(group=group, until=timezone.now() - timedelta(days=1)) @@ -4136,7 +4138,7 @@ def test_set_unresolved_on_snooze(self): group=group, status=GroupHistoryStatus.UNRESOLVED ).exists() - def test_basic_ignore(self): + def test_basic_ignore(self) -> None: group = self.create_group(status=GroupStatus.RESOLVED) snooze = GroupSnooze.objects.create(group=group, until=timezone.now()) @@ -4155,7 +4157,7 @@ def test_basic_ignore(self): assert response.data == {"status": "ignored", "statusDetails": {}, "inbox": None} - def test_snooze_duration(self): + def test_snooze_duration(self) -> None: group = self.create_group(status=GroupStatus.RESOLVED) self.login_as(user=self.user) @@ -4188,7 +4190,7 @@ def test_snooze_duration(self): assert response.data["statusDetails"]["ignoreUntil"] == snooze.until assert response.data["statusDetails"]["actor"]["id"] == str(self.user.id) - def test_snooze_count(self): + def test_snooze_count(self) -> None: group = self.create_group(status=GroupStatus.RESOLVED, times_seen=1) self.login_as(user=self.user) @@ -4212,7 +4214,7 @@ def test_snooze_count(self): assert response.data["statusDetails"]["ignoreUntil"] == snooze.until assert response.data["statusDetails"]["actor"]["id"] == str(self.user.id) - def test_snooze_user_count(self): + def test_snooze_user_count(self) -> None: for i in range(10): event = self.store_event( data={ @@ -4250,7 +4252,7 @@ def test_snooze_user_count(self): assert response.data["statusDetails"]["ignoreUntil"] == snooze.until assert response.data["statusDetails"]["actor"]["id"] == str(self.user.id) - def test_set_bookmarked(self): + def test_set_bookmarked(self) -> None: group1 = self.create_group(status=GroupStatus.RESOLVED) group2 = self.create_group(status=GroupStatus.UNRESOLVED) group3 = self.create_group(status=GroupStatus.IGNORED) @@ -4286,7 +4288,7 @@ def test_set_bookmarked(self): bookmark4 = GroupBookmark.objects.filter(group=group4, user_id=self.user.id) assert not bookmark4.exists() - def test_subscription(self): + def test_subscription(self) -> None: group1 = self.create_group() group2 = self.create_group() group3 = self.create_group() @@ -4311,7 +4313,7 @@ def test_subscription(self): assert not GroupSubscription.objects.filter(group=group4, user_id=self.user.id).exists() - def test_set_public(self): + def test_set_public(self) -> None: group1 = self.create_group() group2 = self.create_group() @@ -4328,7 +4330,7 @@ def test_set_public(self): new_group2 = Group.objects.get(id=group2.id) assert bool(new_group2.get_share_id()) - def test_set_private(self): + def test_set_private(self) -> None: group1 = self.create_group() group2 = self.create_group() @@ -4349,7 +4351,7 @@ def test_set_private(self): new_group2 = Group.objects.get(id=group2.id) assert not bool(new_group2.get_share_id()) - def test_set_has_seen(self): + def test_set_has_seen(self) -> None: group1 = self.create_group(status=GroupStatus.RESOLVED) group2 = self.create_group(status=GroupStatus.UNRESOLVED) group3 = self.create_group(status=GroupStatus.IGNORED) @@ -4380,7 +4382,7 @@ def test_set_has_seen(self): @patch("sentry.issues.merge.uuid4") @patch("sentry.issues.merge.merge_groups") @patch("sentry.eventstream.backend") - def test_merge(self, mock_eventstream, merge_groups, mock_uuid4): + def test_merge(self, mock_eventstream, merge_groups, mock_uuid4) -> None: eventstream_state = object() mock_eventstream.start_merge = Mock(return_value=eventstream_state) @@ -4414,7 +4416,7 @@ def test_merge(self, mock_eventstream, merge_groups, mock_uuid4): @patch("sentry.issues.merge.uuid4") @patch("sentry.issues.merge.merge_groups") @patch("sentry.eventstream.backend") - def test_merge_performance_issues(self, mock_eventstream, merge_groups, mock_uuid4): + def test_merge_performance_issues(self, mock_eventstream, merge_groups, mock_uuid4) -> None: eventstream_state = object() mock_eventstream.start_merge = Mock(return_value=eventstream_state) @@ -4431,7 +4433,7 @@ def test_merge_performance_issues(self, mock_eventstream, merge_groups, mock_uui assert response.status_code == 400, response.content - def test_assign(self): + def test_assign(self) -> None: group1 = self.create_group(is_public=True) group2 = self.create_group(is_public=True) user = self.user @@ -4466,7 +4468,7 @@ def test_assign(self): group=group1, status=GroupHistoryStatus.UNASSIGNED ).exists() - def test_assign_non_member(self): + def test_assign_non_member(self) -> None: group = self.create_group(is_public=True) member = self.user non_member = self.create_user("bar@example.com") @@ -4479,7 +4481,7 @@ def test_assign_non_member(self): ).exists() assert response.status_code == 400, response.content - def test_assign_team(self): + def test_assign_team(self) -> None: self.login_as(user=self.user) group = self.create_group() @@ -4512,7 +4514,7 @@ def test_assign_team(self): group=group, status=GroupHistoryStatus.UNASSIGNED ).exists() - def test_discard(self): + def test_discard(self) -> None: group1 = self.create_group(is_public=True) group2 = self.create_group(is_public=True) group_hash = GroupHash.objects.create(hash="x" * 32, project=group1.project, group=group1) @@ -4535,7 +4537,7 @@ def test_discard(self): assert tombstone.project == group1.project assert tombstone.data == group1.data - def test_set_inbox(self): + def test_set_inbox(self) -> None: group1 = self.create_group() group2 = self.create_group() @@ -4562,7 +4564,7 @@ def test_set_inbox(self): ).exists() assert not GroupInbox.objects.filter(group=group2).exists() - def test_set_resolved_inbox(self): + def test_set_resolved_inbox(self) -> None: group1 = self.create_group() group2 = self.create_group() @@ -4585,7 +4587,7 @@ def test_set_resolved_inbox(self): ).exists() @with_feature("projects:issue-priority") - def test_update_priority(self): + def test_update_priority(self) -> None: """ Bulk-setting priority successfully changes the priority of the groups and also creates a GroupHistory and Activity entry for each group. @@ -4610,7 +4612,7 @@ def test_update_priority(self): ).exists() @with_feature("projects:issue-priority") - def test_update_priority_no_change(self): + def test_update_priority_no_change(self) -> None: """ When the priority is the same as the current priority, no changes are made """ @@ -4655,7 +4657,7 @@ def get_response(self, *args, **kwargs): return super().get_response(org, **kwargs) @patch("sentry.eventstream.backend") - def test_delete_by_id(self, mock_eventstream): + def test_delete_by_id(self, mock_eventstream) -> None: eventstream_state = {"event_stream_state": uuid4()} mock_eventstream.start_delete_groups = Mock(return_value=eventstream_state) @@ -4728,7 +4730,7 @@ def test_delete_by_id(self, mock_eventstream): assert GroupHash.objects.filter(group_id=group4.id).exists() @patch("sentry.eventstream.backend") - def test_delete_performance_issue_by_id(self, mock_eventstream): + def test_delete_performance_issue_by_id(self, mock_eventstream) -> None: eventstream_state = {"event_stream_state": uuid4()} mock_eventstream.start_delete_groups = Mock(return_value=eventstream_state) @@ -4757,7 +4759,7 @@ def test_delete_performance_issue_by_id(self, mock_eventstream): assert Group.objects.filter(id=group2.id).exists() assert GroupHash.objects.filter(group_id=group2.id).exists() - def test_bulk_delete(self): + def test_bulk_delete(self) -> None: groups = [] for i in range(10, 41): groups.append( @@ -4796,7 +4798,7 @@ def test_bulk_delete(self): assert not Group.objects.filter(id=group.id).exists() assert not GroupHash.objects.filter(group_id=group.id).exists() - def test_bulk_delete_performance_issues(self): + def test_bulk_delete_performance_issues(self) -> None: groups = [] for i in range(10, 41): groups.append( diff --git a/tests/sentry/issues/endpoints/test_organization_searches.py b/tests/sentry/issues/endpoints/test_organization_searches.py index 5a6764ebfe4f63..48cb7108bf539a 100644 --- a/tests/sentry/issues/endpoints/test_organization_searches.py +++ b/tests/sentry/issues/endpoints/test_organization_searches.py @@ -5,20 +5,14 @@ from sentry.api.serializers import serialize from sentry.models.savedsearch import SavedSearch, SortOptions, Visibility from sentry.models.search_common import SearchType +from sentry.models.user import User from sentry.testutils.cases import APITestCase class OrgLevelOrganizationSearchesListTest(APITestCase): endpoint = "sentry-api-0-organization-searches" - @cached_property - def user(self): - return self.create_user("test@test.com") - - def get_response(self, *args, **params): - return super().get_response(*args, **params) - - def create_base_data(self): + def create_base_data(self) -> dict[str, SavedSearch]: user_1 = self.user user_2 = self.create_user() @@ -103,14 +97,12 @@ def create_base_data(self): "savedsearch_other_pinned": savedsearch_other_pinned, } - def check_results(self, expected): + def test_simple(self) -> None: + objs = self.create_base_data() + self.login_as(user=self.user) response = self.get_success_response(self.organization.slug) - assert response.data == serialize(expected) - - def test_simple(self): - objs = self.create_base_data() - self.check_results( + assert response.data == serialize( [ objs["savedsearch_global"], objs["savedsearch_org"], @@ -126,18 +118,18 @@ class CreateOrganizationSearchesTest(APITestCase): method = "post" @cached_property - def manager(self): + def manager(self) -> User: user = self.create_user("test@test.com") self.create_member(organization=self.organization, user=user, role="manager") return user @cached_property - def member(self): + def member(self) -> User: user = self.create_user("test@test.com") self.create_member(organization=self.organization, user=user) return user - def test_simple(self): + def test_simple(self) -> None: search_type = SearchType.ISSUE.value name = "test" query = "hello" @@ -157,7 +149,7 @@ def test_simple(self): assert resp.data["visibility"] == visibility assert SavedSearch.objects.filter(id=resp.data["id"]).exists() - def test_member_cannot_create_org_search(self): + def test_member_cannot_create_org_search(self) -> None: self.login_as(user=self.member) resp = self.get_response( self.organization.slug, @@ -168,7 +160,7 @@ def test_member_cannot_create_org_search(self): ) assert resp.status_code == 400 - def test_member_can_create_owner_search(self): + def test_member_can_create_owner_search(self) -> None: self.login_as(user=self.member) resp = self.get_response( self.organization.slug, @@ -180,7 +172,7 @@ def test_member_can_create_owner_search(self): assert resp.status_code == 200 assert SavedSearch.objects.filter(id=resp.data["id"]).exists() - def test_org_global_search_conflict(self): + def test_org_global_search_conflict(self) -> None: global_search = SavedSearch.objects.create( type=SearchType.ISSUE.value, name="Some global search", @@ -200,7 +192,7 @@ def test_org_global_search_conflict(self): assert resp.status_code == 200 assert SavedSearch.objects.filter(id=resp.data["id"]).exists() - def test_org_org_search_conflict(self): + def test_org_org_search_conflict(self) -> None: org_search = SavedSearch.objects.create( organization=self.organization, type=SearchType.ISSUE.value, @@ -219,7 +211,7 @@ def test_org_org_search_conflict(self): assert resp.status_code == 400 assert "already exists" in resp.data["detail"] - def test_owner_global_search_conflict(self): + def test_owner_global_search_conflict(self) -> None: global_search = SavedSearch.objects.create( type=SearchType.ISSUE.value, name="Some global search", @@ -240,7 +232,7 @@ def test_owner_global_search_conflict(self): assert resp.status_code == 200 assert SavedSearch.objects.filter(id=resp.data["id"]).exists() - def test_owner_org_search_conflict(self): + def test_owner_org_search_conflict(self) -> None: org_search = SavedSearch.objects.create( organization=self.organization, type=SearchType.ISSUE.value, @@ -261,7 +253,7 @@ def test_owner_org_search_conflict(self): assert resp.status_code == 200 assert SavedSearch.objects.filter(id=resp.data["id"]).exists() - def test_owner_owner_search_conflict(self): + def test_owner_owner_search_conflict(self) -> None: user_search = SavedSearch.objects.create( organization=self.organization, type=SearchType.ISSUE.value, @@ -281,7 +273,7 @@ def test_owner_owner_search_conflict(self): assert resp.status_code == 400 assert "already exists" in resp.data["detail"] - def test_owner1_owner2_search_conflict(self): + def test_owner1_owner2_search_conflict(self) -> None: # User 1 has a saved search in org other_user_search = SavedSearch.objects.create( organization=self.organization, @@ -307,7 +299,7 @@ def test_owner1_owner2_search_conflict(self): assert SavedSearch.objects.filter(id=other_user_search.id).exists() assert SavedSearch.objects.filter(id=resp.data["id"]).exists() - def test_owner_pinned_search_conflict(self): + def test_owner_pinned_search_conflict(self) -> None: # Member has a pinned search pinned_search = SavedSearch.objects.create( organization=self.organization, @@ -331,7 +323,7 @@ def test_owner_pinned_search_conflict(self): assert resp.status_code == 200 assert SavedSearch.objects.filter(id=resp.data["id"]).exists() - def test_empty(self): + def test_empty(self) -> None: self.login_as(user=self.manager) resp = self.get_response( self.organization.slug, @@ -349,18 +341,18 @@ class OrganizationSearchesGetTest(APITestCase): method = "get" @cached_property - def manager(self): + def manager(self) -> User: user = self.create_user("manager@test.com") self.create_member(organization=self.organization, user=user, role="manager") return user @cached_property - def member(self): + def member(self) -> User: user = self.create_user("member@test.com") self.create_member(organization=self.organization, user=user) return user - def setUp(self): + def setUp(self) -> None: super().setUp() self.issue_search_manager_1 = SavedSearch.objects.create( organization=self.organization, @@ -395,7 +387,7 @@ def setUp(self): owner_id=self.manager.id, ) - def test_manager_filters_by_issue_type(self): + def test_manager_filters_by_issue_type(self) -> None: self.login_as(user=self.manager) response = self.get_success_response(self.organization.slug, type=SearchType.ISSUE.value) assert len(response.data) == 2 @@ -405,19 +397,19 @@ def test_manager_filters_by_issue_type(self): str(self.issue_search_manager_2.id), } - def test_member_filters_by_issue_type(self): + def test_member_filters_by_issue_type(self) -> None: self.login_as(user=self.member) response = self.get_success_response(self.organization.slug, type=SearchType.ISSUE.value) assert len(response.data) == 1 assert response.data[0]["id"] == str(self.issue_search_member.id) - def test_manager_sees_global_searches(self): + def test_manager_sees_global_searches(self) -> None: self.login_as(user=self.manager) response = self.get_success_response(self.organization.slug, type=SearchType.EVENT.value) assert len(response.data) == 1 assert response.data[0]["id"] == str(self.event_search_global.id) - def test_member_sees_global_searches(self): + def test_member_sees_global_searches(self) -> None: self.login_as(user=self.member) response = self.get_success_response(self.organization.slug, type=SearchType.EVENT.value) assert len(response.data) == 1 diff --git a/tests/sentry/issues/endpoints/test_source_map_debug.py b/tests/sentry/issues/endpoints/test_source_map_debug.py index 93181636566815..b695108c99dba5 100644 --- a/tests/sentry/issues/endpoints/test_source_map_debug.py +++ b/tests/sentry/issues/endpoints/test_source_map_debug.py @@ -39,9 +39,9 @@ class SourceMapDebugEndpointTestCase(APITestCase): def setUp(self) -> None: self.login_as(self.user) - return super().setUp() + super().setUp() - def test_url_prefix(self): + def test_url_prefix(self) -> None: cases = [ ("~/v1/scripts/footer/bundle.js", "~/v1/assets/footer/bundle.js", "assets/"), ("~/v1/scripts/footer/bundle.js", "~/v1/next/scripts/footer/bundle.js", "next/"), @@ -52,7 +52,7 @@ def test_url_prefix(self): for filename, artifact_name, expected in cases: assert _find_url_prefix(filename, artifact_name) == expected - def test_missing_event(self): + def test_missing_event(self) -> None: resp = self.get_error_response( self.organization.slug, self.project.slug, @@ -63,7 +63,7 @@ def test_missing_event(self): ) assert resp.data["detail"] == "Event not found" - def test_no_frame_given(self): + def test_no_frame_given(self) -> None: event = self.store_event( data={"event_id": "a" * 32, "release": "my-release"}, project_id=self.project.id ) @@ -75,7 +75,7 @@ def test_no_frame_given(self): ) assert resp.data["detail"] == "Query parameter 'frame_idx' is required" - def test_non_integer_frame_given(self): + def test_non_integer_frame_given(self) -> None: event = self.store_event( data={"event_id": "a" * 32, "release": "my-release"}, project_id=self.project.id ) @@ -88,7 +88,7 @@ def test_non_integer_frame_given(self): ) assert resp.data["detail"] == "Query parameter 'frame_idx' must be an integer" - def test_non_integer_exception_given(self): + def test_non_integer_exception_given(self) -> None: event = self.store_event( data={"event_id": "a" * 32, "release": "my-release"}, project_id=self.project.id ) @@ -102,7 +102,7 @@ def test_non_integer_exception_given(self): ) assert resp.data["detail"] == "Query parameter 'exception_idx' must be an integer" - def test_frame_out_of_bounds(self): + def test_frame_out_of_bounds(self) -> None: event = self.store_event( data=self.base_data, project_id=self.project.id, @@ -117,7 +117,7 @@ def test_frame_out_of_bounds(self): ) assert resp.data["detail"] == "Query parameter 'frame_idx' is out of bounds" - def test_no_exception(self): + def test_no_exception(self) -> None: event_data = self.base_data.copy() del event_data["exception"] event = self.store_event(data=event_data, project_id=self.project.id) @@ -132,7 +132,7 @@ def test_no_exception(self): assert resp.data["detail"] == "Event does not contain an exception" - def test_exception_out_of_bounds(self): + def test_exception_out_of_bounds(self) -> None: event = self.store_event( data=self.base_data, project_id=self.project.id, @@ -147,7 +147,7 @@ def test_exception_out_of_bounds(self): ) assert resp.data["detail"] == "Query parameter 'exception_idx' is out of bounds" - def test_event_frame_has_source_maps(self): + def test_event_frame_has_source_maps(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -186,7 +186,7 @@ def test_event_frame_has_source_maps(self): error = resp.data["errors"] assert error == [] - def test_event_has_no_release(self): + def test_event_has_no_release(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -222,7 +222,7 @@ def test_event_has_no_release(self): assert error["type"] == "no_release_on_event" assert error["message"] == "The event is missing a release" - def test_release_has_no_artifacts(self): + def test_release_has_no_artifacts(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -261,7 +261,7 @@ def test_release_has_no_artifacts(self): assert error["type"] == "no_sourcemaps_on_release" assert error["message"] == "The release is missing source maps" - def test_no_valid_url(self): + def test_no_valid_url(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -321,7 +321,7 @@ def test_no_valid_url(self): assert error["message"] == "The absolute path url is not valid" assert error["data"] == {"absPath": "app.example.com/static/js/main.fa8fe19f.js"} - def test_skips_node_internals(self): + def test_skips_node_internals(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -356,7 +356,7 @@ def test_skips_node_internals(self): ) assert len(resp.data["errors"]) == 0 - def test_skip_node_context_line(self): + def test_skip_node_context_line(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -392,7 +392,7 @@ def test_skip_node_context_line(self): ) assert len(resp.data["errors"]) == 0 - def test_no_valid_url_skips_node(self): + def test_no_valid_url_skips_node(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -458,7 +458,7 @@ def test_no_valid_url_skips_node(self): ) assert len(resp.data["errors"]) == 0 - def test_partial_url_match(self): + def test_partial_url_match(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -531,7 +531,7 @@ def test_partial_url_match(self): ], } - def test_no_url_match(self): + def test_no_url_match(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -583,7 +583,7 @@ def test_no_url_match(self): "artifactNames": ["http://example.com/application.js"], } - def test_dist_mismatch(self): + def test_dist_mismatch(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -643,7 +643,7 @@ def test_dist_mismatch(self): "filename": "/application.js", } - def test_no_sourcemap_found(self): + def test_no_sourcemap_found(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -699,7 +699,7 @@ def test_no_sourcemap_found(self): "filename": "/application.js", } - def test_sourcemap_in_header(self): + def test_sourcemap_in_header(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -767,7 +767,7 @@ def test_sourcemap_in_header(self): assert resp.data["errors"] == [] - def test_sourcemap_in_file(self): + def test_sourcemap_in_file(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -833,7 +833,7 @@ def test_sourcemap_in_file(self): assert resp.data["errors"] == [] - def test_js_out_of_date(self): + def test_js_out_of_date(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -874,7 +874,7 @@ def test_js_out_of_date(self): assert error["type"] == "no_sourcemaps_on_release" assert error["message"] == "The release is missing source maps" - def test_remix_up_to_date(self): + def test_remix_up_to_date(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, @@ -915,7 +915,7 @@ def test_remix_up_to_date(self): assert error["type"] == "no_sourcemaps_on_release" assert error["message"] == "The release is missing source maps" - def test_valid_debugid_sdk_no_sourcemaps(self): + def test_valid_debugid_sdk_no_sourcemaps(self) -> None: event = self.store_event( data={ "event_id": "a" * 32, diff --git a/tests/sentry/issues/test_occurrence_consumer.py b/tests/sentry/issues/test_occurrence_consumer.py index 9676b7eafa1699..8b9f8f77a3de23 100644 --- a/tests/sentry/issues/test_occurrence_consumer.py +++ b/tests/sentry/issues/test_occurrence_consumer.py @@ -178,7 +178,7 @@ def test_occurrence_consumer_without_payload_type(self) -> None: assert Group.objects.filter(grouphash__hash=occurrence.fingerprint[0]).exists() @with_feature("projects:issue-priority") - def test_issue_platform_default_priority(self): + def test_issue_platform_default_priority(self) -> None: # test default priority of LOW message = get_test_message(self.project.id) with self.feature("organizations:profile-file-io-main-thread-ingest"): @@ -192,7 +192,9 @@ def test_issue_platform_default_priority(self): @with_feature("projects:issue-priority") @with_feature("projects:first-event-severity-calculation") @mock.patch("sentry.event_manager._get_severity_score") - def test_issue_platform_override_priority(self, mock_get_severity_score): + def test_issue_platform_override_priority( + self, mock_get_severity_score: mock.MagicMock + ) -> None: # test explicitly set priority of HIGH message = get_test_message(self.project.id) message["initial_issue_priority"] = PriorityLevel.HIGH.value @@ -536,7 +538,7 @@ def test_assignee_none(self) -> None: assert kwargs["occurrence_data"]["assignee"] is None @mock.patch("sentry.issues.occurrence_consumer._process_message") - def test_validate_cache(self, mock_process_message): + def test_validate_cache(self, mock_process_message: mock.MagicMock) -> None: # Test to ensure cache is set properly after processing an occurrence group with mock.patch("django.core.cache.cache.set", side_effect=cache.set) as mock_cache_set: process_occurrence_group([{"id": 1}, {"id": 2}, {"id": 2}]) diff --git a/tests/sentry/issues/test_status_change_consumer.py b/tests/sentry/issues/test_status_change_consumer.py index 64a8399388bdd1..dd17fe1ea13dce 100644 --- a/tests/sentry/issues/test_status_change_consumer.py +++ b/tests/sentry/issues/test_status_change_consumer.py @@ -34,7 +34,7 @@ def get_test_message_status_change( class StatusChangeProcessMessageTest(IssueOccurrenceTestBase): @django_db_all - def setUp(self): + def setUp(self) -> None: super().setUp() message = get_test_message(self.project.id) with self.feature("organizations:profile-file-io-main-thread-ingest"): @@ -47,8 +47,13 @@ def setUp(self): self.fingerprint = ["touch-id"] def _assert_statuses_set( - self, status, substatus, group_history_status, activity_type, priority=None - ): + self, + status: int, + substatus: int | None, + group_history_status: int, + activity_type: ActivityType, + priority: int | None = None, + ) -> None: self.group.refresh_from_db() assert self.group.status == status assert self.group.substatus == substatus @@ -159,7 +164,7 @@ def test_valid_payload_auto_ongoing(self) -> None: class StatusChangeBulkGetGroupsFromFingerprintsTest(IssueOccurrenceTestBase): @django_db_all - def setUp(self): + def setUp(self) -> None: super().setUp() message = get_test_message(self.project.id) with self.feature("organizations:profile-file-io-main-thread-ingest"): From 10e8f583931e3d03545674926bc577ec1b2ebb44 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Thu, 9 May 2024 12:35:47 -0400 Subject: [PATCH 212/376] feat(metrics): Open metrics explorer in traces explorer (#70519) This adds an open in trace explorer button in the metrics page. There's still a few bugs where the traces explorer errors in some cases but this button is behind a feature flag to test. --- static/app/views/metrics/widgetDetails.tsx | 29 +++++++++++++++ .../app/views/performance/traces/content.tsx | 32 +++++++++++++++-- static/app/views/performance/traces/utils.tsx | 36 +++++++++++++++++++ 3 files changed, 94 insertions(+), 3 deletions(-) diff --git a/static/app/views/metrics/widgetDetails.tsx b/static/app/views/metrics/widgetDetails.tsx index 784c581a4c4515..1eb4231978cbc3 100644 --- a/static/app/views/metrics/widgetDetails.tsx +++ b/static/app/views/metrics/widgetDetails.tsx @@ -1,6 +1,9 @@ import {Fragment, useCallback, useMemo, useState} from 'react'; import styled from '@emotion/styled'; +import omit from 'lodash/omit'; +import Feature from 'sentry/components/acl/feature'; +import {Button} from 'sentry/components/button'; import HookOrDefault from 'sentry/components/hookOrDefault'; import { type Field, @@ -21,11 +24,13 @@ import type { } from 'sentry/utils/metrics/types'; import {MetricExpressionType} from 'sentry/utils/metrics/types'; import type {MetricsSamplesResults} from 'sentry/utils/metrics/useMetricsSamples'; +import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import {CodeLocations} from 'sentry/views/metrics/codeLocations'; import type {FocusAreaProps} from 'sentry/views/metrics/context'; import {useMetricsContext} from 'sentry/views/metrics/context'; import {extendQueryWithGroupBys} from 'sentry/views/metrics/utils'; +import {generateTracesRouteWithQuery} from 'sentry/views/performance/traces/utils'; enum Tab { SAMPLES = 'samples', @@ -91,6 +96,7 @@ export function MetricDetails({ focusArea, setMetricsSamples, }: MetricDetailsProps) { + const location = useLocation(); const organization = useOrganization(); const [selectedTab, setSelectedTab] = useState(Tab.SAMPLES); @@ -123,6 +129,19 @@ export function MetricDetails({ [organization] ); + const tracesTarget = generateTracesRouteWithQuery({ + orgSlug: organization.slug, + metric: + op && mri + ? { + metricsOp: op, + mri, + metricsQuery: queryWithFocusedSeries, + } + : undefined, + query: omit(location.query, ['widgets', 'interval']), + }); + return ( @@ -167,6 +186,11 @@ export function MetricDetails({ /> )} + + + + + @@ -193,3 +217,8 @@ const ContentWrapper = styled('div')` position: relative; padding-top: ${space(2)}; `; + +const OpenInTracesWrapper = styled('div')` + display: flex; + justify-content: flex-end; +`; diff --git a/static/app/views/performance/traces/content.tsx b/static/app/views/performance/traces/content.tsx index 7e94eb8ff4cd6f..45fe03eef5eb18 100644 --- a/static/app/views/performance/traces/content.tsx +++ b/static/app/views/performance/traces/content.tsx @@ -3,6 +3,7 @@ import {useTheme} from '@emotion/react'; import styled from '@emotion/styled'; import debounce from 'lodash/debounce'; +import {Alert} from 'sentry/components/alert'; import {Button} from 'sentry/components/button'; import Count from 'sentry/components/count'; import EmptyStateWarning from 'sentry/components/emptyStateWarning'; @@ -18,12 +19,12 @@ import PanelHeader from 'sentry/components/panels/panelHeader'; import PanelItem from 'sentry/components/panels/panelItem'; import PerformanceDuration from 'sentry/components/performanceDuration'; import {IconChevron} from 'sentry/icons/iconChevron'; -import {t} from 'sentry/locale'; +import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {PageFilters} from 'sentry/types/core'; import {browserHistory} from 'sentry/utils/browserHistory'; import {useApiQuery} from 'sentry/utils/queryClient'; -import {decodeInteger, decodeList} from 'sentry/utils/queryString'; +import {decodeInteger, decodeList, decodeScalar} from 'sentry/utils/queryString'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; @@ -55,6 +56,10 @@ export function Content() { return decodeInteger(location.query.perPage, DEFAULT_PER_PAGE); }, [location.query.perPage]); + const metricsOp = decodeScalar(location.query.metricsOp); + const mri = decodeScalar(location.query.mri); + const metricsQuery = decodeScalar(location.query.metricsQuery); + const handleSearch = useCallback( (searchIndex: number, searchQuery: string) => { const newQueries = [...queries]; @@ -95,6 +100,8 @@ export function Content() { [location, queries] ); + const hasMetric = metricsOp && mri; + const traces = useTraces({ fields: [ ...FIELDS, @@ -105,6 +112,8 @@ export function Content() { limit, query: queries, sort: SORTS, + mri: hasMetric ? mri : undefined, + metricsQuery: hasMetric ? metricsQuery : undefined, }); const isLoading = traces.isFetching; @@ -119,6 +128,13 @@ export function Content() { + {hasMetric && ( + + {tct('The metric query [metricQuery] is filtering the results below.', { + metricQuery: {`${metricsOp}(${mri}){${metricsQuery || ''}}`}, + })} + + )} { datetime?: PageFilters['datetime']; enabled?: boolean; limit?: number; + metricsQuery?: string; + mri?: string; query?: string | string[]; sort?: string[]; suggestedQuery?: string; @@ -396,6 +414,8 @@ function useTraces({ datetime, enabled, limit, + mri, + metricsQuery, query, suggestedQuery, sort, @@ -415,7 +435,9 @@ function useTraces({ suggestedQuery, sort, per_page: limit, - maxSpansPerTrace: 10, + maxSpansPerTrace: 5, + mri, + metricsQuery, }, }; @@ -519,3 +541,7 @@ const BreakdownPanelItem = styled(StyledPanelItem)<{highlightedSliceName: string const EmptyValueContainer = styled('span')` color: ${p => p.theme.gray300}; `; + +const StyledAlert = styled(Alert)` + margin-bottom: 0; +`; diff --git a/static/app/views/performance/traces/utils.tsx b/static/app/views/performance/traces/utils.tsx index c1651fb180e324..334eb0e91badbf 100644 --- a/static/app/views/performance/traces/utils.tsx +++ b/static/app/views/performance/traces/utils.tsx @@ -1,3 +1,7 @@ +import type {Location, LocationDescriptor} from 'history'; + +import type {Organization} from 'sentry/types/organization'; + import type {SpanResult, TraceResult} from './content'; import type {Field} from './data'; @@ -26,3 +30,35 @@ export function getStylingSliceName( export function getSecondaryNameFromSpan(span: SpanResult) { return span['sdk.name']; } + +export function generateTracesRoute({orgSlug}: {orgSlug: Organization['slug']}): string { + return `/organizations/${orgSlug}/performance/traces/`; +} + +export function generateTracesRouteWithQuery({ + orgSlug, + metric, + query, +}: { + orgSlug: Organization['slug']; + metric?: { + metricsOp: string; + mri: string; + metricsQuery?: string; + }; + query?: Location['query']; +}): LocationDescriptor { + const {metricsOp, metricsQuery, mri} = metric || {}; + + const pathname = generateTracesRoute({orgSlug}); + + return { + pathname, + query: { + ...query, + metricsOp, + metricsQuery, + mri, + }, + }; +} From 5036b430098cfc31d5b1a9b33b3dc304710f93a5 Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Thu, 9 May 2024 12:40:43 -0400 Subject: [PATCH 213/376] perf: use orjson in endpoints (#70574) We have enough confidence to say that `orjson` doesn't cause any errors/issues. We can now safely get rid of `json` and `rapidjson` in our repository. Ref: https://github.com/getsentry/sentry/issues/68903 --- src/sentry/api/base.py | 6 +- src/sentry/api/client.py | 5 +- src/sentry/api/endpoints/custom_rules.py | 4 +- src/sentry/api/endpoints/debug_files.py | 4 +- .../api/endpoints/event_ai_suggested_fix.py | 13 +-- .../api/endpoints/event_grouping_info.py | 4 +- src/sentry/api/endpoints/group_ai_autofix.py | 8 +- .../endpoints/group_autofix_setup_check.py | 4 +- .../integrations/sentry_apps/details.py | 4 +- .../integrations/sentry_apps/index.py | 4 +- .../organization_artifactbundle_assemble.py | 4 +- .../organization_release_assemble.py | 5 +- ...anization_transaction_anomaly_detection.py | 6 +- .../project_app_store_connect_credentials.py | 5 +- .../project_autofix_create_codebase_index.py | 4 +- .../project_create_sample_transaction.py | 6 +- src/sentry/api/endpoints/project_details.py | 8 +- .../endpoints/project_profiling_profile.py | 6 +- .../api/endpoints/project_symbol_sources.py | 8 +- .../api/endpoints/relay/register_challenge.py | 6 +- .../api/endpoints/relay/register_response.py | 6 +- .../relocations/artifacts/details.py | 14 ++- src/sentry/api/endpoints/seer_rpc.py | 10 +- src/sentry/api/fields/secret.py | 4 +- src/sentry/api/helpers/autofix.py | 4 +- .../serializers/models/app_platform_event.py | 6 +- .../api/serializers/models/dashboard.py | 5 +- src/sentry/api/serializers/models/project.py | 4 +- .../api/serializers/rest_framework/json.py | 7 +- .../api/serializers/rest_framework/rule.py | 4 +- .../api/validators/sentry_apps/schema.py | 5 +- .../test_notification_actions_details.py | 6 +- .../test_notification_actions_index.py | 6 +- .../api/endpoints/relocations/test_index.py | 92 +++++++++---------- .../api/endpoints/relocations/test_retry.py | 7 +- .../api/endpoints/test_event_grouping_info.py | 8 +- .../endpoints/test_group_autofix_update.py | 10 +- .../test_group_similar_issues_embeddings.py | 34 +++---- .../endpoints/test_organization_details.py | 4 +- .../test_organization_invite_request_index.py | 4 +- .../test_organization_join_request.py | 4 +- ...est_organization_metrics_code_locations.py | 4 +- .../test_organization_release_meta.py | 10 +- .../test_organization_sentry_apps.py | 4 +- ...t_project_app_store_connect_credentials.py | 14 +-- ...st_project_artifact_bundle_file_details.py | 6 +- .../endpoints/test_project_artifact_lookup.py | 8 +- ...t_project_autofix_create_codebase_index.py | 8 +- .../endpoints/test_project_create_sample.py | 20 ++-- .../api/endpoints/test_project_details.py | 42 ++++++--- .../test_project_profiling_profile.py | 8 +- .../endpoints/test_project_rule_details.py | 22 ++--- .../api/endpoints/test_project_rules.py | 12 +-- .../endpoints/test_project_symbol_sources.py | 21 +++-- .../endpoints/test_relay_globalconfig_v3.py | 4 +- .../endpoints/test_relay_projectconfigs.py | 5 +- .../endpoints/test_relay_projectconfigs_v2.py | 5 +- .../endpoints/test_relay_projectconfigs_v3.py | 4 +- .../api/endpoints/test_relay_projectids.py | 7 +- .../api/endpoints/test_relay_publickeys.py | 5 +- .../api/endpoints/test_relay_register.py | 22 ++--- tests/sentry/api/endpoints/test_rpc.py | 6 +- tests/sentry/api/endpoints/test_seer_rpc.py | 6 +- .../api/endpoints/test_sentry_app_details.py | 5 +- ...ntry_app_installation_external_requests.py | 6 +- .../sentry/api/endpoints/test_sentry_apps.py | 16 ++-- .../api/endpoints/test_sentry_apps_stats.py | 7 +- ...t_source_map_debug_blue_thunder_edition.py | 42 ++++----- tests/sentry/api/helpers/test_autofix.py | 16 ++-- .../serializers/test_app_platform_event.py | 24 +++-- 70 files changed, 373 insertions(+), 344 deletions(-) diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py index c9ca18b38d1cb8..9f321bdeb5111a 100644 --- a/src/sentry/api/base.py +++ b/src/sentry/api/base.py @@ -9,6 +9,7 @@ from typing import Any from urllib.parse import quote as urlquote +import orjson import sentry_sdk from django.conf import settings from django.http import HttpResponse @@ -34,7 +35,6 @@ from sentry.ratelimits.config import DEFAULT_RATE_LIMIT_CONFIG, RateLimitConfig from sentry.silo.base import SiloLimit, SiloMode from sentry.types.ratelimit import RateLimit, RateLimitCategory -from sentry.utils import json from sentry.utils.audit import create_audit_entry from sentry.utils.cursors import Cursor from sentry.utils.dates import to_datetime @@ -343,8 +343,8 @@ def load_json_body(self, request: Request): return try: - request.json_body = json.loads(request.body) - except json.JSONDecodeError: + request.json_body = orjson.loads(request.body) + except orjson.JSONDecodeError: return def initialize_request(self, request: HttpRequest, *args: Any, **kwargs: Any) -> Request: diff --git a/src/sentry/api/client.py b/src/sentry/api/client.py index d7f2b1034f626d..ae55e29061b995 100644 --- a/src/sentry/api/client.py +++ b/src/sentry/api/client.py @@ -2,12 +2,12 @@ from typing import TypeAlias +import orjson from django.conf import settings from django.urls import resolve from rest_framework.test import APIRequestFactory, force_authenticate from sentry.auth.superuser import Superuser -from sentry.utils import json __all__ = ("ApiClient",) @@ -54,8 +54,9 @@ def request( callback, callback_args, callback_kwargs = resolver_match if data: + # TODO(@anonrig): Investigate why we are doing this? # we encode to ensure compatibility - data = json.loads(json.dumps(data)) + data = orjson.loads(orjson.dumps(data)) rf = APIRequestFactory() mock_request = getattr(rf, method.lower())(full_path, data or {}) diff --git a/src/sentry/api/endpoints/custom_rules.py b/src/sentry/api/endpoints/custom_rules.py index b1c1e7a11ff460..4ec11fdfde6067 100644 --- a/src/sentry/api/endpoints/custom_rules.py +++ b/src/sentry/api/endpoints/custom_rules.py @@ -1,6 +1,7 @@ from datetime import datetime, timedelta, timezone from enum import Enum +import orjson import sentry_sdk from django.db import DatabaseError from rest_framework import serializers @@ -23,7 +24,6 @@ from sentry.models.project import Project from sentry.snuba.metrics.extraction import RuleCondition, SearchQueryConverter, parse_search_query from sentry.tasks.relay import schedule_invalidate_project_config -from sentry.utils import json from sentry.utils.dates import parse_stats_period MAX_RULE_PERIOD_STRING = "6h" @@ -248,7 +248,7 @@ def get(self, request: Request, organization: Organization) -> Response: def _rule_to_response(rule: CustomDynamicSamplingRule) -> Response: response_data = { "ruleId": rule.external_rule_id, - "condition": json.loads(rule.condition), + "condition": orjson.loads(rule.condition), "startDate": rule.start_date.strftime(CUSTOM_RULE_DATE_FORMAT), "endDate": rule.end_date.strftime(CUSTOM_RULE_DATE_FORMAT), "numSamples": rule.num_samples, diff --git a/src/sentry/api/endpoints/debug_files.py b/src/sentry/api/endpoints/debug_files.py index 88cc895261e567..342bc691eebd15 100644 --- a/src/sentry/api/endpoints/debug_files.py +++ b/src/sentry/api/endpoints/debug_files.py @@ -5,6 +5,7 @@ from collections.abc import Sequence import jsonschema +import orjson from django.db import IntegrityError, router from django.db.models import Q from django.http import Http404, HttpResponse, StreamingHttpResponse @@ -44,7 +45,6 @@ get_assemble_status, set_assemble_status, ) -from sentry.utils import json from sentry.utils.db import atomic_transaction logger = logging.getLogger("sentry.api") @@ -419,7 +419,7 @@ def post(self, request: Request, project) -> Response: } try: - files = json.loads(request.body) + files = orjson.loads(request.body) jsonschema.validate(files, schema) except jsonschema.ValidationError as e: return Response({"error": str(e).splitlines()[0]}, status=400) diff --git a/src/sentry/api/endpoints/event_ai_suggested_fix.py b/src/sentry/api/endpoints/event_ai_suggested_fix.py index 60e13f07d4545c..3870b16ddd3e77 100644 --- a/src/sentry/api/endpoints/event_ai_suggested_fix.py +++ b/src/sentry/api/endpoints/event_ai_suggested_fix.py @@ -4,6 +4,7 @@ import random from typing import Any +import orjson from django.conf import settings from django.dispatch import Signal from django.http import HttpResponse, StreamingHttpResponse @@ -16,7 +17,6 @@ from sentry.api.bases.project import ProjectEndpoint from sentry.api.exceptions import ResourceDoesNotExist from sentry.types.ratelimit import RateLimit, RateLimitCategory -from sentry.utils import json from sentry.utils.cache import cache logger = logging.getLogger(__name__) @@ -275,10 +275,7 @@ def suggest_fix(event_data, model=settings.SENTRY_AI_SUGGESTED_FIX_MODEL, stream temperature=0.7, messages=[ {"role": "system", "content": prompt}, - { - "role": "user", - "content": json.dumps(event_info), - }, + {"role": "user", "content": orjson.dumps(event_info).decode()}, ], stream=stream, ) @@ -350,7 +347,7 @@ def get(self, request: Request, project, event_id) -> HttpResponse | StreamingHt if policy_failure is not None: return HttpResponse( - json.dumps({"restriction": policy_failure}), + orjson.dumps({"restriction": policy_failure}), content_type="application/json", status=403, ) @@ -364,7 +361,7 @@ def get(self, request: Request, project, event_id) -> HttpResponse | StreamingHt suggestion = suggest_fix(event.data, stream=stream) except RateLimitError as err: return HttpResponse( - json.dumps({"error": err.response.json()["error"]}), + orjson.dumps({"error": err.response.json()["error"]}), content_type="text/plain; charset=utf-8", status=429, ) @@ -394,6 +391,6 @@ def stream_response(): ) return HttpResponse( - json.dumps({"suggestion": suggestion}), + orjson.dumps({"suggestion": suggestion}), content_type="application/json", ) diff --git a/src/sentry/api/endpoints/event_grouping_info.py b/src/sentry/api/endpoints/event_grouping_info.py index 3384ee7addeefa..faf07987be7d61 100644 --- a/src/sentry/api/endpoints/event_grouping_info.py +++ b/src/sentry/api/endpoints/event_grouping_info.py @@ -1,3 +1,4 @@ +import orjson from django.http import HttpRequest, HttpResponse from sentry import eventstore @@ -7,7 +8,6 @@ from sentry.api.bases.project import ProjectEndpoint from sentry.api.exceptions import ResourceDoesNotExist from sentry.grouping.grouping_info import get_grouping_info -from sentry.utils import json @region_silo_endpoint @@ -31,4 +31,4 @@ def get(self, request: HttpRequest, project, event_id) -> HttpResponse: grouping_info = get_grouping_info(request.GET.get("config", None), project, event) - return HttpResponse(json.dumps(grouping_info), content_type="application/json") + return HttpResponse(orjson.dumps(grouping_info), content_type="application/json") diff --git a/src/sentry/api/endpoints/group_ai_autofix.py b/src/sentry/api/endpoints/group_ai_autofix.py index 7cb2c29135fcef..0ade5ce0084eec 100644 --- a/src/sentry/api/endpoints/group_ai_autofix.py +++ b/src/sentry/api/endpoints/group_ai_autofix.py @@ -4,6 +4,7 @@ from datetime import datetime from typing import Any +import orjson import requests from django.conf import settings from django.contrib.auth.models import AbstractBaseUser, AnonymousUser @@ -19,7 +20,6 @@ from sentry.models.group import Group from sentry.models.user import User from sentry.types.ratelimit import RateLimit, RateLimitCategory -from sentry.utils import json logger = logging.getLogger(__name__) @@ -86,7 +86,7 @@ def _call_autofix( ): response = requests.post( f"{settings.SEER_AUTOFIX_URL}/v1/automation/autofix/start", - data=json.dumps( + data=orjson.dumps( { "organization_id": group.organization.id, "project_id": group.project.id, @@ -118,7 +118,7 @@ def _call_autofix( def _call_get_autofix_state(self, group_id: int) -> dict[str, Any] | None: response = requests.post( f"{settings.SEER_AUTOFIX_URL}/v1/automation/autofix/state", - data=json.dumps( + data=orjson.dumps( { "group_id": group_id, } @@ -136,7 +136,7 @@ def _call_get_autofix_state(self, group_id: int) -> dict[str, Any] | None: return None def post(self, request: Request, group: Group) -> Response: - data = json.loads(request.body) + data = orjson.loads(request.body) # This event_id is the event that the user is looking at when they click the "Fix" button event_id = data.get("event_id", None) diff --git a/src/sentry/api/endpoints/group_autofix_setup_check.py b/src/sentry/api/endpoints/group_autofix_setup_check.py index fe448f9072a932..6af248945da2f6 100644 --- a/src/sentry/api/endpoints/group_autofix_setup_check.py +++ b/src/sentry/api/endpoints/group_autofix_setup_check.py @@ -2,6 +2,7 @@ import logging +import orjson import requests from django.conf import settings from rest_framework.response import Response @@ -23,7 +24,6 @@ from sentry.models.organization import Organization from sentry.models.project import Project from sentry.services.hybrid_cloud.integration import integration_service -from sentry.utils import json logger = logging.getLogger(__name__) @@ -69,7 +69,7 @@ def get_repos_and_access(project: Project) -> list[dict]: for repo in repos: response = requests.post( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/repo/check-access", - data=json.dumps( + data=orjson.dumps( { "repo": repo, } diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/details.py b/src/sentry/api/endpoints/integrations/sentry_apps/details.py index 78d52975bdef9a..b8e26003b4a86e 100644 --- a/src/sentry/api/endpoints/integrations/sentry_apps/details.py +++ b/src/sentry/api/endpoints/integrations/sentry_apps/details.py @@ -1,5 +1,6 @@ import logging +import orjson import sentry_sdk from django.db import router, transaction from requests import RequestException @@ -24,7 +25,6 @@ from sentry.models.integrations.sentry_app_installation import SentryAppInstallation from sentry.sentry_apps.apps import SentryAppUpdater from sentry.services.hybrid_cloud.organization import organization_service -from sentry.utils import json from sentry.utils.audit import create_audit_entry logger = logging.getLogger(__name__) @@ -116,7 +116,7 @@ def put(self, request: Request, sentry_app) -> Response: for error_message in serializer.errors["schema"]: name = "sentry_app.schema_validation_error" log_info = { - "schema": json.dumps(request.data["schema"]), + "schema": orjson.dumps(request.data["schema"]).decode(), "user_id": request.user.id, "sentry_app_id": sentry_app.id, "sentry_app_name": sentry_app.name, diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/index.py b/src/sentry/api/endpoints/integrations/sentry_apps/index.py index 8aa9400b53c2f2..4c016938fd1860 100644 --- a/src/sentry/api/endpoints/integrations/sentry_apps/index.py +++ b/src/sentry/api/endpoints/integrations/sentry_apps/index.py @@ -1,5 +1,6 @@ import logging +import orjson from rest_framework.request import Request from rest_framework.response import Response from rest_framework.serializers import ValidationError @@ -18,7 +19,6 @@ from sentry.models.integrations.sentry_app import SentryApp from sentry.sentry_apps.apps import SentryAppCreator from sentry.services.hybrid_cloud.user.service import user_service -from sentry.utils import json logger = logging.getLogger(__name__) @@ -143,7 +143,7 @@ def post(self, request: Request, organization) -> Response: for error_message in serializer.errors["schema"]: name = "sentry_app.schema_validation_error" log_info = { - "schema": json.dumps(data["schema"]), + "schema": orjson.dumps(data["schema"]).decode(), "user_id": request.user.id, "sentry_app_name": data["name"], "organization_id": organization.id, diff --git a/src/sentry/api/endpoints/organization_artifactbundle_assemble.py b/src/sentry/api/endpoints/organization_artifactbundle_assemble.py index 890c043612009f..76a1441389d48d 100644 --- a/src/sentry/api/endpoints/organization_artifactbundle_assemble.py +++ b/src/sentry/api/endpoints/organization_artifactbundle_assemble.py @@ -1,4 +1,5 @@ import jsonschema +import orjson from rest_framework.request import Request from rest_framework.response import Response @@ -17,7 +18,6 @@ get_assemble_status, set_assemble_status, ) -from sentry.utils import json @region_silo_endpoint @@ -49,7 +49,7 @@ def post(self, request: Request, organization) -> Response: } try: - data = json.loads(request.body) + data = orjson.loads(request.body) jsonschema.validate(data, schema) except jsonschema.ValidationError as e: return Response({"error": str(e).splitlines()[0]}, status=400) diff --git a/src/sentry/api/endpoints/organization_release_assemble.py b/src/sentry/api/endpoints/organization_release_assemble.py index be092e1973c074..ec40db195ae31d 100644 --- a/src/sentry/api/endpoints/organization_release_assemble.py +++ b/src/sentry/api/endpoints/organization_release_assemble.py @@ -1,4 +1,5 @@ import jsonschema +import orjson from rest_framework.request import Request from rest_framework.response import Response @@ -13,7 +14,7 @@ get_assemble_status, set_assemble_status, ) -from sentry.utils import json, metrics +from sentry.utils import metrics @region_silo_endpoint @@ -52,7 +53,7 @@ def post(self, request: Request, organization, version) -> Response: } try: - data = json.loads(request.body) + data = orjson.loads(request.body) jsonschema.validate(data, schema) except jsonschema.ValidationError as e: return Response({"error": str(e).splitlines()[0]}, status=400) diff --git a/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py b/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py index 603ea876f01b46..486f783f1477fd 100644 --- a/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py +++ b/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py @@ -1,6 +1,7 @@ from collections import namedtuple from datetime import datetime, timedelta, timezone +import orjson from django.conf import settings from rest_framework.request import Request from rest_framework.response import Response @@ -13,7 +14,6 @@ from sentry.api.utils import get_date_range_from_params, handle_query_errors from sentry.net.http import connection_from_url from sentry.snuba.metrics_enhanced_performance import timeseries_query -from sentry.utils import json ads_connection_pool = connection_from_url( settings.ANOMALY_DETECTION_URL, @@ -31,10 +31,10 @@ def get_anomalies(snuba_io): response = ads_connection_pool.urlopen( "POST", "/anomaly/predict", - body=json.dumps(snuba_io), + body=orjson.dumps(snuba_io), headers={"content-type": "application/json;charset=utf-8"}, ) - return Response(json.loads(response.data), status=200) + return Response(orjson.loads(response.data), status=200) def get_time_params(start: datetime, end: datetime) -> MappedParams: diff --git a/src/sentry/api/endpoints/project_app_store_connect_credentials.py b/src/sentry/api/endpoints/project_app_store_connect_credentials.py index d5bd8eb1349fc7..9da1f8757b1d9a 100644 --- a/src/sentry/api/endpoints/project_app_store_connect_credentials.py +++ b/src/sentry/api/endpoints/project_app_store_connect_credentials.py @@ -1,3 +1,5 @@ +from typing import Any + from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint @@ -59,7 +61,6 @@ from sentry.ratelimits.config import RateLimitConfig from sentry.tasks.app_store_connect import dsym_download from sentry.types.ratelimit import RateLimit, RateLimitCategory -from sentry.utils import json from sentry.utils.appleconnect import appstore_connect logger = logging.getLogger(__name__) @@ -275,7 +276,7 @@ class AppStoreUpdateCredentialsSerializer(serializers.Serializer): def validate_appconnectPrivateKey( self, private_key_json: str | dict[str, bool] | None - ) -> json.JSONData | None: + ) -> Any | None: return validate_secret(private_key_json) diff --git a/src/sentry/api/endpoints/project_autofix_create_codebase_index.py b/src/sentry/api/endpoints/project_autofix_create_codebase_index.py index 54f488530514b9..7ae6760a3ae8d4 100644 --- a/src/sentry/api/endpoints/project_autofix_create_codebase_index.py +++ b/src/sentry/api/endpoints/project_autofix_create_codebase_index.py @@ -2,6 +2,7 @@ import logging +import orjson import requests from django.conf import settings from rest_framework.response import Response @@ -12,7 +13,6 @@ from sentry.api.bases.project import ProjectEndpoint, ProjectPermission from sentry.api.helpers.repos import get_repos_from_project_code_mappings from sentry.models.project import Project -from sentry.utils import json logger = logging.getLogger(__name__) @@ -45,7 +45,7 @@ def post(self, request: Request, project: Project) -> Response: for repo in repos: response = requests.post( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/create", - data=json.dumps( + data=orjson.dumps( { "organization_id": project.organization.id, "project_id": project.id, diff --git a/src/sentry/api/endpoints/project_create_sample_transaction.py b/src/sentry/api/endpoints/project_create_sample_transaction.py index f3cc22bb91ab0c..81ba9dc5f8f4fa 100644 --- a/src/sentry/api/endpoints/project_create_sample_transaction.py +++ b/src/sentry/api/endpoints/project_create_sample_transaction.py @@ -2,6 +2,7 @@ from datetime import datetime, timedelta, timezone from uuid import uuid4 +import orjson from rest_framework import status from rest_framework.request import Request from rest_framework.response import Response @@ -12,7 +13,6 @@ from sentry.api.bases.project import ProjectEndpoint, ProjectEventPermission from sentry.api.serializers import serialize from sentry.constants import DATA_ROOT -from sentry.utils import json from sentry.utils.samples import create_sample_event_basic base_platforms_with_transactions = ["javascript", "python", "apple-ios"] @@ -84,8 +84,8 @@ def post(self, request: Request, project) -> Response: if expected_commonpath != os.path.commonpath([expected_commonpath, json_real_path]): return Response(status=status.HTTP_400_BAD_REQUEST) - with open(json_path) as fp: - data = json.load(fp) + with open(json_path, "rb") as fp: + data = orjson.loads(fp.read()) data = fix_event_data(data) event = create_sample_event_basic( diff --git a/src/sentry/api/endpoints/project_details.py b/src/sentry/api/endpoints/project_details.py index 7fd731567c1aa6..6c6750d1c36ddf 100644 --- a/src/sentry/api/endpoints/project_details.py +++ b/src/sentry/api/endpoints/project_details.py @@ -4,6 +4,7 @@ from datetime import timedelta from uuid import uuid4 +import orjson from django.db import IntegrityError, router, transaction from django.utils import timezone from drf_spectacular.utils import extend_schema, extend_schema_serializer @@ -48,7 +49,6 @@ from sentry.models.projectredirect import ProjectRedirect from sentry.models.scheduledeletion import RegionScheduledDeletion from sentry.notifications.utils import has_alert_integration -from sentry.utils import json logger = logging.getLogger(__name__) @@ -284,7 +284,7 @@ def validate_builtinSymbolSources(self, value): return value - def validate_symbolSources(self, sources_json): + def validate_symbolSources(self, sources_json) -> str: if not sources_json: return sources_json @@ -307,7 +307,7 @@ def validate_symbolSources(self, sources_json): # This is always allowed. added_or_modified_sources = [s for s in sources if s not in orig_sources] if not added_or_modified_sources: - return json.dumps(sources) if sources else "" + return orjson.dumps(sources).decode() if sources else "" # All modified sources should get a new UUID, as a way to invalidate caches. # Downstream symbolicator uses this ID as part of a cache key, so assigning @@ -321,7 +321,7 @@ def validate_symbolSources(self, sources_json): if source["type"] != "appStoreConnect": source["id"] = str(uuid4()) - sources_json = json.dumps(sources) if sources else "" + sources_json = orjson.dumps(sources).decode() if sources else "" # Adding sources is only allowed if custom symbol sources are enabled. has_sources = features.has( diff --git a/src/sentry/api/endpoints/project_profiling_profile.py b/src/sentry/api/endpoints/project_profiling_profile.py index 73187d7e7469e1..309c6390a45d93 100644 --- a/src/sentry/api/endpoints/project_profiling_profile.py +++ b/src/sentry/api/endpoints/project_profiling_profile.py @@ -1,6 +1,7 @@ from abc import ABC, abstractmethod from typing import Any +import orjson from django.http import HttpResponse, HttpResponseRedirect from rest_framework import serializers from rest_framework.exceptions import ParseError @@ -23,7 +24,6 @@ parse_profile_filters, proxy_profiling_service, ) -from sentry.utils import json class ProjectProfilingBaseEndpoint(ProjectEndpoint): @@ -96,7 +96,7 @@ def get(self, request: Request, project: Project, profile_id: str) -> HttpRespon ) if response.status == 200: - profile = json.loads(response.data) + profile = orjson.loads(response.data) if "release" in profile: profile["release"] = get_release(project, profile["release"]) @@ -186,7 +186,7 @@ def data_fn(offset: int, limit: int) -> Any: **kwargs, ) - data = json.loads(response.data) + data = orjson.loads(response.data) return data.get("functions", []) diff --git a/src/sentry/api/endpoints/project_symbol_sources.py b/src/sentry/api/endpoints/project_symbol_sources.py index 97a6a7878dbe15..0ac69a67c4b003 100644 --- a/src/sentry/api/endpoints/project_symbol_sources.py +++ b/src/sentry/api/endpoints/project_symbol_sources.py @@ -1,5 +1,6 @@ from uuid import uuid4 +import orjson from drf_spectacular.utils import extend_schema from rest_framework import serializers from rest_framework.request import Request @@ -29,7 +30,6 @@ validate_sources, ) from sentry.models.project import Project -from sentry.utils import json class LayoutSerializer(serializers.Serializer): @@ -302,7 +302,7 @@ def delete(self, request: Request, project: Project) -> Response: if len(filtered_sources) == len(sources): return Response(data={"error": f"Unknown source id: {id}"}, status=404) - serialized = json.dumps(filtered_sources) + serialized = orjson.dumps(filtered_sources).decode() project.update_option("sentry:symbol_sources", serialized) return Response(status=204) @@ -341,7 +341,7 @@ def post(self, request: Request, project: Project) -> Response: except InvalidSourcesError: return Response(status=400) - serialized = json.dumps(sources) + serialized = orjson.dumps(sources).decode() project.update_option("sentry:symbol_sources", serialized) redacted = redact_source_secrets([source]) @@ -402,7 +402,7 @@ def put(self, request: Request, project: Project) -> Response: except InvalidSourcesError as e: return Response(data={"error": str(e)}, status=400) - serialized = json.dumps(sources) + serialized = orjson.dumps(sources).decode() project.update_option("sentry:symbol_sources", serialized) redacted = redact_source_secrets([source]) diff --git a/src/sentry/api/endpoints/relay/register_challenge.py b/src/sentry/api/endpoints/relay/register_challenge.py index 558821427acff9..a96889474f92e4 100644 --- a/src/sentry/api/endpoints/relay/register_challenge.py +++ b/src/sentry/api/endpoints/relay/register_challenge.py @@ -1,3 +1,4 @@ +import orjson from django.conf import settings from rest_framework import serializers, status from rest_framework.request import Request @@ -12,7 +13,6 @@ from sentry.api.endpoints.relay.constants import RELAY_AUTH_RATE_LIMITS from sentry.api.serializers import serialize from sentry.relay.utils import get_header_relay_id, get_header_relay_signature -from sentry.utils import json from . import RelayIdSerializer @@ -42,8 +42,8 @@ def post(self, request: Request) -> Response: it will always attempt to invoke this endpoint. """ try: - json_data = json.loads(request.body) - except ValueError: + json_data = orjson.loads(request.body) + except orjson.JSONDecodeError: return Response({"detail": "No valid json body"}, status=status.HTTP_400_BAD_REQUEST) serializer = RelayRegisterChallengeSerializer(data=json_data) diff --git a/src/sentry/api/endpoints/relay/register_response.py b/src/sentry/api/endpoints/relay/register_response.py index 7f052eaac9d7cf..0c46ea568ca283 100644 --- a/src/sentry/api/endpoints/relay/register_response.py +++ b/src/sentry/api/endpoints/relay/register_response.py @@ -1,3 +1,4 @@ +import orjson from django.utils import timezone from rest_framework import serializers, status from rest_framework.request import Request @@ -14,7 +15,6 @@ from sentry.api.serializers import serialize from sentry.models.relay import Relay, RelayUsage from sentry.relay.utils import get_header_relay_id, get_header_relay_signature -from sentry.utils import json from . import RelayIdSerializer @@ -45,8 +45,8 @@ def post(self, request: Request) -> Response: """ try: - json_data = json.loads(request.body) - except ValueError: + json_data = orjson.loads(request.body) + except orjson.JSONDecodeError: return Response({"detail": "No valid json body"}, status=status.HTTP_400_BAD_REQUEST) serializer = RelayRegisterResponseSerializer(data=json_data) diff --git a/src/sentry/api/endpoints/relocations/artifacts/details.py b/src/sentry/api/endpoints/relocations/artifacts/details.py index 832df062ef498d..40bb6a2c9537d8 100644 --- a/src/sentry/api/endpoints/relocations/artifacts/details.py +++ b/src/sentry/api/endpoints/relocations/artifacts/details.py @@ -1,5 +1,7 @@ import logging +from typing import Any +import orjson from cryptography.fernet import Fernet from rest_framework.exceptions import PermissionDenied from rest_framework.request import Request @@ -13,13 +15,13 @@ from sentry.auth.elevated_mode import has_elevated_mode from sentry.auth.staff import has_staff_option from sentry.backup.crypto import ( + CryptoKeyVersion, GCPKMSDecryptor, get_default_crypto_key_version, unwrap_encrypted_export_tarball, ) from sentry.models.files.utils import get_relocation_storage from sentry.models.relocation import Relocation -from sentry.utils import json ERR_NEED_RELOCATION_ADMIN = ( "Cannot view relocation artifacts, as you do not have the appropriate permissions." @@ -28,6 +30,12 @@ logger = logging.getLogger(__name__) +def _orjson_default(obj: Any) -> Any: + if isinstance(obj, CryptoKeyVersion): + return obj._asdict() + raise TypeError + + @region_silo_endpoint class RelocationArtifactDetailsEndpoint(Endpoint): owner = ApiOwner.OPEN_SOURCE @@ -83,10 +91,10 @@ def get( unwrapped = unwrap_encrypted_export_tarball(fp) decryptor = GCPKMSDecryptor.from_bytes( - json.dumps(get_default_crypto_key_version()).encode("utf-8") + orjson.dumps(get_default_crypto_key_version(), default=_orjson_default) ) plaintext_data_encryption_key = decryptor.decrypt_data_encryption_key(unwrapped) fernet = Fernet(plaintext_data_encryption_key) return self.respond( - {"contents": fernet.decrypt(unwrapped.encrypted_json_blob).decode("utf-8")} + {"contents": fernet.decrypt(unwrapped.encrypted_json_blob).decode()} ) diff --git a/src/sentry/api/endpoints/seer_rpc.py b/src/sentry/api/endpoints/seer_rpc.py index e2fbe7a2393680..b8423c89bbb70f 100644 --- a/src/sentry/api/endpoints/seer_rpc.py +++ b/src/sentry/api/endpoints/seer_rpc.py @@ -2,6 +2,7 @@ import hmac from typing import Any +import orjson from django.conf import settings from django.contrib.auth.models import AnonymousUser from django.core.exceptions import ObjectDoesNotExist @@ -24,7 +25,6 @@ from sentry.services.hybrid_cloud.rpc import RpcAuthenticationSetupException, RpcResolutionException from sentry.services.hybrid_cloud.sig import SerializableFunctionValueException from sentry.silo.base import SiloMode -from sentry.utils import json from sentry.utils.env import in_test_environment @@ -44,16 +44,16 @@ def compare_signature(url: str, body: bytes, signature: str) -> bool: return False # We aren't using the version bits currently. - body = json.dumps(json.loads(body.decode("utf8"))).encode("utf8") + body = orjson.dumps(orjson.loads(body)) _, signature_data = signature.split(":", 2) signature_input = b"%s:%s" % ( - url.encode("utf8"), + url.encode(), body, ) for key in settings.SEER_RPC_SHARED_SECRET: - computed = hmac.new(key.encode("utf-8"), signature_input, hashlib.sha256).hexdigest() - is_valid = hmac.compare_digest(computed.encode("utf-8"), signature_data.encode("utf-8")) + computed = hmac.new(key.encode(), signature_input, hashlib.sha256).hexdigest() + is_valid = hmac.compare_digest(computed.encode(), signature_data.encode()) if is_valid: return True diff --git a/src/sentry/api/fields/secret.py b/src/sentry/api/fields/secret.py index d7ccb29745bceb..dbb32130394be4 100644 --- a/src/sentry/api/fields/secret.py +++ b/src/sentry/api/fields/secret.py @@ -1,7 +1,5 @@ from rest_framework import serializers -from sentry.utils import json - class SecretField(serializers.Field): """ @@ -30,7 +28,7 @@ def to_internal_value(self, data): return self.string_field.to_internal_value(data) -def validate_secret(secret: str | dict[str, bool] | None) -> json.JSONData | None: +def validate_secret(secret: str | dict[str, bool] | None) -> str | dict[str, bool] | None: """ Validates the contents of a field containing a secret that may have a magic object representing some existing value already stored on the server. diff --git a/src/sentry/api/helpers/autofix.py b/src/sentry/api/helpers/autofix.py index 4db7b86940cdeb..5468fd884ab920 100644 --- a/src/sentry/api/helpers/autofix.py +++ b/src/sentry/api/helpers/autofix.py @@ -1,10 +1,10 @@ import enum +import orjson import requests from django.conf import settings from sentry.api.helpers.repos import get_repos_from_project_code_mappings -from sentry.utils import json class AutofixCodebaseIndexingStatus(str, enum.Enum): @@ -23,7 +23,7 @@ def get_project_codebase_indexing_status(project): for repo in repos: response = requests.post( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/status", - data=json.dumps( + data=orjson.dumps( { "organization_id": project.organization.id, "project_id": project.id, diff --git a/src/sentry/api/serializers/models/app_platform_event.py b/src/sentry/api/serializers/models/app_platform_event.py index 23b23aa9d43512..1f9c8d2fa98110 100644 --- a/src/sentry/api/serializers/models/app_platform_event.py +++ b/src/sentry/api/serializers/models/app_platform_event.py @@ -1,7 +1,7 @@ from time import time from uuid import uuid4 -from sentry.utils import json +import orjson class AppPlatformEvent: @@ -33,14 +33,14 @@ def get_actor(self): @property def body(self): - return json.dumps( + return orjson.dumps( { "action": self.action, "installation": {"uuid": self.install.uuid}, "data": self.data, "actor": self.get_actor(), } - ) + ).decode() @property def headers(self): diff --git a/src/sentry/api/serializers/models/dashboard.py b/src/sentry/api/serializers/models/dashboard.py index 0ea80b6b7b266f..e5bba9ed508db9 100644 --- a/src/sentry/api/serializers/models/dashboard.py +++ b/src/sentry/api/serializers/models/dashboard.py @@ -1,5 +1,7 @@ from collections import defaultdict +import orjson + from sentry.api.serializers import Serializer, register, serialize from sentry.constants import ALL_ACCESS_PROJECTS from sentry.models.dashboard import Dashboard @@ -12,7 +14,6 @@ ) from sentry.services.hybrid_cloud.user.service import user_service from sentry.snuba.metrics.extraction import OnDemandMetricSpecVersioning -from sentry.utils import json from sentry.utils.dates import outside_retention_with_modified_start, parse_timestamp @@ -128,7 +129,7 @@ def get_attrs(self, item_list, user): "layout": None, } if widget.get("detail"): - detail = json.loads(widget["detail"]) + detail = orjson.loads(widget["detail"]) if detail.get("layout"): widget_preview["layout"] = detail["layout"] diff --git a/src/sentry/api/serializers/models/project.py b/src/sentry/api/serializers/models/project.py index c9eca7598cd7f5..3d9a2c45bb651d 100644 --- a/src/sentry/api/serializers/models/project.py +++ b/src/sentry/api/serializers/models/project.py @@ -5,6 +5,7 @@ from datetime import datetime, timedelta from typing import Any, Final, TypedDict, cast +import orjson import sentry_sdk from django.db import connection from django.db.models import prefetch_related_objects @@ -41,7 +42,6 @@ from sentry.roles import organization_roles from sentry.snuba import discover from sentry.tasks.symbolication import should_demote_symbolication -from sentry.utils import json STATUS_LABELS = { ObjectStatus.ACTIVE: "active", @@ -1013,7 +1013,7 @@ def serialize( serialized_sources = "[]" else: redacted_sources = redact_source_secrets(sources) - serialized_sources = json.dumps(redacted_sources) + serialized_sources = orjson.dumps(redacted_sources).decode() data.update( { diff --git a/src/sentry/api/serializers/rest_framework/json.py b/src/sentry/api/serializers/rest_framework/json.py index 52becf4e1a645a..9a00e75d3a5fd9 100644 --- a/src/sentry/api/serializers/rest_framework/json.py +++ b/src/sentry/api/serializers/rest_framework/json.py @@ -1,8 +1,7 @@ +import orjson from django.utils.translation import gettext_lazy as _ from rest_framework.serializers import Field, ValidationError -from sentry.utils import json - # JSONField taken from Django rest framework version 3.9.0 # See https://github.com/encode/django-rest-framework/blob/0eb2dc1137189027cc8d638630fb1754b02d6cfa/rest_framework/fields.py # or https://www.django-rest-framework.org/api-guide/fields/#jsonfield @@ -14,7 +13,7 @@ class JSONField(Field): def to_internal_value(self, data): try: - json.dumps(data) - except (TypeError, ValueError): + orjson.dumps(data) + except (TypeError, ValueError, orjson.JSONEncodeError): raise ValidationError(self.default_error_messages["invalid"]) return data diff --git a/src/sentry/api/serializers/rest_framework/rule.py b/src/sentry/api/serializers/rest_framework/rule.py index 045645e3162918..3f589bce450596 100644 --- a/src/sentry/api/serializers/rest_framework/rule.py +++ b/src/sentry/api/serializers/rest_framework/rule.py @@ -1,6 +1,7 @@ from typing import Any from uuid import UUID, uuid4 +import orjson from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import extend_schema_field from rest_framework import serializers @@ -10,7 +11,6 @@ from sentry.constants import MIGRATED_CONDITIONS, SENTRY_APP_ACTIONS, TICKET_ACTIONS from sentry.models.environment import Environment from sentry.rules import rules -from sentry.utils import json ValidationError = serializers.ValidationError @@ -27,7 +27,7 @@ def to_representation(self, value): def to_internal_value(self, data): if isinstance(data, str): try: - data = json.loads(data.replace("'", '"')) + data = orjson.loads(data.replace("'", '"')) except Exception: raise ValidationError("Failed trying to parse dict from string") elif not isinstance(data, dict): diff --git a/src/sentry/api/validators/sentry_apps/schema.py b/src/sentry/api/validators/sentry_apps/schema.py index 826897d734618e..8713530ef74131 100644 --- a/src/sentry/api/validators/sentry_apps/schema.py +++ b/src/sentry/api/validators/sentry_apps/schema.py @@ -1,11 +1,10 @@ import logging +import orjson from jsonschema import Draft7Validator from jsonschema.exceptions import ValidationError as SchemaValidationError from jsonschema.exceptions import best_match -from sentry.utils import json - logger = logging.getLogger(__name__) SCHEMA = { @@ -277,7 +276,7 @@ def validate_ui_element_schema(instance): "Unexpected error validating schema: %s", e, exc_info=True, - extra={"schema": json.dumps(instance)}, + extra={"schema": orjson.dumps(instance).decode()}, ) # pre-validators might have unexpected errors if the format is not what they expect in the check # if that happens, we should eat the error and let the main validator find the schema error diff --git a/tests/sentry/api/endpoints/notifications/test_notification_actions_details.py b/tests/sentry/api/endpoints/notifications/test_notification_actions_details.py index c9c2960675b980..a66320ee4a7ef4 100644 --- a/tests/sentry/api/endpoints/notifications/test_notification_actions_details.py +++ b/tests/sentry/api/endpoints/notifications/test_notification_actions_details.py @@ -1,5 +1,6 @@ from unittest.mock import MagicMock, patch +import orjson import responses from rest_framework import serializers, status @@ -18,7 +19,6 @@ from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.slack import install_slack from sentry.testutils.silo import assume_test_silo_mode -from sentry.utils import json class NotificationActionsDetailsEndpointTest(APITestCase): @@ -238,7 +238,7 @@ class MockActionRegistration(ActionRegistration): url="https://slack.com/api/chat.scheduleMessage", status=200, content_type="application/json", - body=json.dumps( + body=orjson.dumps( {"ok": "true", "channel": channel_id, "scheduled_message_id": "Q1298393284"} ), ) @@ -247,7 +247,7 @@ class MockActionRegistration(ActionRegistration): url="https://slack.com/api/chat.deleteScheduledMessage", status=200, content_type="application/json", - body=json.dumps({"ok": True}), + body=orjson.dumps({"ok": True}), ) response = self.get_success_response( diff --git a/tests/sentry/api/endpoints/notifications/test_notification_actions_index.py b/tests/sentry/api/endpoints/notifications/test_notification_actions_index.py index 143819666ca6b4..30d5a6b863a81e 100644 --- a/tests/sentry/api/endpoints/notifications/test_notification_actions_index.py +++ b/tests/sentry/api/endpoints/notifications/test_notification_actions_index.py @@ -1,5 +1,6 @@ from unittest.mock import MagicMock, patch +import orjson import responses from rest_framework import serializers, status @@ -18,7 +19,6 @@ from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.slack import install_slack from sentry.testutils.silo import assume_test_silo_mode -from sentry.utils import json class NotificationActionsIndexEndpointTest(APITestCase): @@ -286,7 +286,7 @@ class MockActionRegistration(ActionRegistration): url="https://slack.com/api/chat.scheduleMessage", status=200, content_type="application/json", - body=json.dumps( + body=orjson.dumps( {"ok": "true", "channel": channel_id, "scheduled_message_id": "Q1298393284"} ), ) @@ -295,7 +295,7 @@ class MockActionRegistration(ActionRegistration): url="https://slack.com/api/chat.deleteScheduledMessage", status=200, content_type="application/json", - body=json.dumps({"ok": True}), + body=orjson.dumps({"ok": True}), ) response = self.get_success_response( self.organization.slug, diff --git a/tests/sentry/api/endpoints/relocations/test_index.py b/tests/sentry/api/endpoints/relocations/test_index.py index d5499769f0ea07..e9ae221166ecad 100644 --- a/tests/sentry/api/endpoints/relocations/test_index.py +++ b/tests/sentry/api/endpoints/relocations/test_index.py @@ -4,6 +4,7 @@ from unittest.mock import Mock, call, patch from uuid import UUID +import orjson from django.core.files.uploadedfile import SimpleUploadedFile from rest_framework import status @@ -23,7 +24,6 @@ from sentry.testutils.helpers.backups import generate_rsa_key_pair from sentry.testutils.helpers.datetime import freeze_time from sentry.testutils.helpers.options import override_options -from sentry.utils import json from sentry.utils.relocation import OrderedTask FRESH_INSTALL_PATH = get_fixture_path("backup", "fresh-install.json") @@ -310,8 +310,8 @@ def test_good_simple( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_success_response( owner=self.owner.username, @@ -372,8 +372,8 @@ def test_good_promo_code( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_success_response( owner=self.owner.username, @@ -439,8 +439,8 @@ def test_good_with_valid_autopause_option( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_success_response( owner=self.owner.username, @@ -493,8 +493,8 @@ def test_good_with_invalid_autopause_option( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_success_response( owner=self.owner.username, @@ -544,8 +544,8 @@ def test_good_staff_when_feature_disabled( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_success_response( owner=self.owner.username, @@ -605,8 +605,8 @@ def test_good_superuser_when_feature_disabled( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_success_response( owner=self.owner.username, @@ -658,8 +658,8 @@ def test_bad_without_superuser_when_feature_disabled( self.login_as(user=self.owner, superuser=False) with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_error_response( owner=self.owner.username, @@ -684,8 +684,8 @@ def test_bad_expired_superuser_when_feature_disabled( self.login_as(user=self.owner, superuser=True) with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_error_response( owner=self.owner.username, @@ -730,8 +730,8 @@ def test_good_valid_org_slugs( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_success_response( owner=self.owner.username, @@ -790,8 +790,8 @@ def test_bad_invalid_org_slugs( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_error_response( owner=self.owner.username, @@ -841,8 +841,8 @@ def test_good_relocation_for_same_owner_already_completed( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_success_response( owner=self.owner.username, @@ -899,8 +899,8 @@ def test_bad_missing_orgs( self.login_as(user=self.owner, superuser=False) with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_error_response( owner=self.owner.username, @@ -927,8 +927,8 @@ def test_bad_missing_owner( self.login_as(user=self.owner, superuser=False) with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_error_response( file=SimpleUploadedFile( @@ -957,8 +957,8 @@ def test_bad_staff_nonexistent_owner( self.login_as(user=self.staff_user, staff=True) with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_error_response( owner="doesnotexist", @@ -988,8 +988,8 @@ def test_bad_superuser_nonexistent_owner( self.login_as(user=self.superuser, superuser=True) with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_error_response( owner="doesnotexist", @@ -1019,8 +1019,8 @@ def test_bad_owner_not_self( self.login_as(user=self.owner, superuser=False) with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: response = self.get_error_response( owner="other", @@ -1064,8 +1064,8 @@ def test_bad_relocation_for_same_owner_already_active( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: simple_file = SimpleUploadedFile( "export.tar", @@ -1095,8 +1095,8 @@ def test_bad_throttle_if_daily_limit_reached( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: initial_response = self.get_success_response( owner=self.owner.username, @@ -1163,8 +1163,8 @@ def test_good_no_throttle_for_staff( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: initial_response = self.get_success_response( owner=self.owner.username, @@ -1246,8 +1246,8 @@ def test_good_no_throttle_for_superuser( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: initial_response = self.get_success_response( owner=self.owner.username, @@ -1335,8 +1335,8 @@ def test_good_no_throttle_different_bucket_relocations( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: initial_response = self.get_success_response( owner=self.owner.username, @@ -1420,7 +1420,7 @@ def test_good_no_throttle_relocation_over_multiple_days( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) with open(FRESH_INSTALL_PATH) as f, freeze_time("2023-11-28 00:00:00") as frozen_time: - data = json.load(f) + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: initial_response = self.get_success_response( owner=self.owner.username, @@ -1506,8 +1506,8 @@ def test_bad_no_auth( with tempfile.TemporaryDirectory() as tmp_dir: (_, tmp_pub_key_path) = self.tmp_keys(tmp_dir) - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) with open(tmp_pub_key_path, "rb") as p: self.get_error_response( owner=self.owner.username, diff --git a/tests/sentry/api/endpoints/relocations/test_retry.py b/tests/sentry/api/endpoints/relocations/test_retry.py index 47a4f4e91f4551..f7615fb0f9d10a 100644 --- a/tests/sentry/api/endpoints/relocations/test_retry.py +++ b/tests/sentry/api/endpoints/relocations/test_retry.py @@ -4,6 +4,8 @@ from unittest.mock import Mock, patch from uuid import uuid4 +import orjson + from sentry.api.endpoints.relocations import ERR_FEATURE_DISABLED from sentry.api.endpoints.relocations.index import ( ERR_DUPLICATE_RELOCATION, @@ -24,7 +26,6 @@ from sentry.testutils.helpers.backups import generate_rsa_key_pair from sentry.testutils.helpers.options import override_options from sentry.testutils.silo import assume_test_silo_mode -from sentry.utils import json from sentry.utils.relocation import RELOCATION_FILE_TYPE, OrderedTask FRESH_INSTALL_PATH = get_fixture_path("backup", "fresh-install.json") @@ -35,8 +36,8 @@ @lru_cache(maxsize=1) def get_test_tarball() -> BytesIO: (_, pub_key_pem) = generate_rsa_key_pair() - with open(FRESH_INSTALL_PATH) as f: - data = json.load(f) + with open(FRESH_INSTALL_PATH, "rb") as f: + data = orjson.loads(f.read()) return create_encrypted_export_tarball(data, LocalFileEncryptor(BytesIO(pub_key_pem))) diff --git a/tests/sentry/api/endpoints/test_event_grouping_info.py b/tests/sentry/api/endpoints/test_event_grouping_info.py index dd13ac767a4449..fe9259f261fee9 100644 --- a/tests/sentry/api/endpoints/test_event_grouping_info.py +++ b/tests/sentry/api/endpoints/test_event_grouping_info.py @@ -1,5 +1,6 @@ from unittest import mock +import orjson import pytest from django.urls import reverse @@ -7,7 +8,6 @@ from sentry.grouping.grouping_info import get_grouping_info from sentry.testutils.cases import APITestCase, PerformanceIssueTestCase from sentry.testutils.skips import requires_snuba -from sentry.utils import json from sentry.utils.samples import load_data pytestmark = [requires_snuba] @@ -39,7 +39,7 @@ def test_error_event(self): ) response = self.client.get(url, format="json") - content = json.loads(response.content) + content = orjson.loads(response.content) assert response.status_code == 200 assert content["system"]["type"] == "component" @@ -58,7 +58,7 @@ def test_transaction_event(self): ) response = self.client.get(url, format="json") - content = json.loads(response.content) + content = orjson.loads(response.content) assert response.status_code == 200 assert content == {} @@ -76,7 +76,7 @@ def test_transaction_event_with_problem(self): ) response = self.client.get(url, format="json") - content = json.loads(response.content) + content = orjson.loads(response.content) assert response.status_code == 200 assert content["performance_n_plus_one_db_queries"]["type"] == "performance-problem" diff --git a/tests/sentry/api/endpoints/test_group_autofix_update.py b/tests/sentry/api/endpoints/test_group_autofix_update.py index 11f59c945139d3..77167dd8d4b103 100644 --- a/tests/sentry/api/endpoints/test_group_autofix_update.py +++ b/tests/sentry/api/endpoints/test_group_autofix_update.py @@ -1,10 +1,10 @@ from unittest.mock import patch +import orjson from django.conf import settings from rest_framework import status from sentry.testutils.cases import APITestCase -from sentry.utils import json class TestGroupAutofixUpdate(APITestCase): @@ -35,7 +35,7 @@ def test_autofix_update_successful(self, mock_post): assert response.status_code == status.HTTP_202_ACCEPTED mock_post.assert_called_once_with( f"{settings.SEER_AUTOFIX_URL}/v1/automation/autofix/update", - data=json.dumps( + data=orjson.dumps( { "run_id": 123, "payload": { @@ -44,7 +44,7 @@ def test_autofix_update_successful(self, mock_post): "fix_id": 789, }, } - ).encode("utf-8"), + ), headers={"content-type": "application/json;charset=utf-8"}, ) @@ -54,7 +54,7 @@ def test_autofix_update_failure(self, mock_post): response = self.client.post( self.url, - data=json.dumps( + data=orjson.dumps( { "run_id": 123, "payload": { @@ -63,7 +63,7 @@ def test_autofix_update_failure(self, mock_post): "fix_id": 789, }, } - ).encode("utf-8"), + ), format="json", ) diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index 20062b81aac95e..65714ae5ce85dd 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -3,6 +3,7 @@ from typing import Any from unittest import mock +import orjson from urllib3.response import HTTPResponse from sentry.api.endpoints.group_similar_issues_embeddings import ( @@ -15,7 +16,6 @@ from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.eventprocessing import save_new_event from sentry.testutils.helpers.features import with_feature -from sentry.utils import json from sentry.utils.types import NonNone EXPECTED_STACKTRACE_STRING = 'ZeroDivisionError: division by zero\n File "python_onboarding.py", function divide_by_zero\n divide = 1/0' @@ -707,7 +707,7 @@ def test_simple_only_group_id_returned(self, mock_logger, mock_seer_request): } ] } - mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value)) response = self.client.get( self.path, @@ -731,7 +731,7 @@ def test_simple_only_group_id_returned(self, mock_logger, mock_seer_request): mock_seer_request.assert_called_with( "POST", "/v0/issues/similar-issues", - body=json.dumps(expected_seer_request_params), + body=orjson.dumps(expected_seer_request_params).decode(), headers={"Content-Type": "application/json;charset=utf-8"}, ) @@ -754,7 +754,7 @@ def test_simple_only_hash_returned(self, mock_logger, mock_seer_request): } ] } - mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value)) response = self.client.get( self.path, @@ -778,7 +778,7 @@ def test_simple_only_hash_returned(self, mock_logger, mock_seer_request): mock_seer_request.assert_called_with( "POST", "/v0/issues/similar-issues", - body=json.dumps(expected_seer_request_params), + body=orjson.dumps(expected_seer_request_params).decode(), headers={"Content-Type": "application/json;charset=utf-8"}, ) @@ -803,7 +803,7 @@ def test_simple_group_id_and_hash_returned(self, mock_logger, mock_seer_request) } ] } - mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value)) response = self.client.get( self.path, @@ -827,7 +827,7 @@ def test_simple_group_id_and_hash_returned(self, mock_logger, mock_seer_request) mock_seer_request.assert_called_with( "POST", "/v0/issues/similar-issues", - body=json.dumps(expected_seer_request_params), + body=orjson.dumps(expected_seer_request_params).decode(), headers={"Content-Type": "application/json;charset=utf-8"}, ) @@ -868,7 +868,7 @@ def test_multiple(self, mock_seer_request, mock_record): }, ] } - mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value)) response = self.client.get( self.path, @@ -919,7 +919,7 @@ def test_incomplete_return_data(self, mock_seer_request, mock_logger): }, ] } - mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value)) response = self.client.get(self.path) mock_logger.exception.assert_called_with( @@ -971,7 +971,7 @@ def test_nonexistent_group(self, mock_seer_request, mock_logger): }, ] } - mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value)) response = self.client.get(self.path) mock_logger.exception.assert_called_with( @@ -1086,7 +1086,7 @@ def test_no_optional_params(self, mock_seer_request): ] } - mock_seer_request.return_value = HTTPResponse(json.dumps(seer_return_value).encode("utf-8")) + mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value)) # Include no optional parameters response = self.client.get(self.path) @@ -1097,7 +1097,7 @@ def test_no_optional_params(self, mock_seer_request): mock_seer_request.assert_called_with( "POST", "/v0/issues/similar-issues", - body=json.dumps( + body=orjson.dumps( { "group_id": self.group.id, "hash": NonNone(self.event.get_primary_hash()), @@ -1105,7 +1105,7 @@ def test_no_optional_params(self, mock_seer_request): "stacktrace": EXPECTED_STACKTRACE_STRING, "message": self.group.message, }, - ), + ).decode(), headers={"Content-Type": "application/json;charset=utf-8"}, ) @@ -1121,7 +1121,7 @@ def test_no_optional_params(self, mock_seer_request): mock_seer_request.assert_called_with( "POST", "/v0/issues/similar-issues", - body=json.dumps( + body=orjson.dumps( { "group_id": self.group.id, "hash": NonNone(self.event.get_primary_hash()), @@ -1130,7 +1130,7 @@ def test_no_optional_params(self, mock_seer_request): "message": self.group.message, "k": 1, }, - ), + ).decode(), headers={"Content-Type": "application/json;charset=utf-8"}, ) @@ -1146,7 +1146,7 @@ def test_no_optional_params(self, mock_seer_request): mock_seer_request.assert_called_with( "POST", "/v0/issues/similar-issues", - body=json.dumps( + body=orjson.dumps( { "group_id": self.group.id, "hash": NonNone(self.event.get_primary_hash()), @@ -1155,6 +1155,6 @@ def test_no_optional_params(self, mock_seer_request): "message": self.group.message, "threshold": 0.98, }, - ), + ).decode(), headers={"Content-Type": "application/json;charset=utf-8"}, ) diff --git a/tests/sentry/api/endpoints/test_organization_details.py b/tests/sentry/api/endpoints/test_organization_details.py index fedfe2a0ecc0ca..989a58284feb61 100644 --- a/tests/sentry/api/endpoints/test_organization_details.py +++ b/tests/sentry/api/endpoints/test_organization_details.py @@ -6,6 +6,7 @@ from typing import Any from unittest.mock import patch +import orjson import pytest import responses from dateutil.parser import parse as parse_date @@ -40,7 +41,6 @@ from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode_of, create_test_regions, region_silo_test from sentry.testutils.skips import requires_snuba -from sentry.utils import json pytestmark = [requires_snuba] @@ -556,7 +556,7 @@ def test_setting_duplicate_trusted_keys(self): response_data = response.data.get("trustedRelays") assert response_data is not None - resp_str = json.dumps(response_data) + resp_str = orjson.dumps(response_data).decode() # check that we have the duplicate key specified somewhere in the error message assert resp_str.find(_VALID_RELAY_KEYS[0]) >= 0 diff --git a/tests/sentry/api/endpoints/test_organization_invite_request_index.py b/tests/sentry/api/endpoints/test_organization_invite_request_index.py index dfce5379f0c889..5f4855256289eb 100644 --- a/tests/sentry/api/endpoints/test_organization_invite_request_index.py +++ b/tests/sentry/api/endpoints/test_organization_invite_request_index.py @@ -1,6 +1,7 @@ from functools import cached_property from urllib.parse import parse_qs, urlparse +import orjson import responses from django.core import mail from django.urls import reverse @@ -12,7 +13,6 @@ from sentry.testutils.helpers.slack import get_blocks_and_fallback_text from sentry.testutils.hybrid_cloud import HybridCloudTestMixin from sentry.testutils.outbox import outbox_runner -from sentry.utils import json class OrganizationInviteRequestListTest(APITestCase): @@ -234,7 +234,7 @@ def test_request_to_invite_slack(self): ) member = OrganizationMember.objects.get(email="eric@localhost") data = parse_qs(responses.calls[0].request.body) - assert json.loads(data["callback_id"][0]) == { + assert orjson.loads(data["callback_id"][0]) == { "member_id": member.id, "member_email": "eric@localhost", } diff --git a/tests/sentry/api/endpoints/test_organization_join_request.py b/tests/sentry/api/endpoints/test_organization_join_request.py index b33891e3c01f46..b148f55d9fd52d 100644 --- a/tests/sentry/api/endpoints/test_organization_join_request.py +++ b/tests/sentry/api/endpoints/test_organization_join_request.py @@ -2,6 +2,7 @@ from unittest.mock import patch from urllib.parse import parse_qs, urlparse +import orjson import responses from django.core import mail @@ -15,7 +16,6 @@ from sentry.testutils.hybrid_cloud import HybridCloudTestMixin from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode -from sentry.utils import json class OrganizationJoinRequestTest(APITestCase, SlackActivityNotificationTest, HybridCloudTestMixin): @@ -212,7 +212,7 @@ def test_request_to_join_slack(self): with outbox_runner(): member = OrganizationMember.objects.get(email=self.email) - assert json.loads(data["callback_id"][0]) == { + assert orjson.loads(data["callback_id"][0]) == { "member_id": member.id, "member_email": self.email, } diff --git a/tests/sentry/api/endpoints/test_organization_metrics_code_locations.py b/tests/sentry/api/endpoints/test_organization_metrics_code_locations.py index b8d77080c58a9a..c7707a040e0b88 100644 --- a/tests/sentry/api/endpoints/test_organization_metrics_code_locations.py +++ b/tests/sentry/api/endpoints/test_organization_metrics_code_locations.py @@ -2,6 +2,7 @@ from datetime import datetime, timedelta from unittest.mock import patch +import orjson import pytest from django.utils import timezone @@ -13,7 +14,6 @@ from sentry.sentry_metrics.querying.utils import get_redis_client_for_metrics_meta from sentry.testutils.cases import APITestCase, BaseSpansTestCase from sentry.testutils.helpers.datetime import freeze_time -from sentry.utils import json pytestmark = pytest.mark.sentry_metrics @@ -48,7 +48,7 @@ def _mock_code_location( if post_context is not None: code_location["post_context"] = post_context - return json.dumps(code_location) + return orjson.dumps(code_location).decode() def _store_code_location( self, organization_id: int, project_id: int, metric_mri: str, timestamp: int, value: str diff --git a/tests/sentry/api/endpoints/test_organization_release_meta.py b/tests/sentry/api/endpoints/test_organization_release_meta.py index 700fe92fe520d5..c2a88091e20b85 100644 --- a/tests/sentry/api/endpoints/test_organization_release_meta.py +++ b/tests/sentry/api/endpoints/test_organization_release_meta.py @@ -1,3 +1,4 @@ +import orjson from django.urls import reverse from sentry.models.artifactbundle import ProjectArtifactBundle, ReleaseArtifactBundle @@ -9,7 +10,6 @@ from sentry.models.releasefile import ReleaseFile from sentry.models.repository import Repository from sentry.testutils.cases import APITestCase -from sentry.utils import json class ReleaseMetaTest(APITestCase): @@ -76,7 +76,7 @@ def test_multiple_projects(self): assert response.status_code == 200, response.content - data = json.loads(response.content) + data = orjson.loads(response.content) assert data["deployCount"] == 1 assert data["commitCount"] == 2 assert data["newGroups"] == 42 @@ -110,7 +110,7 @@ def test_artifact_count_without_weak_association(self): assert response.status_code == 200, response.content - data = json.loads(response.content) + data = orjson.loads(response.content) assert data["releaseFileCount"] == 2 assert not data["isArtifactBundle"] @@ -148,7 +148,7 @@ def test_artifact_count_with_single_weak_association(self): assert response.status_code == 200, response.content - data = json.loads(response.content) + data = orjson.loads(response.content) assert data["releaseFileCount"] == 10 assert data["isArtifactBundle"] @@ -199,6 +199,6 @@ def test_artifact_count_with_multiple_weak_association(self): assert response.status_code == 200, response.content - data = json.loads(response.content) + data = orjson.loads(response.content) assert data["releaseFileCount"] == 40 assert data["isArtifactBundle"] diff --git a/tests/sentry/api/endpoints/test_organization_sentry_apps.py b/tests/sentry/api/endpoints/test_organization_sentry_apps.py index d53726b8021bae..b258fe40dca262 100644 --- a/tests/sentry/api/endpoints/test_organization_sentry_apps.py +++ b/tests/sentry/api/endpoints/test_organization_sentry_apps.py @@ -1,16 +1,16 @@ +import orjson from django.urls import reverse from sentry.models.integrations.sentry_app import SentryApp from sentry.testutils.cases import APITestCase from sentry.testutils.silo import control_silo_test -from sentry.utils import json def assert_response_json(response, data): """ Normalizes unicode strings by encoding/decoding expected output """ - assert json.loads(response.content) == json.loads(json.dumps(data)) + assert orjson.loads(response.content) == orjson.loads(orjson.dumps(data)) class OrganizationSentryAppsTest(APITestCase): diff --git a/tests/sentry/api/endpoints/test_project_app_store_connect_credentials.py b/tests/sentry/api/endpoints/test_project_app_store_connect_credentials.py index e42d3188eb879b..7d068155823679 100644 --- a/tests/sentry/api/endpoints/test_project_app_store_connect_credentials.py +++ b/tests/sentry/api/endpoints/test_project_app_store_connect_credentials.py @@ -1,5 +1,6 @@ from unittest.mock import patch +import orjson from django.test import override_settings from django.urls import reverse @@ -8,14 +9,13 @@ ) from sentry.lang.native.appconnect import AppStoreConnectConfig from sentry.testutils.cases import TestCase -from sentry.utils import json class TestAppStoreUpdateCredentialsSerializer: def test_validate_secrets_magic_object_true(self): payload_json = """{"appconnectPrivateKey": {"hidden-secret": true}}""" - payload = json.loads(payload_json) + payload = orjson.loads(payload_json) serializer = AppStoreUpdateCredentialsSerializer(data=payload) assert serializer.is_valid(), serializer.errors @@ -26,7 +26,7 @@ def test_validate_secrets_magic_object_true(self): def test_validate_secrets_magic_object_false(self): payload_json = """{"appconnectPrivateKey": {"hidden-secret": false}}""" - payload = json.loads(payload_json) + payload = orjson.loads(payload_json) serializer = AppStoreUpdateCredentialsSerializer(data=payload) assert not serializer.is_valid() @@ -35,7 +35,7 @@ def test_validate_secrets_magic_object_false(self): def test_validate_secrets_null(self): payload_json = """{"appconnectPrivateKey": null}""" - payload = json.loads(payload_json) + payload = orjson.loads(payload_json) serializer = AppStoreUpdateCredentialsSerializer(data=payload) assert not serializer.is_valid() @@ -48,7 +48,7 @@ def test_validate_secrets_null(self): def test_validate_secrets_absent(self): payload_json = """{"appId": "honk"}""" - payload = json.loads(payload_json) + payload = orjson.loads(payload_json) serializer = AppStoreUpdateCredentialsSerializer(data=payload) assert serializer.is_valid(), serializer.errors @@ -60,7 +60,7 @@ def test_validate_secrets_absent(self): def test_validate_secrets_empty_string(self): payload_json = """{"appconnectPrivateKey": ""}""" - payload = json.loads(payload_json) + payload = orjson.loads(payload_json) serializer = AppStoreUpdateCredentialsSerializer(data=payload) assert not serializer.is_valid() @@ -71,7 +71,7 @@ def test_validate_secrets_empty_string(self): def test_validate_secrets_string(self): payload_json = """{"appconnectPrivateKey": "honk"}""" - payload = json.loads(payload_json) + payload = orjson.loads(payload_json) serializer = AppStoreUpdateCredentialsSerializer(data=payload) assert serializer.is_valid(), serializer.errors diff --git a/tests/sentry/api/endpoints/test_project_artifact_bundle_file_details.py b/tests/sentry/api/endpoints/test_project_artifact_bundle_file_details.py index 9ff9d43dd63a21..7cc630f791a7aa 100644 --- a/tests/sentry/api/endpoints/test_project_artifact_bundle_file_details.py +++ b/tests/sentry/api/endpoints/test_project_artifact_bundle_file_details.py @@ -3,13 +3,13 @@ import zipfile from uuid import uuid4 +import orjson from django.urls import reverse from sentry.models.artifactbundle import ArtifactBundle, ProjectArtifactBundle from sentry.models.files.file import File from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.response import close_streaming_response -from sentry.utils import json class ProjectArtifactBundleFileDetailsEndpointTest(APITestCase): @@ -26,7 +26,7 @@ def remove_and_return(dictionary, key): zip_file.writestr( "manifest.json", - json.dumps( + orjson.dumps( { # We remove the "content" key in the original dict, thus no subsequent calls should be made. "files": { @@ -34,7 +34,7 @@ def remove_and_return(dictionary, key): for file_path, info in files.items() } } - ), + ).decode(), ) compressed.seek(0) diff --git a/tests/sentry/api/endpoints/test_project_artifact_lookup.py b/tests/sentry/api/endpoints/test_project_artifact_lookup.py index d2c4e9d0e8bcad..b71c80d41a38b5 100644 --- a/tests/sentry/api/endpoints/test_project_artifact_lookup.py +++ b/tests/sentry/api/endpoints/test_project_artifact_lookup.py @@ -5,6 +5,7 @@ from io import BytesIO from uuid import uuid4 +import orjson from django.core.files.base import ContentFile from django.urls import reverse @@ -21,7 +22,6 @@ from sentry.tasks.assemble import assemble_artifacts from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.datetime import freeze_time -from sentry.utils import json def make_file(artifact_name, content, type="artifact.bundle", headers=None): @@ -42,7 +42,7 @@ def remove_and_return(dictionary, key): zip_file.writestr( "manifest.json", - json.dumps( + orjson.dumps( { # We remove the "content" key in the original dict, thus no subsequent calls should be made. "files": { @@ -50,7 +50,7 @@ def remove_and_return(dictionary, key): for file_path, info in files.items() } } - ), + ).decode(), ) compressed.seek(0) @@ -85,7 +85,7 @@ def create_archive(self, fields, files, dist=None): ) buffer = BytesIO() with zipfile.ZipFile(buffer, mode="w") as zf: - zf.writestr("manifest.json", json.dumps(manifest)) + zf.writestr("manifest.json", orjson.dumps(manifest).decode()) for filename, content in files.items(): zf.writestr(filename, content) diff --git a/tests/sentry/api/endpoints/test_project_autofix_create_codebase_index.py b/tests/sentry/api/endpoints/test_project_autofix_create_codebase_index.py index 3ba3e55437f211..0605be81ad1829 100644 --- a/tests/sentry/api/endpoints/test_project_autofix_create_codebase_index.py +++ b/tests/sentry/api/endpoints/test_project_autofix_create_codebase_index.py @@ -1,11 +1,11 @@ from unittest.mock import call, patch +import orjson from django.conf import settings from django.urls import reverse from rest_framework import status from sentry.testutils.cases import APITestCase -from sentry.utils import json class TestProjectAutofixCodebaseIndexCreate(APITestCase): @@ -41,7 +41,7 @@ def test_autofix_create_successful(self, mock_post): assert response.status_code == status.HTTP_202_ACCEPTED mock_post.assert_called_once_with( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/create", - data=json.dumps( + data=orjson.dumps( { "organization_id": self.project.organization.id, "project_id": self.project.id, @@ -86,7 +86,7 @@ def test_autofix_create_multiple_repos_successful(self, mock_post): calls = [ call( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/create", - data=json.dumps( + data=orjson.dumps( { "organization_id": self.project.organization.id, "project_id": self.project.id, @@ -102,7 +102,7 @@ def test_autofix_create_multiple_repos_successful(self, mock_post): ), call( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/create", - data=json.dumps( + data=orjson.dumps( { "organization_id": self.project.organization.id, "project_id": self.project.id, diff --git a/tests/sentry/api/endpoints/test_project_create_sample.py b/tests/sentry/api/endpoints/test_project_create_sample.py index a52e8205c0f88d..e0fe2ee487a74d 100644 --- a/tests/sentry/api/endpoints/test_project_create_sample.py +++ b/tests/sentry/api/endpoints/test_project_create_sample.py @@ -1,9 +1,9 @@ +import orjson from django.urls import reverse from sentry.models.groupinbox import GroupInbox from sentry.testutils.cases import APITestCase from sentry.testutils.skips import requires_snuba -from sentry.utils import json pytestmark = [requires_snuba] @@ -26,7 +26,7 @@ def test_simple(self): response = self.client.post(url, format="json") assert response.status_code == 200, response.content - assert "groupID" in json.loads(response.content) + assert "groupID" in orjson.loads(response.content) assert GroupInbox.objects.filter(group=response.data["groupID"]).exists() def test_project_platform(self): @@ -42,7 +42,7 @@ def test_project_platform(self): response = self.client.post(url, format="json") assert response.status_code == 200, response.content - assert "groupID" in json.loads(response.content) + assert "groupID" in orjson.loads(response.content) def test_cocoa(self): project = self.create_project(teams=[self.team], name="foo", platform="cocoa") @@ -57,7 +57,7 @@ def test_cocoa(self): response = self.client.post(url, format="json") assert response.status_code == 200, response.content - assert "groupID" in json.loads(response.content) + assert "groupID" in orjson.loads(response.content) def test_java(self): project = self.create_project(teams=[self.team], name="foo", platform="java") @@ -72,7 +72,7 @@ def test_java(self): response = self.client.post(url, format="json") assert response.status_code == 200, response.content - assert "groupID" in json.loads(response.content) + assert "groupID" in orjson.loads(response.content) def test_javascript(self): project = self.create_project(teams=[self.team], name="foo", platform="javascript") @@ -87,7 +87,7 @@ def test_javascript(self): response = self.client.post(url, format="json") assert response.status_code == 200, response.content - assert "groupID" in json.loads(response.content) + assert "groupID" in orjson.loads(response.content) def test_php(self): project = self.create_project(teams=[self.team], name="foo", platform="php") @@ -102,7 +102,7 @@ def test_php(self): response = self.client.post(url, format="json") assert response.status_code == 200, response.content - assert "groupID" in json.loads(response.content) + assert "groupID" in orjson.loads(response.content) def test_python(self): project = self.create_project(teams=[self.team], name="foo", platform="python") @@ -117,7 +117,7 @@ def test_python(self): response = self.client.post(url, format="json") assert response.status_code == 200, response.content - assert "groupID" in json.loads(response.content) + assert "groupID" in orjson.loads(response.content) def test_reactnative(self): project = self.create_project(teams=[self.team], name="foo", platform="react-native") @@ -132,7 +132,7 @@ def test_reactnative(self): response = self.client.post(url, format="json") assert response.status_code == 200, response.content - assert "groupID" in json.loads(response.content) + assert "groupID" in orjson.loads(response.content) def test_ruby(self): project = self.create_project(teams=[self.team], name="foo", platform="ruby") @@ -147,7 +147,7 @@ def test_ruby(self): response = self.client.post(url, format="json") assert response.status_code == 200, response.content - assert "groupID" in json.loads(response.content) + assert "groupID" in orjson.loads(response.content) def test_attempted_path_traversal_returns_400(self): project = self.create_project(teams=[self.team], name="foo", platform="../../../etc/passwd") diff --git a/tests/sentry/api/endpoints/test_project_details.py b/tests/sentry/api/endpoints/test_project_details.py index 8ac2c563c7494f..ca268868571c2b 100644 --- a/tests/sentry/api/endpoints/test_project_details.py +++ b/tests/sentry/api/endpoints/test_project_details.py @@ -6,6 +6,7 @@ from typing import Any from unittest import mock +import orjson from django.db import router from django.urls import reverse @@ -34,7 +35,6 @@ from sentry.testutils.helpers import Feature, with_feature from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode -from sentry.utils import json def _dyn_sampling_data(multiple_uniform_rules=False, uniform_rule_last_position=True): @@ -104,7 +104,7 @@ def _remove_ids_from_dynamic_rules(dynamic_rules): def first_symbol_source_id(sources_json): - sources = json.loads(sources_json) + sources = orjson.loads(sources_json) return sources[0]["id"] @@ -1180,11 +1180,13 @@ def test_redacted_symbol_source_secrets(self, create_audit_entry): "password": "beepbeep", } self.get_success_response( - self.org_slug, self.proj_slug, symbolSources=json.dumps([config]) + self.org_slug, self.proj_slug, symbolSources=orjson.dumps([config]).decode() ) config["id"] = first_symbol_source_id(self.project.get_option("sentry:symbol_sources")) - assert self.project.get_option("sentry:symbol_sources") == json.dumps([config]) + assert ( + self.project.get_option("sentry:symbol_sources") == orjson.dumps([config]).decode() + ) # redact password redacted_source = config.copy() @@ -1204,10 +1206,14 @@ def test_redacted_symbol_source_secrets(self, create_audit_entry): } self.get_success_response( - self.org_slug, self.proj_slug, symbolSources=json.dumps([redacted_source]) + self.org_slug, + self.proj_slug, + symbolSources=orjson.dumps([redacted_source]).decode(), ) # on save the magic object should be replaced with the previously set password - assert self.project.get_option("sentry:symbol_sources") == json.dumps([config]) + assert ( + self.project.get_option("sentry:symbol_sources") == orjson.dumps([config]).decode() + ) @mock.patch("sentry.api.base.create_audit_entry") def test_redacted_symbol_source_secrets_unknown_secret(self, create_audit_entry): @@ -1227,21 +1233,23 @@ def test_redacted_symbol_source_secrets_unknown_secret(self, create_audit_entry) "password": "beepbeep", } self.get_success_response( - self.org_slug, self.proj_slug, symbolSources=json.dumps([config]) + self.org_slug, self.proj_slug, symbolSources=orjson.dumps([config]).decode() ) config["id"] = first_symbol_source_id(self.project.get_option("sentry:symbol_sources")) - assert self.project.get_option("sentry:symbol_sources") == json.dumps([config]) + assert ( + self.project.get_option("sentry:symbol_sources") == orjson.dumps([config]).decode() + ) # prepare new call, this secret is not known new_source = config.copy() new_source["password"] = {"hidden-secret": True} new_source["id"] = "oops" response = self.get_response( - self.org_slug, self.proj_slug, symbolSources=json.dumps([new_source]) + self.org_slug, self.proj_slug, symbolSources=orjson.dumps([new_source]).decode() ) assert response.status_code == 400 - assert json.loads(response.content) == { + assert orjson.loads(response.content) == { "symbolSources": ["Hidden symbol source secret is missing a value"] } @@ -1273,7 +1281,7 @@ def symbol_sources(self): "password": "beepbeep", } - project.update_option("sentry:symbol_sources", json.dumps([source1, source2])) + project.update_option("sentry:symbol_sources", orjson.dumps([source1, source2]).decode()) return [source1, source2] def test_symbol_sources_no_modification(self): @@ -1281,22 +1289,26 @@ def test_symbol_sources_no_modification(self): project = Project.objects.get(id=self.project.id) with Feature({"organizations:custom-symbol-sources": False}): resp = self.get_response( - self.org_slug, self.proj_slug, symbolSources=json.dumps([source1, source2]) + self.org_slug, + self.proj_slug, + symbolSources=orjson.dumps([source1, source2]).decode(), ) assert resp.status_code == 200 - assert project.get_option("sentry:symbol_sources", json.dumps([source1, source2])) + assert project.get_option( + "sentry:symbol_sources", orjson.dumps([source1, source2]).decode() + ) def test_symbol_sources_deletion(self): source1, source2 = self.symbol_sources() project = Project.objects.get(id=self.project.id) with Feature({"organizations:custom-symbol-sources": False}): resp = self.get_response( - self.org_slug, self.proj_slug, symbolSources=json.dumps([source1]) + self.org_slug, self.proj_slug, symbolSources=orjson.dumps([source1]).decode() ) assert resp.status_code == 200 - assert project.get_option("sentry:symbol_sources", json.dumps([source1])) + assert project.get_option("sentry:symbol_sources", orjson.dumps([source1]).decode()) class CopyProjectSettingsTest(APITestCase): diff --git a/tests/sentry/api/endpoints/test_project_profiling_profile.py b/tests/sentry/api/endpoints/test_project_profiling_profile.py index fb3da27f5ff0bb..b61f7bd964ade3 100644 --- a/tests/sentry/api/endpoints/test_project_profiling_profile.py +++ b/tests/sentry/api/endpoints/test_project_profiling_profile.py @@ -1,11 +1,11 @@ from unittest.mock import MagicMock, patch from uuid import uuid4 +import orjson from django.urls import reverse from rest_framework.exceptions import ErrorDetail from sentry.testutils.cases import APITestCase -from sentry.utils import json PROFILING_FEATURES = {"organizations:profiling": True} @@ -50,7 +50,7 @@ def test_bad_filter(self): def test_basic(self, mock_proxy): mock_response = MagicMock() mock_response.status = 200 - mock_response.data = json.dumps({"functions": []}) + mock_response.data = orjson.dumps({"functions": []}).decode() mock_proxy.return_value = mock_response with self.feature(PROFILING_FEATURES): response = self.client.get(self.url, {"sort": "count"}) @@ -75,7 +75,7 @@ def test_is_application_invalid(self): def test_is_application_true(self, mock_proxy): mock_response = MagicMock() mock_response.status = 200 - mock_response.data = json.dumps({"functions": []}) + mock_response.data = orjson.dumps({"functions": []}).decode() mock_proxy.return_value = mock_response with self.feature(PROFILING_FEATURES): response = self.client.get(self.url, {"is_application": "1", "sort": "count"}) @@ -88,7 +88,7 @@ def test_is_application_true(self, mock_proxy): def test_is_application_false(self, mock_proxy): mock_response = MagicMock() mock_response.status = 200 - mock_response.data = json.dumps({"functions": []}) + mock_response.data = orjson.dumps({"functions": []}).decode() mock_proxy.return_value = mock_response with self.feature(PROFILING_FEATURES): response = self.client.get(self.url, {"is_application": "0", "sort": "count"}) diff --git a/tests/sentry/api/endpoints/test_project_rule_details.py b/tests/sentry/api/endpoints/test_project_rule_details.py index b56d0beeda7b91..46b86b741f3671 100644 --- a/tests/sentry/api/endpoints/test_project_rule_details.py +++ b/tests/sentry/api/endpoints/test_project_rule_details.py @@ -6,6 +6,7 @@ from unittest.mock import patch from urllib.parse import parse_qs +import orjson import responses from rest_framework import status @@ -25,7 +26,6 @@ from sentry.testutils.helpers.datetime import freeze_time from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import assume_test_silo_mode -from sentry.utils import json def assert_rule_from_payload(rule: Rule, payload: Mapping[str, Any]) -> None: @@ -1017,14 +1017,14 @@ def test_update_channel_slack(self, mock_send_confirmation_notification): url="https://slack.com/api/conversations.list", status=200, content_type="application/json", - body=json.dumps(channels), + body=orjson.dumps(channels), ) responses.add( method=responses.GET, url="https://slack.com/api/conversations.info", status=200, content_type="application/json", - body=json.dumps({"ok": channels["ok"], "channel": channels["channels"][1]}), + body=orjson.dumps({"ok": channels["ok"], "channel": channels["channels"][1]}), ) payload = { @@ -1081,19 +1081,19 @@ def test_slack_confirmation_notification_contents(self): url="https://slack.com/api/conversations.list", status=200, content_type="application/json", - body=json.dumps(channels), + body=orjson.dumps(channels), ) responses.add( method=responses.GET, url="https://slack.com/api/conversations.info", status=200, content_type="application/json", - body=json.dumps({"ok": channels["ok"], "channel": channels["channels"][1]}), + body=orjson.dumps({"ok": channels["ok"], "channel": channels["channels"][1]}), ) blocks = SlackRuleSaveEditMessageBuilder(rule=self.rule, new=False).build() payload = { "text": blocks.get("text"), - "blocks": json.dumps(blocks.get("blocks")), + "blocks": orjson.dumps(blocks.get("blocks")).decode(), "channel": "new_channel_id", "unfurl_links": False, "unfurl_media": False, @@ -1103,7 +1103,7 @@ def test_slack_confirmation_notification_contents(self): url="https://slack.com/api/chat.postMessage", status=200, content_type="application/json", - body=json.dumps(payload), + body=orjson.dumps(payload), ) staging_env = self.create_environment( self.project, name="staging", organization=self.organization @@ -1128,7 +1128,7 @@ def test_slack_confirmation_notification_contents(self): data = parse_qs(responses.calls[1].request.body) message = f"Alert rule in the *{self.project.slug}* project was updated." assert data["text"][0] == message - rendered_blocks = json.loads(data["blocks"][0]) + rendered_blocks = orjson.loads(data["blocks"][0]) assert rendered_blocks[0]["text"]["text"] == message changes = "*Changes*\n" changes += "• Added condition 'The issue's category is equal to Performance'\n" @@ -1170,14 +1170,14 @@ def test_update_channel_slack_workspace_fail(self): url="https://slack.com/api/conversations.list", status=200, content_type="application/json", - body=json.dumps(channels), + body=orjson.dumps(channels), ) responses.add( method=responses.GET, url="https://slack.com/api/conversations.info", status=200, content_type="application/json", - body=json.dumps({"ok": channels["ok"], "channel": channels["channels"][0]}), + body=orjson.dumps({"ok": channels["ok"], "channel": channels["channels"][0]}), ) actions[0]["channel"] = "#new_channel_name" @@ -1201,7 +1201,7 @@ def test_slack_channel_id_saved(self): url="https://slack.com/api/conversations.info", status=200, content_type="application/json", - body=json.dumps( + body=orjson.dumps( {"ok": "true", "channel": {"name": "team-team-team", "id": channel_id}} ), ) diff --git a/tests/sentry/api/endpoints/test_project_rules.py b/tests/sentry/api/endpoints/test_project_rules.py index 0437be096744cd..fa3073de493c54 100644 --- a/tests/sentry/api/endpoints/test_project_rules.py +++ b/tests/sentry/api/endpoints/test_project_rules.py @@ -7,6 +7,7 @@ from urllib.parse import parse_qs from uuid import uuid4 +import orjson import responses from django.test import override_settings from rest_framework import status @@ -23,7 +24,6 @@ from sentry.testutils.cases import APITestCase from sentry.testutils.helpers import install_slack, with_feature from sentry.testutils.silo import assume_test_silo_mode -from sentry.utils import json class ProjectRuleBaseTestCase(APITestCase): @@ -358,7 +358,7 @@ def test_slack_channel_id_saved(self, mock_send_confirmation_notification): url="https://slack.com/api/conversations.info", status=200, content_type="application/json", - body=json.dumps( + body=orjson.dumps( {"ok": "true", "channel": {"name": "team-team-team", "id": self.channel_id}} ), ) @@ -385,7 +385,7 @@ def test_slack_confirmation_notification_contents(self): url="https://slack.com/api/conversations.info", status=200, content_type="application/json", - body=json.dumps( + body=orjson.dumps( {"ok": "true", "channel": {"name": "team-team-team", "id": self.channel_id}} ), ) @@ -393,7 +393,7 @@ def test_slack_confirmation_notification_contents(self): blocks = SlackRuleSaveEditMessageBuilder(rule=self.rule, new=True).build() payload = { "text": blocks.get("text"), - "blocks": json.dumps(blocks.get("blocks")), + "blocks": orjson.dumps(blocks.get("blocks")).decode(), "channel": self.channel_id, "unfurl_links": False, "unfurl_media": False, @@ -403,7 +403,7 @@ def test_slack_confirmation_notification_contents(self): url="https://slack.com/api/chat.postMessage", status=200, content_type="application/json", - body=json.dumps(payload), + body=orjson.dumps(payload), ) response = self.get_success_response( self.organization.slug, @@ -423,7 +423,7 @@ def test_slack_confirmation_notification_contents(self): data = parse_qs(responses.calls[1].request.body) message = f"Alert rule was created in the *{self.project.slug}* project and will send notifications here." assert data["text"][0] == message - rendered_blocks = json.loads(data["blocks"][0]) + rendered_blocks = orjson.loads(data["blocks"][0]) assert rendered_blocks[0]["text"]["text"] == message assert ( rendered_blocks[1]["elements"][0]["text"] diff --git a/tests/sentry/api/endpoints/test_project_symbol_sources.py b/tests/sentry/api/endpoints/test_project_symbol_sources.py index bf00671175d34d..9fcb23e6160961 100644 --- a/tests/sentry/api/endpoints/test_project_symbol_sources.py +++ b/tests/sentry/api/endpoints/test_project_symbol_sources.py @@ -1,6 +1,7 @@ +import orjson + from sentry.lang.native.sources import redact_source_secrets from sentry.testutils.cases import APITestCase -from sentry.utils import json class ProjectSymbolSourcesTest(APITestCase): @@ -19,7 +20,7 @@ def test_get_successful(self): "password": "beepbeep", } project = self.project # force creation - project.update_option("sentry:symbol_sources", json.dumps([config])) + project.update_option("sentry:symbol_sources", orjson.dumps([config]).decode()) self.login_as(user=self.user) expected = redact_source_secrets([config]) @@ -45,7 +46,7 @@ def test_get_unsuccessful(self): "password": "beepbeep", } project = self.project # force creation - project.update_option("sentry:symbol_sources", json.dumps([config])) + project.update_option("sentry:symbol_sources", orjson.dumps([config]).decode()) self.login_as(user=self.user) self.get_error_response( @@ -71,7 +72,7 @@ def test_delete_successful(self): } project = self.project # force creation - project.update_option("sentry:symbol_sources", json.dumps([config])) + project.update_option("sentry:symbol_sources", orjson.dumps([config]).decode()) self.login_as(user=self.user) self.get_success_response( @@ -94,7 +95,7 @@ def test_delete_unsuccessful(self): } project = self.project # force creation - project.update_option("sentry:symbol_sources", json.dumps([config])) + project.update_option("sentry:symbol_sources", orjson.dumps([config]).decode()) self.login_as(user=self.user) self.get_error_response(project.organization.slug, project.slug, status=404) @@ -152,7 +153,7 @@ def test_submit_duplicate(self): } project = self.project # force creation - project.update_option("sentry:symbol_sources", json.dumps([config])) + project.update_option("sentry:symbol_sources", orjson.dumps([config]).decode()) self.login_as(user=self.user) self.get_error_response(project.organization.slug, project.slug, raw_data=config) @@ -224,7 +225,7 @@ def test_update_successful(self): ] project = self.project # force creation - project.update_option("sentry:symbol_sources", json.dumps(config)) + project.update_option("sentry:symbol_sources", orjson.dumps(config).decode()) self.login_as(user=self.user) update_config = { @@ -269,7 +270,9 @@ def test_update_successful(self): del response.data["id"] assert response.data == redact_source_secrets([update_config])[0] - source_ids = {src["id"] for src in json.loads(project.get_option("sentry:symbol_sources"))} + source_ids = { + src["id"] for src in orjson.loads(project.get_option("sentry:symbol_sources")) + } assert "hank" in source_ids assert "beep" not in source_ids @@ -301,7 +304,7 @@ def test_update_unsuccessful(self): ] project = self.project # force creation - project.update_option("sentry:symbol_sources", json.dumps(config)) + project.update_option("sentry:symbol_sources", orjson.dumps(config).decode()) self.login_as(user=self.user) update_config = { diff --git a/tests/sentry/api/endpoints/test_relay_globalconfig_v3.py b/tests/sentry/api/endpoints/test_relay_globalconfig_v3.py index fd0e148eebfe16..d395b31d3589c1 100644 --- a/tests/sentry/api/endpoints/test_relay_globalconfig_v3.py +++ b/tests/sentry/api/endpoints/test_relay_globalconfig_v3.py @@ -1,5 +1,6 @@ from unittest.mock import patch +import orjson import pytest from django.urls import reverse from sentry_relay.processing import normalize_global_config @@ -7,7 +8,6 @@ from sentry.relay.globalconfig import get_global_config from sentry.testutils.helpers.options import override_options from sentry.testutils.pytest.fixtures import django_db_all -from sentry.utils import json @pytest.fixture @@ -26,7 +26,7 @@ def inner(version, global_): HTTP_X_SENTRY_RELAY_SIGNATURE=signature, ) - return json.loads(resp.content), resp.status_code + return orjson.loads(resp.content), resp.status_code return inner diff --git a/tests/sentry/api/endpoints/test_relay_projectconfigs.py b/tests/sentry/api/endpoints/test_relay_projectconfigs.py index d24f4098e4c66f..9fa465b3a3e77b 100644 --- a/tests/sentry/api/endpoints/test_relay_projectconfigs.py +++ b/tests/sentry/api/endpoints/test_relay_projectconfigs.py @@ -4,6 +4,7 @@ from typing import Any from uuid import uuid4 +import orjson import pytest from django.urls import reverse from sentry_relay.auth import generate_key_pair @@ -14,7 +15,7 @@ from sentry.models.relay import Relay from sentry.testutils.helpers import Feature from sentry.testutils.pytest.fixtures import django_db_all -from sentry.utils import json, safe +from sentry.utils import safe _date_regex = re.compile(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z$") @@ -84,7 +85,7 @@ def inner(full_config, projects=None): HTTP_X_SENTRY_RELAY_SIGNATURE=signature, ) - return json.loads(resp.content), resp.status_code + return orjson.loads(resp.content), resp.status_code return inner diff --git a/tests/sentry/api/endpoints/test_relay_projectconfigs_v2.py b/tests/sentry/api/endpoints/test_relay_projectconfigs_v2.py index 85e06019ab0019..52abb7dfbdee1d 100644 --- a/tests/sentry/api/endpoints/test_relay_projectconfigs_v2.py +++ b/tests/sentry/api/endpoints/test_relay_projectconfigs_v2.py @@ -3,6 +3,7 @@ import re from typing import Any +import orjson import pytest from django.urls import reverse @@ -11,7 +12,7 @@ from sentry.models.projectkey import ProjectKey, ProjectKeyStatus from sentry.testutils.helpers import Feature from sentry.testutils.pytest.fixtures import django_db_all -from sentry.utils import json, safe +from sentry.utils import safe _date_regex = re.compile(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z$") @@ -56,7 +57,7 @@ def inner(full_config, public_keys=None, version="2"): HTTP_X_SENTRY_RELAY_SIGNATURE=signature, ) - return json.loads(resp.content), resp.status_code + return orjson.loads(resp.content), resp.status_code return inner diff --git a/tests/sentry/api/endpoints/test_relay_projectconfigs_v3.py b/tests/sentry/api/endpoints/test_relay_projectconfigs_v3.py index fde23906f352f6..e075e6c7787247 100644 --- a/tests/sentry/api/endpoints/test_relay_projectconfigs_v3.py +++ b/tests/sentry/api/endpoints/test_relay_projectconfigs_v3.py @@ -1,5 +1,6 @@ from unittest.mock import patch, sentinel +import orjson import pytest from django.urls import reverse @@ -8,7 +9,6 @@ from sentry.tasks.relay import build_project_config from sentry.testutils.hybrid_cloud import simulated_transaction_watermarks from sentry.testutils.pytest.fixtures import django_db_all -from sentry.utils import json @pytest.fixture(autouse=True) @@ -41,7 +41,7 @@ def inner(full_config, public_keys=None, global_=False): HTTP_X_SENTRY_RELAY_SIGNATURE=signature, ) - return json.loads(resp.content), resp.status_code + return orjson.loads(resp.content), resp.status_code return inner diff --git a/tests/sentry/api/endpoints/test_relay_projectids.py b/tests/sentry/api/endpoints/test_relay_projectids.py index 2938b8d90bdd7d..a84552f40ee6d6 100644 --- a/tests/sentry/api/endpoints/test_relay_projectids.py +++ b/tests/sentry/api/endpoints/test_relay_projectids.py @@ -2,6 +2,7 @@ import uuid from unittest import mock +import orjson from django.test import override_settings from django.urls import reverse from sentry_relay.auth import generate_key_pair @@ -10,7 +11,7 @@ from sentry.models.relay import Relay from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.options import override_options -from sentry.utils import json, safe +from sentry.utils import safe # Note this is duplicated in test_relay_publickeys (maybe put in a common utils) @@ -71,7 +72,7 @@ def _call_endpoint(self, public_key, internal): HTTP_X_SENTRY_RELAY_SIGNATURE=signature, ) - return json.loads(resp.content), resp.status_code + return orjson.loads(resp.content), resp.status_code def _call_endpoint_static_relay(self, internal): raw_json, signature = self.private_key.pack({"publicKeys": [str(self.public_key)]}) @@ -85,7 +86,7 @@ def _call_endpoint_static_relay(self, internal): HTTP_X_SENTRY_RELAY_ID=self.relay_id, HTTP_X_SENTRY_RELAY_SIGNATURE=signature, ) - return json.loads(resp.content), resp.status_code + return orjson.loads(resp.content), resp.status_code def test_internal_relay(self): self._setup_relay(add_org_key=True) diff --git a/tests/sentry/api/endpoints/test_relay_publickeys.py b/tests/sentry/api/endpoints/test_relay_publickeys.py index 90f24b49c40494..469cbc5efab4b5 100644 --- a/tests/sentry/api/endpoints/test_relay_publickeys.py +++ b/tests/sentry/api/endpoints/test_relay_publickeys.py @@ -1,13 +1,13 @@ from unittest import mock from uuid import uuid4 +import orjson from django.urls import reverse from sentry_relay.auth import generate_key_pair from sentry.auth import system from sentry.models.relay import Relay from sentry.testutils.cases import APITestCase -from sentry.utils import json def disable_internal_networks(): @@ -115,5 +115,4 @@ def _call_endpoint(self, calling_relay): HTTP_X_SENTRY_RELAY_SIGNATURE=signature, ) - result = json.loads(resp.content) - return result + return orjson.loads(resp.content) diff --git a/tests/sentry/api/endpoints/test_relay_register.py b/tests/sentry/api/endpoints/test_relay_register.py index a90618ca6b7334..1f7e8696064e84 100644 --- a/tests/sentry/api/endpoints/test_relay_register.py +++ b/tests/sentry/api/endpoints/test_relay_register.py @@ -1,5 +1,6 @@ from uuid import uuid4 +import orjson from django.conf import settings from django.urls import reverse from django.utils import timezone @@ -8,7 +9,6 @@ from sentry.models.relay import Relay, RelayUsage from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.options import override_options -from sentry.utils import json class RelayRegisterTest(APITestCase): @@ -47,7 +47,7 @@ def register_relay(self, key_pair, version, relay_id): ) assert resp.status_code == 200, resp.content - result = json.loads(resp.content) + result = orjson.loads(resp.content) data = { "token": str(result.get("token")), @@ -179,7 +179,7 @@ def test_valid_register_response(self): ) assert resp.status_code == 200, resp.content - result = json.loads(resp.content) + result = orjson.loads(resp.content) raw_json, signature = self.private_key.pack(result) @@ -210,7 +210,7 @@ def test_forge_public_key(self): ) assert resp.status_code == 200, resp.content - result = json.loads(resp.content) + result = orjson.loads(resp.content) raw_json, signature = self.private_key.pack(result) @@ -254,7 +254,7 @@ def test_public_key_mismatch(self): ) assert resp.status_code == 200, resp.content - result = json.loads(resp.content) + result = orjson.loads(resp.content) raw_json, signature = self.private_key.pack(result) @@ -295,7 +295,7 @@ def test_forge_public_key_on_register(self): HTTP_X_SENTRY_RELAY_SIGNATURE=signature, ) - result = json.loads(resp.content) + result = orjson.loads(resp.content) resp = self.client.post( self.path, @@ -337,7 +337,7 @@ def test_invalid_json_response(self): ) assert resp.status_code == 200, resp.content - result = json.loads(resp.content) + result = orjson.loads(resp.content) _, signature = self.private_key.pack(result) @@ -365,7 +365,7 @@ def test_missing_token_response(self): ) assert resp.status_code == 200, resp.content - result = json.loads(resp.content) + result = orjson.loads(resp.content) del result["token"] @@ -395,7 +395,7 @@ def test_missing_sig_response(self): ) assert resp.status_code == 200, resp.content - result = json.loads(resp.content) + result = orjson.loads(resp.content) raw_json, signature = self.private_key.pack(result) @@ -422,7 +422,7 @@ def test_relay_id_mismatch_response(self): ) assert resp.status_code == 200, resp.content - result = json.loads(resp.content) + result = orjson.loads(resp.content) raw_json, signature = self.private_key.pack(result) @@ -462,7 +462,7 @@ def test_old_relays_can_register(self): ) assert resp.status_code == 200, resp.content - result = json.loads(resp.content) + result = orjson.loads(resp.content) raw_json, signature = self.private_key.pack(result) diff --git a/tests/sentry/api/endpoints/test_rpc.py b/tests/sentry/api/endpoints/test_rpc.py index 57553ad58a1874..2c23b2ebdb99b5 100644 --- a/tests/sentry/api/endpoints/test_rpc.py +++ b/tests/sentry/api/endpoints/test_rpc.py @@ -2,6 +2,7 @@ from typing import Any +import orjson from django.test import override_settings from django.urls import reverse from rest_framework.exceptions import ErrorDetail @@ -9,7 +10,6 @@ from sentry.services.hybrid_cloud.organization import RpcUserOrganizationContext from sentry.services.hybrid_cloud.rpc import generate_request_signature from sentry.testutils.cases import APITestCase -from sentry.utils import json @override_settings(RPC_SHARED_SECRET=["a-long-value-that-is-hard-to-guess"]) @@ -27,8 +27,8 @@ def _get_path(service_name: str, method_name: str) -> str: def auth_header(self, path: str, data: dict | str) -> str: if isinstance(data, dict): - data = json.dumps(data) - signature = generate_request_signature(path, data.encode("utf8")) + data = orjson.dumps(data).decode() + signature = generate_request_signature(path, data.encode()) return f"rpcsignature {signature}" diff --git a/tests/sentry/api/endpoints/test_seer_rpc.py b/tests/sentry/api/endpoints/test_seer_rpc.py index 29b33c98f2350d..f2e6fc123ce466 100644 --- a/tests/sentry/api/endpoints/test_seer_rpc.py +++ b/tests/sentry/api/endpoints/test_seer_rpc.py @@ -1,11 +1,11 @@ from typing import Any +import orjson from django.test import override_settings from django.urls import reverse from sentry.api.endpoints.seer_rpc import generate_request_signature from sentry.testutils.cases import APITestCase -from sentry.utils import json @override_settings(SEER_RPC_SHARED_SECRET=["a-long-value-that-is-hard-to-guess"]) @@ -19,8 +19,8 @@ def _get_path(method_name: str) -> str: def auth_header(self, path: str, data: dict | str) -> str: if isinstance(data, dict): - data = json.dumps(data) - signature = generate_request_signature(path, data.encode("utf8")) + data = orjson.dumps(data).decode() + signature = generate_request_signature(path, data.encode()) return f"rpcsignature {signature}" diff --git a/tests/sentry/api/endpoints/test_sentry_app_details.py b/tests/sentry/api/endpoints/test_sentry_app_details.py index 2c1c587cf9d7fc..80fdbdd42bef07 100644 --- a/tests/sentry/api/endpoints/test_sentry_app_details.py +++ b/tests/sentry/api/endpoints/test_sentry_app_details.py @@ -1,5 +1,7 @@ from unittest.mock import patch +import orjson + from sentry import audit_log, deletions from sentry.api.endpoints.integrations.sentry_apps.details import ( PARTNERSHIP_RESTRICTED_ERROR_MESSAGE, @@ -14,7 +16,6 @@ from sentry.testutils.helpers import with_feature from sentry.testutils.helpers.options import override_options from sentry.testutils.silo import assume_test_silo_mode, control_silo_test -from sentry.utils import json class SentryAppDetailsTest(APITestCase): @@ -419,7 +420,7 @@ def test_bad_schema(self, record): sentry_app_id=app.id, sentry_app_name="SampleApp", error_message="'elements' is a required property", - schema=json.dumps(schema), + schema=orjson.dumps(schema).decode(), ) def test_no_webhook_public_integration(self): diff --git a/tests/sentry/api/endpoints/test_sentry_app_installation_external_requests.py b/tests/sentry/api/endpoints/test_sentry_app_installation_external_requests.py index b0841fd4dc369a..d46f07773dff49 100644 --- a/tests/sentry/api/endpoints/test_sentry_app_installation_external_requests.py +++ b/tests/sentry/api/endpoints/test_sentry_app_installation_external_requests.py @@ -1,10 +1,10 @@ +import orjson import responses from django.urls import reverse from django.utils.http import urlencode from responses.matchers import query_string_matcher from sentry.testutils.cases import APITestCase -from sentry.utils import json class SentryAppInstallationExternalRequestsEndpointTest(APITestCase): @@ -55,7 +55,7 @@ def test_makes_external_request_with_dependent_data(self): "projectSlug": self.project.slug, "installationId": self.install.uuid, "query": "proj", - "dependentData": json.dumps({"org_id": "A"}), + "dependentData": orjson.dumps({"org_id": "A"}).decode(), } ) responses.add( @@ -71,7 +71,7 @@ def test_makes_external_request_with_dependent_data(self): "projectId": self.project.id, "uri": "/get-projects", "query": "proj", - "dependentData": json.dumps({"org_id": "A"}), + "dependentData": orjson.dumps({"org_id": "A"}).decode(), } ) url = f"{self.url}?{qs}" diff --git a/tests/sentry/api/endpoints/test_sentry_apps.py b/tests/sentry/api/endpoints/test_sentry_apps.py index 80445400345a22..48f9af0d06367e 100644 --- a/tests/sentry/api/endpoints/test_sentry_apps.py +++ b/tests/sentry/api/endpoints/test_sentry_apps.py @@ -5,6 +5,7 @@ from typing import Any from unittest.mock import patch +import orjson import pytest from django.test import override_settings from django.urls import reverse @@ -23,7 +24,6 @@ from sentry.testutils.helpers import Feature, with_feature from sentry.testutils.helpers.options import override_options from sentry.testutils.silo import assume_test_silo_mode, control_silo_test -from sentry.utils import json POPULARITY = 27 EXPECTED = { @@ -116,7 +116,7 @@ def assert_response_has_serialized_sentry_app( } ] - assert data in json.loads(response.content) + assert data in orjson.loads(response.content) def get_data(self, **kwargs: Any) -> Mapping[str, Any]: return { @@ -373,7 +373,7 @@ def test_superuser_can_create_with_popularity(self): response = self.get_success_response( **self.get_data(popularity=POPULARITY), status_code=201 ) - assert {"popularity": POPULARITY}.items() <= json.loads(response.content).items() + assert {"popularity": POPULARITY}.items() <= orjson.loads(response.content).items() with self.settings(SENTRY_SELF_HOSTED=False): self.get_success_response( @@ -465,7 +465,7 @@ def assert_sentry_app_status_code(self, sentry_app: SentryApp, status_code: int) def test_creates_sentry_app(self): response = self.get_success_response(**self.get_data(), status_code=201) - content = json.loads(response.content) + content = orjson.loads(response.content) for key, value in EXPECTED.items(): assert key in content if isinstance(value, list): @@ -523,7 +523,7 @@ def test_nonsuperuser_cannot_create_with_popularity(self): response = self.get_success_response( **self.get_data(popularity=POPULARITY), status_code=201 ) - assert {"popularity": self.default_popularity}.items() <= json.loads( + assert {"popularity": self.default_popularity}.items() <= orjson.loads( response.content ).items() @@ -548,7 +548,7 @@ def test_create_alert_rule_action(self): data = self.get_data(schema={"elements": [self.create_alert_rule_action_schema()]}) response = self.get_success_response(**data, status_code=201) - content = json.loads(response.content) + content = orjson.loads(response.content) for key, value in expected.items(): assert key in content if isinstance(value, list): @@ -591,7 +591,7 @@ def test_wrong_schema_format(self, record): } # XXX: Compare schema as an object instead of json to avoid key ordering issues - record.call_args.kwargs["schema"] = json.loads(record.call_args.kwargs["schema"]) + record.call_args.kwargs["schema"] = orjson.loads(record.call_args.kwargs["schema"]) record.assert_called_with( "sentry_app.schema_validation_error", @@ -606,7 +606,7 @@ def test_wrong_schema_format(self, record): def test_can_create_with_error_created_hook_with_flag(self): expected = {**EXPECTED, "events": ["error"]} response = self.get_success_response(**self.get_data(events=("error",)), status_code=201) - content = json.loads(response.content) + content = orjson.loads(response.content) for key, value in expected.items(): assert key in content if isinstance(value, list): diff --git a/tests/sentry/api/endpoints/test_sentry_apps_stats.py b/tests/sentry/api/endpoints/test_sentry_apps_stats.py index c85806134987a6..1958356d8741c9 100644 --- a/tests/sentry/api/endpoints/test_sentry_apps_stats.py +++ b/tests/sentry/api/endpoints/test_sentry_apps_stats.py @@ -1,8 +1,9 @@ +import orjson + from sentry.api.serializers.base import serialize from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.options import override_options from sentry.testutils.silo import control_silo_test -from sentry.utils import json @control_silo_test @@ -33,7 +34,7 @@ def _check_response(self, response): "name": self.app_two.name, "installs": 1, "avatars": [], - } in json.loads(response.content) + } in orjson.loads(response.content) assert { "id": self.app_one.id, "uuid": self.app_one.uuid, @@ -41,7 +42,7 @@ def _check_response(self, response): "name": self.app_one.name, "installs": 1, "avatars": [serialize(self.app_one_avatar)], - } in json.loads(response.content) + } in orjson.loads(response.content) def test_superuser_has_access(self): self.login_as(user=self.superuser, superuser=True) diff --git a/tests/sentry/api/endpoints/test_source_map_debug_blue_thunder_edition.py b/tests/sentry/api/endpoints/test_source_map_debug_blue_thunder_edition.py index 2d33839e665785..475e01a39ec8fa 100644 --- a/tests/sentry/api/endpoints/test_source_map_debug_blue_thunder_edition.py +++ b/tests/sentry/api/endpoints/test_source_map_debug_blue_thunder_edition.py @@ -1,6 +1,7 @@ import zipfile from io import BytesIO +import orjson from django.core.files.base import ContentFile from rest_framework import status @@ -21,7 +22,6 @@ from sentry.models.releasefile import ARTIFACT_INDEX_FILENAME, ARTIFACT_INDEX_TYPE, ReleaseFile from sentry.testutils.cases import APITestCase from sentry.testutils.skips import requires_snuba -from sentry.utils import json pytestmark = [requires_snuba] @@ -864,7 +864,7 @@ def test_frame_release_process_artifact_bundle_data_protocol_source_map_referenc zip_file.writestr("files/_/_/bundle.min.js", b'console.log("hello world");') zip_file.writestr( "manifest.json", - json.dumps( + orjson.dumps( { "files": { "files/_/_/bundle.min.js": { @@ -877,7 +877,7 @@ def test_frame_release_process_artifact_bundle_data_protocol_source_map_referenc }, }, } - ), + ).decode(), ) compressed.seek(0) @@ -940,7 +940,7 @@ def test_frame_release_process_artifact_bundle_source_file_wrong_dist(self): ) zip_file.writestr( "manifest.json", - json.dumps( + orjson.dumps( { "files": { "files/_/_/bundle.min.js": { @@ -952,7 +952,7 @@ def test_frame_release_process_artifact_bundle_source_file_wrong_dist(self): }, }, } - ), + ).decode(), ) compressed.seek(0) @@ -1013,7 +1013,7 @@ def test_frame_release_process_artifact_bundle_source_file_successful(self): ) zip_file.writestr( "manifest.json", - json.dumps( + orjson.dumps( { "files": { "files/_/_/bundle.min.js": { @@ -1025,7 +1025,7 @@ def test_frame_release_process_artifact_bundle_source_file_successful(self): }, }, } - ), + ).decode(), ) compressed.seek(0) @@ -1086,7 +1086,7 @@ def test_frame_release_process_artifact_bundle_source_map_not_found(self): zip_file.writestr("files/_/_/bundle.min.js.map", b"") zip_file.writestr( "manifest.json", - json.dumps( + orjson.dumps( { "files": { "files/_/_/bundle.min.js": { @@ -1105,7 +1105,7 @@ def test_frame_release_process_artifact_bundle_source_map_not_found(self): }, }, } - ), + ).decode(), ) compressed.seek(0) @@ -1175,7 +1175,7 @@ def test_frame_release_process_artifact_bundle_source_map_wrong_dist(self): zip_file.writestr("files/_/_/bundle.min.js.map", b"") zip_file.writestr( "manifest.json", - json.dumps( + orjson.dumps( { "files": { "files/_/_/bundle.min.js": { @@ -1194,7 +1194,7 @@ def test_frame_release_process_artifact_bundle_source_map_wrong_dist(self): }, }, } - ), + ).decode(), ) compressed.seek(0) @@ -1291,7 +1291,7 @@ def test_frame_release_process_artifact_bundle_source_map_successful(self): zip_file.writestr("files/_/_/bundle.min.js.map", b"") zip_file.writestr( "manifest.json", - json.dumps( + orjson.dumps( { "files": { "files/_/_/bundle.min.js": { @@ -1310,7 +1310,7 @@ def test_frame_release_process_artifact_bundle_source_map_successful(self): }, }, } - ), + ).decode(), ) compressed.seek(0) @@ -1392,7 +1392,7 @@ def test_frame_release_file_success(self): artifact_index.putfile( ContentFile( - json.dumps( + orjson.dumps( { "files": { "~/bundle.min.js": { @@ -1415,7 +1415,7 @@ def test_frame_release_file_success(self): }, }, } - ).encode() + ) ) ) @@ -1438,7 +1438,7 @@ def test_frame_release_file_success(self): zip_file.writestr("files/_/_/bundle.min.js.map", b"") zip_file.writestr( "manifest.json", - json.dumps( + orjson.dumps( { "files": { "files/_/_/bundle.min.js": { @@ -1457,7 +1457,7 @@ def test_frame_release_file_success(self): }, }, } - ), + ).decode(), ) compressed.seek(0) release_artifact_bundle = File.objects.create( @@ -1509,7 +1509,7 @@ def test_frame_release_file_wrong_dist(self): artifact_index.putfile( ContentFile( - json.dumps( + orjson.dumps( { "files": { "~/bundle.min.js": { @@ -1528,7 +1528,7 @@ def test_frame_release_file_wrong_dist(self): }, }, } - ).encode() + ) ) ) @@ -1550,7 +1550,7 @@ def test_frame_release_file_wrong_dist(self): zip_file.writestr("files/_/_/bundle.min.js.map", b"") zip_file.writestr( "manifest.json", - json.dumps( + orjson.dumps( { "files": { "files/_/_/bundle.min.js": { @@ -1569,7 +1569,7 @@ def test_frame_release_file_wrong_dist(self): }, }, } - ), + ).decode(), ) compressed.seek(0) release_artifact_bundle = File.objects.create( diff --git a/tests/sentry/api/helpers/test_autofix.py b/tests/sentry/api/helpers/test_autofix.py index b8c1e827d24b08..ca05a651050c96 100644 --- a/tests/sentry/api/helpers/test_autofix.py +++ b/tests/sentry/api/helpers/test_autofix.py @@ -1,11 +1,11 @@ from unittest import mock from unittest.mock import call, patch +import orjson from django.conf import settings from sentry.api.helpers.autofix import get_project_codebase_indexing_status from sentry.testutils.cases import TestCase -from sentry.utils import json class TestGetProjectCodebaseIndexingStatus(TestCase): @@ -28,7 +28,7 @@ def test_autofix_codebase_status_successful(self, mock_post): assert status == "up_to_date" mock_post.assert_called_once_with( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/status", - data=json.dumps( + data=orjson.dumps( { "organization_id": self.project.organization.id, "project_id": self.project.id, @@ -71,7 +71,7 @@ def test_autofix_codebase_status_multiple_repos_one_in_progress(self, mock_post) calls = [ call( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/status", - data=json.dumps( + data=orjson.dumps( { "organization_id": self.project.organization.id, "project_id": self.project.id, @@ -87,7 +87,7 @@ def test_autofix_codebase_status_multiple_repos_one_in_progress(self, mock_post) ), call( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/status", - data=json.dumps( + data=orjson.dumps( { "organization_id": self.project.organization.id, "project_id": self.project.id, @@ -132,7 +132,7 @@ def test_autofix_codebase_status_multiple_repos_both_done(self, mock_post): calls = [ call( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/status", - data=json.dumps( + data=orjson.dumps( { "organization_id": self.project.organization.id, "project_id": self.project.id, @@ -148,7 +148,7 @@ def test_autofix_codebase_status_multiple_repos_both_done(self, mock_post): ), call( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/status", - data=json.dumps( + data=orjson.dumps( { "organization_id": self.project.organization.id, "project_id": self.project.id, @@ -194,7 +194,7 @@ def test_autofix_codebase_status_multiple_repos_one_not_indexed(self, mock_post) calls = [ call( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/status", - data=json.dumps( + data=orjson.dumps( { "organization_id": self.project.organization.id, "project_id": self.project.id, @@ -210,7 +210,7 @@ def test_autofix_codebase_status_multiple_repos_one_not_indexed(self, mock_post) ), call( f"{settings.SEER_AUTOFIX_URL}/v1/automation/codebase/index/status", - data=json.dumps( + data=orjson.dumps( { "organization_id": self.project.organization.id, "project_id": self.project.id, diff --git a/tests/sentry/api/serializers/test_app_platform_event.py b/tests/sentry/api/serializers/test_app_platform_event.py index e38e84aa123383..00d629aa6cd1ef 100644 --- a/tests/sentry/api/serializers/test_app_platform_event.py +++ b/tests/sentry/api/serializers/test_app_platform_event.py @@ -1,6 +1,7 @@ +import orjson + from sentry.api.serializers import AppPlatformEvent from sentry.testutils.cases import TestCase -from sentry.utils import json class AppPlatformEventSerializerTest(TestCase): @@ -17,13 +18,16 @@ def test_no_actor(self): resource="event_alert", action="triggered", install=self.install, data={} ) - assert result.body == json.dumps( - { - "action": "triggered", - "installation": {"uuid": self.install.uuid}, - "data": {}, - "actor": {"type": "application", "id": "sentry", "name": "Sentry"}, - } + assert ( + result.body + == orjson.dumps( + { + "action": "triggered", + "installation": {"uuid": self.install.uuid}, + "data": {}, + "actor": {"type": "application", "id": "sentry", "name": "Sentry"}, + } + ).decode() ) signature = self.sentry_app.build_signature(result.body) @@ -41,7 +45,7 @@ def test_sentry_app_actor(self): actor=self.sentry_app.proxy_user, ) - assert json.loads(result.body)["actor"] == { + assert orjson.loads(result.body)["actor"] == { "type": "application", "id": self.sentry_app.uuid, "name": self.sentry_app.name, @@ -62,7 +66,7 @@ def test_user_actor(self): actor=self.user, ) - assert json.loads(result.body)["actor"] == { + assert orjson.loads(result.body)["actor"] == { "type": "user", "id": self.user.id, "name": self.user.name, From 995df95dd4354e2599fc9ff228005f513afb2699 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 9 May 2024 16:47:53 +0000 Subject: [PATCH 214/376] Revert "chore(chartcuterie): Revert Style Changes (#70558)" This reverts commit 8ff17a99a1ed3c4b7d84daccfa5a3fd3077cf935. Co-authored-by: iamrajjoshi <33237075+iamrajjoshi@users.noreply.github.com> --- static/app/chartcuterie/performance.tsx | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/static/app/chartcuterie/performance.tsx b/static/app/chartcuterie/performance.tsx index a4d01c31df2c2f..452d440eecddcf 100644 --- a/static/app/chartcuterie/performance.tsx +++ b/static/app/chartcuterie/performance.tsx @@ -1,3 +1,4 @@ +import type {LineChartProps} from 'sentry/components/charts/lineChart'; import {transformToLineSeries} from 'sentry/components/charts/lineChart'; import getBreakpointChartOptionsFromData, { type EventBreakpointChartData, @@ -10,17 +11,33 @@ import {ChartType} from './types'; export const performanceCharts: RenderDescriptor[] = []; +function modifyOptionsForSlack(options: Omit) { + options.legend = options.legend || {}; + options.legend.icon = 'none'; + options.legend.left = '25'; + options.legend.top = '20'; + + return { + ...options, + grid: slackChartDefaults.grid, + visualMap: options.options?.visualMap, + }; +} + performanceCharts.push({ key: ChartType.SLACK_PERFORMANCE_ENDPOINT_REGRESSION, getOption: (data: EventBreakpointChartData) => { const {chartOptions, series} = getBreakpointChartOptionsFromData(data, theme); const transformedSeries = transformToLineSeries({series}); + const modifiedOptions = modifyOptionsForSlack(chartOptions); return { - ...chartOptions, + ...modifiedOptions, + backgroundColor: theme.background, series: transformedSeries, grid: slackChartDefaults.grid, + visualMap: modifiedOptions.options?.visualMap, }; }, ...slackChartSize, From 6c6900cbe0cdd569c5978e39f088da5d11330890 Mon Sep 17 00:00:00 2001 From: Yash Kamothi Date: Thu, 9 May 2024 10:07:38 -0700 Subject: [PATCH 215/376] feat(slack): Add support for external issue creation (#70523) Add support for an external issue created activity event notification for Slack threads. To do this, we added a few classes that will parse the specific pieces of information we need, and added logging and metrics to track "bad state" activities, as we assume some data to always be there as a form of contract. Also added test cases to help validate logic and keep it consistent from breaking changes. With the changes, we should now be able to create a slack notification that looks like the following: ``` created a ( issue )[Link] ``` Special consideration had to be done for Asana for now, and we might be able to remove that special consideration if we can update the activity data object when an asana issue is created, but the changes would need to be verified that it does not affect other assumptions/contracts. --- src/sentry/integrations/slack/service.py | 6 +- .../slack/threads/activity_notifications.py | 189 ++++++++++++++++++ .../activity_notifications/__init__.py | 7 + ...t_asana_external_issue_created_activity.py | 49 +++++ .../test_external_issue_created_activity.py | 107 ++++++++++ ...nal_issue_created_activity_notification.py | 62 ++++++ 6 files changed, 419 insertions(+), 1 deletion(-) create mode 100644 tests/sentry/integrations/slack/threads/activity_notifications/test_asana_external_issue_created_activity.py create mode 100644 tests/sentry/integrations/slack/threads/activity_notifications/test_external_issue_created_activity.py create mode 100644 tests/sentry/integrations/slack/threads/activity_notifications/test_external_issue_created_activity_notification.py diff --git a/src/sentry/integrations/slack/service.py b/src/sentry/integrations/slack/service.py index e7c82545e31995..ea3181932d5c49 100644 --- a/src/sentry/integrations/slack/service.py +++ b/src/sentry/integrations/slack/service.py @@ -8,7 +8,10 @@ IssueAlertNotificationMessageRepository, ) from sentry.integrations.slack import BlockSlackMessageBuilder, SlackClient -from sentry.integrations.slack.threads.activity_notifications import AssignedActivityNotification +from sentry.integrations.slack.threads.activity_notifications import ( + AssignedActivityNotification, + ExternalIssueCreatedActivityNotification, +) from sentry.integrations.utils.common import get_active_integration_for_organization from sentry.models.activity import Activity from sentry.models.rule import Rule @@ -45,6 +48,7 @@ ActivityType.SET_ESCALATING: EscalatingActivityNotification, ActivityType.SET_IGNORED: ArchiveActivityNotification, ActivityType.SET_UNRESOLVED: UnresolvedActivityNotification, + ActivityType.CREATE_ISSUE: ExternalIssueCreatedActivityNotification, } diff --git a/src/sentry/integrations/slack/threads/activity_notifications.py b/src/sentry/integrations/slack/threads/activity_notifications.py index 798378936bc870..e3415bc15959ec 100644 --- a/src/sentry/integrations/slack/threads/activity_notifications.py +++ b/src/sentry/integrations/slack/threads/activity_notifications.py @@ -1,9 +1,16 @@ +import logging from collections.abc import Mapping from typing import Any +from sentry.models.activity import Activity from sentry.notifications.notifications.activity.assigned import ( AssignedActivityNotification as BaseAssignedActivityNotification, ) +from sentry.notifications.notifications.activity.base import GroupActivityNotification +from sentry.types.activity import ActivityType +from sentry.utils import metrics + +_default_logger = logging.getLogger(__name__) class AssignedActivityNotification(BaseAssignedActivityNotification): @@ -15,3 +22,185 @@ class AssignedActivityNotification(BaseAssignedActivityNotification): def get_description(self) -> tuple[str, str | None, Mapping[str, Any]]: return "{author} assigned this issue to {assignee}", None, {"assignee": self.get_assignee()} + + +class _ExternalIssueCreatedActivity: + """ + Class responsible for helping derive data from a specific activity type + """ + + _NO_PROVIDER_KEY_METRICS = "sentry.integrations.slack.tasks.activity_notifications.external_issue_created_activity.missing_provider" + _NO_LINK_KEY_METRICS = "sentry.integrations.slack.tasks.activity_notifications.external_issue_created_activity.missing_link" + _NO_LABEL_KEY_METRICS = "sentry.integrations.slack.tasks.activity_notifications.external_issue_created_activity.missing_label" + + DEFAULT_PROVIDER_FALLBACK_TEXT = "external provider" + _PROVIDER_KEY = "provider" + _TICKET_KEY = "label" + _URL_KEY = "location" + + def __init__(self, activity: Activity) -> None: + try: + activity_type: ActivityType = ActivityType(activity.type) + except ValueError as err: + _default_logger.info( + "there was an error trying to get activity type, assuming activity is unsupported", + exc_info=err, + extra={ + "error": str(err), + "activity_id": activity.id, + "activity_type_raw": activity.type, + }, + ) + raise + + if activity_type != ActivityType.CREATE_ISSUE: + _default_logger.info( + "tried to use external issue creator for an improper activity type", + extra={ + "activity_id": activity.id, + "activity_type_raw": activity.type, + "activity_type": activity_type, + }, + ) + + raise Exception(f"Activity type {activity_type} is incorrect") + self._activity: Activity = activity + + def get_link(self) -> str: + """ + Returns the link to where the issue was created in the external provider. + """ + link = self._activity.data.get(self._URL_KEY, None) + if not link: + metrics.incr( + self._NO_LINK_KEY_METRICS, + sample_rate=1.0, + ) + + _default_logger.info( + "Activity does not have a url key, using fallback", + extra={ + "activity_id": self._activity.id, + }, + ) + link = "" + return link + + def get_provider(self) -> str: + """ + Returns the provider of the activity for where the issue was created. + Returns the value in lowercase to provider consistent value. + If key is not found, or value is empty, uses the fallback value. + """ + provider = self._activity.data.get(self._PROVIDER_KEY, None) + if not provider: + metrics.incr( + self._NO_PROVIDER_KEY_METRICS, + sample_rate=1.0, + ) + _default_logger.info( + "Activity does not have a provider key, using fallback", + extra={ + "activity_id": self._activity.id, + }, + ) + provider = self.DEFAULT_PROVIDER_FALLBACK_TEXT + + return provider.lower() + + def get_ticket_number(self) -> str: + """ + Returns the ticket number for the issue that was created on the external provider. + """ + ticket_number = self._activity.data.get(self._TICKET_KEY, None) + if not ticket_number: + metrics.incr( + self._NO_LABEL_KEY_METRICS, + sample_rate=1.0, + ) + _default_logger.info( + "Activity does not have a label key, using fallback", + extra={ + "activity_id": self._activity.id, + }, + ) + ticket_number = "" + + return ticket_number + + +class _AsanaExternalIssueCreatedActivity(_ExternalIssueCreatedActivity): + """ + Override class for Asana as, at this time, the label, or ticket number, does not exist and has to be derived. + If plausible, this could be removed if the activity object itself properly has the correct data, but side effects + for that change are not yet known. + """ + + _DEFAULT_ASANA_LABEL_VALUE = "Asana Issue" + + def get_ticket_number(self) -> str: + # Try to use the base logic if it works as a just in-case + stored_value = super().get_ticket_number() + if stored_value != "" and stored_value != self._DEFAULT_ASANA_LABEL_VALUE: + return stored_value + + link = self.get_link() + if not link: + return "" + + # Remove any trailing slashes + if link.endswith("/"): + link = link[:-1] + + # Split the URL by "/" + parts = link.split("/") + + # Get the last part + last_part = parts[-1] + + return last_part + + +def _external_issue_activity_factory(activity: Activity) -> _ExternalIssueCreatedActivity: + """ + Returns the correct ExternalIssueCreatedActivity class based on the provider. + All classes have the same interface, the method for one is simply modified for its use case. + """ + base_activity = _ExternalIssueCreatedActivity(activity=activity) + provider = base_activity.get_provider() + if provider == "asana": + return _AsanaExternalIssueCreatedActivity(activity=activity) + + return base_activity + + +class ExternalIssueCreatedActivityNotification(GroupActivityNotification): + metrics_key = "create_issue" + title = "External Issue Created" + + def get_description(self) -> tuple[str, str | None, Mapping[str, Any]]: + external_issue = _external_issue_activity_factory(activity=self.activity) + + provider = external_issue.get_provider() + # Use proper grammar, so use "an" if it's "external provider" and "a" if it's a regular name + if provider == external_issue.DEFAULT_PROVIDER_FALLBACK_TEXT: + base_template = "an " + else: + base_template = "a " + # Make sure to make the proper noun have correct capitalization + # I.e. github -> Github, jira -> Jira + provider = provider.capitalize() + base_template += "{provider} issue" + + ticket_number = external_issue.get_ticket_number() + if ticket_number: + base_template += " {ticket}" + + link = external_issue.get_link() + if link: + base_template = "<{link}|" + base_template + ">" + + # Template should look something like "{author} created <{link}| a/an {provider} issue {ticket}>" + base_template = "{author} created " + base_template + + return base_template, None, {"provider": provider, "ticket": ticket_number, "link": link} diff --git a/tests/sentry/integrations/slack/threads/activity_notifications/__init__.py b/tests/sentry/integrations/slack/threads/activity_notifications/__init__.py index e69de29bb2d1d6..0e4b89fed3c69b 100644 --- a/tests/sentry/integrations/slack/threads/activity_notifications/__init__.py +++ b/tests/sentry/integrations/slack/threads/activity_notifications/__init__.py @@ -0,0 +1,7 @@ +from sentry.testutils.cases import TestCase +from sentry.types.activity import ActivityType + + +class BaseTestCase(TestCase): + def setUp(self) -> None: + self.activity.type = ActivityType.CREATE_ISSUE diff --git a/tests/sentry/integrations/slack/threads/activity_notifications/test_asana_external_issue_created_activity.py b/tests/sentry/integrations/slack/threads/activity_notifications/test_asana_external_issue_created_activity.py new file mode 100644 index 00000000000000..e6bbfb184169fd --- /dev/null +++ b/tests/sentry/integrations/slack/threads/activity_notifications/test_asana_external_issue_created_activity.py @@ -0,0 +1,49 @@ +from sentry.integrations.slack.threads.activity_notifications import ( + _AsanaExternalIssueCreatedActivity, +) +from tests.sentry.integrations.slack.threads.activity_notifications import BaseTestCase + + +class TestGetTicketNumber(BaseTestCase): + def test_returns_base_value_when_exists(self) -> None: + ticket_number_value = "ABC-123" + self.activity.data = {"label": ticket_number_value} + + create_issue_activity = _AsanaExternalIssueCreatedActivity(self.activity) + ret = create_issue_activity.get_ticket_number() + + assert ret == ticket_number_value + + def test_returns_empty_with_no_link(self) -> None: + self.activity.data = {"label": "Asana Issue"} + create_issue_activity = _AsanaExternalIssueCreatedActivity(self.activity) + + ret = create_issue_activity.get_ticket_number() + assert ret == "" + + def test_returns_last_part_of_link(self) -> None: + last_part = "ABC-123" + self.activity.data = {"location": f"www.example.com/{last_part}"} + + create_issue_activity = _AsanaExternalIssueCreatedActivity(self.activity) + ret = create_issue_activity.get_ticket_number() + + assert ret == last_part + + def test_returns_last_part_of_link_with_multiple_slashes(self) -> None: + last_part = "ABC-123" + self.activity.data = {"location": f"www.example.com/abc/something/whatever/{last_part}"} + + create_issue_activity = _AsanaExternalIssueCreatedActivity(self.activity) + ret = create_issue_activity.get_ticket_number() + + assert ret == last_part + + def test_accounts_for_trailing_slash(self) -> None: + last_part = "ABC-123" + self.activity.data = {"location": f"www.example.com/{last_part}/"} + + create_issue_activity = _AsanaExternalIssueCreatedActivity(self.activity) + ret = create_issue_activity.get_ticket_number() + + assert ret == last_part diff --git a/tests/sentry/integrations/slack/threads/activity_notifications/test_external_issue_created_activity.py b/tests/sentry/integrations/slack/threads/activity_notifications/test_external_issue_created_activity.py new file mode 100644 index 00000000000000..bb102c33894572 --- /dev/null +++ b/tests/sentry/integrations/slack/threads/activity_notifications/test_external_issue_created_activity.py @@ -0,0 +1,107 @@ +import pytest + +from sentry.integrations.slack.threads.activity_notifications import _ExternalIssueCreatedActivity +from sentry.testutils.cases import TestCase +from sentry.types.activity import ActivityType +from tests.sentry.integrations.slack.threads.activity_notifications import BaseTestCase + + +class TestInit(TestCase): + def test_throws_error_on_invalid_activity_type(self) -> None: + with pytest.raises(Exception): + self.activity.type = 500 + _ExternalIssueCreatedActivity(self.activity) + + def test_throws_error_on_unsupported_activity_type(self) -> None: + with pytest.raises(Exception): + self.activity.type = ActivityType.ASSIGNED + _ExternalIssueCreatedActivity(self.activity) + + def test_success(self) -> None: + self.activity.type = ActivityType.CREATE_ISSUE + obj = _ExternalIssueCreatedActivity(self.activity) + assert obj is not None + + +class TestGetLink(BaseTestCase): + def test_when_link_key_is_not_in_map(self) -> None: + self.activity.data = {} + create_issue_activity = _ExternalIssueCreatedActivity(self.activity) + + ret = create_issue_activity.get_link() + assert ret == "" + + def test_when_link_key_is_empty(self) -> None: + self.activity.data = {"location": None} + create_issue_activity = _ExternalIssueCreatedActivity(self.activity) + + ret = create_issue_activity.get_link() + assert ret == "" + + def test_returns_correct_value(self) -> None: + link_value = "www.example.com" + self.activity.data = {"location": link_value} + + create_issue_activity = _ExternalIssueCreatedActivity(self.activity) + ret = create_issue_activity.get_link() + + assert ret == link_value + + +class TestGetProvider(BaseTestCase): + def test_returns_fallback_when_provider_key_is_not_in_map(self) -> None: + self.activity.data = {} + create_issue_activity = _ExternalIssueCreatedActivity(self.activity) + + ret = create_issue_activity.get_provider() + assert ret == create_issue_activity.DEFAULT_PROVIDER_FALLBACK_TEXT + + def test_returns_fallback_when_provider_key_is_empty(self) -> None: + self.activity.data = {"provider": None} + create_issue_activity = _ExternalIssueCreatedActivity(self.activity) + + ret = create_issue_activity.get_provider() + assert ret == create_issue_activity.DEFAULT_PROVIDER_FALLBACK_TEXT + + def test_returns_correct_value(self) -> None: + provider_value = "whatever" + self.activity.data = {"provider": provider_value} + + create_issue_activity = _ExternalIssueCreatedActivity(self.activity) + ret = create_issue_activity.get_provider() + + assert ret == provider_value + + def test_returns_lowercase_value(self) -> None: + provider_value = "WHATEVER" + self.activity.data = {"provider": provider_value} + + create_issue_activity = _ExternalIssueCreatedActivity(self.activity) + ret = create_issue_activity.get_provider() + + assert ret == provider_value.lower() + + +class TestGetTicketNumber(BaseTestCase): + def test_when_ticket_number_key_is_not_in_map(self) -> None: + self.activity.data = {} + create_issue_activity = _ExternalIssueCreatedActivity(self.activity) + + ret = create_issue_activity.get_ticket_number() + assert ret == "" + + def test_when_ticket_number_key_is_empty(self) -> None: + self.activity.data = {"label": None} + create_issue_activity = _ExternalIssueCreatedActivity(self.activity) + + ret = create_issue_activity.get_ticket_number() + assert ret == "" + + def test_returns_correct_value(self) -> None: + ticket_number_value = "ABC-123" + self.activity.data = {"label": ticket_number_value} + + create_issue_activity = _ExternalIssueCreatedActivity(self.activity) + ret = create_issue_activity.get_ticket_number() + + assert ret == ticket_number_value diff --git a/tests/sentry/integrations/slack/threads/activity_notifications/test_external_issue_created_activity_notification.py b/tests/sentry/integrations/slack/threads/activity_notifications/test_external_issue_created_activity_notification.py new file mode 100644 index 00000000000000..7b7d51b414bacb --- /dev/null +++ b/tests/sentry/integrations/slack/threads/activity_notifications/test_external_issue_created_activity_notification.py @@ -0,0 +1,62 @@ +from sentry.integrations.slack.threads.activity_notifications import ( + ExternalIssueCreatedActivityNotification, +) +from tests.sentry.integrations.slack.threads.activity_notifications import BaseTestCase + + +class TestGetDescription(BaseTestCase): + def test_basic_case(self) -> None: + provider = "Github" + label = "ABC-123" + location = "www.example.com" + self.activity.data = {"provider": provider, "label": label, "location": location} + + notification = ExternalIssueCreatedActivityNotification(self.activity) + template, _, metadata = notification.get_description() + + assert template == "{author} created <{link}|a {provider} issue {ticket}>" + assert metadata["provider"] == provider + assert metadata["ticket"] == label + assert metadata["link"] == location + + def test_with_default_provider(self) -> None: + provider = "" + label = "ABC-123" + location = "www.example.com" + self.activity.data = {"provider": provider, "label": label, "location": location} + + notification = ExternalIssueCreatedActivityNotification(self.activity) + template, _, metadata = notification.get_description() + + assert template == "{author} created <{link}|an {provider} issue {ticket}>" + assert metadata["provider"] == "external provider" + assert metadata["ticket"] == label + assert metadata["link"] == location + + def test_without_ticket_number(self) -> None: + provider = "Jira" + label = "" + location = "www.example.com" + self.activity.data = {"provider": provider, "label": label, "location": location} + + notification = ExternalIssueCreatedActivityNotification(self.activity) + template, _, metadata = notification.get_description() + + assert template == "{author} created <{link}|a {provider} issue>" + assert metadata["provider"] == provider + assert metadata["ticket"] == label + assert metadata["link"] == location + + def test_without_link(self) -> None: + provider = "Jira" + label = "ABC-123" + location = "" + self.activity.data = {"provider": provider, "label": label, "location": location} + + notification = ExternalIssueCreatedActivityNotification(self.activity) + template, _, metadata = notification.get_description() + + assert template == "{author} created a {provider} issue {ticket}" + assert metadata["provider"] == provider + assert metadata["ticket"] == label + assert metadata["link"] == location From 04e3addd18f9cb84d471fbc0645db1583b141f3d Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Thu, 9 May 2024 13:36:50 -0400 Subject: [PATCH 216/376] perf: use orjson in sentry/lang (#70587) --- src/sentry/lang/dart/utils.py | 6 ++--- src/sentry/lang/java/utils.py | 6 ++--- src/sentry/lang/javascript/errormapping.py | 6 ++--- src/sentry/lang/native/appconnect.py | 12 ++++----- src/sentry/lang/native/sources.py | 9 ++++--- src/sentry/lang/native/symbolicator.py | 11 ++++---- .../sentry/lang/javascript/test_sourcemaps.py | 7 +++-- tests/sentry/lang/native/test_appconnect.py | 26 +++++++++---------- 8 files changed, 41 insertions(+), 42 deletions(-) diff --git a/src/sentry/lang/dart/utils.py b/src/sentry/lang/dart/utils.py index cf02d50de47943..46a2d7bf5fb88c 100644 --- a/src/sentry/lang/dart/utils.py +++ b/src/sentry/lang/dart/utils.py @@ -4,12 +4,12 @@ import re from typing import Any +import orjson import sentry_sdk from sentry.lang.java.utils import deobfuscation_template from sentry.models.debugfile import ProjectDebugFile from sentry.models.project import Project -from sentry.utils import json from sentry.utils.safe import get_path # Obfuscated type values are either in the form of "xyz" or "xyz" where @@ -60,8 +60,8 @@ def generate_dart_symbols_map(uuid: str, project: Project): dart_symbols_file_size_in_mb = os.path.getsize(debug_file_path) / (1024 * 1024.0) span.set_tag("dart_symbols_file_size_in_mb", dart_symbols_file_size_in_mb) - with open(debug_file_path) as f: - debug_array = json.loads(f.read()) + with open(debug_file_path, "rb") as f: + debug_array = orjson.loads(f.read()) if len(debug_array) % 2 != 0: raise Exception("Debug array contains an odd number of elements") diff --git a/src/sentry/lang/java/utils.py b/src/sentry/lang/java/utils.py index d789abfd6e3ff6..9812234ee106fc 100644 --- a/src/sentry/lang/java/utils.py +++ b/src/sentry/lang/java/utils.py @@ -3,6 +3,7 @@ import os from typing import Any +import orjson import sentry_sdk from sentry import options @@ -13,7 +14,6 @@ from sentry.models.debugfile import ProjectDebugFile from sentry.models.project import Project from sentry.stacktraces.processing import StacktraceInfo -from sentry.utils import json from sentry.utils.cache import cache_key_for_event from sentry.utils.safe import get_path @@ -115,7 +115,7 @@ def deobfuscation_template(data, map_type, deobfuscation_fn): new_attachments = [] for attachment in attachments: if attachment.type == "event.view_hierarchy": - view_hierarchy = json.loads(attachment_cache.get_data(attachment)) + view_hierarchy = orjson.loads(attachment_cache.get_data(attachment)) deobfuscation_fn(data, project, view_hierarchy) # Reupload to cache as a unchunked data @@ -125,7 +125,7 @@ def deobfuscation_template(data, map_type, deobfuscation_fn): id=attachment.id, name=attachment.name, content_type=attachment.content_type, - data=json.dumps_htmlsafe(view_hierarchy).encode(), + data=orjson.dumps(view_hierarchy), chunks=None, ) ) diff --git a/src/sentry/lang/javascript/errormapping.py b/src/sentry/lang/javascript/errormapping.py index 9c197496e541a9..9253984e9d39ba 100644 --- a/src/sentry/lang/javascript/errormapping.py +++ b/src/sentry/lang/javascript/errormapping.py @@ -6,11 +6,11 @@ import time from urllib.parse import parse_qsl +import orjson from django.conf import settings from django.core.cache import cache from sentry import http -from sentry.utils import json from sentry.utils.meta import Meta from sentry.utils.safe import get_path from sentry.utils.strings import count_sprintf_parameters @@ -44,7 +44,7 @@ def load_mapping(self): mapping = cache.get(key) cached_rv = None if mapping is not None: - ts, cached_rv = json.loads(mapping) + ts, cached_rv = orjson.loads(mapping) if not is_expired(ts): return cached_rv @@ -58,7 +58,7 @@ def load_mapping(self): # Make sure we only get a 2xx to prevent caching bad data response.raise_for_status() data = response.json() - cache.set(key, json.dumps([time.time(), data]), HARD_TIMEOUT) + cache.set(key, orjson.dumps([time.time(), data]).decode(), HARD_TIMEOUT) except Exception: if cached_rv is None: raise diff --git a/src/sentry/lang/native/appconnect.py b/src/sentry/lang/native/appconnect.py index 5b3f0158ee73d6..a59593996e8d52 100644 --- a/src/sentry/lang/native/appconnect.py +++ b/src/sentry/lang/native/appconnect.py @@ -10,13 +10,13 @@ from typing import Any import jsonschema +import orjson import requests import sentry_sdk from django.db import router, transaction from sentry.lang.native.sources import APP_STORE_CONNECT_SCHEMA, secret_fields from sentry.models.project import Project -from sentry.utils import json from sentry.utils.appleconnect import appstore_connect logger = logging.getLogger(__name__) @@ -123,7 +123,7 @@ def from_project_config(cls, project: Project, config_id: str) -> "AppStoreConne if not raw: raw = "[]" - all_sources = json.loads(raw) + all_sources = orjson.loads(raw) for source in all_sources: if source.get("type") == SYMBOL_SOURCE_TYPE_NAME and (source.get("id") == config_id): return cls.from_json(source) @@ -136,7 +136,7 @@ def all_config_ids(project: Project) -> list[str]: raw = project.get_option(SYMBOL_SOURCES_PROP_NAME) if not raw: raw = "[]" - all_sources = json.loads(raw) + all_sources = orjson.loads(raw) return [ s.get("id") for s in all_sources @@ -177,7 +177,7 @@ def to_redacted_json(self) -> dict[str, Any]: data[to_redact] = {"hidden-secret": True} return data - def update_project_symbol_source(self, project: Project, allow_multiple: bool) -> json.JSONData: + def update_project_symbol_source(self, project: Project, allow_multiple: bool) -> Any: """Updates this configuration in the Project's symbol sources. If a symbol source of type ``appStoreConnect`` already exists the ID must match and it @@ -194,7 +194,7 @@ def update_project_symbol_source(self, project: Project, allow_multiple: bool) - """ with transaction.atomic(router.db_for_write(Project)): all_sources_raw = project.get_option(SYMBOL_SOURCES_PROP_NAME) - all_sources = json.loads(all_sources_raw) if all_sources_raw else [] + all_sources = orjson.loads(all_sources_raw) if all_sources_raw else [] for i, source in enumerate(all_sources): if source.get("type") == SYMBOL_SOURCE_TYPE_NAME: if source.get("id") == self.id: @@ -207,7 +207,7 @@ def update_project_symbol_source(self, project: Project, allow_multiple: bool) - else: # No matching existing appStoreConnect symbol source, append it. all_sources.append(self.to_json()) - project.update_option(SYMBOL_SOURCES_PROP_NAME, json.dumps(all_sources)) + project.update_option(SYMBOL_SOURCES_PROP_NAME, orjson.dumps(all_sources).decode()) return all_sources diff --git a/src/sentry/lang/native/sources.py b/src/sentry/lang/native/sources.py index f9b7ba0fe1ff65..e5d3128c96dafb 100644 --- a/src/sentry/lang/native/sources.py +++ b/src/sentry/lang/native/sources.py @@ -8,6 +8,7 @@ from typing import Any import jsonschema +import orjson import sentry_sdk from django.conf import settings from django.urls import reverse @@ -16,7 +17,7 @@ from sentry import features, options from sentry.auth.system import get_system_token from sentry.models.project import Project -from sentry.utils import json, metrics, redis, safe +from sentry.utils import metrics, redis, safe from sentry.utils.http import get_origins logger = logging.getLogger(__name__) @@ -374,7 +375,7 @@ def parse_sources(config, filter_appconnect=True): return [] try: - sources = json.loads(config) + sources = orjson.loads(config) except Exception as e: raise InvalidSourcesError(f"{e}") @@ -397,7 +398,7 @@ def parse_backfill_sources(sources_json, original_sources): return [] try: - sources = json.loads(sources_json) + sources = orjson.loads(sources_json) except Exception as e: raise InvalidSourcesError("Sources are not valid serialised JSON") from e @@ -431,7 +432,7 @@ def backfill_source(source, original_sources_by_id): source[secret] = secret_value -def redact_source_secrets(config_sources: json.JSONData) -> json.JSONData: +def redact_source_secrets(config_sources: Any) -> Any: """ Returns a JSONData with all of the secrets redacted from every source. diff --git a/src/sentry/lang/native/symbolicator.py b/src/sentry/lang/native/symbolicator.py index 65b6a8eda0e544..a3e05efbf8b039 100644 --- a/src/sentry/lang/native/symbolicator.py +++ b/src/sentry/lang/native/symbolicator.py @@ -9,6 +9,7 @@ from enum import Enum from urllib.parse import urljoin +import orjson import sentry_sdk from django.conf import settings from requests.exceptions import RequestException @@ -22,7 +23,7 @@ ) from sentry.models.project import Project from sentry.net.http import Session -from sentry.utils import json, metrics +from sentry.utils import metrics MAX_ATTEMPTS = 3 @@ -165,8 +166,8 @@ def process_minidump(self, minidump): (sources, process_response) = sources_for_symbolication(self.project) scraping_config = get_scraping_config(self.project) data = { - "sources": json.dumps(sources), - "scraping": json.dumps(scraping_config), + "sources": orjson.dumps(sources).decode(), + "scraping": orjson.dumps(scraping_config).decode(), "options": '{"dif_candidates": true}', } @@ -182,8 +183,8 @@ def process_applecrashreport(self, report): (sources, process_response) = sources_for_symbolication(self.project) scraping_config = get_scraping_config(self.project) data = { - "sources": json.dumps(sources), - "scraping": json.dumps(scraping_config), + "sources": orjson.dumps(sources).decode(), + "scraping": orjson.dumps(scraping_config).decode(), "options": '{"dif_candidates": true}', } diff --git a/tests/sentry/lang/javascript/test_sourcemaps.py b/tests/sentry/lang/javascript/test_sourcemaps.py index a589fba7f94b0e..50cae7a4476213 100644 --- a/tests/sentry/lang/javascript/test_sourcemaps.py +++ b/tests/sentry/lang/javascript/test_sourcemaps.py @@ -1,9 +1,8 @@ from unittest import TestCase +import orjson from symbolic.sourcemap import SourceMapTokenMatch, SourceMapView -from sentry.utils import json - sourcemap = b"""{ "version":3, "file":"file.min.js", @@ -13,7 +12,7 @@ "sourceRoot": "foo" }""" -indexed_sourcemap_example = json.dumps( +indexed_sourcemap_example = orjson.dumps( { "version": 3, "file": "min.js", @@ -48,7 +47,7 @@ }, ], } -).encode("utf-8") +) class FindSourceTest(TestCase): diff --git a/tests/sentry/lang/native/test_appconnect.py b/tests/sentry/lang/native/test_appconnect.py index 16d3e4d57102d1..6c43d9c917faba 100644 --- a/tests/sentry/lang/native/test_appconnect.py +++ b/tests/sentry/lang/native/test_appconnect.py @@ -1,15 +1,15 @@ import pathlib import uuid from datetime import datetime -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from unittest import mock +import orjson import pytest from django.utils import timezone from sentry.lang.native import appconnect from sentry.testutils.pytest.fixtures import django_db_all -from sentry.utils import json from sentry.utils.appleconnect import appstore_connect if TYPE_CHECKING: @@ -23,7 +23,7 @@ def now(self) -> datetime: return timezone.now() @pytest.fixture - def data(self, now: datetime) -> json.JSONData: + def data(self, now: datetime) -> Any: return { "type": "appStoreConnect", "id": "abc123", @@ -36,7 +36,7 @@ def data(self, now: datetime) -> json.JSONData: "bundleId": "com.example.app", } - def test_from_json_basic(self, data: json.JSONData, now: datetime) -> None: + def test_from_json_basic(self, data: Any, now: datetime) -> None: config = appconnect.AppStoreConnectConfig.from_json(data) assert config.type == "appStoreConnect" assert config.id == data["id"] @@ -46,13 +46,13 @@ def test_from_json_basic(self, data: json.JSONData, now: datetime) -> None: assert config.appName == data["appName"] assert config.bundleId == data["bundleId"] - def test_to_json(self, data: json.JSONData, now: datetime) -> None: + def test_to_json(self, data: Any, now: datetime) -> None: config = appconnect.AppStoreConnectConfig.from_json(data) new_data = config.to_json() assert new_data == data - def test_to_redacted_json(self, data: json.JSONData, now: datetime) -> None: + def test_to_redacted_json(self, data: Any, now: datetime) -> None: config = appconnect.AppStoreConnectConfig.from_json(data) new_data = config.to_redacted_json() @@ -62,9 +62,7 @@ def test_to_redacted_json(self, data: json.JSONData, now: datetime) -> None: assert new_data == data @django_db_all - def test_from_project_config_empty_sources( - self, default_project: "Project", data: json.JSONData - ) -> None: + def test_from_project_config_empty_sources(self, default_project: "Project", data: Any) -> None: with pytest.raises(KeyError): appconnect.AppStoreConnectConfig.from_project_config(default_project, "not-an-id") @@ -94,16 +92,16 @@ def test_new_source( assert cfg == config raw = default_project.get_option(appconnect.SYMBOL_SOURCES_PROP_NAME, default="[]") - stored_sources = json.loads(raw) + stored_sources = orjson.loads(raw) assert stored_sources == sources @django_db_all def test_new_sources_with_existing( self, default_project: "Project", config: appconnect.AppStoreConnectConfig ) -> None: - old_sources = json.dumps( + old_sources = orjson.dumps( [{"type": "not-this-one", "id": "a"}, {"type": "not-this-one", "id": "b"}] - ) + ).decode() default_project.update_option(appconnect.SYMBOL_SOURCES_PROP_NAME, old_sources) sources = config.update_project_symbol_source(default_project, allow_multiple=False) @@ -112,10 +110,10 @@ def test_new_sources_with_existing( assert cfg == config raw = default_project.get_option(appconnect.SYMBOL_SOURCES_PROP_NAME, default="[]") - stored_sources = json.loads(raw) + stored_sources = orjson.loads(raw) assert stored_sources == sources - new_sources = json.loads(old_sources) + new_sources = orjson.loads(old_sources) new_sources.append(cfg.to_json()) assert stored_sources == new_sources From 17dd86108275373098a05ff4cfb71d301b8622e4 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Thu, 9 May 2024 13:37:25 -0400 Subject: [PATCH 217/376] feat(traces): Make timeline visualization simpler (#70586) ### Summary This simplifies the timeline visualization and only moves blocks you have hovered. --- .../app/views/performance/traces/content.tsx | 7 +-- .../performance/traces/fieldRenderers.tsx | 49 ++++++++++++------- static/app/views/performance/traces/utils.tsx | 11 +++++ 3 files changed, 46 insertions(+), 21 deletions(-) diff --git a/static/app/views/performance/traces/content.tsx b/static/app/views/performance/traces/content.tsx index 45fe03eef5eb18..78cec828607bf6 100644 --- a/static/app/views/performance/traces/content.tsx +++ b/static/app/views/performance/traces/content.tsx @@ -435,6 +435,7 @@ function useTraces({ suggestedQuery, sort, per_page: limit, + minBreakdownPercentage: 1 / 40, maxSpansPerTrace: 5, mri, metricsQuery, @@ -523,14 +524,14 @@ const BreakdownPanelItem = styled(StyledPanelItem)<{highlightedSliceName: string ${p => p.highlightedSliceName ? `--highlightedSlice-${p.highlightedSliceName}-opacity: 1.0; - --highlightedSlice-${p.highlightedSliceName}-transform: translateY(-2px); + --highlightedSlice-${p.highlightedSliceName}-transform: translateY(0px); ` : null} ${p => p.highlightedSliceName ? ` - --defaultSlice-opacity: 0.3; - --defaultSlice-transform: translateY(1px); + --defaultSlice-opacity: 1.0; + --defaultSlice-transform: translateY(0px); ` : ` --defaultSlice-opacity: 1.0; diff --git a/static/app/views/performance/traces/fieldRenderers.tsx b/static/app/views/performance/traces/fieldRenderers.tsx index c09ea46424fa13..4409d3bcd3ddcf 100644 --- a/static/app/views/performance/traces/fieldRenderers.tsx +++ b/static/app/views/performance/traces/fieldRenderers.tsx @@ -1,3 +1,4 @@ +import {useState} from 'react'; import {type Theme, useTheme} from '@emotion/react'; import styled from '@emotion/styled'; @@ -26,7 +27,7 @@ import {transactionSummaryRouteWithQuery} from 'sentry/views/performance/transac import type {TraceResult} from './content'; import type {Field} from './data'; -import {getStylingSliceName} from './utils'; +import {getShortenedSdkName, getStylingSliceName} from './utils'; interface ProjectRendererProps { projectSlug: string; @@ -52,17 +53,19 @@ export function ProjectRenderer({projectSlug, hideName}: ProjectRendererProps) { ); } -export const TraceBreakdownContainer = styled('div')` +export const TraceBreakdownContainer = styled('div')<{hoveredIndex?: number}>` position: relative; display: flex; min-width: 200px; height: 15px; background-color: ${p => p.theme.gray100}; + ${p => `--hoveredSlice-${p.hoveredIndex ?? -1}-translateY: translateY(-3px)`}; `; const RectangleTraceBreakdown = styled(RowRectangle)<{ sliceColor: string; sliceName: string | null; + offset?: number; }>` background-color: ${p => p.sliceColor}; position: relative; @@ -72,7 +75,7 @@ const RectangleTraceBreakdown = styled(RowRectangle)<{ opacity: var(--highlightedSlice-${p.sliceName ?? ''}-opacity, var(--defaultSlice-opacity, 1.0)); `} ${p => ` - transform: var(--highlightedSlice-${p.sliceName ?? ''}-transform, var(--defaultSlice-transform, 1.0)); + transform: var(--hoveredSlice-${p.offset}-translateY, var(--highlightedSlice-${p.sliceName ?? ''}-transform, var(--defaultSlice-transform, 1.0))); `} transition: opacity,transform 0.2s cubic-bezier(0.4, 0, 0.2, 1); `; @@ -86,10 +89,15 @@ export function TraceBreakdownRenderer({ trace: TraceResult; }) { const theme = useTheme(); + const [hoveredIndex, setHoveredIndex] = useState(-1); return ( - - {trace.breakdowns.map(breakdown => { + setHoveredIndex(-1)} + > + {trace.breakdowns.map((breakdown, index) => { return ( + offset={index} + onMouseEnter={() => { + setHoveredIndex(index); breakdown.project ? setHighlightedSliceName( getStylingSliceName(breakdown.project, breakdown.sdkName) ?? '' ) - : null - } + : null; + }} /> ); })} @@ -124,6 +134,7 @@ export function SpanBreakdownSliceRenderer({ sliceEnd, sliceSecondaryName, onMouseEnter, + offset, }: { onMouseEnter: () => void; sliceEnd: number; @@ -132,6 +143,7 @@ export function SpanBreakdownSliceRenderer({ sliceStart: number; theme: Theme; trace: TraceResult; + offset?: number; }) { const traceDuration = trace.end - trace.start; @@ -169,14 +181,7 @@ export function SpanBreakdownSliceRenderer({ {sliceName ? : null} {sliceName} - - {sliceSecondaryName ? ( - - {'\u2014'} -   - {sliceSecondaryName} - - ) : null} + ({getShortenedSdkName(sliceSecondaryName)})
@@ -185,12 +190,20 @@ export function SpanBreakdownSliceRenderer({ } containerDisplayMode="block" > - + ); } +const Subtext = styled('span')` + font-weight: 400; + color: ${p => p.theme.gray300}; +`; const FlexContainer = styled('div')` display: flex; flex-direction: row; @@ -314,7 +327,7 @@ export function TraceIssuesRenderer({trace}: {trace: TraceResult}) { to={normalizeUrl({ pathname: `/organizations/${organization.slug}/issues`, query: { - query: `is:unresolved trace:"${trace.trace}"`, + query: `trace:"${trace.trace}"`, }, })} size="xs" diff --git a/static/app/views/performance/traces/utils.tsx b/static/app/views/performance/traces/utils.tsx index 334eb0e91badbf..5b2309a36468bc 100644 --- a/static/app/views/performance/traces/utils.tsx +++ b/static/app/views/performance/traces/utils.tsx @@ -62,3 +62,14 @@ export function generateTracesRouteWithQuery({ }, }; } + +export function getShortenedSdkName(sdkName: string | null) { + if (!sdkName) { + return ''; + } + const sdkNameParts = sdkName.split('.'); + if (sdkNameParts.length <= 1) { + return sdkName; + } + return sdkNameParts[sdkNameParts.length - 1]; +} From 39dfe1844e033c6abd4a7abd0ce766c443710340 Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Thu, 9 May 2024 13:42:56 -0400 Subject: [PATCH 218/376] perf: use orjson in dynamic sampling (#70584) --- .../rules/biases/custom_rule_bias.py | 7 +++--- src/sentry/dynamic_sampling/rules/utils.py | 23 ++++++++----------- .../helpers/boost_low_volume_transactions.py | 6 ++--- 3 files changed, 16 insertions(+), 20 deletions(-) diff --git a/src/sentry/dynamic_sampling/rules/biases/custom_rule_bias.py b/src/sentry/dynamic_sampling/rules/biases/custom_rule_bias.py index 5b73730fb5565e..930a2ca3f715d5 100644 --- a/src/sentry/dynamic_sampling/rules/biases/custom_rule_bias.py +++ b/src/sentry/dynamic_sampling/rules/biases/custom_rule_bias.py @@ -1,14 +1,13 @@ import logging from typing import cast +import orjson from sentry_relay.processing import validate_rule_condition from sentry.dynamic_sampling.rules.biases.base import Bias from sentry.dynamic_sampling.rules.utils import Condition, PolymorphicRule from sentry.models.dynamicsampling import CUSTOM_RULE_DATE_FORMAT, CustomDynamicSamplingRule from sentry.models.project import Project -from sentry.utils import json -from sentry.utils.json import JSONDecodeError logger = logging.getLogger(__name__) @@ -34,7 +33,7 @@ def generate_rules(self, project: Project, base_sample_rate: float) -> list[Poly continue try: - condition = cast(Condition, json.loads(rule.condition)) + condition = cast(Condition, orjson.loads(rule.condition)) ret_val.append( { "samplingValue": {"type": "reservoir", "limit": rule.num_samples}, @@ -47,7 +46,7 @@ def generate_rules(self, project: Project, base_sample_rate: float) -> list[Poly }, } ) - except JSONDecodeError: + except orjson.JSONDecodeError: logger.exception( "Custom rule with invalid json found", extra={"rule_id": rule.rule_id, "condition": rule.condition}, diff --git a/src/sentry/dynamic_sampling/rules/utils.py b/src/sentry/dynamic_sampling/rules/utils.py index f7bc1bf1b3cbfd..5afb83ec793f18 100644 --- a/src/sentry/dynamic_sampling/rules/utils.py +++ b/src/sentry/dynamic_sampling/rules/utils.py @@ -1,11 +1,12 @@ from enum import Enum -from typing import Any, Literal, NotRequired, TypedDict, Union +from typing import Literal, NotRequired, TypedDict, Union +import orjson from django.conf import settings from rediscluster import RedisCluster from sentry.models.dynamicsampling import CUSTOM_RULE_START -from sentry.utils import json, redis +from sentry.utils import redis BOOSTED_RELEASES_LIMIT = 10 @@ -192,15 +193,18 @@ def get_rule_type(rule: Rule) -> RuleType | None: def get_rule_hash(rule: PolymorphicRule) -> int: # We want to be explicit in what we use for computing the hash. In addition, we need to remove certain fields like # the sampleRate. - return json.dumps( - _deep_sorted( + return ( + orjson.dumps( { "id": rule["id"], "type": rule["type"], "condition": rule["condition"], - } + }, + option=orjson.OPT_SORT_KEYS, ) - ).__hash__() + .decode() + .__hash__() + ) def get_sampling_value(rule: PolymorphicRule) -> tuple[str, float] | None: @@ -213,13 +217,6 @@ def get_sampling_value(rule: PolymorphicRule) -> tuple[str, float] | None: return None -def _deep_sorted(value: Any | dict[Any, Any]) -> Any | dict[Any, Any]: - if isinstance(value, dict): - return {key: _deep_sorted(value) for key, value in sorted(value.items())} - else: - return value - - def get_user_biases(user_set_biases: list[ActivatableBias] | None) -> list[ActivatableBias]: if user_set_biases is None: return DEFAULT_BIASES diff --git a/src/sentry/dynamic_sampling/tasks/helpers/boost_low_volume_transactions.py b/src/sentry/dynamic_sampling/tasks/helpers/boost_low_volume_transactions.py index 32ad1d0aa11462..648551b4517fdf 100644 --- a/src/sentry/dynamic_sampling/tasks/helpers/boost_low_volume_transactions.py +++ b/src/sentry/dynamic_sampling/tasks/helpers/boost_low_volume_transactions.py @@ -1,10 +1,10 @@ from collections.abc import Mapping +import orjson import sentry_sdk from sentry.dynamic_sampling.models.common import RebalancedItem from sentry.dynamic_sampling.rules.utils import get_redis_client_for_ds -from sentry.utils import json def _get_cache_key(org_id: int, proj_id: int) -> str: @@ -19,7 +19,7 @@ def get_transactions_resampling_rates( try: serialised_val = redis_client.get(cache_key) if serialised_val: - return json.loads(serialised_val) + return orjson.loads(serialised_val) except (TypeError, ValueError) as e: sentry_sdk.capture_exception(e) @@ -33,6 +33,6 @@ def set_transactions_resampling_rates( cache_key = _get_cache_key(org_id=org_id, proj_id=proj_id) named_rates_dict = {rate.id: rate.new_sample_rate for rate in named_rates} val = [named_rates_dict, default_rate] - val_str = json.dumps(val) + val_str = orjson.dumps(val).decode() redis_client.set(cache_key, val_str) redis_client.pexpire(cache_key, ttl_ms) From e311537fb40aab5f910c1884acacd5b29ac487ec Mon Sep 17 00:00:00 2001 From: Mark Story Date: Thu, 9 May 2024 13:43:47 -0400 Subject: [PATCH 219/376] chore(actor) Update usage of RpcActor -> Actor (#70575) Complete the rename of RpcActor to Actor by updating all usage and removing the shim path. --- src/sentry/api/endpoints/event_owners.py | 6 +- .../api/endpoints/organization_unsubscribe.py | 4 +- .../api/endpoints/project_rule_details.py | 4 +- src/sentry/api/fields/actor.py | 4 +- src/sentry/api/helpers/group_index/update.py | 10 +-- .../helpers/group_index/validators/group.py | 4 +- .../api/serializers/models/alert_rule.py | 4 +- src/sentry/api/serializers/models/rule.py | 4 +- .../serializers/rest_framework/mentions.py | 8 +- src/sentry/digests/utils.py | 16 ++-- src/sentry/incidents/action_handlers.py | 4 +- src/sentry/incidents/logic.py | 14 ++-- .../discord/views/link_identity.py | 2 +- .../discord/views/unlink_identity.py | 2 +- .../msteams/card_builder/notifications.py | 6 +- .../integrations/msteams/notifications.py | 8 +- src/sentry/integrations/notifications.py | 14 ++-- .../slack/message_builder/issues.py | 8 +- .../message_builder/notifications/base.py | 4 +- .../notifications/daily_summary.py | 4 +- .../message_builder/notifications/digest.py | 4 +- .../message_builder/notifications/issues.py | 4 +- .../integrations/slack/notifications.py | 12 +-- src/sentry/issues/issue_occurrence.py | 8 +- src/sentry/issues/occurrence_consumer.py | 2 +- src/sentry/mail/adapter.py | 4 +- src/sentry/mail/notifications.py | 18 ++--- src/sentry/models/group.py | 4 +- src/sentry/models/groupassignee.py | 8 +- src/sentry/models/groupowner.py | 4 +- src/sentry/models/groupsubscription.py | 10 +-- src/sentry/models/projectownership.py | 6 +- .../monitors/consumers/monitor_consumer.py | 2 +- .../endpoints/organization_monitor_index.py | 6 +- src/sentry/monitors/models.py | 6 +- src/sentry/monitors/serializers.py | 8 +- src/sentry/notifications/helpers.py | 20 ++--- .../notifications/notificationcontroller.py | 18 ++--- .../notifications/activity/base.py | 12 +-- .../notifications/activity/escalating.py | 4 +- .../activity/new_processing_issues.py | 10 +-- .../notifications/activity/note.py | 4 +- .../notifications/activity/release.py | 14 ++-- .../notifications/notifications/base.py | 38 ++++----- .../notifications/codeowners_auto_sync.py | 8 +- .../notifications/daily_summary.py | 14 ++-- .../notifications/notifications/digest.py | 18 ++--- .../notifications/integration_nudge.py | 16 ++-- .../notifications/missing_members_nudge.py | 8 +- .../abstract_invite_request.py | 8 +- .../organization_request/base.py | 12 +-- .../integration_request.py | 8 +- .../organization_request/invite_request.py | 6 +- .../organization_request/join_request.py | 6 +- .../notifications/notifications/rules.py | 10 +-- .../role_based_recipient_strategy.py | 6 +- .../notifications/user_report.py | 4 +- src/sentry/notifications/notify.py | 10 +-- .../notifications/utils/participants.py | 78 +++++++++---------- src/sentry/ownership/grammar.py | 8 +- src/sentry/plugins/bases/notify.py | 4 +- src/sentry/rules/actions/utils.py | 6 +- src/sentry/services/hybrid_cloud/actor.py | 12 --- .../hybrid_cloud/notifications/impl.py | 10 +-- .../hybrid_cloud/notifications/service.py | 10 +-- src/sentry/tasks/summaries/daily_summary.py | 4 +- src/sentry/testutils/helpers/notifications.py | 4 +- src/sentry/web/frontend/debug/mail.py | 4 +- .../endpoints/test_project_rule_details.py | 4 +- .../endpoints/test_project_team_details.py | 8 +- .../sentry/api/endpoints/test_rule_snooze.py | 4 +- .../endpoints/test_team_alerts_triggered.py | 12 +-- tests/sentry/api/helpers/test_group_index.py | 30 +++---- .../rest_framework/test_mentions.py | 10 +-- .../sentry/api/serializers/test_alert_rule.py | 8 +- tests/sentry/api/serializers/test_fields.py | 2 +- tests/sentry/deletions/test_organization.py | 4 +- tests/sentry/deletions/test_team.py | 4 +- tests/sentry/digests/test_utilities.py | 2 +- ...ganization_combined_rule_index_endpoint.py | 16 ++-- .../test_organization_incident_index.py | 6 +- tests/sentry/incidents/test_logic.py | 14 ++-- .../msteams/test_notifications.py | 12 +-- .../slack/test_message_builder.py | 6 +- .../test_notification_utilities.py | 6 +- tests/sentry/issues/test_issue_occurrence.py | 12 +-- tests/sentry/mail/activity/test_note.py | 4 +- tests/sentry/mail/activity/test_release.py | 20 ++--- tests/sentry/mail/test_adapter.py | 10 +-- tests/sentry/models/test_groupsubscription.py | 4 +- tests/sentry/models/test_project.py | 10 +-- tests/sentry/models/test_projectownership.py | 30 +++---- .../test_organization_request.py | 4 +- tests/sentry/notifications/test_helpers.py | 6 +- .../test_notificationcontroller.py | 16 ++-- .../notifications/utils/test_participants.py | 30 ++++--- .../{hybridcloud => types}/test_actor.py | 42 +++++----- 97 files changed, 466 insertions(+), 500 deletions(-) delete mode 100644 src/sentry/services/hybrid_cloud/actor.py rename tests/sentry/{hybridcloud => types}/test_actor.py (84%) diff --git a/src/sentry/api/endpoints/event_owners.py b/src/sentry/api/endpoints/event_owners.py index 5beadd6975a27f..9b2bf67bf0b853 100644 --- a/src/sentry/api/endpoints/event_owners.py +++ b/src/sentry/api/endpoints/event_owners.py @@ -9,7 +9,7 @@ from sentry.api.serializers import serialize from sentry.api.serializers.models.actor import ActorSerializer from sentry.models.projectownership import ProjectOwnership -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor @region_silo_endpoint @@ -40,9 +40,7 @@ def get(self, request: Request, project, event_id) -> Response: if owners == ProjectOwnership.Everyone: owners = [] - serialized_owners = serialize( - RpcActor.resolve_many(owners), request.user, ActorSerializer() - ) + serialized_owners = serialize(Actor.resolve_many(owners), request.user, ActorSerializer()) # Make sure the serialized owners are in the correct order ordered_owners = [] diff --git a/src/sentry/api/endpoints/organization_unsubscribe.py b/src/sentry/api/endpoints/organization_unsubscribe.py index 9c8dcadeef8f55..12466807112f03 100644 --- a/src/sentry/api/endpoints/organization_unsubscribe.py +++ b/src/sentry/api/endpoints/organization_unsubscribe.py @@ -20,8 +20,8 @@ NotificationSettingEnum, NotificationSettingsOptionEnum, ) -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.notifications.service import notifications_service +from sentry.types.actor import Actor, ActorType T = TypeVar("T", bound=BaseModel) @@ -103,7 +103,7 @@ def add_instance_data(self, data: dict[str, Any], instance: Project) -> dict[str def unsubscribe(self, request: Request, instance: Project): notifications_service.update_notification_options( - actor=RpcActor(id=request.user.pk, actor_type=ActorType.USER), + actor=Actor(id=request.user.pk, actor_type=ActorType.USER), type=NotificationSettingEnum.ISSUE_ALERTS, scope_type=NotificationScopeEnum.PROJECT, scope_identifier=instance.id, diff --git a/src/sentry/api/endpoints/project_rule_details.py b/src/sentry/api/endpoints/project_rule_details.py index e70552aaf60c2e..b8effef82796ee 100644 --- a/src/sentry/api/endpoints/project_rule_details.py +++ b/src/sentry/api/endpoints/project_rule_details.py @@ -43,9 +43,9 @@ from sentry.models.user import User from sentry.rules.actions import trigger_sentry_app_action_creators_for_issues from sentry.rules.actions.utils import get_changed_data, get_updated_rule_data -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.signals import alert_rule_edited from sentry.tasks.integrations.slack import find_channel_id_for_rule +from sentry.types.actor import Actor from sentry.utils import metrics logger = logging.getLogger(__name__) @@ -235,7 +235,7 @@ def put(self, request: Request, project, rule) -> Response: if rule.environment_id: rule_data_before["environment_id"] = rule.environment_id if rule.owner_team_id or rule.owner_user_id: - rule_data_before["owner"] = RpcActor.from_id( + rule_data_before["owner"] = Actor.from_id( user_id=rule.owner_user_id, team_id=rule.owner_team_id ) rule_data_before["label"] = rule.label diff --git a/src/sentry/api/fields/actor.py b/src/sentry/api/fields/actor.py index d763dee5cf0f66..fb13c515b6ae3e 100644 --- a/src/sentry/api/fields/actor.py +++ b/src/sentry/api/fields/actor.py @@ -4,7 +4,7 @@ from drf_spectacular.utils import extend_schema_field from rest_framework import serializers -from sentry.services.hybrid_cloud.actor import RpcActor, parse_and_validate_actor +from sentry.types.actor import Actor, parse_and_validate_actor @extend_schema_field(field=OpenApiTypes.STR) @@ -15,5 +15,5 @@ def __init__(self, *args, **kwds): def to_representation(self, value): return value.identifier - def to_internal_value(self, data) -> RpcActor | None: + def to_internal_value(self, data) -> Actor | None: return parse_and_validate_actor(data, self.context["organization"].id) diff --git a/src/sentry/api/helpers/group_index/update.py b/src/sentry/api/helpers/group_index/update.py index e8e4fc61795cdb..2f9bd4ff5af1b7 100644 --- a/src/sentry/api/helpers/group_index/update.py +++ b/src/sentry/api/helpers/group_index/update.py @@ -46,13 +46,13 @@ from sentry.models.user import User from sentry.notifications.types import SUBSCRIPTION_REASON_MAP, GroupSubscriptionReason from sentry.services.hybrid_cloud import coerce_id_from -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.user import RpcUser from sentry.services.hybrid_cloud.user.service import user_service from sentry.services.hybrid_cloud.user_option import user_option_service from sentry.signals import issue_resolved from sentry.tasks.integrations import kick_off_status_syncs from sentry.types.activity import ActivityType +from sentry.types.actor import Actor, ActorType from sentry.types.group import SUBSTATUS_UPDATE_CHOICES, GroupSubStatus, PriorityLevel from sentry.utils import metrics @@ -106,10 +106,10 @@ def handle_discard( def self_subscribe_and_assign_issue( acting_user: User | RpcUser | None, group: Group, self_assign_issue: str -) -> RpcActor | None: +) -> Actor | None: # Used during issue resolution to assign to acting user # returns None if the user didn't elect to self assign on resolution - # or the group is assigned already, otherwise returns RpcActor + # or the group is assigned already, otherwise returns Actor # representation of current user if acting_user: GroupSubscription.objects.subscribe( @@ -117,7 +117,7 @@ def self_subscribe_and_assign_issue( ) if self_assign_issue == "1" and not group.assignee_set.exists(): - return RpcActor(id=acting_user.id, actor_type=ActorType.USER) + return Actor(id=acting_user.id, actor_type=ActorType.USER) return None @@ -841,7 +841,7 @@ def handle_is_public( def handle_assigned_to( - assigned_actor: RpcActor, + assigned_actor: Actor, assigned_by: str | None, integration: str | None, group_list: list[Group], diff --git a/src/sentry/api/helpers/group_index/validators/group.py b/src/sentry/api/helpers/group_index/validators/group.py index 204bda7f0723cf..501c45d90df067 100644 --- a/src/sentry/api/helpers/group_index/validators/group.py +++ b/src/sentry/api/helpers/group_index/validators/group.py @@ -5,7 +5,7 @@ from sentry.api.fields import ActorField from sentry.models.group import STATUS_UPDATE_CHOICES -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.group import SUBSTATUS_UPDATE_CHOICES, PriorityLevel from . import InboxDetailsValidator, StatusDetailsValidator @@ -49,7 +49,7 @@ class GroupValidator(serializers.Serializer): # for the moment, the CLI sends this for any issue update, so allow nulls snoozeDuration = serializers.IntegerField(allow_null=True) - def validate_assignedTo(self, value: RpcActor) -> RpcActor: + def validate_assignedTo(self, value: Actor) -> Actor: if ( value and value.is_user diff --git a/src/sentry/api/serializers/models/alert_rule.py b/src/sentry/api/serializers/models/alert_rule.py index c76c09fed7a702..f4fe8c0d63408f 100644 --- a/src/sentry/api/serializers/models/alert_rule.py +++ b/src/sentry/api/serializers/models/alert_rule.py @@ -26,11 +26,11 @@ from sentry.models.rule import Rule from sentry.models.rulesnooze import RuleSnooze from sentry.models.user import User -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.app import app_service from sentry.services.hybrid_cloud.user import RpcUser from sentry.services.hybrid_cloud.user.service import user_service from sentry.snuba.models import SnubaQueryEventType +from sentry.types.actor import Actor logger = logging.getLogger(__name__) @@ -198,7 +198,7 @@ def get_attrs( result[item]["activations"] = serialize(activations, **kwargs) if item.user_id or item.team_id: - actor = RpcActor.from_id(user_id=item.user_id, team_id=item.team_id) + actor = Actor.from_id(user_id=item.user_id, team_id=item.team_id) result[item]["owner"] = actor.identifier if "original_alert_rule" in self.expand: diff --git a/src/sentry/api/serializers/models/rule.py b/src/sentry/api/serializers/models/rule.py index 250338fe9927d8..0823b22ff4a03b 100644 --- a/src/sentry/api/serializers/models/rule.py +++ b/src/sentry/api/serializers/models/rule.py @@ -9,8 +9,8 @@ from sentry.models.rule import NeglectedRule, Rule, RuleActivity, RuleActivityType from sentry.models.rulefirehistory import RuleFireHistory from sentry.models.rulesnooze import RuleSnooze -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.user.service import user_service +from sentry.types.actor import Actor def generate_rule_label(project, rule, data): @@ -132,7 +132,7 @@ def get_attrs(self, item_list, user, **kwargs): for rule in rules.values(): if rule.owner_team_id or rule.owner_user_id: - actor = RpcActor.from_id(user_id=rule.owner_user_id, team_id=rule.owner_team_id) + actor = Actor.from_id(user_id=rule.owner_user_id, team_id=rule.owner_team_id) result[rule]["owner"] = actor.identifier for action in rule.data.get("actions", []): diff --git a/src/sentry/api/serializers/rest_framework/mentions.py b/src/sentry/api/serializers/rest_framework/mentions.py index b698b80a905fe2..a93c3d933411f6 100644 --- a/src/sentry/api/serializers/rest_framework/mentions.py +++ b/src/sentry/api/serializers/rest_framework/mentions.py @@ -7,21 +7,21 @@ from sentry.models.organizationmember import OrganizationMember from sentry.models.organizationmemberteam import OrganizationMemberTeam from sentry.models.team import Team -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.user import RpcUser from sentry.services.hybrid_cloud.util import region_silo_function +from sentry.types.actor import Actor @region_silo_function def extract_user_ids_from_mentions(organization_id, mentions): """ Extracts user ids from a set of mentions. Mentions should be a list of - `RpcActor` instances. Returns a dictionary with 'users', 'team_users', and 'teams' keys. + `Actor` instances. Returns a dictionary with 'users', 'team_users', and 'teams' keys. 'users' is the user ids for all explicitly mentioned users, 'team_users' is all user ids from explicitly mentioned teams, excluding any already mentioned users, and 'teams' is the team ids for all explicitly mentioned teams. """ - actors: Sequence[RpcUser | Team] = RpcActor.resolve_many(mentions) + actors: Sequence[RpcUser | Team] = Actor.resolve_many(mentions) actor_mentions = separate_resolved_actors(actors) team_user_ids = set( @@ -42,7 +42,7 @@ def extract_user_ids_from_mentions(organization_id, mentions): } -def separate_actors(actors: Sequence[RpcActor]): +def separate_actors(actors: Sequence[Actor]): users = [actor for actor in actors if actor.is_user] teams = [actor for actor in actors if actor.is_team] diff --git a/src/sentry/digests/utils.py b/src/sentry/digests/utils.py index fff3d0734d248e..a7d12aaf9c409b 100644 --- a/src/sentry/digests/utils.py +++ b/src/sentry/digests/utils.py @@ -16,7 +16,7 @@ from sentry.models.rulesnooze import RuleSnooze from sentry.notifications.types import ActionTargetType, FallthroughChoiceType from sentry.notifications.utils.participants import get_send_to -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders @@ -69,12 +69,12 @@ def get_digest_as_context(digest: Digest) -> Mapping[str, Any]: def get_events_by_participant( - participants_by_provider_by_event: Mapping[Event, Mapping[ExternalProviders, set[RpcActor]]] -) -> Mapping[RpcActor, set[Event]]: + participants_by_provider_by_event: Mapping[Event, Mapping[ExternalProviders, set[Actor]]] +) -> Mapping[Actor, set[Event]]: """Invert a mapping of events to participants to a mapping of participants to events.""" output = defaultdict(set) for event, participants_by_provider in participants_by_provider_by_event.items(): - participants: set[RpcActor] + participants: set[Actor] for participants in participants_by_provider.values(): for participant in participants: output[participant].add(event) @@ -83,8 +83,8 @@ def get_events_by_participant( def get_personalized_digests( digest: Digest, - participants_by_provider_by_event: Mapping[Event, Mapping[ExternalProviders, set[RpcActor]]], -) -> Mapping[RpcActor, Digest]: + participants_by_provider_by_event: Mapping[Event, Mapping[ExternalProviders, set[Actor]]], +) -> Mapping[Actor, Digest]: events_by_participant = get_events_by_participant(participants_by_provider_by_event) actor_to_digest = {} @@ -108,7 +108,7 @@ def get_event_from_groups_in_digest(digest: Digest) -> Iterable[Event]: def build_custom_digest( - original_digest: Digest, events: Iterable[Event], participant: RpcActor + original_digest: Digest, events: Iterable[Event], participant: Actor ) -> Digest: """Given a digest and a set of events, filter the digest to only records that include the events.""" user_digest: Digest = {} @@ -138,7 +138,7 @@ def get_participants_by_event( target_type: ActionTargetType = ActionTargetType.ISSUE_OWNERS, target_identifier: int | None = None, fallthrough_choice: FallthroughChoiceType | None = None, -) -> Mapping[Event, Mapping[ExternalProviders, set[RpcActor]]]: +) -> Mapping[Event, Mapping[ExternalProviders, set[Actor]]]: """ This is probably the slowest part in sending digests because we do a lot of DB calls while we iterate over every event. It would be great if we could diff --git a/src/sentry/incidents/action_handlers.py b/src/sentry/incidents/action_handlers.py index 5e67e57bb17398..0c644c49bc659f 100644 --- a/src/sentry/incidents/action_handlers.py +++ b/src/sentry/incidents/action_handlers.py @@ -19,11 +19,11 @@ from sentry.models.user import User from sentry.notifications.types import NotificationSettingEnum from sentry.notifications.utils.participants import get_notification_recipients -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.user import RpcUser from sentry.services.hybrid_cloud.user.service import user_service from sentry.services.hybrid_cloud.user_option import RpcUserOption, user_option_service from sentry.snuba.metrics import format_mri_field, is_mri_field +from sentry.types.actor import Actor, ActorType from sentry.types.integrations import ExternalProviders from sentry.utils import json from sentry.utils.email import MessageBuilder, get_email_addresses @@ -128,7 +128,7 @@ def _get_targets(self) -> set[int]: users = None out = get_notification_recipients( recipients=list( - RpcActor(id=member.user_id, actor_type=ActorType.USER) + Actor(id=member.user_id, actor_type=ActorType.USER) for member in target.member_set ), type=NotificationSettingEnum.ISSUE_ALERTS, diff --git a/src/sentry/incidents/logic.py b/src/sentry/incidents/logic.py index 0f6f2245132ce9..15366e1965e85a 100644 --- a/src/sentry/incidents/logic.py +++ b/src/sentry/incidents/logic.py @@ -50,7 +50,6 @@ from sentry.relay.config.metric_extraction import on_demand_metrics_feature_flags from sentry.search.events.builder import QueryBuilder from sentry.search.events.fields import is_function, resolve_field -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.app import RpcSentryAppInstallation, app_service from sentry.services.hybrid_cloud.integration import RpcIntegration, integration_service from sentry.services.hybrid_cloud.integration.model import RpcOrganizationIntegration @@ -79,6 +78,7 @@ ) from sentry.snuba.tasks import build_query_builder from sentry.tasks.relay import schedule_invalidate_project_config +from sentry.types.actor import Actor from sentry.utils import metrics from sentry.utils.audit import create_audit_entry_from_user from sentry.utils.snuba import is_measurement @@ -503,7 +503,7 @@ def create_alert_rule( time_window, threshold_type, threshold_period, - owner: RpcActor | None = None, + owner: Actor | None = None, resolve_threshold=None, environment=None, include_all_projects=False, @@ -525,7 +525,7 @@ def create_alert_rule( if `include_all_projects` is True :param name: Name for the alert rule. This will be used as part of the incident name, and must be unique per project - :param owner: RpcActor (sentry.services.hybrid_cloud.actor.RpcActor) or None + :param owner: Actor (sentry.services.hybrid_cloud.actor.Actor) or None :param query: An event search query to subscribe to and monitor for alerts :param aggregate: A string representing the aggregate used in this alert rule :param time_window: Time period to aggregate over, in minutes @@ -559,7 +559,7 @@ def create_alert_rule( owner_user_id = None owner_team_id = None - if owner and isinstance(owner, RpcActor): + if owner and isinstance(owner, Actor): if owner.is_user: owner_user_id = owner.id elif owner.is_team: @@ -690,7 +690,7 @@ def update_alert_rule( dataset=None, projects=None, name=None, - owner: RpcActor | None | object = NOT_SET, + owner: Actor | None | object = NOT_SET, query=None, aggregate=None, time_window=None, @@ -714,7 +714,7 @@ def update_alert_rule( `include_all_projects` is True :param name: Name for the alert rule. This will be used as part of the incident name, and must be unique per project. - :param owner: RpcActor (sentry.services.hybrid_cloud.actor.RpcActor) or None + :param owner: Actor (sentry.services.hybrid_cloud.actor.Actor) or None :param query: An event search query to subscribe to and monitor for alerts :param aggregate: A string representing the aggregate used in this alert rule :param time_window: Time period to aggregate over, in minutes. @@ -764,7 +764,7 @@ def update_alert_rule( if owner is not NOT_SET: team_id = None user_id = None - if owner and isinstance(owner, RpcActor): + if owner and isinstance(owner, Actor): if owner.is_user: user_id = owner.id elif owner.is_team: diff --git a/src/sentry/integrations/discord/views/link_identity.py b/src/sentry/integrations/discord/views/link_identity.py index b52084afd3b795..f85ed85ae7f66f 100644 --- a/src/sentry/integrations/discord/views/link_identity.py +++ b/src/sentry/integrations/discord/views/link_identity.py @@ -7,8 +7,8 @@ from sentry import analytics from sentry.integrations.utils.identities import get_identity_or_404 from sentry.models.identity import Identity -from sentry.services.hybrid_cloud.actor import ActorType from sentry.services.hybrid_cloud.integration.model import RpcIntegration +from sentry.types.actor import ActorType from sentry.types.integrations import ExternalProviders from sentry.utils.http import absolute_uri from sentry.utils.signing import sign, unsign diff --git a/src/sentry/integrations/discord/views/unlink_identity.py b/src/sentry/integrations/discord/views/unlink_identity.py index ab7268468072d2..329ab4a5c2f572 100644 --- a/src/sentry/integrations/discord/views/unlink_identity.py +++ b/src/sentry/integrations/discord/views/unlink_identity.py @@ -8,8 +8,8 @@ from sentry import analytics from sentry.integrations.utils.identities import get_identity_or_404 from sentry.models.identity import Identity -from sentry.services.hybrid_cloud.actor import ActorType from sentry.services.hybrid_cloud.integration.model import RpcIntegration +from sentry.types.actor import ActorType from sentry.types.integrations import ExternalProviders from sentry.utils.http import absolute_uri from sentry.utils.signing import sign, unsign diff --git a/src/sentry/integrations/msteams/card_builder/notifications.py b/src/sentry/integrations/msteams/card_builder/notifications.py index fd17c2ab8947ac..901630a316ed0c 100644 --- a/src/sentry/integrations/msteams/card_builder/notifications.py +++ b/src/sentry/integrations/msteams/card_builder/notifications.py @@ -14,7 +14,7 @@ from sentry.notifications.notifications.activity.base import GroupActivityNotification from sentry.notifications.notifications.base import BaseNotification from sentry.notifications.utils.actions import MessageAction -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from .block import ( @@ -35,7 +35,7 @@ class MSTeamsNotificationsMessageBuilder(MSTeamsMessageBuilder): def __init__( - self, notification: BaseNotification, context: Mapping[str, Any], recipient: RpcActor + self, notification: BaseNotification, context: Mapping[str, Any], recipient: Actor ): self.notification = notification self.context = context @@ -124,7 +124,7 @@ def __init__( self, notification: GroupActivityNotification, context: Mapping[str, Any], - recipient: RpcActor, + recipient: Actor, ): super().__init__(notification, context, recipient) self.group = notification.group diff --git a/src/sentry/integrations/msteams/notifications.py b/src/sentry/integrations/msteams/notifications.py index cba1dbed4fcacd..8cb567ae54d432 100644 --- a/src/sentry/integrations/msteams/notifications.py +++ b/src/sentry/integrations/msteams/notifications.py @@ -24,7 +24,7 @@ from sentry.notifications.notifications.base import BaseNotification from sentry.notifications.notifications.rules import AlertRuleNotification from sentry.notifications.notify import register_notification_provider -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from sentry.utils import metrics @@ -63,7 +63,7 @@ def is_supported_notification_type(notification: BaseNotification) -> bool: def get_notification_card( - notification: BaseNotification, context: Mapping[str, Any], recipient: User | Team | RpcActor + notification: BaseNotification, context: Mapping[str, Any], recipient: User | Team | Actor ) -> AdaptiveCard: cls = MESSAGE_BUILDERS[notification.message_builder] return cls(notification, context, recipient).build_notification_card() @@ -72,9 +72,9 @@ def get_notification_card( @register_notification_provider(ExternalProviders.MSTEAMS) def send_notification_as_msteams( notification: BaseNotification, - recipients: Iterable[RpcActor], + recipients: Iterable[Actor], shared_context: Mapping[str, Any], - extra_context_by_actor: Mapping[RpcActor, Mapping[str, Any]] | None, + extra_context_by_actor: Mapping[Actor, Mapping[str, Any]] | None, ): if not is_supported_notification_type(notification): logger.info( diff --git a/src/sentry/integrations/notifications.py b/src/sentry/integrations/notifications.py index e910f565ef3609..82f22be1706c57 100644 --- a/src/sentry/integrations/notifications.py +++ b/src/sentry/integrations/notifications.py @@ -9,23 +9,23 @@ from sentry.models.organization import Organization from sentry.models.team import Team from sentry.notifications.notifications.base import BaseNotification -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.identity import identity_service from sentry.services.hybrid_cloud.integration import RpcIntegration, integration_service from sentry.services.hybrid_cloud.user import RpcUser +from sentry.types.actor import Actor from sentry.types.integrations import EXTERNAL_PROVIDERS, ExternalProviders def get_context( notification: BaseNotification, - recipient: RpcActor | Team | RpcUser, + recipient: Actor | Team | RpcUser, shared_context: Mapping[str, Any], extra_context: Mapping[str, Any], ) -> Mapping[str, Any]: """Compose the various levels of context and add Slack-specific fields.""" return { **shared_context, - **notification.get_recipient_context(RpcActor.from_object(recipient), extra_context), + **notification.get_recipient_context(Actor.from_object(recipient), extra_context), } @@ -99,11 +99,11 @@ def _get_channel_and_integration_by_team( def get_integrations_by_channel_by_recipient( organization: Organization, - recipients: Iterable[RpcActor], + recipients: Iterable[Actor], provider: ExternalProviders, -) -> Mapping[RpcActor, Mapping[str, RpcIntegration]]: - output: MutableMapping[RpcActor, Mapping[str, RpcIntegration]] = defaultdict(dict) - for recipient in RpcActor.many_from_object(recipients): +) -> Mapping[Actor, Mapping[str, RpcIntegration]]: + output: MutableMapping[Actor, Mapping[str, RpcIntegration]] = defaultdict(dict) + for recipient in Actor.many_from_object(recipients): channels_to_integrations = None if recipient.is_user: channels_to_integrations = _get_channel_and_integration_by_user( diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py index df3007937828cf..488066f36d3f9f 100644 --- a/src/sentry/integrations/slack/message_builder/issues.py +++ b/src/sentry/integrations/slack/message_builder/issues.py @@ -53,10 +53,10 @@ dedupe_suggested_assignees, get_suspect_commit_users, ) -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.identity import RpcIdentity, identity_service from sentry.services.hybrid_cloud.user.model import RpcUser from sentry.snuba.referrer import Referrer +from sentry.types.actor import Actor from sentry.types.group import SUBSTATUS_TO_STR from sentry.types.integrations import ExternalProviders from sentry.utils import json @@ -102,7 +102,7 @@ def build_assigned_text(identity: RpcIdentity, assignee: str) -> str | None: - actor = RpcActor.from_identifier(assignee) + actor = Actor.from_identifier(assignee) try: assigned_actor = actor.resolve() @@ -292,7 +292,7 @@ def get_suggested_assignees( ): # we don't want every user in the project to be a suggested assignee suggested_assignees = issue_owners try: - suspect_commit_users = RpcActor.many_from_object(get_suspect_commit_users(project, event)) + suspect_commit_users = Actor.many_from_object(get_suspect_commit_users(project, event)) suggested_assignees.extend(suspect_commit_users) except (Release.DoesNotExist, Commit.DoesNotExist): logger.info("Skipping suspect committers because release does not exist.") @@ -454,7 +454,7 @@ def __init__( link_to_event: bool = False, issue_details: bool = False, notification: ProjectNotification | None = None, - recipient: RpcActor | None = None, + recipient: Actor | None = None, is_unfurl: bool = False, skip_fallback: bool = False, notes: str | None = None, diff --git a/src/sentry/integrations/slack/message_builder/notifications/base.py b/src/sentry/integrations/slack/message_builder/notifications/base.py index 1d81c2f7d78009..d25d0fdfcd368b 100644 --- a/src/sentry/integrations/slack/message_builder/notifications/base.py +++ b/src/sentry/integrations/slack/message_builder/notifications/base.py @@ -7,7 +7,7 @@ from sentry.integrations.slack.message_builder.base.block import BlockSlackMessageBuilder from sentry.integrations.slack.utils.escape import escape_slack_text from sentry.notifications.notifications.base import BaseNotification -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from sentry.utils import json @@ -17,7 +17,7 @@ def __init__( self, notification: BaseNotification, context: Mapping[str, Any], - recipient: RpcActor, + recipient: Actor, ) -> None: super().__init__() self.notification = notification diff --git a/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py b/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py index 0b83095e91bf61..025d82ac276acf 100644 --- a/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py +++ b/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py @@ -14,8 +14,8 @@ from sentry.models.project import Project from sentry.models.release import Release from sentry.notifications.notifications.base import BaseNotification -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.tasks.summaries.utils import COMPARISON_PERIOD +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from sentry.utils import json from sentry.utils.http import absolute_uri @@ -30,7 +30,7 @@ def __init__( self, notification: BaseNotification, context: Mapping[str, Any], - recipient: RpcActor, + recipient: Actor, ) -> None: super().__init__(notification, context, recipient) self.notification = notification diff --git a/src/sentry/integrations/slack/message_builder/notifications/digest.py b/src/sentry/integrations/slack/message_builder/notifications/digest.py index 4ef737faa72ab9..6b4bd9bb1e245f 100644 --- a/src/sentry/integrations/slack/message_builder/notifications/digest.py +++ b/src/sentry/integrations/slack/message_builder/notifications/digest.py @@ -8,7 +8,7 @@ from sentry.integrations.slack.message_builder import SlackBlock from sentry.integrations.slack.message_builder.issues import SlackIssuesMessageBuilder from sentry.notifications.notifications.digest import DigestNotification -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from .base import SlackNotificationsMessageBuilder @@ -18,7 +18,7 @@ def __init__( self, notification: DigestNotification, context: Mapping[str, Any], - recipient: RpcActor, + recipient: Actor, ) -> None: super().__init__(notification, context, recipient) self.notification: DigestNotification = notification diff --git a/src/sentry/integrations/slack/message_builder/notifications/issues.py b/src/sentry/integrations/slack/message_builder/notifications/issues.py index b4e88c2e746aec..8ee24a9214d06c 100644 --- a/src/sentry/integrations/slack/message_builder/notifications/issues.py +++ b/src/sentry/integrations/slack/message_builder/notifications/issues.py @@ -6,7 +6,7 @@ from sentry.integrations.slack.message_builder import SlackBlock from sentry.integrations.slack.message_builder.issues import SlackIssuesMessageBuilder from sentry.notifications.notifications.base import ProjectNotification -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from .base import SlackNotificationsMessageBuilder @@ -16,7 +16,7 @@ def __init__( self, notification: ProjectNotification, context: Mapping[str, Any], - recipient: RpcActor, + recipient: Actor, ) -> None: super().__init__(notification, context, recipient) self.notification: ProjectNotification = notification diff --git a/src/sentry/integrations/slack/notifications.py b/src/sentry/integrations/slack/notifications.py index e572bdd8a956a1..a4232b6b0c4a55 100644 --- a/src/sentry/integrations/slack/notifications.py +++ b/src/sentry/integrations/slack/notifications.py @@ -16,10 +16,10 @@ from sentry.notifications.additional_attachment_manager import get_additional_attachment from sentry.notifications.notifications.base import BaseNotification from sentry.notifications.notify import register_notification_provider -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.shared_integrations.exceptions import ApiError from sentry.silo.base import SiloMode from sentry.tasks.integrations.slack import post_message, post_message_control +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from sentry.utils import json, metrics @@ -43,9 +43,9 @@ def send_message(self, channel_id: str, message: str) -> None: def _get_attachments( notification: BaseNotification, - recipient: RpcActor, + recipient: Actor, shared_context: Mapping[str, Any], - extra_context_by_actor: Mapping[RpcActor, Mapping[str, Any]] | None, + extra_context_by_actor: Mapping[Actor, Mapping[str, Any]] | None, ) -> SlackBlock: extra_context = ( extra_context_by_actor[recipient] if extra_context_by_actor and recipient else {} @@ -58,7 +58,7 @@ def _get_attachments( def _notify_recipient( notification: BaseNotification, - recipient: RpcActor, + recipient: Actor, attachments: SlackBlock, channel: str, integration: Integration, @@ -130,9 +130,9 @@ def _notify_recipient( @register_notification_provider(ExternalProviders.SLACK) def send_notification_as_slack( notification: BaseNotification, - recipients: Iterable[RpcActor], + recipients: Iterable[Actor], shared_context: Mapping[str, Any], - extra_context_by_actor: Mapping[RpcActor, Mapping[str, Any]] | None, + extra_context_by_actor: Mapping[Actor, Mapping[str, Any]] | None, ) -> None: """Send an "activity" or "alert rule" notification to a Slack user or team, but NOT to a channel directly. Sending Slack notifications to a channel is in integrations/slack/actions/notification.py""" diff --git a/src/sentry/issues/issue_occurrence.py b/src/sentry/issues/issue_occurrence.py index 35d7fda6c9b4d9..c3b6696e6a0851 100644 --- a/src/sentry/issues/issue_occurrence.py +++ b/src/sentry/issues/issue_occurrence.py @@ -11,7 +11,7 @@ from sentry import nodestore from sentry.issues.grouptype import GroupType, get_group_type_by_type_id -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.utils.dates import parse_timestamp DEFAULT_LEVEL = "info" @@ -41,7 +41,7 @@ class IssueOccurrenceData(TypedDict): assignee: NotRequired[str | None] """ Who to assign the issue to when creating a new issue. Has no effect on existing issues. - In the format of an Actor identifier, as defined in `RpcActor.from_identifier` + In the format of an Actor identifier, as defined in `Actor.from_identifier` """ @@ -95,7 +95,7 @@ class IssueOccurrence: level: str culprit: str initial_issue_priority: int | None = None - assignee: RpcActor | None = None + assignee: Actor | None = None def __post_init__(self) -> None: if not is_aware(self.detection_time): @@ -139,7 +139,7 @@ def from_dict(cls, data: IssueOccurrenceData) -> IssueOccurrence: # Note that this can cause IO, but in practice this will happen only the first time that # the occurrence is sent to the issue platform. We then translate to the id and store # that, so subsequent fetches won't cause IO. - assignee = RpcActor.from_identifier(data.get("assignee")) + assignee = Actor.from_identifier(data.get("assignee")) except ValidationError: logging.exception("Failed to parse assignee actor identifier") except Exception: diff --git a/src/sentry/issues/occurrence_consumer.py b/src/sentry/issues/occurrence_consumer.py index b3cd00eff9c47a..b88b25ca8b4912 100644 --- a/src/sentry/issues/occurrence_consumer.py +++ b/src/sentry/issues/occurrence_consumer.py @@ -27,7 +27,7 @@ from sentry.issues.status_change_consumer import process_status_change_message from sentry.models.organization import Organization from sentry.models.project import Project -from sentry.services.hybrid_cloud.actor import parse_and_validate_actor +from sentry.types.actor import parse_and_validate_actor from sentry.utils import json, metrics logger = logging.getLogger(__name__) diff --git a/src/sentry/mail/adapter.py b/src/sentry/mail/adapter.py index fc22f6ce72a6a1..1d19576e978354 100644 --- a/src/sentry/mail/adapter.py +++ b/src/sentry/mail/adapter.py @@ -20,8 +20,8 @@ ) from sentry.notifications.utils.participants import get_notification_recipients from sentry.plugins.base.structs import Notification -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.tasks.digests import deliver_digest +from sentry.types.actor import Actor, ActorType from sentry.types.integrations import ExternalProviders from sentry.utils import metrics @@ -108,7 +108,7 @@ def get_sendable_user_objects(project): notifications for the provided project. """ user_ids = project.member_set.values_list("user_id", flat=True) - actors = [RpcActor(id=uid, actor_type=ActorType.USER) for uid in user_ids] + actors = [Actor(id=uid, actor_type=ActorType.USER) for uid in user_ids] recipients = get_notification_recipients( recipients=actors, type=NotificationSettingEnum.ISSUE_ALERTS, diff --git a/src/sentry/mail/notifications.py b/src/sentry/mail/notifications.py index 36c7d0d3d2cb95..9182726eca0627 100644 --- a/src/sentry/mail/notifications.py +++ b/src/sentry/mail/notifications.py @@ -14,7 +14,7 @@ from sentry.notifications.notifications.base import BaseNotification, ProjectNotification from sentry.notifications.notify import register_notification_provider from sentry.notifications.types import UnsubscribeContext -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from sentry.utils import json from sentry.utils.email import MessageBuilder, group_id_to_email @@ -72,14 +72,14 @@ def get_unsubscribe_link(user_id: int, data: UnsubscribeContext) -> str: ) -def _log_message(notification: BaseNotification, recipient: RpcActor) -> None: +def _log_message(notification: BaseNotification, recipient: Actor) -> None: extra = notification.get_log_params(recipient) logger.info("mail.adapter.notify.mail_user", extra={**extra}) def get_context( notification: BaseNotification, - recipient: RpcActor | Team | RpcUser, + recipient: Actor | Team | RpcUser, shared_context: Mapping[str, Any], extra_context: Mapping[str, Any], ) -> Mapping[str, Any]: @@ -88,7 +88,7 @@ def get_context( generic HTML/text templates only render the unsubscribe link if one is present in the context, so don't automatically add it to every message. """ - recipient_actor = RpcActor.from_object(recipient) + recipient_actor = Actor.from_object(recipient) context = { **shared_context, **notification.get_recipient_context(recipient_actor, extra_context), @@ -107,12 +107,12 @@ def get_context( @register_notification_provider(ExternalProviders.EMAIL) def send_notification_as_email( notification: BaseNotification, - recipients: Iterable[RpcActor], + recipients: Iterable[Actor], shared_context: Mapping[str, Any], - extra_context_by_actor: Mapping[RpcActor, Mapping[str, Any]] | None, + extra_context_by_actor: Mapping[Actor, Mapping[str, Any]] | None, ) -> None: for recipient in recipients: - recipient_actor = RpcActor.from_object(recipient) + recipient_actor = Actor.from_object(recipient) with sentry_sdk.start_span(op="notification.send_email", description="one_recipient"): if recipient_actor.is_team: # TODO(mgaeta): MessageBuilder only works with Users so filter out Teams for now. @@ -138,9 +138,9 @@ def send_notification_as_email( def get_builder_args( notification: BaseNotification, - recipient: RpcActor, + recipient: Actor, shared_context: Mapping[str, Any] | None = None, - extra_context_by_actor: Mapping[RpcActor, Mapping[str, Any]] | None = None, + extra_context_by_actor: Mapping[Actor, Mapping[str, Any]] | None = None, ) -> Mapping[str, Any]: # TODO: move context logic to single notification class method extra_context = ( diff --git a/src/sentry/models/group.py b/src/sentry/models/group.py index 4f8b07f57888ae..ef974d3646b341 100644 --- a/src/sentry/models/group.py +++ b/src/sentry/models/group.py @@ -45,10 +45,10 @@ ) from sentry.models.grouphistory import record_group_history, record_group_history_from_activity_type from sentry.models.organization import Organization -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.snuba.dataset import Dataset from sentry.snuba.referrer import Referrer from sentry.types.activity import ActivityType +from sentry.types.actor import Actor from sentry.types.group import ( IGNORED_SUBSTATUS_CHOICES, UNRESOLVED_SUBSTATUS_CHOICES, @@ -901,7 +901,7 @@ def get_assignee(self) -> Team | RpcUser | None: except GroupAssignee.DoesNotExist: return None - assigned_actor: RpcActor = group_assignee.assigned_actor() + assigned_actor: Actor = group_assignee.assigned_actor() return assigned_actor.resolve() diff --git a/src/sentry/models/groupassignee.py b/src/sentry/models/groupassignee.py index afeaa73838d5a6..c954bee7179b67 100644 --- a/src/sentry/models/groupassignee.py +++ b/src/sentry/models/groupassignee.py @@ -15,9 +15,9 @@ from sentry.models.groupowner import GroupOwner from sentry.models.groupsubscription import GroupSubscription from sentry.notifications.types import GroupSubscriptionReason -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.signals import issue_assigned, issue_unassigned from sentry.types.activity import ActivityType +from sentry.types.actor import Actor, ActorType from sentry.utils import metrics if TYPE_CHECKING: @@ -267,13 +267,13 @@ def save(self, *args, **kwargs): ), "Must have Team or User, not both" super().save(*args, **kwargs) - def assigned_actor(self) -> RpcActor: + def assigned_actor(self) -> Actor: if self.user_id is not None: - return RpcActor( + return Actor( id=self.user_id, actor_type=ActorType.USER, ) if self.team_id is not None: - return RpcActor(id=self.team_id, actor_type=ActorType.TEAM) + return Actor(id=self.team_id, actor_type=ActorType.TEAM) raise NotImplementedError("Unknown Assignee") diff --git a/src/sentry/models/groupowner.py b/src/sentry/models/groupowner.py index 76ae56c29cbaff..0187d95c5dcf2c 100644 --- a/src/sentry/models/groupowner.py +++ b/src/sentry/models/groupowner.py @@ -96,12 +96,12 @@ def owner_id(self): raise NotImplementedError("Unknown Owner") def owner(self): - from sentry.services.hybrid_cloud.actor import RpcActor + from sentry.types.actor import Actor if not self.owner_id(): return None - return RpcActor.from_identifier(self.owner_id()) + return Actor.from_identifier(self.owner_id()) @classmethod def get_autoassigned_owner(cls, group_id, project_id, autoassignment_types): diff --git a/src/sentry/models/groupsubscription.py b/src/sentry/models/groupsubscription.py index f6cf2b4b96ad66..78dce75152aed3 100644 --- a/src/sentry/models/groupsubscription.py +++ b/src/sentry/models/groupsubscription.py @@ -22,9 +22,9 @@ NotificationSettingEnum, NotificationSettingsOptionEnum, ) -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.notifications import notifications_service from sentry.services.hybrid_cloud.user import RpcUser +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders if TYPE_CHECKING: @@ -168,7 +168,7 @@ def get_participants(self, group: Group) -> ParticipantMap: from sentry import features from sentry.notifications.utils.participants import ParticipantMap - all_possible_actors = RpcActor.many_from_object(group.project.get_members_as_rpc_users()) + all_possible_actors = Actor.many_from_object(group.project.get_members_as_rpc_users()) active_and_disabled_subscriptions = self.filter( group=group, user_id__in=[u.id for u in all_possible_actors] ) @@ -229,14 +229,14 @@ def get_participants(self, group: Group) -> ParticipantMap: result.add(provider, user, reason) return result - def get_possible_team_actors(self, group: Group) -> list[RpcActor]: + def get_possible_team_actors(self, group: Group) -> list[Actor]: from sentry.models.team import Team possible_teams_ids = Team.objects.filter(id__in=self.get_participating_team_ids(group)) - return RpcActor.many_from_object(possible_teams_ids) + return Actor.many_from_object(possible_teams_ids) def get_subscriptions_by_team_id( - self, group: Group, possible_team_actors: list[RpcActor] + self, group: Group, possible_team_actors: list[Actor] ) -> Mapping[int, int]: active_and_disabled_team_subscriptions = self.filter( group=group, team_id__in=[t.id for t in possible_team_actors] diff --git a/src/sentry/models/projectownership.py b/src/sentry/models/projectownership.py index 45bd7174bc025c..3115675a1b6891 100644 --- a/src/sentry/models/projectownership.py +++ b/src/sentry/models/projectownership.py @@ -17,8 +17,8 @@ from sentry.models.group import Group from sentry.models.groupowner import OwnerRuleType from sentry.ownership.grammar import Rule, load_schema, resolve_actors -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.types.activity import ActivityType +from sentry.types.actor import Actor from sentry.utils import metrics from sentry.utils.cache import cache @@ -106,7 +106,7 @@ def get_ownership_cached(cls, project_id): @classmethod def get_owners( cls, project_id: int, data: Mapping[str, Any] - ) -> tuple[_Everyone | list[RpcActor], Sequence[Rule] | None]: + ) -> tuple[_Everyone | list[Actor], Sequence[Rule] | None]: """ For a given project_id, and event data blob. We combine the schemas from IssueOwners and CodeOwners. @@ -156,7 +156,7 @@ def _hydrate_rules(cls, project_id, rules, type: str = OwnerRuleType.OWNERSHIP_R result = [ ( rule, - RpcActor.resolve_many([actors[owner] for owner in rule.owners if owner in actors]), + Actor.resolve_many([actors[owner] for owner in rule.owners if owner in actors]), type, ) for rule in rules diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py index e6d82aa58266ac..a8be16132bcdd7 100644 --- a/src/sentry/monitors/consumers/monitor_consumer.py +++ b/src/sentry/monitors/consumers/monitor_consumer.py @@ -56,7 +56,7 @@ valid_duration, ) from sentry.monitors.validators import ConfigValidator, MonitorCheckInValidator -from sentry.services.hybrid_cloud.actor import parse_and_validate_actor +from sentry.types.actor import parse_and_validate_actor from sentry.utils import json, metrics from sentry.utils.dates import to_datetime from sentry.utils.outcomes import Outcome, track_outcome diff --git a/src/sentry/monitors/endpoints/organization_monitor_index.py b/src/sentry/monitors/endpoints/organization_monitor_index.py index c2b85d23a97253..98de33a870f6d6 100644 --- a/src/sentry/monitors/endpoints/organization_monitor_index.py +++ b/src/sentry/monitors/endpoints/organization_monitor_index.py @@ -48,7 +48,7 @@ from sentry.monitors.utils import create_issue_alert_rule, signal_monitor_created from sentry.monitors.validators import MonitorBulkEditValidator, MonitorValidator from sentry.search.utils import tokenize_query -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.utils.outcomes import Outcome from .base import OrganizationMonitorPermission @@ -201,13 +201,13 @@ def get(self, request: Request, organization: Organization) -> Response: if owners: owners = set(owners) - # Remove special values from owners, this can't be parsed as an RpcActor + # Remove special values from owners, this can't be parsed as an Actor include_myteams = "myteams" in owners owners.discard("myteams") include_unassigned = "unassigned" in owners owners.discard("unassigned") - actors = [RpcActor.from_identifier(identifier) for identifier in owners] + actors = [Actor.from_identifier(identifier) for identifier in owners] user_ids = [actor.id for actor in actors if actor.is_user] team_ids = [actor.id for actor in actors if actor.is_team] diff --git a/src/sentry/monitors/models.py b/src/sentry/monitors/models.py index d790bac3c9a336..181b5244b16ab8 100644 --- a/src/sentry/monitors/models.py +++ b/src/sentry/monitors/models.py @@ -39,7 +39,7 @@ from sentry.models.rule import Rule, RuleSource from sentry.monitors.constants import MAX_SLUG_LENGTH from sentry.monitors.types import CrontabSchedule, IntervalSchedule -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.utils.retries import TimedRetryPolicy logger = logging.getLogger(__name__) @@ -301,10 +301,10 @@ def save(self, *args, **kwargs): return super().save(*args, **kwargs) @property - def owner_actor(self) -> RpcActor | None: + def owner_actor(self) -> Actor | None: if not (self.owner_user_id or self.owner_team_id): return None - return RpcActor.from_id(user_id=self.owner_user_id, team_id=self.owner_team_id) + return Actor.from_id(user_id=self.owner_user_id, team_id=self.owner_team_id) @property def schedule(self) -> CrontabSchedule | IntervalSchedule: diff --git a/src/sentry/monitors/serializers.py b/src/sentry/monitors/serializers.py index ad032013df29e1..4e242866e8493d 100644 --- a/src/sentry/monitors/serializers.py +++ b/src/sentry/monitors/serializers.py @@ -19,7 +19,7 @@ ) from sentry.monitors.utils import fetch_associated_groups from sentry.monitors.validators import IntervalNames -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor class MonitorEnvBrokenDetectionSerializerResponse(TypedDict): @@ -180,12 +180,12 @@ def get_attrs(self, item_list, user, **kwargs): for project, serialized_project in zip(projects, serialize(list(projects), user)) } - actors = [RpcActor.from_id(user_id=m.owner_user_id) for m in item_list if m.owner_user_id] + actors = [Actor.from_id(user_id=m.owner_user_id) for m in item_list if m.owner_user_id] actors.extend( - [RpcActor.from_id(team_id=m.owner_team_id) for m in item_list if m.owner_team_id] + [Actor.from_id(team_id=m.owner_team_id) for m in item_list if m.owner_team_id] ) - actors_serialized = serialize(RpcActor.resolve_many(actors), user, ActorSerializer()) + actors_serialized = serialize(Actor.resolve_many(actors), user, ActorSerializer()) actor_data = { actor: serialized_actor for actor, serialized_actor in zip(actors, actors_serialized) } diff --git a/src/sentry/notifications/helpers.py b/src/sentry/notifications/helpers.py index b397535c3df8e8..f47c8ef7cefd69 100644 --- a/src/sentry/notifications/helpers.py +++ b/src/sentry/notifications/helpers.py @@ -21,8 +21,8 @@ NotificationSettingEnum, NotificationSettingsOptionEnum, ) -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.user.model import RpcUser +from sentry.types.actor import Actor, ActorType from sentry.types.integrations import PERSONAL_NOTIFICATION_PROVIDERS_AS_INT, ExternalProviderEnum if TYPE_CHECKING: @@ -102,23 +102,23 @@ def get_reason_context(extra_context: Mapping[str, Any]) -> MutableMapping[str, } -def recipient_is_user(recipient: RpcActor | Team | RpcUser | User) -> bool: +def recipient_is_user(recipient: Actor | Team | RpcUser | User) -> bool: from sentry.models.user import User - if isinstance(recipient, RpcActor) and recipient.is_user: + if isinstance(recipient, Actor) and recipient.is_user: return True return isinstance(recipient, (RpcUser, User)) -def recipient_is_team(recipient: RpcActor | Team | RpcUser | User) -> bool: +def recipient_is_team(recipient: Actor | Team | RpcUser | User) -> bool: from sentry.models.team import Team - if isinstance(recipient, RpcActor) and recipient.is_team: + if isinstance(recipient, Actor) and recipient.is_team: return True return isinstance(recipient, Team) -def team_is_valid_recipient(team: Team | RpcActor) -> bool: +def team_is_valid_recipient(team: Team | Actor) -> bool: """ A team is a valid recipient if it has a linked integration (ie. linked Slack channel) for any one of the providers allowed for personal notifications. @@ -133,12 +133,12 @@ def team_is_valid_recipient(team: Team | RpcActor) -> bool: return False -def get_team_members(team: Team | RpcActor) -> list[RpcActor]: +def get_team_members(team: Team | Actor) -> list[Actor]: if recipient_is_team(team): # handles type error below team_id = team.id - else: # team is either Team or RpcActor, so if recipient_is_team returns false it is because RpcActor has a different type + else: # team is either Team or Actor, so if recipient_is_team returns false it is because Actor has a different type raise Exception( - "RpcActor team has ActorType %s, expected ActorType Team", team.actor_type # type: ignore[union-attr] + "Actor team has ActorType %s, expected ActorType Team", team.actor_type # type: ignore[union-attr] ) # get organization member IDs of all members in the team @@ -157,7 +157,7 @@ def get_team_members(team: Team | RpcActor) -> list[RpcActor]: ) return [ - RpcActor(id=user_id, actor_type=ActorType.USER) + Actor(id=user_id, actor_type=ActorType.USER) for user_id in members.values_list("user_id", flat=True) if user_id ] diff --git a/src/sentry/notifications/notificationcontroller.py b/src/sentry/notifications/notificationcontroller.py index e39fc247bccbba..07c655c656c1ff 100644 --- a/src/sentry/notifications/notificationcontroller.py +++ b/src/sentry/notifications/notificationcontroller.py @@ -26,9 +26,9 @@ NotificationSettingEnum, NotificationSettingsOptionEnum, ) -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.organization_mapping.serial import serialize_organization_mapping from sentry.services.hybrid_cloud.user.model import RpcUser +from sentry.types.actor import Actor, ActorType from sentry.types.integrations import ( EXTERNAL_PROVIDERS_REVERSE, PERSONAL_NOTIFICATION_PROVIDERS, @@ -36,7 +36,7 @@ ExternalProviders, ) -Recipient = Union[RpcActor, Team, RpcUser, User] +Recipient = Union[Actor, Team, RpcUser, User] TEAM_NOTIFICATION_PROVIDERS = [ExternalProviderEnum.SLACK] @@ -412,7 +412,7 @@ def get_notification_recipients( type: NotificationSettingEnum, actor_type: ActorType | None = None, project_id: int | None = None, - ) -> Mapping[ExternalProviders, set[RpcActor]]: + ) -> Mapping[ExternalProviders, set[Actor]]: """ Returns the recipients that should be notified for each provider, filtered by the given notification type. @@ -423,9 +423,9 @@ def get_notification_recipients( combined_settings = self.get_combined_settings( type=type, actor_type=actor_type, project_id=project_id ) - recipients: Mapping[ExternalProviders, set[RpcActor]] = defaultdict(set) + recipients: Mapping[ExternalProviders, set[Actor]] = defaultdict(set) for recipient, type_map in combined_settings.items(): - actor = RpcActor.from_object(recipient) + actor = Actor.from_object(recipient) for type, provider_map in type_map.items(): for provider, value in provider_map.items(): if value == NotificationSettingsOptionEnum.NEVER: @@ -522,9 +522,7 @@ def get_subscriptions_status_for_projects( def get_participants( self, - ) -> MutableMapping[ - RpcActor, MutableMapping[ExternalProviders, NotificationSettingsOptionEnum] - ]: + ) -> MutableMapping[Actor, MutableMapping[ExternalProviders, NotificationSettingsOptionEnum]]: """ Returns a mapping of recipients to the providers they should be notified on. Note that this returns the ExternalProviders int enum instead of the ExternalProviderEnum string. @@ -535,10 +533,10 @@ def get_participants( combined_settings = self.get_combined_settings(type=self.type) user_to_providers: MutableMapping[ - RpcActor, MutableMapping[ExternalProviders, NotificationSettingsOptionEnum] + Actor, MutableMapping[ExternalProviders, NotificationSettingsOptionEnum] ] = defaultdict(dict) for recipient, setting_map in combined_settings.items(): - actor = RpcActor.from_object(recipient) + actor = Actor.from_object(recipient) provider_map = setting_map[self.type] user_to_providers[actor] = { EXTERNAL_PROVIDERS_REVERSE[provider]: value diff --git a/src/sentry/notifications/notifications/activity/base.py b/src/sentry/notifications/notifications/activity/base.py index d7e9dd34c7ad1f..8ea468c253c90b 100644 --- a/src/sentry/notifications/notifications/activity/base.py +++ b/src/sentry/notifications/notifications/activity/base.py @@ -16,9 +16,9 @@ from sentry.notifications.utils import send_activity_notification from sentry.notifications.utils.avatar import avatar_as_html from sentry.notifications.utils.participants import ParticipantMap, get_participants_for_group -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.user import RpcUser from sentry.services.hybrid_cloud.user.service import user_service +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders if TYPE_CHECKING: @@ -50,7 +50,7 @@ def get_base_context(self) -> MutableMapping[str, Any]: } def get_recipient_context( - self, recipient: RpcActor, extra_context: Mapping[str, Any] + self, recipient: Actor, extra_context: Mapping[str, Any] ) -> MutableMapping[str, Any]: context = super().get_recipient_context(recipient, extra_context) return {**context, **get_reason_context(context)} @@ -70,7 +70,7 @@ def get_participants_with_group_subscription_reason(self) -> ParticipantMap: def send(self) -> None: return send_activity_notification(self) - def get_log_params(self, recipient: RpcActor) -> Mapping[str, Any]: + def get_log_params(self, recipient: Actor) -> Mapping[str, Any]: return {"activity": self.activity, **super().get_log_params(recipient)} @@ -202,15 +202,15 @@ def description_as_html(self, description: str, params: Mapping[str, Any]) -> Sa return format_html(description, **context) - def get_title_link(self, recipient: RpcActor, provider: ExternalProviders) -> str | None: + def get_title_link(self, recipient: Actor, provider: ExternalProviders) -> str | None: from sentry.integrations.message_builder import get_title_link return get_title_link(self.group, None, False, True, self, provider) - def build_attachment_title(self, recipient: RpcActor) -> str: + def build_attachment_title(self, recipient: Actor) -> str: from sentry.integrations.message_builder import build_attachment_title return build_attachment_title(self.group) - def get_log_params(self, recipient: RpcActor, **kwargs: Any) -> Mapping[str, Any]: + def get_log_params(self, recipient: Actor, **kwargs: Any) -> Mapping[str, Any]: return {"group": self.group.id, **super().get_log_params(recipient)} diff --git a/src/sentry/notifications/notifications/activity/escalating.py b/src/sentry/notifications/notifications/activity/escalating.py index 0586526d400312..9938ee9e61567d 100644 --- a/src/sentry/notifications/notifications/activity/escalating.py +++ b/src/sentry/notifications/notifications/activity/escalating.py @@ -3,7 +3,7 @@ from collections.abc import Mapping from typing import Any -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from .base import GroupActivityNotification @@ -41,5 +41,5 @@ def get_description(self) -> tuple[str, str | None, Mapping[str, Any]]: # Return a default basic message return ("Sentry flagged this issue as escalating.", None, {}) - def get_message_description(self, recipient: RpcActor, provider: ExternalProviders) -> Any: + def get_message_description(self, recipient: Actor, provider: ExternalProviders) -> Any: return self.get_context()["text_description"] diff --git a/src/sentry/notifications/notifications/activity/new_processing_issues.py b/src/sentry/notifications/notifications/activity/new_processing_issues.py index 09be7e859cc725..21620cd3fa69c9 100644 --- a/src/sentry/notifications/notifications/activity/new_processing_issues.py +++ b/src/sentry/notifications/notifications/activity/new_processing_issues.py @@ -8,7 +8,7 @@ from sentry.notifications.types import GroupSubscriptionReason, NotificationSettingEnum from sentry.notifications.utils import summarize_issues from sentry.notifications.utils.participants import ParticipantMap, get_notification_recipients -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor +from sentry.types.actor import Actor, ActorType from sentry.types.integrations import ExternalProviders from .base import ActivityNotification @@ -25,7 +25,7 @@ def __init__(self, activity: Activity) -> None: def get_participants_with_group_subscription_reason(self) -> ParticipantMap: participants_by_provider = None user_ids = list(self.project.member_set.values_list("user_id", flat=True)) - actors = [RpcActor(id=uid, actor_type=ActorType.USER) for uid in user_ids] + actors = [Actor(id=uid, actor_type=ActorType.USER) for uid in user_ids] participants_by_provider = get_notification_recipients( recipients=actors, type=NotificationSettingEnum.WORKFLOW, @@ -42,7 +42,7 @@ def get_participants_with_group_subscription_reason(self) -> ParticipantMap: ) return result - def get_message_description(self, recipient: RpcActor, provider: ExternalProviders) -> str: + def get_message_description(self, recipient: Actor, provider: ExternalProviders) -> str: return f"Some events failed to process in your project {self.project.slug}" def get_context(self) -> MutableMapping[str, Any]: @@ -77,8 +77,8 @@ def get_notification_title( ) return f"Processing issues on {self.format_url(text=self.project.slug, url=project_url, provider=provider)}" - def build_attachment_title(self, recipient: RpcActor) -> str: + def build_attachment_title(self, recipient: Actor) -> str: return self.get_subject() - def get_title_link(self, recipient: RpcActor, provider: ExternalProviders) -> str | None: + def get_title_link(self, recipient: Actor, provider: ExternalProviders) -> str | None: return None diff --git a/src/sentry/notifications/notifications/activity/note.py b/src/sentry/notifications/notifications/activity/note.py index 93c8bd98961c05..b77556c672be5a 100644 --- a/src/sentry/notifications/notifications/activity/note.py +++ b/src/sentry/notifications/notifications/activity/note.py @@ -7,7 +7,7 @@ from django.utils.safestring import SafeString from sentry.notifications.utils.avatar import avatar_as_html, get_user_avatar_url -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from .base import GroupActivityNotification @@ -36,7 +36,7 @@ def get_notification_title( ) -> str: return self.title - def get_message_description(self, recipient: RpcActor, provider: ExternalProviders) -> Any: + def get_message_description(self, recipient: Actor, provider: ExternalProviders) -> Any: return self.get_context()["text_description"] def description_as_html(self, description: str, params: Mapping[str, Any]) -> SafeString: diff --git a/src/sentry/notifications/notifications/activity/release.py b/src/sentry/notifications/notifications/activity/release.py index 1b4d084d6f2251..14fa6b09663007 100644 --- a/src/sentry/notifications/notifications/activity/release.py +++ b/src/sentry/notifications/notifications/activity/release.py @@ -22,8 +22,8 @@ ) from sentry.notifications.utils.actions import MessageAction from sentry.notifications.utils.participants import ParticipantMap, get_participants_for_release -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.user.service import user_service +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from .base import ActivityNotification @@ -97,7 +97,7 @@ def get_context(self) -> MutableMapping[str, Any]: "version_parsed": self.version_parsed, } - def get_projects(self, recipient: RpcActor) -> set[Project]: + def get_projects(self, recipient: Actor) -> set[Project]: if not self.release: return set() @@ -114,7 +114,7 @@ def get_projects(self, recipient: RpcActor) -> set[Project]: return projects def get_recipient_context( - self, recipient: RpcActor, extra_context: Mapping[str, Any] + self, recipient: Actor, extra_context: Mapping[str, Any] ) -> MutableMapping[str, Any]: projects = self.get_projects(recipient) release_links = [ @@ -152,7 +152,7 @@ def get_notification_title( return f"Release {self.version_parsed} was deployed to {self.environment}{projects_text}" def get_message_actions( - self, recipient: RpcActor, provider: ExternalProviders + self, recipient: Actor, provider: ExternalProviders ) -> Sequence[MessageAction]: if self.release: release = get_release(self.activity, self.project.organization) @@ -170,13 +170,13 @@ def get_message_actions( ] return [] - def build_attachment_title(self, recipient: RpcActor) -> str: + def build_attachment_title(self, recipient: Actor) -> str: return "" - def get_title_link(self, recipient: RpcActor, provider: ExternalProviders) -> str | None: + def get_title_link(self, recipient: Actor, provider: ExternalProviders) -> str | None: return None - def build_notification_footer(self, recipient: RpcActor, provider: ExternalProviders) -> str: + def build_notification_footer(self, recipient: Actor, provider: ExternalProviders) -> str: settings_url = self.get_settings_url(recipient, provider) # no environment related to a deploy diff --git a/src/sentry/notifications/notifications/base.py b/src/sentry/notifications/notifications/base.py index ad54e9e7993069..bae6721118288c 100644 --- a/src/sentry/notifications/notifications/base.py +++ b/src/sentry/notifications/notifications/base.py @@ -13,7 +13,7 @@ from sentry.models.environment import Environment from sentry.notifications.types import FineTuningAPIKey, NotificationSettingEnum, UnsubscribeContext from sentry.notifications.utils.actions import MessageAction -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import EXTERNAL_PROVIDERS, ExternalProviders from sentry.utils.safe import safe_execute @@ -98,7 +98,7 @@ def template_path(self) -> str: """ def get_recipient_context( - self, recipient: RpcActor, extra_context: Mapping[str, Any] + self, recipient: Actor, extra_context: Mapping[str, Any] ) -> MutableMapping[str, Any]: # Basically a noop. return {**extra_context} @@ -109,23 +109,23 @@ def get_notification_title( """The subject line when sending this notifications as a chat notification.""" raise NotImplementedError - def get_title_link(self, recipient: RpcActor, provider: ExternalProviders) -> str | None: + def get_title_link(self, recipient: Actor, provider: ExternalProviders) -> str | None: raise NotImplementedError - def build_attachment_title(self, recipient: RpcActor) -> str: + def build_attachment_title(self, recipient: Actor) -> str: raise NotImplementedError - def build_notification_footer(self, recipient: RpcActor, provider: ExternalProviders) -> str: + def build_notification_footer(self, recipient: Actor, provider: ExternalProviders) -> str: raise NotImplementedError - def get_message_description(self, recipient: RpcActor, provider: ExternalProviders) -> Any: + def get_message_description(self, recipient: Actor, provider: ExternalProviders) -> Any: context = getattr(self, "context", None) return context["text_description"] if context else None def get_unsubscribe_key(self) -> UnsubscribeContext | None: return None - def get_log_params(self, recipient: RpcActor) -> Mapping[str, Any]: + def get_log_params(self, recipient: Actor) -> Mapping[str, Any]: group = getattr(self, "group", None) params = { "organization_id": self.organization.id, @@ -137,7 +137,7 @@ def get_log_params(self, recipient: RpcActor) -> Mapping[str, Any]: params["user_id"] = recipient.id return params - def get_custom_analytics_params(self, recipient: RpcActor) -> Mapping[str, Any]: + def get_custom_analytics_params(self, recipient: Actor) -> Mapping[str, Any]: """ Returns a mapping of params used to record the event associated with self.analytics_event. By default, use the log params. @@ -145,7 +145,7 @@ def get_custom_analytics_params(self, recipient: RpcActor) -> Mapping[str, Any]: return self.get_log_params(recipient) def get_message_actions( - self, recipient: RpcActor, provider: ExternalProviders + self, recipient: Actor, provider: ExternalProviders ) -> Sequence[MessageAction]: return [] @@ -162,7 +162,7 @@ def analytics_instance(self) -> Any | None: def record_analytics(self, event_name: str, *args: Any, **kwargs: Any) -> None: analytics.record(event_name, *args, **kwargs) - def record_notification_sent(self, recipient: RpcActor, provider: ExternalProviders) -> None: + def record_notification_sent(self, recipient: Actor, provider: ExternalProviders) -> None: with sentry_sdk.start_span(op="notification.send", description="record_notification_sent"): # may want to explicitly pass in the parameters for this event self.record_analytics( @@ -180,7 +180,7 @@ def record_notification_sent(self, recipient: RpcActor, provider: ExternalProvid **self.get_custom_analytics_params(recipient), ) - def get_referrer(self, provider: ExternalProviders, recipient: RpcActor | None = None) -> str: + def get_referrer(self, provider: ExternalProviders, recipient: Actor | None = None) -> str: # referrer needs the provider and recipient referrer = f"{self.metrics_key}-{EXTERNAL_PROVIDERS[provider]}" if recipient: @@ -190,7 +190,7 @@ def get_referrer(self, provider: ExternalProviders, recipient: RpcActor | None = def get_sentry_query_params( self, provider: ExternalProviders, - recipient: RpcActor | None = None, + recipient: Actor | None = None, set_organization_id: bool = False, ) -> str: """ @@ -210,7 +210,7 @@ def get_sentry_query_params( query = urlencode(q_params) return f"?{query}" - def get_settings_url(self, recipient: RpcActor, provider: ExternalProviders) -> str: + def get_settings_url(self, recipient: Actor, provider: ExternalProviders) -> str: set_organization_id = False # Settings url is dependant on the provider so we know which provider is sending them into Sentry. if recipient.is_team: @@ -232,7 +232,7 @@ def get_settings_url(self, recipient: RpcActor, provider: ExternalProviders) -> ) ) - def determine_recipients(self) -> list[RpcActor]: + def determine_recipients(self) -> list[Actor]: raise NotImplementedError def get_notification_providers(self) -> Iterable[ExternalProviders]: @@ -242,8 +242,8 @@ def get_notification_providers(self) -> Iterable[ExternalProviders]: return notification_providers() def filter_to_accepting_recipients( - self, recipients: Iterable[RpcActor] - ) -> Mapping[ExternalProviders, Iterable[RpcActor]]: + self, recipients: Iterable[Actor] + ) -> Mapping[ExternalProviders, Iterable[Actor]]: from sentry.notifications.utils.participants import get_notification_recipients setting_type = ( @@ -257,7 +257,7 @@ def filter_to_accepting_recipients( organization_id=self.organization.id, ) - def get_participants(self) -> Mapping[ExternalProviders, Iterable[RpcActor]]: + def get_participants(self) -> Mapping[ExternalProviders, Iterable[Actor]]: # need a notification_setting_type_enum to call this function if not self.notification_setting_type_enum: raise NotImplementedError @@ -297,10 +297,10 @@ def get_project_link(self) -> str: f"/organizations/{self.organization.slug}/projects/{self.project.slug}/" ) - def get_log_params(self, recipient: RpcActor) -> Mapping[str, Any]: + def get_log_params(self, recipient: Actor) -> Mapping[str, Any]: return {"project_id": self.project.id, **super().get_log_params(recipient)} - def build_notification_footer(self, recipient: RpcActor, provider: ExternalProviders) -> str: + def build_notification_footer(self, recipient: Actor, provider: ExternalProviders) -> str: settings_url = self.get_settings_url(recipient, provider) parent = getattr(self, "project", self.organization) diff --git a/src/sentry/notifications/notifications/codeowners_auto_sync.py b/src/sentry/notifications/notifications/codeowners_auto_sync.py index b705ffedc137e5..110643af8f62dd 100644 --- a/src/sentry/notifications/notifications/codeowners_auto_sync.py +++ b/src/sentry/notifications/notifications/codeowners_auto_sync.py @@ -5,7 +5,7 @@ from sentry.notifications.notifications.base import ProjectNotification from sentry.notifications.types import NotificationSettingEnum -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders if TYPE_CHECKING: @@ -17,8 +17,8 @@ class AutoSyncNotification(ProjectNotification): notification_setting_type_enum = NotificationSettingEnum.DEPLOY template_path = "sentry/emails/codeowners-auto-sync-failure" - def determine_recipients(self) -> list[RpcActor]: - return RpcActor.many_from_object(self.organization.get_owners()) + def determine_recipients(self) -> list[Actor]: + return Actor.many_from_object(self.organization.get_owners()) @property def reference(self) -> Model | None: @@ -35,7 +35,7 @@ def get_context(self) -> MutableMapping[str, Any]: return {"project_name": self.project.name} def get_recipient_context( - self, recipient: RpcActor, extra_context: Mapping[str, Any] + self, recipient: Actor, extra_context: Mapping[str, Any] ) -> MutableMapping[str, Any]: context = super().get_recipient_context(recipient, extra_context) context["url"] = self.organization.absolute_url( diff --git a/src/sentry/notifications/notifications/daily_summary.py b/src/sentry/notifications/notifications/daily_summary.py index 8a703525d1f841..42cde749301c7c 100644 --- a/src/sentry/notifications/notifications/daily_summary.py +++ b/src/sentry/notifications/notifications/daily_summary.py @@ -8,8 +8,8 @@ from sentry.db.models import Model from sentry.notifications.notifications.base import BaseNotification -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.tasks.summaries.utils import DailySummaryProjectContext +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders if TYPE_CHECKING: @@ -27,7 +27,7 @@ class DailySummaryNotification(BaseNotification): def __init__( self, organization: Organization, - recipient: RpcActor, + recipient: Actor, provider: ExternalProviders, project_context: dict[int, DailySummaryProjectContext], ) -> None: @@ -40,7 +40,7 @@ def __init__( def reference(self) -> Model | None: return None - def get_participants(self) -> Mapping[ExternalProviders, Iterable[RpcActor]]: + def get_participants(self) -> Mapping[ExternalProviders, Iterable[Actor]]: return {self.provider: {self.recipient}} def get_subject(self, context: Mapping[str, Any] | None = None) -> str: @@ -54,15 +54,15 @@ def get_notification_title( ) -> str: return "" - def get_message_description(self, recipient: RpcActor, provider: ExternalProviders) -> Any: + def get_message_description(self, recipient: Actor, provider: ExternalProviders) -> Any: return f"Daily Summary for Your {self.organization.slug.title()} Projects" - def get_title_link(self, recipient: RpcActor, provider: ExternalProviders) -> str | None: + def get_title_link(self, recipient: Actor, provider: ExternalProviders) -> str | None: return None - def build_attachment_title(self, recipient: RpcActor) -> str: + def build_attachment_title(self, recipient: Actor) -> str: return "" - def build_notification_footer(self, recipient: RpcActor, provider: ExternalProviders) -> str: + def build_notification_footer(self, recipient: Actor, provider: ExternalProviders) -> str: url = self.organization.absolute_url(reverse("sentry-account-settings")) return f"Getting this at a funky time? This sends at 4pm for whatever time zone you have set. | <{url}|*Account Settings*>" diff --git a/src/sentry/notifications/notifications/digest.py b/src/sentry/notifications/notifications/digest.py index 32f60e2681c203..92c9f1f69a2b53 100644 --- a/src/sentry/notifications/notifications/digest.py +++ b/src/sentry/notifications/notifications/digest.py @@ -31,7 +31,7 @@ send_as_alert_notification, should_send_as_alert_notification, ) -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders if TYPE_CHECKING: @@ -95,10 +95,10 @@ def get_notification_title( date="{date_pretty}", ) - def get_title_link(self, recipient: RpcActor, provider: ExternalProviders) -> str | None: + def get_title_link(self, recipient: Actor, provider: ExternalProviders) -> str | None: return None - def build_attachment_title(self, recipient: RpcActor) -> str: + def build_attachment_title(self, recipient: Actor) -> str: return "" @property @@ -157,10 +157,8 @@ def build_context( def get_extra_context( self, - participants_by_provider_by_event: Mapping[ - Event, Mapping[ExternalProviders, set[RpcActor]] - ], - ) -> Mapping[RpcActor, Mapping[str, Any]]: + participants_by_provider_by_event: Mapping[Event, Mapping[ExternalProviders, set[Actor]]], + ) -> Mapping[Actor, Mapping[str, Any]]: personalized_digests = get_personalized_digests( self.digest, participants_by_provider_by_event ) @@ -214,7 +212,7 @@ def send(self) -> None: ) # Calculate the per-participant context. - extra_context: Mapping[RpcActor, Mapping[str, Any]] = {} + extra_context: Mapping[Actor, Mapping[str, Any]] = {} personalized_digests = should_get_personalized_digests(self.target_type, self.project.id) if personalized_digests: @@ -230,7 +228,7 @@ def send(self) -> None: participants -= participants_to_remove notify(provider, self, participants, shared_context, extra_context) - def get_log_params(self, recipient: RpcActor) -> Mapping[str, Any]: + def get_log_params(self, recipient: Actor) -> Mapping[str, Any]: try: alert_id = list(self.digest.keys())[0].id except Exception: @@ -243,7 +241,7 @@ def get_log_params(self, recipient: RpcActor) -> Mapping[str, Any]: **super().get_log_params(recipient), } - def record_notification_sent(self, recipient: RpcActor, provider: ExternalProviders) -> None: + def record_notification_sent(self, recipient: Actor, provider: ExternalProviders) -> None: super().record_notification_sent(recipient, provider) log_params = self.get_log_params(recipient) analytics.record( diff --git a/src/sentry/notifications/notifications/integration_nudge.py b/src/sentry/notifications/notifications/integration_nudge.py index 3fa6071c360613..15ab0ad8ffbeb5 100644 --- a/src/sentry/notifications/notifications/integration_nudge.py +++ b/src/sentry/notifications/notifications/integration_nudge.py @@ -8,7 +8,7 @@ from sentry.db.models import Model from sentry.notifications.notifications.base import BaseNotification from sentry.notifications.utils.actions import MessageAction -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders if TYPE_CHECKING: @@ -69,17 +69,17 @@ def __init__( def reference(self) -> Model | None: return None - def get_participants(self) -> Mapping[ExternalProviders, Iterable[RpcActor]]: + def get_participants(self) -> Mapping[ExternalProviders, Iterable[Actor]]: return {self.provider: {self.recipient}} def get_subject(self, context: Mapping[str, Any] | None = None) -> str: return "" - def get_message_description(self, recipient: RpcActor, provider: ExternalProviders) -> Any: + def get_message_description(self, recipient: Actor, provider: ExternalProviders) -> Any: return MESSAGE_LIBRARY[self.seed].format(provider=self.provider.name.capitalize()) def get_message_actions( - self, recipient: RpcActor, provider: ExternalProviders + self, recipient: Actor, provider: ExternalProviders ) -> Sequence[MessageAction]: return [ MessageAction( @@ -102,14 +102,14 @@ def get_notification_title( ) -> str: return "" - def get_title_link(self, recipient: RpcActor, provider: ExternalProviders) -> str | None: + def get_title_link(self, recipient: Actor, provider: ExternalProviders) -> str | None: return None - def build_attachment_title(self, recipient: RpcActor) -> str: + def build_attachment_title(self, recipient: Actor) -> str: return "" - def build_notification_footer(self, recipient: RpcActor, provider: ExternalProviders) -> str: + def build_notification_footer(self, recipient: Actor, provider: ExternalProviders) -> str: return "" - def get_log_params(self, recipient: RpcActor) -> Mapping[str, Any]: + def get_log_params(self, recipient: Actor) -> Mapping[str, Any]: return {"seed": self.seed, **super().get_log_params(recipient)} diff --git a/src/sentry/notifications/notifications/missing_members_nudge.py b/src/sentry/notifications/notifications/missing_members_nudge.py index 4e4e0806b46882..55e240f36cd03c 100644 --- a/src/sentry/notifications/notifications/missing_members_nudge.py +++ b/src/sentry/notifications/notifications/missing_members_nudge.py @@ -10,7 +10,7 @@ MemberWriteRoleRecipientStrategy, ) from sentry.notifications.types import NotificationSettingEnum -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders PROVIDER_TO_URL = {"github": "https://github.com/"} @@ -49,7 +49,7 @@ def get_notification_providers(self) -> Iterable[ExternalProviders]: return [ExternalProviders.EMAIL] def get_members_list_url( - self, provider: ExternalProviders, recipient: RpcActor | None = None + self, provider: ExternalProviders, recipient: Actor | None = None ) -> str: url = self.organization.absolute_url( f"/settings/{self.organization.slug}/members/", @@ -66,6 +66,6 @@ def get_context(self) -> MutableMapping[str, Any]: "provider": self.provider.capitalize(), } - def determine_recipients(self) -> list[RpcActor]: + def determine_recipients(self) -> list[Actor]: # owners and managers have org:write - return RpcActor.many_from_object(self.role_based_recipient_strategy.determine_recipients()) + return Actor.many_from_object(self.role_based_recipient_strategy.determine_recipients()) diff --git a/src/sentry/notifications/notifications/organization_request/abstract_invite_request.py b/src/sentry/notifications/notifications/organization_request/abstract_invite_request.py index 51b49c1c92617f..e42b81cc28ed99 100644 --- a/src/sentry/notifications/notifications/organization_request/abstract_invite_request.py +++ b/src/sentry/notifications/notifications/organization_request/abstract_invite_request.py @@ -12,8 +12,8 @@ MemberWriteRoleRecipientStrategy, ) from sentry.notifications.utils.actions import MessageAction -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.user.service import user_service +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders if TYPE_CHECKING: @@ -40,7 +40,7 @@ def get_subject(self, context: Mapping[str, Any] | None = None) -> str: return f"Access request to {self.organization.name}" def get_recipient_context( - self, recipient: RpcActor, extra_context: Mapping[str, Any] + self, recipient: Actor, extra_context: Mapping[str, Any] ) -> MutableMapping[str, Any]: context = super().get_recipient_context(recipient, extra_context) context["email"] = self.pending_member.email @@ -61,7 +61,7 @@ def get_recipient_context( return context def get_message_actions( - self, recipient: RpcActor, provider: ExternalProviders + self, recipient: Actor, provider: ExternalProviders ) -> Sequence[MessageAction]: members_url = self.members_url + self.get_sentry_query_params(provider, recipient) return [ @@ -88,7 +88,7 @@ def get_message_actions( def get_callback_data(self) -> Mapping[str, Any]: return {"member_id": self.pending_member.id, "member_email": self.pending_member.email} - def get_log_params(self, recipient: RpcActor) -> MutableMapping[str, Any]: + def get_log_params(self, recipient: Actor) -> MutableMapping[str, Any]: # TODO: figure out who the user should be when pending_member.inviter_id is None return { **super().get_log_params(recipient), diff --git a/src/sentry/notifications/notifications/organization_request/base.py b/src/sentry/notifications/notifications/organization_request/base.py index 42ef8d3dc2fcf3..aa2d13efdbdc31 100644 --- a/src/sentry/notifications/notifications/organization_request/base.py +++ b/src/sentry/notifications/notifications/organization_request/base.py @@ -11,7 +11,7 @@ RoleBasedRecipientStrategy, ) from sentry.notifications.types import NotificationSettingEnum -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders if TYPE_CHECKING: @@ -37,8 +37,8 @@ def reference(self) -> Model | None: def get_context(self) -> MutableMapping[str, Any]: return {} - def determine_recipients(self) -> list[RpcActor]: - return RpcActor.many_from_object(self.role_based_recipient_strategy.determine_recipients()) + def determine_recipients(self) -> list[Actor]: + return Actor.many_from_object(self.role_based_recipient_strategy.determine_recipients()) def get_notification_title( self, provider: ExternalProviders, context: Mapping[str, Any] | None = None @@ -46,7 +46,7 @@ def get_notification_title( # purposely use empty string for the notification title return "" - def build_notification_footer(self, recipient: RpcActor, provider: ExternalProviders) -> str: + def build_notification_footer(self, recipient: Actor, provider: ExternalProviders) -> str: if recipient.is_team: raise NotImplementedError @@ -60,10 +60,10 @@ def build_notification_footer(self, recipient: RpcActor, provider: ExternalProvi settings_url ) - def get_title_link(self, recipient: RpcActor, provider: ExternalProviders) -> str | None: + def get_title_link(self, recipient: Actor, provider: ExternalProviders) -> str | None: return None - def get_log_params(self, recipient: RpcActor) -> MutableMapping[str, Any]: + def get_log_params(self, recipient: Actor) -> MutableMapping[str, Any]: if recipient.is_team: raise NotImplementedError diff --git a/src/sentry/notifications/notifications/organization_request/integration_request.py b/src/sentry/notifications/notifications/organization_request/integration_request.py index 391aaa5b1d4b4d..94ca6956f188bc 100644 --- a/src/sentry/notifications/notifications/organization_request/integration_request.py +++ b/src/sentry/notifications/notifications/organization_request/integration_request.py @@ -9,7 +9,7 @@ OwnerRecipientStrategy, ) from sentry.notifications.utils.actions import MessageAction -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders if TYPE_CHECKING: @@ -78,10 +78,10 @@ def get_notification_title( ) -> str: return self.get_subject() - def build_attachment_title(self, recipient: RpcActor) -> str: + def build_attachment_title(self, recipient: Actor) -> str: return "Request to Install" - def get_message_description(self, recipient: RpcActor, provider: ExternalProviders) -> str: + def get_message_description(self, recipient: Actor, provider: ExternalProviders) -> str: requester_name = self.requester.get_display_name() optional_message = ( f" They've included this message `{self.message}`" if self.message else "" @@ -89,7 +89,7 @@ def get_message_description(self, recipient: RpcActor, provider: ExternalProvide return f"{requester_name} is requesting to install the {self.provider_name} integration into {self.organization.name}.{optional_message}" def get_message_actions( - self, recipient: RpcActor, provider: ExternalProviders + self, recipient: Actor, provider: ExternalProviders ) -> Sequence[MessageAction]: # TODO: update referrer return [MessageAction(name="Check it out", url=self.integration_link)] diff --git a/src/sentry/notifications/notifications/organization_request/invite_request.py b/src/sentry/notifications/notifications/organization_request/invite_request.py index 51d0ffbed8ff5a..0edce0fd2418dc 100644 --- a/src/sentry/notifications/notifications/organization_request/invite_request.py +++ b/src/sentry/notifications/notifications/organization_request/invite_request.py @@ -1,7 +1,7 @@ from __future__ import annotations from sentry.notifications.class_manager import register -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from .abstract_invite_request import AbstractInviteRequestNotification @@ -13,9 +13,9 @@ class InviteRequestNotification(AbstractInviteRequestNotification): metrics_key = "invite_request" template_path = "sentry/emails/organization-invite-request" - def build_attachment_title(self, recipient: RpcActor) -> str: + def build_attachment_title(self, recipient: Actor) -> str: return "Request to Invite" - def get_message_description(self, recipient: RpcActor, provider: ExternalProviders) -> str: + def get_message_description(self, recipient: Actor, provider: ExternalProviders) -> str: requester_name = self.requester.get_display_name() return f"{requester_name} is requesting to invite {self.pending_member.email} into {self.organization.name}" diff --git a/src/sentry/notifications/notifications/organization_request/join_request.py b/src/sentry/notifications/notifications/organization_request/join_request.py index ff58e68760226e..052283a535fc67 100644 --- a/src/sentry/notifications/notifications/organization_request/join_request.py +++ b/src/sentry/notifications/notifications/organization_request/join_request.py @@ -1,7 +1,7 @@ from __future__ import annotations from sentry.notifications.class_manager import register -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from .abstract_invite_request import AbstractInviteRequestNotification @@ -13,8 +13,8 @@ class JoinRequestNotification(AbstractInviteRequestNotification): metrics_key = "join_request" template_path = "sentry/emails/organization-join-request" - def build_attachment_title(self, recipient: RpcActor) -> str: + def build_attachment_title(self, recipient: Actor) -> str: return "Request to Join" - def get_message_description(self, recipient: RpcActor, provider: ExternalProviders) -> str: + def get_message_description(self, recipient: Actor, provider: ExternalProviders) -> str: return f"{self.pending_member.email} is requesting to join {self.organization.name}" diff --git a/src/sentry/notifications/notifications/rules.py b/src/sentry/notifications/notifications/rules.py index 9697427d43e4bd..af33e1c6031014 100644 --- a/src/sentry/notifications/notifications/rules.py +++ b/src/sentry/notifications/notifications/rules.py @@ -34,9 +34,9 @@ ) from sentry.notifications.utils.participants import get_owner_reason, get_send_to from sentry.plugins.base.structs import Notification -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.user_option import user_option_service from sentry.services.hybrid_cloud.user_option.service import get_option_from_list +from sentry.types.actor import Actor from sentry.types.group import GroupSubStatus from sentry.types.integrations import ExternalProviders from sentry.utils import metrics @@ -98,7 +98,7 @@ def __init__( self.template_path = f"sentry/emails/{email_template_name}" - def get_participants(self) -> Mapping[ExternalProviders, Iterable[RpcActor]]: + def get_participants(self) -> Mapping[ExternalProviders, Iterable[Actor]]: return get_send_to( project=self.project, target_type=self.target_type, @@ -118,7 +118,7 @@ def reference(self) -> Model | None: return self.group def get_recipient_context( - self, recipient: RpcActor, extra_context: Mapping[str, Any] + self, recipient: Actor, extra_context: Mapping[str, Any] ) -> MutableMapping[str, Any]: tz = timezone.utc if recipient.is_user: @@ -294,7 +294,7 @@ def send(self) -> None: for provider, participants in participants_by_provider.items(): notify(provider, self, participants, shared_context) - def get_log_params(self, recipient: RpcActor) -> Mapping[str, Any]: + def get_log_params(self, recipient: Actor) -> Mapping[str, Any]: return { "target_type": self.target_type, "target_identifier": self.target_identifier, @@ -302,7 +302,7 @@ def get_log_params(self, recipient: RpcActor) -> Mapping[str, Any]: **super().get_log_params(recipient), } - def record_notification_sent(self, recipient: RpcActor, provider: ExternalProviders) -> None: + def record_notification_sent(self, recipient: Actor, provider: ExternalProviders) -> None: super().record_notification_sent(recipient, provider) log_params = self.get_log_params(recipient) analytics.record( diff --git a/src/sentry/notifications/notifications/strategies/role_based_recipient_strategy.py b/src/sentry/notifications/notifications/strategies/role_based_recipient_strategy.py index 252dbd3f8caf56..944406be4b0d77 100644 --- a/src/sentry/notifications/notifications/strategies/role_based_recipient_strategy.py +++ b/src/sentry/notifications/notifications/strategies/role_based_recipient_strategy.py @@ -9,9 +9,9 @@ from sentry import roles from sentry.models.organizationmember import OrganizationMember from sentry.roles.manager import OrganizationRole -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.user import RpcUser from sentry.services.hybrid_cloud.user.service import user_service +from sentry.types.actor import Actor, ActorType if TYPE_CHECKING: from sentry.models.organization import Organization @@ -25,9 +25,9 @@ class RoleBasedRecipientStrategy(metaclass=ABCMeta): def __init__(self, organization: Organization): self.organization = organization - def get_member(self, user: RpcUser | RpcActor) -> OrganizationMember: + def get_member(self, user: RpcUser | Actor) -> OrganizationMember: # cache the result - actor = RpcActor.from_object(user) + actor = Actor.from_object(user) if actor.actor_type != ActorType.USER: raise OrganizationMember.DoesNotExist() user_id = actor.id diff --git a/src/sentry/notifications/notifications/user_report.py b/src/sentry/notifications/notifications/user_report.py index b65b46bfe153f8..2807aa24172544 100644 --- a/src/sentry/notifications/notifications/user_report.py +++ b/src/sentry/notifications/notifications/user_report.py @@ -13,7 +13,7 @@ from sentry.notifications.notifications.base import ProjectNotification from sentry.notifications.utils import send_activity_notification from sentry.notifications.utils.participants import ParticipantMap -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders if TYPE_CHECKING: @@ -79,7 +79,7 @@ def get_context(self) -> MutableMapping[str, Any]: } def get_recipient_context( - self, recipient: RpcActor, extra_context: Mapping[str, Any] + self, recipient: Actor, extra_context: Mapping[str, Any] ) -> MutableMapping[str, Any]: context = super().get_recipient_context(recipient, extra_context) return {**context, **get_reason_context(context)} diff --git a/src/sentry/notifications/notify.py b/src/sentry/notifications/notify.py index 0cdf6d6e41cfdc..fef0dca493a9c2 100644 --- a/src/sentry/notifications/notify.py +++ b/src/sentry/notifications/notify.py @@ -4,16 +4,16 @@ from typing import Any, Optional, TypeVar from sentry.notifications.notifications.base import BaseNotification -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders # Shortcut so that types don't explode. NotifyCallable = Callable[ [ BaseNotification, - Iterable[RpcActor], + Iterable[Actor], Mapping[str, Any], - Optional[Mapping[RpcActor, Mapping[str, Any]]], + Optional[Mapping[Actor, Mapping[str, Any]]], ], None, ] @@ -46,9 +46,9 @@ def wrapped(send_notification: Notifiable) -> Notifiable: def notify( provider: ExternalProviders, notification: Any, - recipients: Iterable[RpcActor], + recipients: Iterable[Actor], shared_context: Mapping[str, Any], - extra_context_by_actor: Mapping[RpcActor, Mapping[str, Any]] | None = None, + extra_context_by_actor: Mapping[Actor, Mapping[str, Any]] | None = None, ) -> None: """Send notifications to these users or team.""" diff --git a/src/sentry/notifications/utils/participants.py b/src/sentry/notifications/utils/participants.py index c9bb7e44c21d06..96e5cefa237c81 100644 --- a/src/sentry/notifications/utils/participants.py +++ b/src/sentry/notifications/utils/participants.py @@ -29,11 +29,11 @@ NotificationSettingEnum, NotificationSettingsOptionEnum, ) -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.notifications import notifications_service from sentry.services.hybrid_cloud.user import RpcUser from sentry.services.hybrid_cloud.user.service import user_service from sentry.services.hybrid_cloud.user_option import get_option_from_list, user_option_service +from sentry.types.actor import Actor, ActorType from sentry.types.integrations import ExternalProviders, get_provider_enum_from_string from sentry.utils import json, metrics from sentry.utils.committers import AuthorCommitsSerialized, get_serialized_event_file_committers @@ -53,27 +53,25 @@ class ParticipantMap: - _dict: MutableMapping[ExternalProviders, MutableMapping[RpcActor, int]] + _dict: MutableMapping[ExternalProviders, MutableMapping[Actor, int]] def __init__(self) -> None: self._dict = defaultdict(dict) - def get_participants_by_provider( - self, provider: ExternalProviders - ) -> set[tuple[RpcActor, int]]: + def get_participants_by_provider(self, provider: ExternalProviders) -> set[tuple[Actor, int]]: return {(k, v) for k, v in self._dict.get(provider, {}).items()} - def add(self, provider: ExternalProviders, participant: RpcActor, reason: int) -> None: + def add(self, provider: ExternalProviders, participant: Actor, reason: int) -> None: self._dict[provider][participant] = reason - def add_all(self, provider: ExternalProviders, actor_group: Mapping[RpcActor, int]) -> None: + def add_all(self, provider: ExternalProviders, actor_group: Mapping[Actor, int]) -> None: self._dict[provider].update(actor_group) def update(self, other: ParticipantMap) -> None: for provider, actor_group in other._dict.items(): self.add_all(provider, actor_group) - def get_participant_sets(self) -> Iterable[tuple[ExternalProviders, Iterable[RpcActor]]]: + def get_participant_sets(self) -> Iterable[tuple[ExternalProviders, Iterable[Actor]]]: return ((provider, participants.keys()) for (provider, participants) in self._dict.items()) def delete_participant_by_id( @@ -93,9 +91,7 @@ def is_empty(self) -> bool: def split_participants_and_context( self, - ) -> Iterable[ - tuple[ExternalProviders, Iterable[RpcActor], Mapping[RpcActor, Mapping[str, Any]]] - ]: + ) -> Iterable[tuple[ExternalProviders, Iterable[Actor], Mapping[Actor, Mapping[str, Any]]]]: for provider, participants_with_reasons in self._dict.items(): extra_context = { participant: {"reason": reason} @@ -147,7 +143,7 @@ def get_participants_for_group(group: Group, user_id: int | None = None) -> Part def get_reason( - user: User | RpcActor, + user: User | Actor, value: NotificationSettingsOptionEnum, user_ids: set[int], ) -> int | None: @@ -183,7 +179,7 @@ def get_participants_for_release( ) ) - actors = RpcActor.many_from_object(RpcUser(id=user_id) for user_id in user_ids) + actors = Actor.many_from_object(RpcUser(id=user_id) for user_id in user_ids) # don't pass in projects since the settings are scoped to the organization only for now providers_by_recipient = notifications_service.get_participants( type=NotificationSettingEnum.DEPLOY, @@ -207,7 +203,7 @@ def get_owners( project: Project, event: Event | None = None, fallthrough_choice: FallthroughChoiceType | None = None, -) -> tuple[list[RpcActor], str]: +) -> tuple[list[Actor], str]: """ Given a project and an event, decide which users and teams are the owners. @@ -222,14 +218,14 @@ def get_owners( if not owners: outcome = "empty" - recipients: list[RpcActor] = list() + recipients: list[Actor] = list() elif owners == ProjectOwnership.Everyone: outcome = "everyone" users = user_service.get_many( filter=dict(user_ids=list(project.member_set.values_list("user_id", flat=True))) ) - recipients = RpcActor.many_from_object(users) + recipients = Actor.many_from_object(users) else: outcome = "match" @@ -291,7 +287,7 @@ def get_suspect_commit_users(project: Project, event: Event) -> list[RpcUser]: return [committer for committer in suspect_committers if committer.id in in_project_user_ids] -def dedupe_suggested_assignees(suggested_assignees: Iterable[RpcActor]) -> Iterable[RpcActor]: +def dedupe_suggested_assignees(suggested_assignees: Iterable[Actor]) -> Iterable[Actor]: return list({assignee.id: assignee for assignee in suggested_assignees}.values()) @@ -301,7 +297,7 @@ def determine_eligible_recipients( target_identifier: int | None = None, event: Event | None = None, fallthrough_choice: FallthroughChoiceType | None = None, -) -> Iterable[RpcActor]: +) -> Iterable[Actor]: """ Either get the individual recipient from the target type/id or the owners as determined by rules for this project and event. @@ -312,12 +308,12 @@ def determine_eligible_recipients( elif target_type == ActionTargetType.MEMBER: user = get_user_from_identifier(project, target_identifier) if user: - return [RpcActor.from_object(user)] + return [Actor.from_object(user)] elif target_type == ActionTargetType.TEAM: team = get_team_from_identifier(project, target_identifier) if team: - return [RpcActor.from_orm_team(team)] + return [Actor.from_orm_team(team)] elif target_type == ActionTargetType.ISSUE_OWNERS: if not event: @@ -338,9 +334,7 @@ def determine_eligible_recipients( suspect_commit_users = None try: - suspect_commit_users = RpcActor.many_from_object( - get_suspect_commit_users(project, event) - ) + suspect_commit_users = Actor.many_from_object(get_suspect_commit_users(project, event)) suggested_assignees.extend(suspect_commit_users) except (Release.DoesNotExist, Commit.DoesNotExist): logger.info("Skipping suspect committers because release does not exist.") @@ -362,7 +356,7 @@ def determine_eligible_recipients( if suggested_assignees: return dedupe_suggested_assignees(suggested_assignees) - return RpcActor.many_from_object(get_fallthrough_recipients(project, fallthrough_choice)) + return Actor.many_from_object(get_fallthrough_recipients(project, fallthrough_choice)) return set() @@ -376,7 +370,7 @@ def get_send_to( fallthrough_choice: FallthroughChoiceType | None = None, rules: Iterable[Rule] | None = None, notification_uuid: str | None = None, -) -> Mapping[ExternalProviders, set[RpcActor]]: +) -> Mapping[ExternalProviders, set[Actor]]: recipients = determine_eligible_recipients( project, target_type, target_identifier, event, fallthrough_choice ) @@ -471,8 +465,8 @@ def get_team_from_identifier(project: Project, target_identifier: str | int | No def _partition_recipients( - recipients: Iterable[RpcActor], -) -> Mapping[ActorType, set[RpcActor]]: + recipients: Iterable[Actor], +) -> Mapping[ActorType, set[Actor]]: mapping = defaultdict(set) for recipient in recipients: mapping[recipient.actor_type].add(recipient) @@ -480,8 +474,8 @@ def _partition_recipients( def _get_users_from_team_fall_back( - teams: Iterable[RpcActor], - recipients_by_provider: Mapping[ExternalProviders, Iterable[RpcActor]], + teams: Iterable[Actor], + recipients_by_provider: Mapping[ExternalProviders, Iterable[Actor]], ) -> Iterable[RpcUser]: assert all(team.is_team for team in teams) @@ -503,9 +497,9 @@ def _get_users_from_team_fall_back( def combine_recipients_by_provider( - teams_by_provider: Mapping[ExternalProviders, Iterable[RpcActor]], - users_by_provider: Mapping[ExternalProviders, Iterable[RpcActor]], -) -> Mapping[ExternalProviders, set[RpcActor]]: + teams_by_provider: Mapping[ExternalProviders, Iterable[Actor]], + users_by_provider: Mapping[ExternalProviders, Iterable[Actor]], +) -> Mapping[ExternalProviders, set[Actor]]: """TODO(mgaeta): Make this more generic and move it to utils.""" recipients_by_provider = defaultdict(set) for provider, teams in teams_by_provider.items(): @@ -518,12 +512,12 @@ def combine_recipients_by_provider( def get_notification_recipients( - recipients: Iterable[RpcActor], + recipients: Iterable[Actor], type: NotificationSettingEnum, organization_id: int | None = None, project_ids: list[int] | None = None, actor_type: ActorType | None = None, -) -> Mapping[ExternalProviders, set[RpcActor]]: +) -> Mapping[ExternalProviders, set[Actor]]: recipients_by_provider = notifications_service.get_notification_recipients( recipients=list(recipients), type=type, @@ -541,12 +535,12 @@ def get_notification_recipients( # TODO(Steve): Remove once reference is gone from getsentry def get_notification_recipients_v2( - recipients: Iterable[RpcActor], + recipients: Iterable[Actor], type: NotificationSettingEnum, organization_id: int | None = None, project_ids: list[int] | None = None, actor_type: ActorType | None = None, -) -> Mapping[ExternalProviders, set[RpcActor]]: +) -> Mapping[ExternalProviders, set[Actor]]: return get_notification_recipients( recipients=recipients, type=type, @@ -558,12 +552,12 @@ def get_notification_recipients_v2( def _get_recipients_by_provider( project: Project, - recipients: Iterable[RpcActor], + recipients: Iterable[Actor], notification_type_enum: NotificationSettingEnum = NotificationSettingEnum.ISSUE_ALERTS, target_type: ActionTargetType | None = None, target_identifier: int | None = None, notification_uuid: str | None = None, -) -> Mapping[ExternalProviders, set[RpcActor]]: +) -> Mapping[ExternalProviders, set[Actor]]: """Get the lists of recipients that should receive an Issue Alert by ExternalProvider.""" recipients_by_type = _partition_recipients(recipients) teams = recipients_by_type[ActorType.TEAM] @@ -571,7 +565,7 @@ def _get_recipients_by_provider( # First evaluate the teams. setting_type = notification_type_enum - teams_by_provider: Mapping[ExternalProviders, Iterable[RpcActor]] = {} + teams_by_provider: Mapping[ExternalProviders, Iterable[Actor]] = {} # get by team teams_by_provider = get_notification_recipients( @@ -590,12 +584,10 @@ def _get_recipients_by_provider( } # If there are any teams that didn't get added, fall back and add all users. - users |= set( - RpcActor.many_from_object(_get_users_from_team_fall_back(teams, teams_by_provider)) - ) + users |= set(Actor.many_from_object(_get_users_from_team_fall_back(teams, teams_by_provider))) # Repeat for users. - users_by_provider: Mapping[ExternalProviders, Iterable[RpcActor]] = {} + users_by_provider: Mapping[ExternalProviders, Iterable[Actor]] = {} # convert from string to enum users_by_provider = get_notification_recipients( recipients=users, diff --git a/src/sentry/ownership/grammar.py b/src/sentry/ownership/grammar.py index 65261e5441133a..6d741e738b0157 100644 --- a/src/sentry/ownership/grammar.py +++ b/src/sentry/ownership/grammar.py @@ -13,8 +13,8 @@ from sentry.eventstore.models import EventSubjectTemplateData from sentry.models.integrations.repository_project_path_config import RepositoryProjectPathConfig from sentry.models.organizationmember import OrganizationMember -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.user.service import user_service +from sentry.types.actor import Actor, ActorType from sentry.utils.codeowners import codeowners_match from sentry.utils.event_frames import find_stack_frames, get_sdk_name, munged_filename_and_frames from sentry.utils.glob import glob_match @@ -412,7 +412,7 @@ def convert_codeowners_syntax( return result -def resolve_actors(owners: Iterable[Owner], project_id: int) -> Mapping[Owner, RpcActor]: +def resolve_actors(owners: Iterable[Owner], project_id: int) -> Mapping[Owner, Actor]: """Convert a list of Owner objects into a dictionary of {Owner: Actor} pairs. Actors not identified are returned as None.""" @@ -455,7 +455,7 @@ def resolve_actors(owners: Iterable[Owner], project_id: int) -> Mapping[Owner, R actors.update( { - ("user", email.lower()): RpcActor(id=u_id, actor_type=ActorType.USER) + ("user", email.lower()): Actor(id=u_id, actor_type=ActorType.USER) # This will need to be broken in hybrid cloud world, querying users from region silo won't be possible # without an explicit service call. for u_id, email in user_id_email_tuples @@ -465,7 +465,7 @@ def resolve_actors(owners: Iterable[Owner], project_id: int) -> Mapping[Owner, R if teams: actors.update( { - ("team", slug): RpcActor(id=t_id, actor_type=ActorType.TEAM, slug=slug) + ("team", slug): Actor(id=t_id, actor_type=ActorType.TEAM, slug=slug) for t_id, slug in Team.objects.filter( slug__in=[o.identifier for o in teams], projectteam__project_id=project_id ).values_list("id", "slug") diff --git a/src/sentry/plugins/bases/notify.py b/src/sentry/plugins/bases/notify.py index 6e26bcd088452b..122e97d24f7e6c 100644 --- a/src/sentry/plugins/bases/notify.py +++ b/src/sentry/plugins/bases/notify.py @@ -10,9 +10,9 @@ from sentry.notifications.types import NotificationSettingEnum from sentry.plugins.base import Notification, Plugin from sentry.plugins.base.configuration import react_plugin_config -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.notifications.service import notifications_service from sentry.shared_integrations.exceptions import ApiError +from sentry.types.actor import Actor, ActorType from sentry.types.integrations import ExternalProviders @@ -143,7 +143,7 @@ def get_sendable_user_objects(self, project): """ if self.get_conf_key() == "mail": user_ids = list(project.member_set.values_list("user_id", flat=True)) - actors = [RpcActor(id=uid, actor_type=ActorType.USER) for uid in user_ids] + actors = [Actor(id=uid, actor_type=ActorType.USER) for uid in user_ids] recipients = notifications_service.get_notification_recipients( recipients=actors, type=NotificationSettingEnum.ISSUE_ALERTS, diff --git a/src/sentry/rules/actions/utils.py b/src/sentry/rules/actions/utils.py index df9db699ecd02a..42f513c9f11392 100644 --- a/src/sentry/rules/actions/utils.py +++ b/src/sentry/rules/actions/utils.py @@ -4,7 +4,7 @@ from sentry.api.serializers.models.rule import generate_rule_label from sentry.models.environment import Environment from sentry.models.rule import Rule -from sentry.services.hybrid_cloud.actor import RpcActor +from sentry.types.actor import Actor ONE_HOUR = 60 ONE_DAY = ONE_HOUR * 24 @@ -16,9 +16,7 @@ def get_updated_rule_data(rule: Rule) -> dict[str, Any]: if rule.environment_id: rule_data["environment_id"] = rule.environment_id if rule.owner_user_id or rule.owner_team_id: - rule_data["owner"] = RpcActor.from_id( - user_id=rule.owner_user_id, team_id=rule.owner_team_id - ) + rule_data["owner"] = Actor.from_id(user_id=rule.owner_user_id, team_id=rule.owner_team_id) rule_data["label"] = rule.label return rule_data diff --git a/src/sentry/services/hybrid_cloud/actor.py b/src/sentry/services/hybrid_cloud/actor.py deleted file mode 100644 index f5729723be6645..00000000000000 --- a/src/sentry/services/hybrid_cloud/actor.py +++ /dev/null @@ -1,12 +0,0 @@ -# Deprecated module for actor imports -# Use sentry.types.actor instead. -from sentry.types.actor import Actor, ActorTarget, ActorType, parse_and_validate_actor - -RpcActor = Actor - -__all__ = ( - "RpcActor", - "ActorType", - "ActorTarget", - "parse_and_validate_actor", -) diff --git a/src/sentry/services/hybrid_cloud/notifications/impl.py b/src/sentry/services/hybrid_cloud/notifications/impl.py index 7c62a9596dea02..144a261a7bd7c0 100644 --- a/src/sentry/services/hybrid_cloud/notifications/impl.py +++ b/src/sentry/services/hybrid_cloud/notifications/impl.py @@ -13,9 +13,9 @@ NotificationSettingEnum, NotificationSettingsOptionEnum, ) -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.notifications import NotificationsService from sentry.services.hybrid_cloud.user.service import user_service +from sentry.types.actor import Actor, ActorType from sentry.types.integrations import EXTERNAL_PROVIDERS, ExternalProviderEnum, ExternalProviders @@ -59,7 +59,7 @@ def enable_all_settings_for_provider( def update_notification_options( self, *, - actor: RpcActor, + actor: Actor, type: NotificationSettingEnum, scope_type: NotificationScopeEnum, scope_identifier: int, @@ -142,7 +142,7 @@ def get_subscriptions_for_projects( def get_participants( self, *, - recipients: list[RpcActor], + recipients: list[Actor], type: NotificationSettingEnum, project_ids: list[int] | None = None, organization_id: int | None = None, @@ -175,12 +175,12 @@ def get_users_for_weekly_reports( def get_notification_recipients( self, *, - recipients: list[RpcActor], + recipients: list[Actor], type: NotificationSettingEnum, organization_id: int | None = None, project_ids: list[int] | None = None, actor_type: ActorType | None = None, - ) -> Mapping[str, set[RpcActor]]: + ) -> Mapping[str, set[Actor]]: controller = NotificationController( recipients=recipients, organization_id=organization_id, diff --git a/src/sentry/services/hybrid_cloud/notifications/service.py b/src/sentry/services/hybrid_cloud/notifications/service.py index 5d4a9690319a3f..e1a4bfeb260a83 100644 --- a/src/sentry/services/hybrid_cloud/notifications/service.py +++ b/src/sentry/services/hybrid_cloud/notifications/service.py @@ -10,9 +10,9 @@ NotificationSettingEnum, NotificationSettingsOptionEnum, ) -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method from sentry.silo.base import SiloMode +from sentry.types.actor import Actor, ActorType from sentry.types.integrations import ExternalProviderEnum, ExternalProviders @@ -45,7 +45,7 @@ def enable_all_settings_for_provider( def update_notification_options( self, *, - actor: RpcActor, + actor: Actor, type: NotificationSettingEnum, scope_type: NotificationScopeEnum, scope_identifier: int, @@ -86,7 +86,7 @@ def get_subscriptions_for_projects( def get_participants( self, *, - recipients: list[RpcActor], + recipients: list[Actor], type: NotificationSettingEnum, project_ids: list[int] | None = None, organization_id: int | None = None, @@ -105,12 +105,12 @@ def get_users_for_weekly_reports( def get_notification_recipients( self, *, - recipients: list[RpcActor], + recipients: list[Actor], type: NotificationSettingEnum, organization_id: int | None = None, project_ids: list[int] | None = None, actor_type: ActorType | None = None, - ) -> Mapping[str, set[RpcActor]]: + ) -> Mapping[str, set[Actor]]: pass diff --git a/src/sentry/tasks/summaries/daily_summary.py b/src/sentry/tasks/summaries/daily_summary.py index 2bb19d5daf5bbf..09f064cbb4983c 100644 --- a/src/sentry/tasks/summaries/daily_summary.py +++ b/src/sentry/tasks/summaries/daily_summary.py @@ -18,7 +18,6 @@ from sentry.models.release import Release from sentry.models.releases.release_project import ReleaseProject from sentry.notifications.notifications.daily_summary import DailySummaryNotification -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.notifications import notifications_service from sentry.services.hybrid_cloud.user.service import user_service from sentry.services.hybrid_cloud.user_option import user_option_service @@ -39,6 +38,7 @@ user_project_ownership, ) from sentry.types.activity import ActivityType +from sentry.types.actor import Actor from sentry.types.group import GroupSubStatus from sentry.types.integrations import ExternalProviders from sentry.utils import json @@ -299,7 +299,7 @@ def deliver_summary(ctx: OrganizationReportContext, users: list[int]): ) for user_id in user_ids: top_projects_context_map = build_top_projects_map(ctx, user_id) - user = cast(RpcActor, user_service.get_user(user_id=user_id)) + user = cast(Actor, user_service.get_user(user_id=user_id)) logger.info( "daily_summary.delivering_summary", extra={"user": user_id, "organization": ctx.organization.id}, diff --git a/src/sentry/testutils/helpers/notifications.py b/src/sentry/testutils/helpers/notifications.py index 0223d8c12f477c..533d9c794094ae 100644 --- a/src/sentry/testutils/helpers/notifications.py +++ b/src/sentry/testutils/helpers/notifications.py @@ -19,8 +19,8 @@ from sentry.models.user import User from sentry.notifications.notifications.base import BaseNotification from sentry.notifications.utils.actions import MessageAction -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.user import RpcUser +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders @@ -58,7 +58,7 @@ def get_participants(self): return [] def get_message_actions( - self, recipient: RpcActor, provider: ExternalProviders + self, recipient: Actor, provider: ExternalProviders ) -> Sequence[MessageAction]: zombo_link = MessageAction( name="Go to Zombo.com", diff --git a/src/sentry/web/frontend/debug/mail.py b/src/sentry/web/frontend/debug/mail.py index 049a253b26ad92..c20af81f45d24f 100644 --- a/src/sentry/web/frontend/debug/mail.py +++ b/src/sentry/web/frontend/debug/mail.py @@ -53,13 +53,13 @@ get_issue_replay_link, get_rules, ) -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.testutils.helpers.datetime import before_now # NOQA:S007 from sentry.testutils.helpers.notifications import ( # NOQA:S007 SAMPLE_TO_OCCURRENCE_MAP, TEST_FEEDBACK_ISSUE_OCCURENCE, TEST_ISSUE_OCCURRENCE, ) +from sentry.types.actor import Actor from sentry.types.group import GroupSubStatus from sentry.utils import json, loremipsum from sentry.utils.auth import AuthenticatedHttpRequest @@ -829,7 +829,7 @@ def org_delete_confirm(request): # Used to generate debug email views from a notification def render_preview_email_for_notification( - notification: BaseNotification, recipient: RpcActor + notification: BaseNotification, recipient: Actor ) -> HttpResponse: shared_context = notification.get_context() basic_args = get_builder_args(notification, recipient, shared_context) diff --git a/tests/sentry/api/endpoints/test_project_rule_details.py b/tests/sentry/api/endpoints/test_project_rule_details.py index 46b86b741f3671..8a263462440e99 100644 --- a/tests/sentry/api/endpoints/test_project_rule_details.py +++ b/tests/sentry/api/endpoints/test_project_rule_details.py @@ -19,13 +19,13 @@ from sentry.models.environment import Environment from sentry.models.rule import NeglectedRule, Rule, RuleActivity, RuleActivityType from sentry.models.rulefirehistory import RuleFireHistory -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.silo.base import SiloMode from sentry.testutils.cases import APITestCase from sentry.testutils.helpers import install_slack from sentry.testutils.helpers.datetime import freeze_time from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import assume_test_silo_mode +from sentry.types.actor import Actor def assert_rule_from_payload(rule: Rule, payload: Mapping[str, Any]) -> None: @@ -37,7 +37,7 @@ def assert_rule_from_payload(rule: Rule, payload: Mapping[str, Any]) -> None: owner_id = payload.get("owner") if owner_id: - actor = RpcActor.from_identifier(owner_id) + actor = Actor.from_identifier(owner_id) if actor.is_user: assert rule.owner_user_id == actor.id assert rule.owner_team_id is None diff --git a/tests/sentry/api/endpoints/test_project_team_details.py b/tests/sentry/api/endpoints/test_project_team_details.py index c86eb1b0938718..220957e75fd38c 100644 --- a/tests/sentry/api/endpoints/test_project_team_details.py +++ b/tests/sentry/api/endpoints/test_project_team_details.py @@ -2,10 +2,10 @@ from sentry.models.projectteam import ProjectTeam from sentry.models.rule import Rule -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.testutils.cases import APITestCase from sentry.testutils.helpers import with_feature from sentry.testutils.helpers.options import override_options +from sentry.types.actor import Actor class ProjectTeamDetailsTest(APITestCase): @@ -107,17 +107,17 @@ def test_remove_team(self): ) ar1 = self.create_alert_rule( name="test alert rule", - owner=RpcActor.from_id(user_id=None, team_id=team.id), + owner=Actor.from_id(user_id=None, team_id=team.id), projects=[project], ) ar2 = self.create_alert_rule( name="another test alert rule", - owner=RpcActor.from_id(user_id=None, team_id=team.id), + owner=Actor.from_id(user_id=None, team_id=team.id), projects=[another_project], ) ar3 = self.create_alert_rule( name="another test alert rule", - owner=RpcActor.from_id(user_id=None, team_id=another_team.id), + owner=Actor.from_id(user_id=None, team_id=another_team.id), projects=[another_project], ) diff --git a/tests/sentry/api/endpoints/test_rule_snooze.py b/tests/sentry/api/endpoints/test_rule_snooze.py index 1132766411a2c3..ad86bb5a7ce8da 100644 --- a/tests/sentry/api/endpoints/test_rule_snooze.py +++ b/tests/sentry/api/endpoints/test_rule_snooze.py @@ -6,10 +6,10 @@ from sentry import audit_log from sentry.models.rule import Rule from sentry.models.rulesnooze import RuleSnooze -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.log.service import log_rpc_service from sentry.testutils.cases import APITestCase from sentry.testutils.outbox import outbox_runner +from sentry.types.actor import Actor class BaseRuleSnoozeTest(APITestCase): @@ -545,7 +545,7 @@ def test_user_can_snooze_metric_alert_for_self(self): other_metric_alert_rule = self.create_alert_rule( organization=self.project.organization, projects=[self.project], - owner=RpcActor.from_identifier(f"team:{other_team.id}"), + owner=Actor.from_identifier(f"team:{other_team.id}"), ) self.get_success_response( self.organization.slug, diff --git a/tests/sentry/api/endpoints/test_team_alerts_triggered.py b/tests/sentry/api/endpoints/test_team_alerts_triggered.py index d6da7812622c29..96fd1754bf7233 100644 --- a/tests/sentry/api/endpoints/test_team_alerts_triggered.py +++ b/tests/sentry/api/endpoints/test_team_alerts_triggered.py @@ -5,9 +5,9 @@ IncidentActivityType, IncidentStatus, ) -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.datetime import before_now, freeze_time +from sentry.types.actor import Actor @freeze_time() @@ -28,7 +28,7 @@ def test_simple(self): threshold_type=AlertRuleThresholdType.ABOVE, resolve_threshold=10, threshold_period=1, - owner=RpcActor.from_identifier(self.user.id), + owner=Actor.from_identifier(self.user.id), ) user_owned_incident = self.create_incident(status=20, alert_rule=user_owned_rule) activities = [] @@ -110,7 +110,7 @@ def test_not_as_simple(self): threshold_type=AlertRuleThresholdType.ABOVE, resolve_threshold=10, threshold_period=1, - owner=RpcActor.from_identifier(self.user.id), + owner=Actor.from_identifier(self.user.id), ) user_owned_incident = self.create_incident( projects=[project2], status=20, alert_rule=user_owned_rule @@ -125,7 +125,7 @@ def test_not_as_simple(self): threshold_type=AlertRuleThresholdType.ABOVE, resolve_threshold=10, threshold_period=1, - owner=RpcActor.from_identifier(f"team:{self.team.id}"), + owner=Actor.from_identifier(f"team:{self.team.id}"), ) team_owned_incident = self.create_incident( projects=[project1], status=20, alert_rule=team_owned_rule @@ -174,7 +174,7 @@ def test(self): organization=self.organization, projects=[project1], name="user owned rule", - owner=RpcActor.from_identifier(self.user.id), + owner=Actor.from_identifier(self.user.id), ) user_owned_incident = self.create_incident(status=20, alert_rule=user_owned_rule) @@ -192,7 +192,7 @@ def test(self): organization=self.organization, projects=[project1], name="team owned rule", - owner=RpcActor.from_identifier(f"team:{self.team.id}"), + owner=Actor.from_identifier(f"team:{self.team.id}"), ) team_owned_incident = self.create_incident(status=20, alert_rule=team_owned_rule) activities.append( diff --git a/tests/sentry/api/helpers/test_group_index.py b/tests/sentry/api/helpers/test_group_index.py index e85c1307901ebb..1c5dd3f10862f7 100644 --- a/tests/sentry/api/helpers/test_group_index.py +++ b/tests/sentry/api/helpers/test_group_index.py @@ -24,11 +24,11 @@ from sentry.models.groupsnooze import GroupSnooze from sentry.models.groupsubscription import GroupSubscription from sentry.notifications.types import GroupSubscriptionReason -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.testutils.cases import TestCase from sentry.testutils.helpers.features import with_feature from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType +from sentry.types.actor import Actor from sentry.types.group import GroupSubStatus pytestmark = [requires_snuba] @@ -507,7 +507,7 @@ def setUp(self) -> None: @patch("sentry.analytics.record") def test_assigned_to(self, mock_record: Mock) -> None: assigned_to = handle_assigned_to( - RpcActor.from_identifier(self.user.id), + Actor.from_identifier(self.user.id), None, None, self.group_list, @@ -542,7 +542,7 @@ def test_assigned_to(self, mock_record: Mock) -> None: def test_unassign(self, mock_record: Mock) -> None: # first assign the issue handle_assigned_to( - RpcActor.from_identifier(self.user.id), + Actor.from_identifier(self.user.id), None, None, self.group_list, @@ -592,7 +592,7 @@ def test_unassign_team(self, mock_record: Mock) -> None: # first assign the issue to team1 assigned_to = handle_assigned_to( - RpcActor.from_identifier(f"team:{team1.id}"), + Actor.from_identifier(f"team:{team1.id}"), None, None, self.group_list, @@ -656,7 +656,7 @@ def test_unassign_team_with_team_workflow_notifications_flag(self, mock_record: # first assign the issue to team1 assigned_to = handle_assigned_to( - RpcActor.from_identifier(f"team:{team1.id}"), + Actor.from_identifier(f"team:{team1.id}"), None, None, self.group_list, @@ -701,7 +701,7 @@ def test_reassign_user(self, mock_record: Mock) -> None: # first assign the issue assigned_to = handle_assigned_to( - RpcActor.from_identifier(self.user.id), + Actor.from_identifier(self.user.id), None, None, self.group_list, @@ -719,7 +719,7 @@ def test_reassign_user(self, mock_record: Mock) -> None: # then assign it to someone else assigned_to = handle_assigned_to( - RpcActor.from_identifier(user2.id), + Actor.from_identifier(user2.id), None, None, self.group_list, @@ -758,7 +758,7 @@ def test_reassign_user(self, mock_record: Mock) -> None: ) # pass assignedTo but it's the same as the existing assignee assigned_to = handle_assigned_to( - RpcActor.from_identifier(user2.id), + Actor.from_identifier(user2.id), None, None, self.group_list, @@ -816,7 +816,7 @@ def test_reassign_team(self, mock_record: Mock) -> None: # first assign the issue to team1 assigned_to = handle_assigned_to( - RpcActor.from_identifier(f"team:{team1.id}"), + Actor.from_identifier(f"team:{team1.id}"), None, None, self.group_list, @@ -840,7 +840,7 @@ def test_reassign_team(self, mock_record: Mock) -> None: # then assign it to team2 assigned_to = handle_assigned_to( - RpcActor.from_identifier(f"team:{team2.id}"), + Actor.from_identifier(f"team:{team2.id}"), None, None, self.group_list, @@ -911,7 +911,7 @@ def test_reassign_team_with_team_workflow_notifications_flag(self, mock_record: # first assign the issue to team1 assigned_to = handle_assigned_to( - RpcActor.from_identifier(f"team:{team1.id}"), + Actor.from_identifier(f"team:{team1.id}"), None, None, self.group_list, @@ -929,7 +929,7 @@ def test_reassign_team_with_team_workflow_notifications_flag(self, mock_record: # then assign it to team2 assigned_to = handle_assigned_to( - RpcActor.from_identifier(f"team:{team2.id}"), + Actor.from_identifier(f"team:{team2.id}"), None, None, self.group_list, @@ -979,7 +979,7 @@ def test_user_in_reassigned_team(self): # assign the issue to the team assigned_to = handle_assigned_to( - RpcActor.from_identifier(f"team:{team1.id}"), + Actor.from_identifier(f"team:{team1.id}"), None, None, self.group_list, @@ -1003,7 +1003,7 @@ def test_user_in_reassigned_team(self): # then assign it to user1 assigned_to = handle_assigned_to( - RpcActor.from_identifier(user1.id), + Actor.from_identifier(user1.id), None, None, self.group_list, @@ -1035,7 +1035,7 @@ def test_user_in_reassigned_team(self): # assign the issue back to the team assigned_to = handle_assigned_to( - RpcActor.from_identifier(f"team:{team1.id}"), + Actor.from_identifier(f"team:{team1.id}"), None, None, self.group_list, diff --git a/tests/sentry/api/serializers/rest_framework/test_mentions.py b/tests/sentry/api/serializers/rest_framework/test_mentions.py index 1928f5a0e95c0c..db29cddb4c9874 100644 --- a/tests/sentry/api/serializers/rest_framework/test_mentions.py +++ b/tests/sentry/api/serializers/rest_framework/test_mentions.py @@ -1,11 +1,11 @@ from sentry.api.serializers.rest_framework.mentions import extract_user_ids_from_mentions -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.testutils.cases import TestCase +from sentry.types.actor import Actor class ExtractUserIdsFromMentionsTest(TestCase): def test_users(self): - actor = RpcActor.from_id(user_id=self.user.id) + actor = Actor.from_id(user_id=self.user.id) result = extract_user_ids_from_mentions(self.organization.id, [actor]) assert result["users"] == {self.user.id} assert result["team_users"] == set() @@ -13,7 +13,7 @@ def test_users(self): other_user = self.create_user() result = extract_user_ids_from_mentions( - self.organization.id, [actor, RpcActor.from_id(user_id=other_user.id)] + self.organization.id, [actor, Actor.from_id(user_id=other_user.id)] ) assert result["users"] == {self.user.id, other_user.id} assert result["team_users"] == set() @@ -28,7 +28,7 @@ def test_teams(self): self.create_member( user=not_team_member, organization=self.organization, role="member", teams=[] ) - actor = RpcActor.from_id(team_id=self.team.id) + actor = Actor.from_id(team_id=self.team.id) result = extract_user_ids_from_mentions(self.organization.id, [actor]) assert result["users"] == set() assert result["team_users"] == {self.user.id, member_user.id} @@ -36,7 +36,7 @@ def test_teams(self): # Explicitly mentioned users shouldn't be included in team_users result = extract_user_ids_from_mentions( - self.organization.id, [RpcActor.from_id(user_id=member_user.id), actor] + self.organization.id, [Actor.from_id(user_id=member_user.id), actor] ) assert result["users"] == {member_user.id} assert result["team_users"] == {self.user.id} diff --git a/tests/sentry/api/serializers/test_alert_rule.py b/tests/sentry/api/serializers/test_alert_rule.py index 28e32bf8b9cf0d..64c42ff5268dea 100644 --- a/tests/sentry/api/serializers/test_alert_rule.py +++ b/tests/sentry/api/serializers/test_alert_rule.py @@ -14,10 +14,10 @@ AlertRuleTriggerAction, ) from sentry.models.rule import Rule -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.user.service import user_service from sentry.snuba.models import SnubaQueryEventType from sentry.testutils.cases import APITestCase, TestCase +from sentry.types.actor import Actor NOT_SET = object() @@ -64,7 +64,7 @@ def assert_alert_rule_serialized( assert result["environment"] is None if alert_rule.user_id or alert_rule.team_id: - owner = RpcActor.from_id(user_id=alert_rule.user_id, team_id=alert_rule.team_id) + owner = Actor.from_id(user_id=alert_rule.user_id, team_id=alert_rule.team_id) assert owner assert result["owner"] == owner.identifier else: @@ -105,7 +105,7 @@ def create_issue_alert_rule(self, data): if data.get("date_added"): rule.date_added = data["date_added"] if data.get("owner"): - actor = RpcActor.from_identifier(data["owner"]) + actor = Actor.from_identifier(data["owner"]) if actor.is_user: rule.owner_user_id = actor.id if actor.is_team: @@ -201,7 +201,7 @@ def test_owner(self): alert_rule = self.create_alert_rule( environment=self.environment, user=user, - owner=RpcActor.from_id(team_id=self.team.id, user_id=None), + owner=Actor.from_id(team_id=self.team.id, user_id=None), ) result = serialize(alert_rule) self.assert_alert_rule_serialized(alert_rule, result) diff --git a/tests/sentry/api/serializers/test_fields.py b/tests/sentry/api/serializers/test_fields.py index 90057a2d96d5d6..5e79eb3d1c2b4a 100644 --- a/tests/sentry/api/serializers/test_fields.py +++ b/tests/sentry/api/serializers/test_fields.py @@ -5,8 +5,8 @@ from rest_framework.serializers import ListField from sentry.api.fields.actor import ActorField -from sentry.services.hybrid_cloud.actor import ActorType from sentry.testutils.cases import TestCase +from sentry.types.actor import ActorType class ChildSerializer(serializers.Serializer): diff --git a/tests/sentry/deletions/test_organization.py b/tests/sentry/deletions/test_organization.py index 22e29dd7aa3f87..44006c0e5c88b3 100644 --- a/tests/sentry/deletions/test_organization.py +++ b/tests/sentry/deletions/test_organization.py @@ -22,7 +22,6 @@ from sentry.models.releasecommit import ReleaseCommit from sentry.models.releaseenvironment import ReleaseEnvironment from sentry.models.repository import Repository -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.silo.base import SiloMode from sentry.snuba.models import SnubaQuery from sentry.tasks.deletion.scheduled import run_scheduled_deletions @@ -30,6 +29,7 @@ from sentry.testutils.hybrid_cloud import HybridCloudTestMixin from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode +from sentry.types.actor import Actor class DeleteOrganizationTest(TransactionTestCase, HybridCloudTestMixin): @@ -308,7 +308,7 @@ def test_delete_org_after_project_transfer(self): alert_rule = self.create_alert_rule( organization=from_org, projects=[project], - owner=RpcActor.from_identifier(f"team:{from_team.id}"), + owner=Actor.from_identifier(f"team:{from_team.id}"), environment=environment, ) diff --git a/tests/sentry/deletions/test_team.py b/tests/sentry/deletions/test_team.py index bf80d1ef5a31bf..c52d3d23a86e79 100644 --- a/tests/sentry/deletions/test_team.py +++ b/tests/sentry/deletions/test_team.py @@ -3,10 +3,10 @@ from sentry.models.rule import Rule from sentry.models.team import Team from sentry.monitors.models import Monitor, MonitorType -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.tasks.deletion.scheduled import run_scheduled_deletions from sentry.testutils.cases import TestCase from sentry.testutils.hybrid_cloud import HybridCloudTestMixin +from sentry.types.actor import Actor class DeleteTeamTest(TestCase, HybridCloudTestMixin): @@ -33,7 +33,7 @@ def test_alert_blanking(self): rule = Rule.objects.create(label="test rule", project=project, owner_team_id=team.id) alert_rule = self.create_alert_rule( name="test alert rule", - owner=RpcActor.from_id(user_id=None, team_id=team.id), + owner=Actor.from_id(user_id=None, team_id=team.id), projects=[project], ) self.ScheduledDeletion.schedule(team, days=0) diff --git a/tests/sentry/digests/test_utilities.py b/tests/sentry/digests/test_utilities.py index 4d95e28f756e20..0d83c67a42bb55 100644 --- a/tests/sentry/digests/test_utilities.py +++ b/tests/sentry/digests/test_utilities.py @@ -15,9 +15,9 @@ from sentry.models.projectownership import ProjectOwnership from sentry.notifications.types import ActionTargetType, FallthroughChoiceType from sentry.ownership.grammar import Matcher, Owner, Rule, dump_schema -from sentry.services.hybrid_cloud.actor import ActorType from sentry.testutils.cases import SnubaTestCase, TestCase from sentry.testutils.helpers.datetime import before_now, iso_format +from sentry.types.actor import ActorType class UtilitiesHelpersTestCase(TestCase, SnubaTestCase): diff --git a/tests/sentry/incidents/endpoints/test_organization_combined_rule_index_endpoint.py b/tests/sentry/incidents/endpoints/test_organization_combined_rule_index_endpoint.py index 10dc8116a71c68..26a77a4e9f741e 100644 --- a/tests/sentry/incidents/endpoints/test_organization_combined_rule_index_endpoint.py +++ b/tests/sentry/incidents/endpoints/test_organization_combined_rule_index_endpoint.py @@ -7,10 +7,10 @@ from sentry.incidents.models.incident import IncidentTrigger, TriggerStatus from sentry.models.rule import Rule, RuleSource from sentry.models.rulefirehistory import RuleFireHistory -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.snuba.dataset import Dataset from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.datetime import before_now, freeze_time +from sentry.types.actor import Actor from sentry.utils import json from tests.sentry.api.serializers.test_alert_rule import BaseAlertRuleSerializerTest @@ -70,14 +70,14 @@ def setup_project_and_rules(self): organization=self.org, projects=[self.project], date_added=before_now(minutes=6), - owner=RpcActor.from_id(user_id=None, team_id=self.team.id), + owner=Actor.from_id(user_id=None, team_id=self.team.id), ) self.other_alert_rule = self.create_alert_rule( name="other alert rule", organization=self.org, projects=[self.project2], date_added=before_now(minutes=5), - owner=RpcActor.from_id(user_id=None, team_id=self.team.id), + owner=Actor.from_id(user_id=None, team_id=self.team.id), ) self.issue_rule = self.create_issue_alert_rule( data={ @@ -94,7 +94,7 @@ def setup_project_and_rules(self): organization=self.org, projects=[self.project], date_added=before_now(minutes=3), - owner=RpcActor.from_id(user_id=None, team_id=self.team2.id), + owner=Actor.from_id(user_id=None, team_id=self.team2.id), ) self.combined_rules_url = f"/api/0/organizations/{self.org.slug}/combined-rules/" @@ -170,14 +170,14 @@ def test_limit_as_1_with_paging_sort_name_urlencode(self): organization=self.org, projects=[self.project], date_added=before_now(minutes=6), - owner=RpcActor.from_id(user_id=None, team_id=self.team.id), + owner=Actor.from_id(user_id=None, team_id=self.team.id), ) alert_rule1 = self.create_alert_rule( name="!1?zz", organization=self.org, projects=[self.project], date_added=before_now(minutes=6), - owner=RpcActor.from_id(user_id=None, team_id=self.team.id), + owner=Actor.from_id(user_id=None, team_id=self.team.id), ) # Test Limit as 1, no cursor: @@ -543,7 +543,7 @@ def test_myteams_filter_superuser(self): organization=another_org, projects=[another_project], date_added=before_now(minutes=6), - owner=RpcActor.from_id(user_id=None, team_id=another_org_team.id), + owner=Actor.from_id(user_id=None, team_id=another_org_team.id), ) self.create_issue_alert_rule( @@ -824,7 +824,7 @@ def test_non_existing_owner(self): organization=self.org, projects=[self.project], date_added=before_now(minutes=1), - owner=RpcActor.from_id(user_id=None, team_id=team.id), + owner=Actor.from_id(user_id=None, team_id=team.id), ) self.create_issue_alert_rule( data={ diff --git a/tests/sentry/incidents/endpoints/test_organization_incident_index.py b/tests/sentry/incidents/endpoints/test_organization_incident_index.py index a50bbc5db4bf3e..7f4d6235bc63a5 100644 --- a/tests/sentry/incidents/endpoints/test_organization_incident_index.py +++ b/tests/sentry/incidents/endpoints/test_organization_incident_index.py @@ -6,9 +6,9 @@ from sentry.api.serializers import serialize from sentry.incidents.logic import update_incident_status from sentry.incidents.models.incident import IncidentStatus -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.snuba.dataset import Dataset from sentry.testutils.cases import APITestCase +from sentry.types.actor import Actor class IncidentListEndpointTest(APITestCase): @@ -149,14 +149,14 @@ def test_rule_teams(self): name="alert rule", organization=self.organization, projects=[self.project], - owner=RpcActor.from_id(user_id=None, team_id=team.id), + owner=Actor.from_id(user_id=None, team_id=team.id), ) other_team = self.create_team(organization=self.organization, members=[self.user]) other_alert_rule = self.create_alert_rule( name="rule 2", organization=self.organization, projects=[self.project], - owner=RpcActor.from_id(user_id=None, team_id=other_team.id), + owner=Actor.from_id(user_id=None, team_id=other_team.id), ) unassigned_alert_rule = self.create_alert_rule( name="rule 66", diff --git a/tests/sentry/incidents/test_logic.py b/tests/sentry/incidents/test_logic.py index 3c870d34da99d7..469854c15d4481 100644 --- a/tests/sentry/incidents/test_logic.py +++ b/tests/sentry/incidents/test_logic.py @@ -79,7 +79,6 @@ from sentry.integrations.pagerduty.utils import add_service from sentry.models.group import GroupStatus from sentry.models.integrations.organization_integration import OrganizationIntegration -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.integration.serial import serialize_integration from sentry.shared_integrations.exceptions import ApiError, ApiRateLimitedError, ApiTimeoutError from sentry.silo.base import SiloMode @@ -91,6 +90,7 @@ from sentry.testutils.helpers.features import with_feature from sentry.testutils.helpers.options import override_options from sentry.testutils.silo import assume_test_silo_mode, assume_test_silo_mode_of +from sentry.types.actor import Actor from sentry.utils import json pytestmark = [pytest.mark.sentry_metrics] @@ -671,7 +671,7 @@ def test_alert_rule_owner(self): 1, AlertRuleThresholdType.ABOVE, 1, - owner=RpcActor.from_identifier(self.user.id), + owner=Actor.from_identifier(self.user.id), ) assert alert_rule_1.user_id == self.user.id assert alert_rule_1.team_id is None @@ -684,7 +684,7 @@ def test_alert_rule_owner(self): 1, AlertRuleThresholdType.ABOVE, 1, - owner=RpcActor.from_identifier(f"team:{self.team.id}"), + owner=Actor.from_identifier(f"team:{self.team.id}"), ) assert alert_rule_2.user_id is None assert alert_rule_2.team_id == self.team.id @@ -1042,28 +1042,28 @@ def test_alert_rule_owner(self): 1, AlertRuleThresholdType.ABOVE, 1, - owner=RpcActor.from_identifier(self.user.id), + owner=Actor.from_identifier(self.user.id), ) assert alert_rule.user_id == self.user.id assert alert_rule.team_id is None update_alert_rule( alert_rule=alert_rule, - owner=RpcActor.from_identifier(f"team:{self.team.id}"), + owner=Actor.from_identifier(f"team:{self.team.id}"), ) assert alert_rule.team_id == self.team.id assert alert_rule.user_id is None update_alert_rule( alert_rule=alert_rule, - owner=RpcActor.from_identifier(f"user:{self.user.id}"), + owner=Actor.from_identifier(f"user:{self.user.id}"), ) assert alert_rule.user_id == self.user.id assert alert_rule.team_id is None update_alert_rule( alert_rule=alert_rule, - owner=RpcActor.from_identifier(self.user.id), + owner=Actor.from_identifier(self.user.id), ) assert alert_rule.user_id == self.user.id assert alert_rule.team_id is None diff --git a/tests/sentry/integrations/msteams/test_notifications.py b/tests/sentry/integrations/msteams/test_notifications.py index d1431242527d4c..26c1193524e4af 100644 --- a/tests/sentry/integrations/msteams/test_notifications.py +++ b/tests/sentry/integrations/msteams/test_notifications.py @@ -6,7 +6,6 @@ from sentry.integrations.msteams.notifications import send_notification_as_msteams from sentry.models.activity import Activity from sentry.notifications.notifications.activity.note import NoteActivityNotification -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.silo.base import SiloMode from sentry.testutils.cases import MSTeamsActivityNotificationTest, TestCase from sentry.testutils.helpers.notifications import ( @@ -16,6 +15,7 @@ from sentry.testutils.silo import assume_test_silo_mode, control_silo_test from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType +from sentry.types.actor import Actor pytestmark = [requires_snuba] @@ -94,7 +94,7 @@ def test_simple( notification = DummyNotification(self.organization) with assume_test_silo_mode(SiloMode.REGION): - recipients = RpcActor.many_from_object([self.user_1]) + recipients = Actor.many_from_object([self.user_1]) with self.tasks(): send_notification_as_msteams(notification, recipients, {}, {}) @@ -109,7 +109,7 @@ def test_unsupported_notification_type(self, mock_send_card: MagicMock): notification = DummyNotification(self.organization) with assume_test_silo_mode(SiloMode.REGION): - recipients = RpcActor.many_from_object([self.user_1]) + recipients = Actor.many_from_object([self.user_1]) with patch( "sentry.integrations.msteams.notifications.SUPPORTED_NOTIFICATION_TYPES", @@ -130,7 +130,7 @@ def test_missing_tenant_id(self, mock_send_card: MagicMock): notification = DummyNotification(self.organization) with assume_test_silo_mode(SiloMode.REGION): - recipients = RpcActor.many_from_object([self.user_1]) + recipients = Actor.many_from_object([self.user_1]) with self.tasks(): send_notification_as_msteams(notification, recipients, {}, {}) @@ -149,7 +149,7 @@ def test_no_identity(self, mock_send_card: MagicMock): notification = DummyNotification(self.organization) with assume_test_silo_mode(SiloMode.REGION): - recipients = RpcActor.many_from_object([user_2]) + recipients = Actor.many_from_object([user_2]) with self.tasks(): send_notification_as_msteams(notification, recipients, {}, {}) @@ -165,7 +165,7 @@ def test_multiple(self, mock_send_card: MagicMock): notification = DummyNotification(self.organization) with assume_test_silo_mode(SiloMode.REGION): - recipients = RpcActor.many_from_object([self.user_1, user_2]) + recipients = Actor.many_from_object([self.user_1, user_2]) with self.tasks(): send_notification_as_msteams(notification, recipients, {}, {}) diff --git a/tests/sentry/integrations/slack/test_message_builder.py b/tests/sentry/integrations/slack/test_message_builder.py index 5bc7170cab4d32..63ca61a20cf5f8 100644 --- a/tests/sentry/integrations/slack/test_message_builder.py +++ b/tests/sentry/integrations/slack/test_message_builder.py @@ -41,7 +41,6 @@ from sentry.notifications.utils import get_commits from sentry.notifications.utils.actions import MessageAction from sentry.ownership.grammar import Matcher, Owner, Rule, dump_schema -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.silo.base import SiloMode from sentry.testutils.cases import PerformanceIssueTestCase, TestCase from sentry.testutils.factories import DEFAULT_EVENT_DATA @@ -49,6 +48,7 @@ from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import assume_test_silo_mode from sentry.testutils.skips import requires_snuba +from sentry.types.actor import Actor from sentry.types.group import GroupSubStatus from sentry.utils.http import absolute_uri from tests.sentry.issues.test_utils import OccurrenceTestMixin @@ -693,7 +693,7 @@ def test_issue_alert_with_suggested_assignees(self): def test_team_recipient(self): issue_alert_group = self.create_group(project=self.project) ret = SlackIssuesMessageBuilder( - issue_alert_group, recipient=RpcActor.from_object(self.team) + issue_alert_group, recipient=Actor.from_object(self.team) ).build() assert isinstance(ret, dict) has_actions = False @@ -710,7 +710,7 @@ def test_team_recipient_block_kit_already_assigned(self): project=self.project, group=issue_alert_group, user_id=self.user.id ) ret = SlackIssuesMessageBuilder( - issue_alert_group, recipient=RpcActor.from_object(self.team) + issue_alert_group, recipient=Actor.from_object(self.team) ).build() assert isinstance(ret, dict) assert ( diff --git a/tests/sentry/integrations/test_notification_utilities.py b/tests/sentry/integrations/test_notification_utilities.py index 91a765773c9efd..a71242c378cf6f 100644 --- a/tests/sentry/integrations/test_notification_utilities.py +++ b/tests/sentry/integrations/test_notification_utilities.py @@ -5,12 +5,12 @@ from sentry.integrations.notifications import get_integrations_by_channel_by_recipient from sentry.models.integrations.integration import Integration from sentry.models.user import User -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.integration import RpcIntegration from sentry.services.hybrid_cloud.integration.serial import serialize_integration from sentry.testutils.cases import TestCase from sentry.testutils.helpers.notifications import DummyNotification from sentry.testutils.silo import control_silo_test +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders @@ -36,10 +36,10 @@ def setUp(self): def _assert_integrations_are( self, - actual: Mapping[RpcActor, Mapping[str, RpcIntegration | Integration]], + actual: Mapping[Actor, Mapping[str, RpcIntegration | Integration]], expected: Mapping[User, Mapping[str, RpcIntegration | Integration]], ): - assert actual == {RpcActor.from_orm_user(k): v for (k, v) in expected.items()} + assert actual == {Actor.from_orm_user(k): v for (k, v) in expected.items()} def test_simple(self): integrations_by_channel_by_recipient = get_integrations_by_channel_by_recipient( diff --git a/tests/sentry/issues/test_issue_occurrence.py b/tests/sentry/issues/test_issue_occurrence.py index f2de82aa1fac0d..1b5a9b39d315e1 100644 --- a/tests/sentry/issues/test_issue_occurrence.py +++ b/tests/sentry/issues/test_issue_occurrence.py @@ -1,6 +1,6 @@ from sentry.issues.issue_occurrence import DEFAULT_LEVEL, IssueEvidence, IssueOccurrence -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.testutils.cases import TestCase +from sentry.types.actor import Actor, ActorType from tests.sentry.issues.test_utils import OccurrenceTestMixin @@ -21,19 +21,19 @@ def test_assignee(self) -> None: occurrence_data = self.build_occurrence_data() occurrence_data["assignee"] = f"user:{self.user.id}" occurrence = IssueOccurrence.from_dict(occurrence_data) - assert occurrence.assignee == RpcActor(id=self.user.id, actor_type=ActorType.USER) + assert occurrence.assignee == Actor(id=self.user.id, actor_type=ActorType.USER) occurrence_data["assignee"] = f"{self.user.id}" occurrence = IssueOccurrence.from_dict(occurrence_data) - assert occurrence.assignee == RpcActor(id=self.user.id, actor_type=ActorType.USER) + assert occurrence.assignee == Actor(id=self.user.id, actor_type=ActorType.USER) occurrence_data["assignee"] = f"{self.user.email}" occurrence = IssueOccurrence.from_dict(occurrence_data) - assert occurrence.assignee == RpcActor(id=self.user.id, actor_type=ActorType.USER) + assert occurrence.assignee == Actor(id=self.user.id, actor_type=ActorType.USER) occurrence_data["assignee"] = f"{self.user.username}" occurrence = IssueOccurrence.from_dict(occurrence_data) - assert occurrence.assignee == RpcActor(id=self.user.id, actor_type=ActorType.USER) + assert occurrence.assignee == Actor(id=self.user.id, actor_type=ActorType.USER) occurrence_data["assignee"] = f"team:{self.team.id}" occurrence = IssueOccurrence.from_dict(occurrence_data) - assert occurrence.assignee == RpcActor(id=self.team.id, actor_type=ActorType.TEAM) + assert occurrence.assignee == Actor(id=self.team.id, actor_type=ActorType.TEAM) def test_assignee_none(self) -> None: occurrence_data = self.build_occurrence_data() diff --git a/tests/sentry/mail/activity/test_note.py b/tests/sentry/mail/activity/test_note.py index 1cfd3688ce7936..ffe5d4f3c4cca6 100644 --- a/tests/sentry/mail/activity/test_note.py +++ b/tests/sentry/mail/activity/test_note.py @@ -3,11 +3,11 @@ from sentry.models.options.user_option import UserOption from sentry.notifications.notifications.activity.note import NoteActivityNotification from sentry.notifications.types import GroupSubscriptionReason -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.silo.base import SiloMode from sentry.testutils.cases import ActivityTestCase from sentry.testutils.silo import assume_test_silo_mode from sentry.types.activity import ActivityType +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders @@ -42,7 +42,7 @@ def test_allow_self_notifications(self): participants = self.email.get_participants_with_group_subscription_reason() actual = dict(participants.get_participants_by_provider(ExternalProviders.EMAIL)) expected = { - RpcActor.from_orm_user(self.user): GroupSubscriptionReason.implicit, + Actor.from_orm_user(self.user): GroupSubscriptionReason.implicit, } assert actual == expected diff --git a/tests/sentry/mail/activity/test_release.py b/tests/sentry/mail/activity/test_release.py index 9058d5bb4d2cbd..93d45852f953bd 100644 --- a/tests/sentry/mail/activity/test_release.py +++ b/tests/sentry/mail/activity/test_release.py @@ -13,12 +13,12 @@ NotificationSettingEnum, NotificationSettingsOptionEnum, ) -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.user.service import user_service from sentry.silo.base import SiloMode from sentry.testutils.cases import ActivityTestCase from sentry.testutils.silo import assume_test_silo_mode from sentry.types.activity import ActivityType +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviderEnum, ExternalProviders @@ -132,9 +132,9 @@ def test_simple(self): ) ) assert participants == { - (RpcActor.from_orm_user(self.user1), GroupSubscriptionReason.committed), - (RpcActor.from_orm_user(self.user3), GroupSubscriptionReason.deploy_setting), - (RpcActor.from_orm_user(self.user5), GroupSubscriptionReason.committed), + (Actor.from_orm_user(self.user1), GroupSubscriptionReason.committed), + (Actor.from_orm_user(self.user3), GroupSubscriptionReason.deploy_setting), + (Actor.from_orm_user(self.user5), GroupSubscriptionReason.committed), } context = email.get_context() @@ -148,7 +148,7 @@ def test_simple(self): (self.commit1, user_service.get_user(user_id=self.user1.id)), ] - user_context = email.get_recipient_context(RpcActor.from_orm_user(self.user1), {}) + user_context = email.get_recipient_context(Actor.from_orm_user(self.user1), {}) # make sure this only includes projects user has access to assert len(user_context["projects"]) == 1 assert user_context["projects"][0][0] == self.project @@ -199,14 +199,14 @@ def test_no_committers(self): ) ) assert participants == { - (RpcActor.from_orm_user(self.user3), GroupSubscriptionReason.deploy_setting) + (Actor.from_orm_user(self.user3), GroupSubscriptionReason.deploy_setting) } context = email.get_context() assert context["environment"] == "production" assert context["repos"] == [] - user_context = email.get_recipient_context(RpcActor.from_orm_user(self.user1), {}) + user_context = email.get_recipient_context(Actor.from_orm_user(self.user1), {}) # make sure this only includes projects user has access to assert len(user_context["projects"]) == 1 assert user_context["projects"][0][0] == self.project @@ -253,15 +253,15 @@ def test_uses_default(self): ) assert len(participants) == 2 assert participants == { - (RpcActor.from_orm_user(user6), GroupSubscriptionReason.deploy_setting), - (RpcActor.from_orm_user(self.user3), GroupSubscriptionReason.deploy_setting), + (Actor.from_orm_user(user6), GroupSubscriptionReason.deploy_setting), + (Actor.from_orm_user(self.user3), GroupSubscriptionReason.deploy_setting), } context = email.get_context() assert context["environment"] == "production" assert context["repos"] == [] - user_context = email.get_recipient_context(RpcActor.from_orm_user(user6), {}) + user_context = email.get_recipient_context(Actor.from_orm_user(user6), {}) # make sure this only includes projects user has access to assert len(user_context["projects"]) == 1 assert user_context["projects"][0][0] == self.project diff --git a/tests/sentry/mail/test_adapter.py b/tests/sentry/mail/test_adapter.py index 94e3d21653528e..ab9277b746e9c9 100644 --- a/tests/sentry/mail/test_adapter.py +++ b/tests/sentry/mail/test_adapter.py @@ -42,13 +42,13 @@ from sentry.ownership.grammar import Matcher, Owner, dump_schema from sentry.plugins.base import Notification from sentry.replays.testutils import mock_replay -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.silo.base import SiloMode from sentry.testutils.cases import PerformanceIssueTestCase, ReplaysSnubaTestCase, TestCase from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.testutils.silo import assume_test_silo_mode from sentry.testutils.skips import requires_snuba from sentry.types.activity import ActivityType +from sentry.types.actor import Actor from sentry.types.group import GroupSubStatus from sentry.types.rules import RuleFuture from sentry.utils.email import MessageBuilder, get_email_addresses @@ -505,9 +505,7 @@ def test_notify_users_does_email(self, mock_logger, mock_func): args, kwargs = mock_func.call_args notification = args[1] - recipient_context = notification.get_recipient_context( - RpcActor.from_orm_user(self.user), {} - ) + recipient_context = notification.get_recipient_context(Actor.from_orm_user(self.user), {}) assert recipient_context["timezone"] == zoneinfo.ZoneInfo("Europe/Vienna") self.assertEqual(notification.project, self.project) @@ -695,9 +693,7 @@ def _test_invalid_timezone(self, s: str) -> None: notification = AlertRuleNotification( Notification(event=event), ActionTargetType.ISSUE_OWNERS ) - recipient_context = notification.get_recipient_context( - RpcActor.from_orm_user(self.user), {} - ) + recipient_context = notification.get_recipient_context(Actor.from_orm_user(self.user), {}) assert recipient_context["timezone"] == UTC def test_context_invalid_timezone_empty_string(self): diff --git a/tests/sentry/models/test_groupsubscription.py b/tests/sentry/models/test_groupsubscription.py index a723c4352cdd72..61d79649819124 100644 --- a/tests/sentry/models/test_groupsubscription.py +++ b/tests/sentry/models/test_groupsubscription.py @@ -14,13 +14,13 @@ NotificationSettingEnum, NotificationSettingsOptionEnum, ) -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.user.service import user_service from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase from sentry.testutils.helpers.features import with_feature from sentry.testutils.helpers.slack import link_team from sentry.testutils.silo import assume_test_silo_mode +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviderEnum, ExternalProviders @@ -331,7 +331,7 @@ def _assert_subscribers_are( for provider in ExternalProviders: actual = dict(all_participants.get_participants_by_provider(provider)) expected = { - RpcActor.from_object(user): reason + Actor.from_object(user): reason for (user, reason) in (all_expected.get(provider) or {}).items() } assert actual == expected diff --git a/tests/sentry/models/test_project.py b/tests/sentry/models/test_project.py index 1f7e7c69562779..93fe4b200a37d3 100644 --- a/tests/sentry/models/test_project.py +++ b/tests/sentry/models/test_project.py @@ -20,7 +20,6 @@ from sentry.monitors.models import Monitor, MonitorEnvironment, MonitorType, ScheduleType from sentry.notifications.types import NotificationSettingEnum from sentry.notifications.utils.participants import get_notification_recipients -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.silo.base import SiloMode from sentry.snuba.models import SnubaQuery from sentry.tasks.deletion.hybrid_cloud import schedule_hybrid_cloud_foreign_key_jobs_control @@ -28,6 +27,7 @@ from sentry.testutils.helpers.features import with_feature from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode, control_silo_test +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders @@ -245,7 +245,7 @@ def test_transfer_to_organization_alert_rules(self): alert_rule = self.create_alert_rule( organization=self.organization, projects=[project], - owner=RpcActor.from_identifier(f"team:{team.id}"), + owner=Actor.from_identifier(f"team:{team.id}"), environment=environment, ) snuba_query = SnubaQuery.objects.filter(id=alert_rule.snuba_query_id).get() @@ -394,7 +394,7 @@ def test_remove_team_clears_alerts(self): rule = Rule.objects.create(project=self.project, label="issa rule", owner_team_id=team.id) alert_rule = self.create_alert_rule( - organization=self.organization, owner=RpcActor.from_id(team_id=team.id) + organization=self.organization, owner=Actor.from_id(team_id=team.id) ) self.project.remove_team(team) @@ -505,13 +505,13 @@ def test_copy_with_previous_settings(self): class FilterToSubscribedUsersTest(TestCase): def run_test(self, users: Iterable[User], expected_users: Iterable[User]): recipients = get_notification_recipients( - recipients=RpcActor.many_from_object(users), + recipients=Actor.many_from_object(users), type=NotificationSettingEnum.ISSUE_ALERTS, project_ids=[self.project.id], organization_id=self.project.organization.id, ) actual_recipients = recipients[ExternalProviders.EMAIL] - expected_recipients = {RpcActor.from_object(user) for user in expected_users} + expected_recipients = {Actor.from_object(user) for user in expected_users} assert actual_recipients == expected_recipients def test(self): diff --git a/tests/sentry/models/test_projectownership.py b/tests/sentry/models/test_projectownership.py index 41b62553d2b87a..1279e3c5edcbfd 100644 --- a/tests/sentry/models/test_projectownership.py +++ b/tests/sentry/models/test_projectownership.py @@ -6,12 +6,12 @@ from sentry.models.projectownership import ProjectOwnership from sentry.models.repository import Repository from sentry.ownership.grammar import Matcher, Owner, Rule, dump_schema, resolve_actors -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.services.hybrid_cloud.user.service import user_service from sentry.testutils.cases import TestCase from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.testutils.silo import assume_test_silo_mode_of from sentry.testutils.skips import requires_snuba +from sentry.types.actor import Actor, ActorType pytestmark = requires_snuba @@ -98,7 +98,7 @@ def test_get_owners_basic(self): ProjectOwnership.get_owners( self.project.id, {"stacktrace": {"frames": [{"filename": "foo.py"}]}} ), - ([RpcActor(id=self.team.id, actor_type=ActorType.TEAM)], [rule_a]), + ([Actor(id=self.team.id, actor_type=ActorType.TEAM)], [rule_a]), ) # Match only rule_b @@ -106,7 +106,7 @@ def test_get_owners_basic(self): ProjectOwnership.get_owners( self.project.id, {"stacktrace": {"frames": [{"filename": "src/thing.txt"}]}} ), - ([RpcActor(id=self.user.id, actor_type=ActorType.USER)], [rule_b]), + ([Actor(id=self.user.id, actor_type=ActorType.USER)], [rule_b]), ) # Matches both rule_a and rule_b @@ -116,8 +116,8 @@ def test_get_owners_basic(self): ), ( [ - RpcActor(id=self.team.id, actor_type=ActorType.TEAM), - RpcActor(id=self.user.id, actor_type=ActorType.USER), + Actor(id=self.team.id, actor_type=ActorType.TEAM), + Actor(id=self.user.id, actor_type=ActorType.USER), ], [rule_a, rule_b], ), @@ -138,8 +138,8 @@ def test_get_owners_basic(self): ), ( [ - RpcActor(id=self.team.id, actor_type=ActorType.TEAM), - RpcActor(id=self.user.id, actor_type=ActorType.USER), + Actor(id=self.team.id, actor_type=ActorType.TEAM), + Actor(id=self.user.id, actor_type=ActorType.USER), ], [rule_a, rule_b], ), @@ -163,7 +163,7 @@ def test_get_owners_when_codeowners_exists_and_no_issueowners(self): self.project.id, {"stacktrace": {"frames": [{"filename": "src/foo.js"}]}} ), ( - [RpcActor(id=self.team.id, actor_type=ActorType.TEAM)], + [Actor(id=self.team.id, actor_type=ActorType.TEAM)], [rule_a], ), ) @@ -189,8 +189,8 @@ def test_get_owners_when_codeowners_and_issueowners_exists(self): ), ( [ - RpcActor(id=self.team.id, actor_type=ActorType.TEAM), - RpcActor(id=self.team2.id, actor_type=ActorType.TEAM), + Actor(id=self.team.id, actor_type=ActorType.TEAM), + Actor(id=self.team2.id, actor_type=ActorType.TEAM), ], [rule_a, rule_c], ), @@ -603,7 +603,7 @@ def test_abs_path_when_filename_present(self): ) assert ProjectOwnership.get_owners( self.project.id, {"stacktrace": {"frames": [frame]}} - ) == ([RpcActor(id=self.team.id, actor_type=ActorType.TEAM)], [rule]) + ) == ([Actor(id=self.team.id, actor_type=ActorType.TEAM)], [rule]) def test_saves_without_either_auto_assignment_option(self): self.group = self.create_group(project=self.project) @@ -705,14 +705,14 @@ def test_no_actors(self): def test_basic(self): owners = [Owner("user", self.user.email), Owner("team", self.team.slug)] assert resolve_actors(owners, self.project.id) == { - owners[0]: RpcActor(id=self.user.id, actor_type=ActorType.USER), - owners[1]: RpcActor(id=self.team.id, actor_type=ActorType.TEAM), + owners[0]: Actor(id=self.user.id, actor_type=ActorType.USER), + owners[1]: Actor(id=self.team.id, actor_type=ActorType.TEAM), } def test_teams(self): # Normal team owner1 = Owner("team", self.team.slug) - actor1 = RpcActor(id=self.team.id, actor_type=ActorType.TEAM) + actor1 = Actor(id=self.team.id, actor_type=ActorType.TEAM) # Team that doesn't exist owner2 = Owner("team", "nope") @@ -733,7 +733,7 @@ def test_teams(self): def test_users(self): # Normal user owner1 = Owner("user", self.user.email) - actor1 = RpcActor(id=self.user.id, actor_type=ActorType.USER) + actor1 = Actor(id=self.user.id, actor_type=ActorType.USER) # An extra secondary email email1 = self.create_useremail(self.user, None, is_verified=True).email diff --git a/tests/sentry/notifications/notifications/test_organization_request.py b/tests/sentry/notifications/notifications/test_organization_request.py index 69d2bf8dfbf410..1e83774874534b 100644 --- a/tests/sentry/notifications/notifications/test_organization_request.py +++ b/tests/sentry/notifications/notifications/test_organization_request.py @@ -3,8 +3,8 @@ from sentry.notifications.notifications.strategies.role_based_recipient_strategy import ( RoleBasedRecipientStrategy, ) -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.testutils.cases import TestCase +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders @@ -23,7 +23,7 @@ class GetParticipantsTest(TestCase): def setUp(self): self.user2 = self.create_user() self.create_member(user=self.user2, organization=self.organization) - self.user_actors = {RpcActor.from_orm_user(user) for user in (self.user, self.user2)} + self.user_actors = {Actor.from_orm_user(user) for user in (self.user, self.user2)} def test_default_to_slack(self): notification = DummyRequestNotification(self.organization, self.user) diff --git a/tests/sentry/notifications/test_helpers.py b/tests/sentry/notifications/test_helpers.py index 354d077fcbce28..3256a32411a77f 100644 --- a/tests/sentry/notifications/test_helpers.py +++ b/tests/sentry/notifications/test_helpers.py @@ -18,10 +18,10 @@ get_group_settings_link, get_rules, ) -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase from sentry.testutils.silo import assume_test_silo_mode, assume_test_silo_mode_of +from sentry.types.actor import Actor def mock_event(*, transaction, data=None): @@ -147,8 +147,8 @@ def test_get_team_members(self): self.create_member(organization=self.organization, teams=[team2], user=user2) with assume_test_silo_mode_of(OrganizationMemberTeamReplica): - assert get_team_members(team1) == [RpcActor.from_object(user1)] - assert get_team_members(team2) == [RpcActor.from_object(user2)] + assert get_team_members(team1) == [Actor.from_object(user1)] + assert get_team_members(team2) == [Actor.from_object(user2)] assert get_team_members(team3) == [] def test_team_is_valid_recipient(self): diff --git a/tests/sentry/notifications/test_notificationcontroller.py b/tests/sentry/notifications/test_notificationcontroller.py index b705c27b536c73..34175fce627962 100644 --- a/tests/sentry/notifications/test_notificationcontroller.py +++ b/tests/sentry/notifications/test_notificationcontroller.py @@ -9,12 +9,12 @@ NotificationSettingEnum, NotificationSettingsOptionEnum, ) -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase from sentry.testutils.helpers.features import with_feature from sentry.testutils.helpers.slack import link_team from sentry.testutils.silo import assume_test_silo_mode, control_silo_test +from sentry.types.actor import Actor, ActorType from sentry.types.integrations import ExternalProviderEnum, ExternalProviders @@ -139,7 +139,7 @@ def test_get_all_setting_providers(self): assert list(controller.get_all_setting_providers) == self.setting_providers def test_without_settings(self): - rpc_user = RpcActor.from_object(self.user) + rpc_user = Actor.from_object(self.user) NotificationSettingOption.objects.all().delete() NotificationSettingProvider.objects.all().delete() controller = NotificationController( @@ -519,9 +519,9 @@ def test_get_combined_settings(self): assert provider_settings == expected_setting def test_get_notification_recipients(self): - rpc_user = RpcActor.from_object(self.user) + rpc_user = Actor.from_object(self.user) new_user = self.create_user() - rpc_new_user = RpcActor.from_object(new_user) + rpc_new_user = Actor.from_object(new_user) self.create_member( organization=self.organization, user=new_user, role="member", teams=[self.team] ) @@ -608,7 +608,7 @@ def test_get_subscriptions_status_for_projects(self): } def test_get_participants(self): - rpc_user = RpcActor.from_object(self.user) + rpc_user = Actor.from_object(self.user) controller = NotificationController( recipients=[self.user], project_ids=[self.project.id], @@ -639,7 +639,7 @@ def test_get_participants(self): @with_feature("organizations:team-workflow-notifications") def test_get_team_workflow_participants(self): - rpc_user = RpcActor.from_object(self.team) + rpc_user = Actor.from_object(self.team) with assume_test_silo_mode(SiloMode.REGION): link_team(self.team, self.integration, "#team-channel", "team_channel_id") controller = NotificationController( @@ -657,7 +657,7 @@ def test_get_team_workflow_participants(self): @with_feature("organizations:team-workflow-notifications") def test_get_team_issue_alert_participants(self): - rpc_user = RpcActor.from_object(self.team) + rpc_user = Actor.from_object(self.team) with assume_test_silo_mode(SiloMode.REGION): link_team(self.team, self.integration, "#team-channel", "team_channel_id") controller = NotificationController( @@ -865,7 +865,7 @@ def test_fallback_if_invalid_team(self): assert len(controller.recipients) == 2 for recipient in controller.recipients: - assert isinstance(recipient, RpcActor) and recipient.actor_type == ActorType.USER + assert isinstance(recipient, Actor) and recipient.actor_type == ActorType.USER @with_feature("organizations:team-workflow-notifications") def test_keeps_team_as_recipient_if_valid(self): diff --git a/tests/sentry/notifications/utils/test_participants.py b/tests/sentry/notifications/utils/test_participants.py index bada56deb485d2..5ec5944e89dd6b 100644 --- a/tests/sentry/notifications/utils/test_participants.py +++ b/tests/sentry/notifications/utils/test_participants.py @@ -32,7 +32,6 @@ ) from sentry.ownership import grammar from sentry.ownership.grammar import Matcher, Owner, Rule, dump_schema -from sentry.services.hybrid_cloud.actor import RpcActor from sentry.services.hybrid_cloud.user.service import user_service from sentry.silo.base import SiloMode from sentry.testutils.cases import TestCase @@ -41,6 +40,7 @@ from sentry.testutils.helpers.slack import link_team from sentry.testutils.silo import assume_test_silo_mode from sentry.testutils.skips import requires_snuba +from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders from sentry.utils.cache import cache from tests.sentry.mail import make_event_data @@ -64,12 +64,12 @@ class _ParticipantsTest(TestCase): def assert_recipients_are( self, - actual: Mapping[ExternalProviders, set[RpcActor]], + actual: Mapping[ExternalProviders, set[Actor]], *, email: Iterable[int] = (), slack: Iterable[int] = (), ) -> None: - expected: dict[ExternalProviders, set[RpcActor]] = collections.defaultdict(set) + expected: dict[ExternalProviders, set[Actor]] = collections.defaultdict(set) for provider, user_ids in [ (ExternalProviders.EMAIL, email), (ExternalProviders.SLACK, slack), @@ -78,14 +78,14 @@ def assert_recipients_are( for user_id in user_ids: user = user_service.get_user(user_id) assert user is not None - expected[provider].add(RpcActor.from_rpc_user(user)) + expected[provider].add(Actor.from_rpc_user(user)) assert actual == expected class GetSendToMemberTest(_ParticipantsTest): def get_send_to_member( self, project: Project | None = None, user_id: int | None = None - ) -> Mapping[ExternalProviders, set[RpcActor]]: + ) -> Mapping[ExternalProviders, set[Actor]]: return get_send_to( project=project or self.project, target_type=ActionTargetType.MEMBER, @@ -161,7 +161,7 @@ def setUp(self): def get_send_to_team( self, project: Project | None = None, team_id: int | None = None - ) -> Mapping[ExternalProviders, set[RpcActor]]: + ) -> Mapping[ExternalProviders, set[Actor]]: return get_send_to( project=project or self.project, target_type=ActionTargetType.TEAM, @@ -196,7 +196,7 @@ def test_send_to_team_direct(self): type="alerts", ).update(value="always") assert self.get_send_to_team() == { - ExternalProviders.SLACK: {RpcActor.from_orm_team(self.team)} + ExternalProviders.SLACK: {Actor.from_orm_team(self.team)} } with assume_test_silo_mode(SiloMode.CONTROL): @@ -229,7 +229,7 @@ def test_send_workflow_to_team_direct(self): target_identifier=self.team.id, notification_type_enum=NotificationSettingEnum.WORKFLOW, ) == { - ExternalProviders.SLACK: {RpcActor.from_orm_team(self.team)}, + ExternalProviders.SLACK: {Actor.from_orm_team(self.team)}, } def test_other_project_team(self): @@ -255,7 +255,7 @@ def test_other_org_team(self): class GetSendToOwnersTest(_ParticipantsTest): - def get_send_to_owners(self, event: Event) -> Mapping[ExternalProviders, set[RpcActor]]: + def get_send_to_owners(self, event: Event) -> Mapping[ExternalProviders, set[Actor]]: return get_send_to( self.project, target_type=ActionTargetType.ISSUE_OWNERS, @@ -674,10 +674,8 @@ def create_ownership( fallthrough=fallthrough, ) - def assert_recipients( - self, expected: Iterable[Team | User], received: Iterable[RpcActor] - ) -> None: - assert {RpcActor.from_object(recipient) for recipient in expected} == set(received) + def assert_recipients(self, expected: Iterable[Team | User], received: Iterable[Actor]) -> None: + assert {Actor.from_object(recipient) for recipient in expected} == set(received) # If no event to match, we assume fallthrough is enabled def test_get_owners_no_event(self): @@ -826,7 +824,7 @@ def get_send_to_fallthrough( event: Event, project: Project, fallthrough_choice: FallthroughChoiceType | None = None, - ) -> Mapping[ExternalProviders, set[RpcActor]]: + ) -> Mapping[ExternalProviders, set[Actor]]: return get_send_to( project, target_type=ActionTargetType.ISSUE_OWNERS, @@ -956,7 +954,7 @@ def test_fallthrough_admin_or_recent_under_20(self): ) event = self.store_event("admin.lol", self.project) - expected_notified_users = {RpcActor.from_orm_user(user) for user in notifiable_users} + expected_notified_users = {Actor.from_orm_user(user) for user in notifiable_users} notified_users = self.get_send_to_fallthrough( event, self.project, FallthroughChoiceType.ACTIVE_MEMBERS )[ExternalProviders.EMAIL] @@ -985,7 +983,7 @@ def test_fallthrough_admin_or_recent_over_20(self): ) event = self.store_event("admin.lol", self.project) - expected_notified_users = {RpcActor.from_orm_user(user) for user in notifiable_users} + expected_notified_users = {Actor.from_orm_user(user) for user in notifiable_users} notified_users = self.get_send_to_fallthrough( event, self.project, FallthroughChoiceType.ACTIVE_MEMBERS )[ExternalProviders.EMAIL] diff --git a/tests/sentry/hybridcloud/test_actor.py b/tests/sentry/types/test_actor.py similarity index 84% rename from tests/sentry/hybridcloud/test_actor.py rename to tests/sentry/types/test_actor.py index a413fd0c2c38e5..471e36334d45e5 100644 --- a/tests/sentry/hybridcloud/test_actor.py +++ b/tests/sentry/types/test_actor.py @@ -2,19 +2,19 @@ from rest_framework import serializers from sentry.models.team import Team -from sentry.services.hybrid_cloud.actor import ActorType, RpcActor, parse_and_validate_actor from sentry.services.hybrid_cloud.user.model import RpcUser from sentry.services.hybrid_cloud.user.service import user_service from sentry.testutils.factories import Factories from sentry.testutils.pytest.fixtures import django_db_all +from sentry.types.actor import Actor, ActorType, parse_and_validate_actor @django_db_all(transaction=True) def test_many_from_object_users(): users = [Factories.create_user(), Factories.create_user()] - actors = RpcActor.many_from_object(users) + actors = Actor.many_from_object(users) assert len(actors) == len(users) - assert all([isinstance(a, RpcActor) for a in actors]) + assert all([isinstance(a, Actor) for a in actors]) assert actors[0].id == users[0].id assert actors[0].actor_type == ActorType.USER assert actors[0].is_user @@ -30,35 +30,35 @@ def test_from_identifier(): org = Factories.create_organization(owner=user) team = Factories.create_team(organization=org) - actor = RpcActor.from_identifier(user.id) + actor = Actor.from_identifier(user.id) assert actor assert actor.id == user.id assert actor.actor_type == ActorType.USER assert actor.is_user assert not actor.is_team - actor = RpcActor.from_identifier(str(user.id)) + actor = Actor.from_identifier(str(user.id)) assert actor assert actor.id == user.id assert actor.actor_type == ActorType.USER - actor = RpcActor.from_identifier(f"user:{user.id}") + actor = Actor.from_identifier(f"user:{user.id}") assert actor assert actor.id == user.id assert actor.actor_type == ActorType.USER - actor = RpcActor.from_identifier(user.username) + actor = Actor.from_identifier(user.username) assert actor assert actor.id == user.id assert actor.actor_type == ActorType.USER - actor = RpcActor.from_identifier(user.email) + actor = Actor.from_identifier(user.email) assert actor assert actor.id == user.id assert actor.actor_type == ActorType.USER assert actor.identifier == f"user:{user.id}" - actor = RpcActor.from_identifier(f"team:{team.id}") + actor = Actor.from_identifier(f"team:{team.id}") assert actor assert actor.id == team.id assert actor.actor_type == ActorType.TEAM @@ -68,20 +68,20 @@ def test_from_identifier(): def test_from_id(): - actor = RpcActor.from_id(team_id=1) + actor = Actor.from_id(team_id=1) assert actor assert actor.id == 1 assert actor.actor_type == ActorType.TEAM - actor = RpcActor.from_id(user_id=11) + actor = Actor.from_id(user_id=11) assert actor assert actor.id == 11 assert actor.actor_type == ActorType.USER - with pytest.raises(RpcActor.InvalidActor): - RpcActor.from_id(user_id=11, team_id=99) - with pytest.raises(RpcActor.InvalidActor): - RpcActor.from_id(user_id=None) + with pytest.raises(Actor.InvalidActor): + Actor.from_id(user_id=11, team_id=99) + with pytest.raises(Actor.InvalidActor): + Actor.from_id(user_id=None) @django_db_all(transaction=True) @@ -90,9 +90,9 @@ def test_many_from_object_rpc_users(): user_ids = [u.id for u in orm_users] rpc_users = user_service.get_many(filter={"user_ids": user_ids}) - actors = RpcActor.many_from_object(rpc_users) + actors = Actor.many_from_object(rpc_users) assert len(actors) == len(rpc_users) - assert all([isinstance(a, RpcActor) for a in actors]) + assert all([isinstance(a, Actor) for a in actors]) assert actors[0].id == rpc_users[0].id assert actors[0].actor_type == ActorType.USER @@ -107,7 +107,7 @@ def test_many_from_object_teams(): Factories.create_team(organization=organization), Factories.create_team(organization=organization), ] - actors = RpcActor.many_from_object(teams) + actors = Actor.many_from_object(teams) assert len(actors) == 2 assert actors[0].id == teams[0].id @@ -127,7 +127,7 @@ def test_many_from_object_mixed(): Factories.create_team(organization=organization), Factories.create_team(organization=organization), ] - actors = RpcActor.many_from_object(teams) + actors = Actor.many_from_object(teams) assert len(actors) == 2 assert actors[0].id == teams[0].id @@ -149,8 +149,8 @@ def test_resolve_many(): user_two = Factories.create_user() members = [user_one, user_two, team_two, team_one] - actors = [RpcActor.from_object(m) for m in members] - resolved = RpcActor.resolve_many(actors) + actors = [Actor.from_object(m) for m in members] + resolved = Actor.resolve_many(actors) assert len(resolved) == len(actors) assert isinstance(resolved[0], RpcUser) From 0280f7bc2f837e2b76a1883d93b45b4ec70da919 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Thu, 9 May 2024 14:19:25 -0400 Subject: [PATCH 220/376] ref(js): Avoid direct usage of useRouteContext (#70591) This is to help ease the transition to react-router 6 --- .../app/components/feedback/feedbackOnboarding/sidebar.tsx | 4 ++-- static/app/components/feedback/useFeedbackOnboarding.tsx | 4 ++-- static/app/components/onboardingWizard/task.tsx | 7 +++---- static/app/components/onboardingWizard/taskConfig.tsx | 2 +- static/app/types/onboarding.tsx | 4 ++-- static/app/utils/replays/hooks/useReplayOnboarding.tsx | 4 ++-- 6 files changed, 12 insertions(+), 13 deletions(-) diff --git a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx index 9dfa18981e34fd..3141920b42315f 100644 --- a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx +++ b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx @@ -35,8 +35,8 @@ import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {PlatformKey, Project, SelectValue} from 'sentry/types'; import {trackAnalytics} from 'sentry/utils/analytics'; +import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; -import {useRouteContext} from 'sentry/utils/useRouteContext'; import useUrlParams from 'sentry/utils/useUrlParams'; function FeedbackOnboardingSidebar(props: CommonSidebarProps) { @@ -165,7 +165,7 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { }>(jsFrameworkSelectOptions[0]); const defaultTab = 'npm'; - const {location} = useRouteContext(); + const location = useLocation(); const crashReportOnboarding = location.hash === CRASH_REPORT_HASH; const {getParamValue: setupMode, setParamValue: setSetupMode} = useUrlParams( diff --git a/static/app/components/feedback/useFeedbackOnboarding.tsx b/static/app/components/feedback/useFeedbackOnboarding.tsx index 80237a2ee73d6e..1802d88338f97f 100644 --- a/static/app/components/feedback/useFeedbackOnboarding.tsx +++ b/static/app/components/feedback/useFeedbackOnboarding.tsx @@ -3,8 +3,8 @@ import {useCallback, useEffect} from 'react'; import {SidebarPanelKey} from 'sentry/components/sidebar/types'; import SidebarPanelStore from 'sentry/stores/sidebarPanelStore'; import useSelectedProjectsHaveField from 'sentry/utils/project/useSelectedProjectsHaveField'; +import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; -import {useRouteContext} from 'sentry/utils/useRouteContext'; export const CRASH_REPORT_HASH = '#crashreport-sidequest'; export const FEEDBACK_HASH = '#feedback-sidequest'; @@ -22,7 +22,7 @@ export function useHaveSelectedProjectsSetupNewFeedback() { } export function useFeedbackOnboardingSidebarPanel() { - const {location} = useRouteContext(); + const location = useLocation(); const organization = useOrganization(); useEffect(() => { diff --git a/static/app/components/onboardingWizard/task.tsx b/static/app/components/onboardingWizard/task.tsx index 0cb198455d9184..13203471230044 100644 --- a/static/app/components/onboardingWizard/task.tsx +++ b/static/app/components/onboardingWizard/task.tsx @@ -22,7 +22,7 @@ import type { import {trackAnalytics} from 'sentry/utils/analytics'; import {isDemoWalkthrough} from 'sentry/utils/demoMode'; import testableTransition from 'sentry/utils/testableTransition'; -import {useRouteContext} from 'sentry/utils/useRouteContext'; +import useRouter from 'sentry/utils/useRouter'; import withOrganization from 'sentry/utils/withOrganization'; import SkipConfirm from './skipConfirm'; @@ -63,8 +63,7 @@ type Props = { function Task(props: Props) { const {task, onSkip, onMarkComplete, forwardedRef, organization, hidePanel} = props; - const routeContext = useRouteContext(); - const {router} = routeContext; + const router = useRouter(); const handleSkip = () => { recordAnalytics(task, organization, 'skipped'); onSkip(task.task); @@ -83,7 +82,7 @@ function Task(props: Props) { } if (task.actionType === 'action') { - task.action(routeContext); + task.action(router); } if (task.actionType === 'app') { diff --git a/static/app/components/onboardingWizard/taskConfig.tsx b/static/app/components/onboardingWizard/taskConfig.tsx index ca97b84c127ef5..40f45d9b147e5d 100644 --- a/static/app/components/onboardingWizard/taskConfig.tsx +++ b/static/app/components/onboardingWizard/taskConfig.tsx @@ -243,7 +243,7 @@ export function getOnboardingTasks({ skippable: true, requisites: [OnboardingTaskKey.FIRST_PROJECT], actionType: 'action', - action: ({router}) => { + action: router => { // Use `features?.` because getsentry has a different `Organization` type/payload if (!organization.features?.includes('performance-onboarding-checklist')) { window.open( diff --git a/static/app/types/onboarding.tsx b/static/app/types/onboarding.tsx index 14c81736111c33..126f69f827b0d4 100644 --- a/static/app/types/onboarding.tsx +++ b/static/app/types/onboarding.tsx @@ -1,4 +1,4 @@ -import type {RouteContextInterface} from 'react-router'; +import type {InjectedRouter} from 'react-router'; import type {OnboardingContextProps} from 'sentry/components/onboarding/onboardingContext'; import type {Category} from 'sentry/components/platformPicker'; @@ -89,7 +89,7 @@ interface OnboardingTaskDescriptorBase { } interface OnboardingTypeDescriptorWithAction extends OnboardingTaskDescriptorBase { - action: (props: RouteContextInterface) => void; + action: (props: InjectedRouter) => void; actionType: 'action'; } diff --git a/static/app/utils/replays/hooks/useReplayOnboarding.tsx b/static/app/utils/replays/hooks/useReplayOnboarding.tsx index da086f2a14869f..c2afae2bc17403 100644 --- a/static/app/utils/replays/hooks/useReplayOnboarding.tsx +++ b/static/app/utils/replays/hooks/useReplayOnboarding.tsx @@ -4,9 +4,9 @@ import {SidebarPanelKey} from 'sentry/components/sidebar/types'; import SidebarPanelStore from 'sentry/stores/sidebarPanelStore'; import {trackAnalytics} from 'sentry/utils/analytics'; import useSelectedProjectsHaveField from 'sentry/utils/project/useSelectedProjectsHaveField'; +import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import useProjects from 'sentry/utils/useProjects'; -import {useRouteContext} from 'sentry/utils/useRouteContext'; export function useHasOrganizationSentAnyReplayEvents() { const {projects, fetching} = useProjects(); @@ -21,7 +21,7 @@ export function useHaveSelectedProjectsSentAnyReplayEvents() { } export function useReplayOnboardingSidebarPanel() { - const {location} = useRouteContext(); + const location = useLocation(); const organization = useOrganization(); useEffect(() => { From 4d2213e334fd8a2f6140dfb6ee937d157dc0a2aa Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Thu, 9 May 2024 14:40:11 -0400 Subject: [PATCH 221/376] feat(cache): add transaction duration to table (#70363) Adds the the avg transaction duration column to the cache module. Gotta wait for #70272 to merge as its a dependant to this PR --- .../performance/cache/cacheLandingPage.tsx | 57 +++++++++++++++++-- .../app/views/performance/cache/referrers.ts | 1 + .../cache/tables/transactionsTable.tsx | 12 +++- .../app/views/starfish/queries/useDiscover.ts | 9 ++- static/app/views/starfish/types.tsx | 2 + 5 files changed, 71 insertions(+), 10 deletions(-) diff --git a/static/app/views/performance/cache/cacheLandingPage.tsx b/static/app/views/performance/cache/cacheLandingPage.tsx index fdbb9ab5de0546..23f6ff24311db5 100644 --- a/static/app/views/performance/cache/cacheLandingPage.tsx +++ b/static/app/views/performance/cache/cacheLandingPage.tsx @@ -1,4 +1,5 @@ import React from 'react'; +import keyBy from 'lodash/keyBy'; import FeatureBadge from 'sentry/components/badge/featureBadge'; import {Breadcrumbs} from 'sentry/components/breadcrumbs'; @@ -32,7 +33,7 @@ import { } from 'sentry/views/performance/cache/tables/transactionsTable'; import * as ModuleLayout from 'sentry/views/performance/moduleLayout'; import {ModulePageProviders} from 'sentry/views/performance/modulePageProviders'; -import {useSpanMetrics} from 'sentry/views/starfish/queries/useDiscover'; +import {useMetrics, useSpanMetrics} from 'sentry/views/starfish/queries/useDiscover'; import {useSpanMetricsSeries} from 'sentry/views/starfish/queries/useDiscoverSeries'; import {SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types'; import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; @@ -99,7 +100,32 @@ export function CacheLandingPage() { Referrer.LANDING_CACHE_TRANSACTION_LIST ); - addCustomMeta(transactionsListMeta); + const { + data: transactionDurationData, + error: transactionDurationError, + meta: transactionDurationMeta, + isLoading: isTransactionDurationLoading, + } = useMetrics( + { + search: `transaction:[${transactionsList.map(({transaction}) => `"${transaction}"`).join(',')}]`, + fields: [`avg(transaction.duration)`, 'transaction'], + enabled: !isTransactionsListLoading && transactionsList.length > 0, + }, + Referrer.LANDING_CACHE_TRANSACTION_DURATION + ); + + const transactionDurationsMap = keyBy(transactionDurationData, 'transaction'); + + const transactionsListWithDuration = + transactionsList?.map(transaction => ({ + ...transaction, + 'avg(transaction.duration)': + transactionDurationsMap[transaction.transaction]?.['avg(transaction.duration)'], + })) || []; + + const meta = combineMeta(transactionsListMeta, transactionDurationMeta); + + addCustomMeta(meta); return ( @@ -156,11 +182,11 @@ export function CacheLandingPage() { @@ -184,6 +210,25 @@ export function LandingPageWithProviders() { ); } +const combineMeta = ( + meta1?: EventsMetaType, + meta2?: EventsMetaType +): EventsMetaType | undefined => { + if (!meta1 && !meta2) { + return undefined; + } + if (!meta1) { + return meta2; + } + if (!meta2) { + return meta1; + } + return { + fields: {...meta1.fields, ...meta2.fields}, + units: {...meta1.units, ...meta2.units}, + }; +}; + // TODO - this should come from the backend const addCustomMeta = (meta?: EventsMetaType) => { if (meta) { diff --git a/static/app/views/performance/cache/referrers.ts b/static/app/views/performance/cache/referrers.ts index ba10434791f30c..167502fdf0476f 100644 --- a/static/app/views/performance/cache/referrers.ts +++ b/static/app/views/performance/cache/referrers.ts @@ -2,6 +2,7 @@ export enum Referrer { LANDING_CACHE_HIT_MISS_CHART = 'api.performance.cache.landing-cache-hit-miss-chart', LANDING_CACHE_THROUGHPUT_CHART = 'api.performance.cache.landing-cache-throughput-chart', LANDING_CACHE_TRANSACTION_LIST = 'api.performance.cache.landing-cache-transaction-list', + LANDING_CACHE_TRANSACTION_DURATION = 'api.performance.cache.landing-cache-transaction-duration', SAMPLES_CACHE_METRICS_RIBBON = 'api.performance.cache.samples-cache-metrics-ribbon', SAMPLES_CACHE_TRANSACTION_DURATION_CHART = 'api.performance.cache.samples-cache-transaction-duration-chart', diff --git a/static/app/views/performance/cache/tables/transactionsTable.tsx b/static/app/views/performance/cache/tables/transactionsTable.tsx index 4098ab7a30b1e6..341f594429324b 100644 --- a/static/app/views/performance/cache/tables/transactionsTable.tsx +++ b/static/app/views/performance/cache/tables/transactionsTable.tsx @@ -18,6 +18,8 @@ import useOrganization from 'sentry/utils/useOrganization'; import {TransactionCell} from 'sentry/views/performance/cache/tables/transactionCell'; import {renderHeadCell} from 'sentry/views/starfish/components/tableCells/renderHeadCell'; import { + MetricsFields, + type MetricsResponse, SpanFunction, SpanMetricsField, type SpanMetricsResponse, @@ -26,6 +28,7 @@ import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; import {DataTitles} from 'sentry/views/starfish/views/spans/types'; const {CACHE_MISS_RATE, SPM, TIME_SPENT_PERCENTAGE} = SpanFunction; +const {TRANSACTION_DURATION} = MetricsFields; const {CACHE_ITEM_SIZE} = SpanMetricsField; type Row = Pick< @@ -38,7 +41,8 @@ type Row = Pick< | 'sum(span.self_time)' | 'time_spent_percentage()' | 'avg(cache.item_size)' ->; +> & + Pick; type Column = GridColumnHeader< | 'transaction' @@ -46,6 +50,7 @@ type Column = GridColumnHeader< | 'cache_miss_rate()' | 'time_spent_percentage()' | 'project' + | 'avg(transaction.duration)' | 'avg(cache.item_size)' >; @@ -70,6 +75,11 @@ const COLUMN_ORDER: Column[] = [ name: `${t('Requests')} ${RATE_UNIT_TITLE[RateUnit.PER_MINUTE]}`, width: COL_WIDTH_UNDEFINED, }, + { + key: `avg(${TRANSACTION_DURATION})`, + name: DataTitles[`avg(${TRANSACTION_DURATION})`], + width: COL_WIDTH_UNDEFINED, + }, { key: `${CACHE_MISS_RATE}()`, name: DataTitles.cacheMissRate, diff --git a/static/app/views/starfish/queries/useDiscover.ts b/static/app/views/starfish/queries/useDiscover.ts index 37faf1241356be..20fe77adf29d34 100644 --- a/static/app/views/starfish/queries/useDiscover.ts +++ b/static/app/views/starfish/queries/useDiscover.ts @@ -17,7 +17,8 @@ interface UseMetricsOptions { enabled?: boolean; fields?: Fields; limit?: number; - search?: MutableSearch; + referrer?: string; + search?: MutableSearch | string; // TODO - ideally this probably would be only `Mutable Search`, but it doesn't handle some situations well sorts?: Sort[]; } @@ -75,16 +76,18 @@ const useDiscover = [], ResponseTy }; function getEventView( - search: MutableSearch | undefined, + search: MutableSearch | string | undefined, fields: string[] = [], sorts: Sort[] = [], pageFilters: PageFilters, dataset: DiscoverDatasets ) { + const query = typeof search === 'string' ? search : search?.formatString() ?? ''; + const eventView = EventView.fromNewQueryWithPageFilters( { name: '', - query: search?.formatString() ?? '', + query, fields, dataset, version: 2, diff --git a/static/app/views/starfish/types.tsx b/static/app/views/starfish/types.tsx index eace04518f2d10..810a49e9f43ff6 100644 --- a/static/app/views/starfish/types.tsx +++ b/static/app/views/starfish/types.tsx @@ -327,6 +327,8 @@ export type MetricsFunctions = (typeof METRICS_FUNCTIONS)[number]; export type MetricsResponse = { [Property in MetricsNumberFields as `${Aggregate}(${Property})`]: number; +} & { + [Property in MetricsStringFields as `${Property}`]: string; }; export type MetricsProperty = keyof MetricsResponse; From 1e1ce62875fff867e5478122179959b3011bb747 Mon Sep 17 00:00:00 2001 From: Riya Chakraborty <47572810+ayirr7@users.noreply.github.com> Date: Thu, 9 May 2024 11:41:01 -0700 Subject: [PATCH 222/376] Bump Kafka schemas to 0.1.81 (#70592) bump schemas to support zstd in gen metrics --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index 132aca053fac6f..0b431dbc82f36f 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -63,7 +63,7 @@ rfc3339-validator>=0.1.2 rfc3986-validator>=0.1.1 # [end] jsonschema format validators sentry-arroyo>=2.16.5 -sentry-kafka-schemas>=0.1.79 +sentry-kafka-schemas>=0.1.81 sentry-ophio==0.2.7 sentry-redis-tools>=0.1.7 sentry-relay>=0.8.60 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index f9f3c08de1d684..5676b0b44002fd 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -180,7 +180,7 @@ sentry-cli==2.16.0 sentry-devenv==1.6.2 sentry-forked-django-stubs==5.0.0.post3 sentry-forked-djangorestframework-stubs==3.15.0.post1 -sentry-kafka-schemas==0.1.79 +sentry-kafka-schemas==0.1.81 sentry-ophio==0.2.7 sentry-redis-tools==0.1.7 sentry-relay==0.8.60 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index a0c3b592bdb6d0..bfb645df01a6da 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -120,7 +120,7 @@ rpds-py==0.15.2 rsa==4.8 s3transfer==0.10.0 sentry-arroyo==2.16.5 -sentry-kafka-schemas==0.1.79 +sentry-kafka-schemas==0.1.81 sentry-ophio==0.2.7 sentry-redis-tools==0.1.7 sentry-relay==0.8.60 From 14327513f7f00ba74758217d6fd89783a9084a0f Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Thu, 9 May 2024 11:41:28 -0700 Subject: [PATCH 223/376] ref(grouping): Split hash mismatch check out of `get_grouping_info_from_variants` (#70495) When we use `get_grouping_info_from_variants` in the context of recalculating hashes for the purposes of getting grouping info to show at the bottom of the issue details page, there's a possibility that the grouping config may have changed between the time we ingested the given event and the time we're running the function. To track this, we check for that possibility as we're processing the variant data into grouping info. But soon we will also be using the function during ingest, as it's part of the process of computing the stacktrace string which we send to Seer. In that context there's no possibility the grouping config will have changed "in the meantime" - there is no "meantime," since we're still mid-ingest. It therefore doesn't make any sense to run the check. To account for this, this PR breaks the check out into its own helper, `_check_for_mismatched_hashes`, which can then be called separately, leaving `get_grouping_info_from_variants` to do nothing besides the processing. Now during ingest we'll be able to call it and not run the unnecessary check. As a bonus, pulling out the check also allowed the variant-to-grouping-info processing to be simplified all the way down into a simple dictionary comprehension. --- src/sentry/grouping/grouping_info.py | 52 +++++++++++++++++++--------- 1 file changed, 35 insertions(+), 17 deletions(-) diff --git a/src/sentry/grouping/grouping_info.py b/src/sentry/grouping/grouping_info.py index bd8ea21b9a8247..637b4d6b000cc6 100644 --- a/src/sentry/grouping/grouping_info.py +++ b/src/sentry/grouping/grouping_info.py @@ -44,29 +44,40 @@ def get_grouping_info( except GroupingConfigNotFound: raise ResourceDoesNotExist(detail="Unknown grouping config") - return get_grouping_info_from_variants( - event, project, variants, hashes.hashes, hashes.hierarchical_hashes + grouping_info = get_grouping_info_from_variants(variants) + + # One place we use this info is in the grouping info section of the event details page, and for + # that we recalculate hashes/variants on the fly since we don't store the variants as part of + # event data. If the grouping config has been changed since the event was ingested, we may get + # different hashes here than the ones stored on the event. + _check_for_mismatched_hashes( + event, project, grouping_info, hashes.hashes, hashes.hierarchical_hashes ) + return grouping_info -def get_grouping_info_from_variants( + +def _check_for_mismatched_hashes( event: Event, project: Project, - variants: dict[str, Any], + grouping_info: dict[str, dict[str, Any]], hashes: list[str], hierarchical_hashes: list[str], -) -> dict[str, dict[str, Any]]: - grouping_info = {} +) -> None: + """ + Given a dictionary of variant data, check each variant's hash value to make sure it is one of + the known values from either `hashes` or `hierarchical_hashes`. + + The result is stored with each variant and recorded as a metric. + """ + + for variant_dict in grouping_info.values(): + hash_value = variant_dict["hash"] - for key, variant in variants.items(): - variant_dict = variant.as_dict() - # Since the hashes are generated on the fly and might no - # longer match the stored ones we indicate if the hash - # generation caused the hash to mismatch. variant_dict["hashMismatch"] = hash_mismatch = ( - variant_dict["hash"] is not None - and variant_dict["hash"] not in hashes - and variant_dict["hash"] not in hierarchical_hashes + hash_value is not None + and hash_value not in hashes + and hash_value not in hierarchical_hashes ) if hash_mismatch: @@ -78,7 +89,14 @@ def get_grouping_info_from_variants( else: metrics.incr("event_grouping_info.hash_match") - variant_dict["key"] = key - grouping_info[key] = variant_dict - return grouping_info +def get_grouping_info_from_variants( + variants: dict[str, BaseVariant], +) -> dict[str, dict[str, Any]]: + """ + Given a dictionary of variant objects, create and return a copy of the dictionary in which each + variant object value has been transformed into an equivalent dictionary value, which knows the + key under which it lives. + """ + + return {key: {"key": key, **variant.as_dict()} for key, variant in variants.items()} From 1976cab287ad39b129c1a6c9f26fc022c28f1ba5 Mon Sep 17 00:00:00 2001 From: Ash <0Calories@users.noreply.github.com> Date: Thu, 9 May 2024 14:41:55 -0400 Subject: [PATCH 224/376] fix(perf): Query tpm() from metrics instead of discover (#70595) the transaction throughput chart on the new span summary page was not querying from the metrics dataset, resulting in incorrect chart data --- .../transactionSpans/spanSummary/spanSummaryCharts.tsx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx index 51374211d7473d..8f107033815d4e 100644 --- a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx +++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/spanSummaryCharts.tsx @@ -6,6 +6,7 @@ import { type DiscoverQueryProps, useGenericDiscoverQuery, } from 'sentry/utils/discover/genericDiscoverQuery'; +import {DiscoverDatasets} from 'sentry/utils/discover/types'; import {formatRate} from 'sentry/utils/formatters'; import {MutableSearch} from 'sentry/utils/tokenizeSearch'; import {useLocation} from 'sentry/utils/useLocation'; @@ -76,6 +77,7 @@ function SpanSummaryCharts() { }).formatString(), fields: [], version: 2, + dataset: DiscoverDatasets.METRICS, }, location ); From ccc988923b0e52f6f4d63b4ae77bc81518f5f73e Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Thu, 9 May 2024 14:52:52 -0400 Subject: [PATCH 225/376] ref: fix typing in two more tests (#70583) --- pyproject.toml | 2 - .../sentry/models/test_organizationmember.py | 3 +- ..._project_replay_recording_segment_index.py | 65 ++++++++----------- 3 files changed, 28 insertions(+), 42 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4e930afa0ff04d..7a6b0bf3b9312d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -532,8 +532,6 @@ module = [ "tests.sentry.ingest.test_slicing", "tests.sentry.integrations.github.test_client", "tests.sentry.issues.test_utils", - "tests.sentry.models.test_organizationmember", - "tests.sentry.replays.test_project_replay_recording_segment_index", "tests.sentry.tasks.test_post_process", ] disable_error_code = [ diff --git a/tests/sentry/models/test_organizationmember.py b/tests/sentry/models/test_organizationmember.py index 232a790693bd0c..ebf14877be92e2 100644 --- a/tests/sentry/models/test_organizationmember.py +++ b/tests/sentry/models/test_organizationmember.py @@ -202,8 +202,7 @@ def test_set_user(self): def test_regenerate_token(self): member = OrganizationMember(organization=self.organization, email="foo@example.com") - assert member.token is None - assert member.token_expires_at is None + assert (member.token, member.token_expires_at) == (None, None) member.regenerate_token() assert member.token diff --git a/tests/sentry/replays/test_project_replay_recording_segment_index.py b/tests/sentry/replays/test_project_replay_recording_segment_index.py index 3cf5d4845b124f..e678591c84c47e 100644 --- a/tests/sentry/replays/test_project_replay_recording_segment_index.py +++ b/tests/sentry/replays/test_project_replay_recording_segment_index.py @@ -13,9 +13,31 @@ Message = namedtuple("Message", ["project_id", "replay_id"]) -class ProjectReplayRecordingSegmentIndexMixin: +# have to use TransactionTestCase because we're using threadpools +class FilestoreProjectReplayRecordingSegmentIndexTestCase(TransactionTestCase): endpoint = "sentry-api-0-project-replay-recording-segment-index" + def setUp(self): + super().setUp() + self.login_as(self.user) + self.replay_id = uuid.uuid4().hex + self.url = reverse( + self.endpoint, + args=(self.organization.slug, self.project.slug, self.replay_id), + ) + + def save_recording_segment( + self, segment_id: int, data: bytes, compressed: bool = True, is_archived: bool = False + ) -> None: + metadata = RecordingSegmentStorageMeta( + project_id=self.project.id, + replay_id=self.replay_id, + segment_id=segment_id, + retention_days=30, + file_id=None, + ) + FilestoreBlob().set(metadata, zlib.compress(data) if compressed else data) + def test_index_download_basic_compressed(self): for i in range(0, 3): self.save_recording_segment(i, f'[{{"test":"hello {i}"}}]'.encode()) @@ -80,49 +102,16 @@ def test_index_download_paginate(self): assert b'[[{"test":"hello 1"}],[{"test":"hello 2"}]]' == close_streaming_response(response) -class FilestoreProjectReplayRecordingSegmentIndexTestCase( - ProjectReplayRecordingSegmentIndexMixin, TransactionTestCase -): - # have to use TransactionTestCase because we're using threadpools - - endpoint = "sentry-api-0-project-replay-recording-segment-index" - - def setUp(self): - super().setUp() - self.login_as(self.user) - self.replay_id = uuid.uuid4().hex - self.url = reverse( - self.endpoint, - args=(self.organization.slug, self.project.slug, self.replay_id), - ) - - def save_recording_segment(self, segment_id, data: bytes, compressed: bool = True): - metadata = RecordingSegmentStorageMeta( - project_id=self.project.id, - replay_id=self.replay_id, - segment_id=segment_id, - retention_days=30, - file_id=None, - ) - FilestoreBlob().set(metadata, zlib.compress(data) if compressed else data) - - class StorageProjectReplayRecordingSegmentIndexTestCase( - ProjectReplayRecordingSegmentIndexMixin, APITestCase, ReplaysSnubaTestCase + FilestoreProjectReplayRecordingSegmentIndexTestCase, APITestCase, ReplaysSnubaTestCase ): def setUp(self): super().setUp() - self.login_as(self.user) - self.replay_id = uuid.uuid4().hex - self.url = reverse( - self.endpoint, - args=(self.organization.slug, self.project.slug, self.replay_id), - ) self.features = {"organizations:session-replay": True} def save_recording_segment( - self, segment_id: int, data: bytes, compressed: bool = True, **metadata - ): + self, segment_id: int, data: bytes, compressed: bool = True, is_archived: bool = False + ) -> None: # Insert the row in clickhouse. self.store_replays( mock_replay( @@ -131,7 +120,7 @@ def save_recording_segment( self.replay_id, segment_id=segment_id, retention_days=30, - **metadata, + is_archived=is_archived, ) ) From 0545798965b84594913b313101950dada02df181 Mon Sep 17 00:00:00 2001 From: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com> Date: Thu, 9 May 2024 14:54:37 -0400 Subject: [PATCH 226/376] feat(performance): Update queues module to use span.duration and fixes some minor chart issues (#70522) - We want to display `span.duration` instead of `span.self_time` for queue spans, because we're interested in the duration of any potential child spans during a consumer/producer task. Updates all instances of `span.self_time` to `span.duration` - Also includes some minor fixes for charts, such as correcting avg mark line and tooltip + axis formatting. --- .../queues/charts/latencyChart.spec.tsx | 4 +-- .../queues/charts/latencyChart.tsx | 13 ++++------ .../queues/charts/throughputChart.spec.tsx | 4 +-- .../queues/charts/throughputChart.tsx | 10 +++---- .../destinationSummaryPage.spec.tsx | 1 - .../destinationSummaryPage.tsx | 9 +++---- .../transactionsTable.spec.tsx | 24 ++++++++--------- .../destinationSummary/transactionsTable.tsx | 13 +++++----- .../messageConsumerSamplesPanel.spec.tsx | 12 ++++----- .../queues/messageConsumerSamplesPanel.tsx | 26 ++++++++++--------- .../queues/messageSpanSamplesTable.spec.tsx | 2 +- .../queues/messageSpanSamplesTable.tsx | 8 +++--- .../queries/useQueuesByDestinationQuery.tsx | 8 +++--- .../queries/useQueuesByTransactionQuery.tsx | 8 +++--- .../queues/queries/useQueuesMetricsQuery.tsx | 8 +++--- .../queries/useQueuesTimeSeriesQuery.tsx | 4 +-- .../performance/queues/queuesTable.spec.tsx | 24 ++++++++--------- .../views/performance/queues/queuesTable.tsx | 11 ++++---- .../app/views/performance/queues/settings.ts | 1 - .../components/tableCells/renderHeadCell.tsx | 1 + 20 files changed, 90 insertions(+), 101 deletions(-) diff --git a/static/app/views/performance/queues/charts/latencyChart.spec.tsx b/static/app/views/performance/queues/charts/latencyChart.spec.tsx index c9e269eb845714..ec92a61924e8b4 100644 --- a/static/app/views/performance/queues/charts/latencyChart.spec.tsx +++ b/static/app/views/performance/queues/charts/latencyChart.spec.tsx @@ -30,8 +30,8 @@ describe('latencyChart', () => { expect.objectContaining({ query: expect.objectContaining({ yAxis: [ - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', + 'avg_if(span.duration,span.op,queue.publish)', + 'avg_if(span.duration,span.op,queue.process)', 'avg(messaging.message.receive.latency)', 'count_op(queue.publish)', 'count_op(queue.process)', diff --git a/static/app/views/performance/queues/charts/latencyChart.tsx b/static/app/views/performance/queues/charts/latencyChart.tsx index ae84d72e54ba6f..edc9f616922518 100644 --- a/static/app/views/performance/queues/charts/latencyChart.tsx +++ b/static/app/views/performance/queues/charts/latencyChart.tsx @@ -1,9 +1,7 @@ -import {DEFAULT_RELATIVE_PERIODS} from 'sentry/constants'; import {CHART_PALETTE} from 'sentry/constants/chartPalette'; import {t} from 'sentry/locale'; import {decodeScalar} from 'sentry/utils/queryString'; import {useLocation} from 'sentry/utils/useLocation'; -import usePageFilters from 'sentry/utils/usePageFilters'; import {CHART_HEIGHT} from 'sentry/views/performance/database/settings'; import {useQueuesTimeSeriesQuery} from 'sentry/views/performance/queues/queries/useQueuesTimeSeriesQuery'; import Chart, {ChartType} from 'sentry/views/starfish/components/chart'; @@ -16,18 +14,15 @@ interface Props { export function LatencyChart({error}: Props) { const {query} = useLocation(); const destination = decodeScalar(query.destination); - const pageFilters = usePageFilters(); - const period = pageFilters.selection.datetime.period; - const chartSubtext = (period && DEFAULT_RELATIVE_PERIODS[period]) ?? ''; const {data, isLoading} = useQueuesTimeSeriesQuery({destination}); return ( - + ); diff --git a/static/app/views/performance/queues/charts/throughputChart.spec.tsx b/static/app/views/performance/queues/charts/throughputChart.spec.tsx index f54863ef245eb0..9999329330b165 100644 --- a/static/app/views/performance/queues/charts/throughputChart.spec.tsx +++ b/static/app/views/performance/queues/charts/throughputChart.spec.tsx @@ -30,8 +30,8 @@ describe('throughputChart', () => { expect.objectContaining({ query: expect.objectContaining({ yAxis: [ - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', + 'avg_if(span.duration,span.op,queue.publish)', + 'avg_if(span.duration,span.op,queue.process)', 'avg(messaging.message.receive.latency)', 'count_op(queue.publish)', 'count_op(queue.process)', diff --git a/static/app/views/performance/queues/charts/throughputChart.tsx b/static/app/views/performance/queues/charts/throughputChart.tsx index 649a1fc6309780..06fb151da75f50 100644 --- a/static/app/views/performance/queues/charts/throughputChart.tsx +++ b/static/app/views/performance/queues/charts/throughputChart.tsx @@ -1,9 +1,7 @@ -import {DEFAULT_RELATIVE_PERIODS} from 'sentry/constants'; import {CHART_PALETTE} from 'sentry/constants/chartPalette'; import {t} from 'sentry/locale'; import {decodeScalar} from 'sentry/utils/queryString'; import {useLocation} from 'sentry/utils/useLocation'; -import usePageFilters from 'sentry/utils/usePageFilters'; import {CHART_HEIGHT} from 'sentry/views/performance/database/settings'; import {useQueuesTimeSeriesQuery} from 'sentry/views/performance/queues/queries/useQueuesTimeSeriesQuery'; import Chart, {ChartType} from 'sentry/views/starfish/components/chart'; @@ -16,18 +14,15 @@ interface Props { export function ThroughputChart({error}: Props) { const {query} = useLocation(); const destination = decodeScalar(query.destination); - const pageFilters = usePageFilters(); - const period = pageFilters.selection.datetime.period; - const chartSubtext = (period && DEFAULT_RELATIVE_PERIODS[period]) ?? ''; const {data, isLoading} = useQueuesTimeSeriesQuery({destination}); return ( - + ); diff --git a/static/app/views/performance/queues/destinationSummary/destinationSummaryPage.spec.tsx b/static/app/views/performance/queues/destinationSummary/destinationSummaryPage.spec.tsx index e73ea1f7c84b35..210ffc4e6b137b 100644 --- a/static/app/views/performance/queues/destinationSummary/destinationSummaryPage.spec.tsx +++ b/static/app/views/performance/queues/destinationSummary/destinationSummaryPage.spec.tsx @@ -75,7 +75,6 @@ describe('destinationSummaryPage', () => { render(); await screen.findByRole('table', {name: 'Transactions'}); await waitForElementToBeRemoved(() => screen.queryAllByTestId('loading-indicator')); - screen.getByPlaceholderText('Search for events, users, tags, and more'); screen.getByText('Avg Latency'); screen.getByText('Published vs Processed'); expect(eventsStatsMock).toHaveBeenCalled(); diff --git a/static/app/views/performance/queues/destinationSummary/destinationSummaryPage.tsx b/static/app/views/performance/queues/destinationSummary/destinationSummaryPage.tsx index 6125f1a4a0783f..999e0dfcfba808 100644 --- a/static/app/views/performance/queues/destinationSummary/destinationSummaryPage.tsx +++ b/static/app/views/performance/queues/destinationSummary/destinationSummaryPage.tsx @@ -10,7 +10,6 @@ import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter'; -import SmartSearchBar from 'sentry/components/smartSearchBar'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import {DurationUnit} from 'sentry/utils/discover/fields'; @@ -98,8 +97,8 @@ function DestinationSummaryPage() { isLoading={false} /> @@ -123,7 +122,7 @@ function DestinationSummaryPage() { /> @@ -148,8 +147,6 @@ function DestinationSummaryPage() { - {/* TODO: Make search bar work */} - diff --git a/static/app/views/performance/queues/destinationSummary/transactionsTable.spec.tsx b/static/app/views/performance/queues/destinationSummary/transactionsTable.spec.tsx index 6015d1d9d60ce1..e691d162f80b32 100644 --- a/static/app/views/performance/queues/destinationSummary/transactionsTable.spec.tsx +++ b/static/app/views/performance/queues/destinationSummary/transactionsTable.spec.tsx @@ -30,10 +30,10 @@ describe('transactionsTable', () => { 'count()': 2, 'count_op(queue.publish)': 0, 'count_op(queue.process)': 2, - 'sum(span.self_time)': 6, - 'avg(span.self_time)': 3, - 'avg_if(span.self_time,span.op,queue.publish)': 0, - 'avg_if(span.self_time,span.op,queue.process)': 3, + 'sum(span.duration)': 6, + 'avg(span.duration)': 3, + 'avg_if(span.duration,span.op,queue.publish)': 0, + 'avg_if(span.duration,span.op,queue.process)': 3, 'avg(messaging.message.receive.latency)': 20, }, ], @@ -42,10 +42,10 @@ describe('transactionsTable', () => { 'count()': 'integer', 'count_op(queue.publish)': 'integer', 'count_op(queue.process)': 'integer', - 'sum(span.self_time)': 'duration', - 'avg(span.self_time)': 'duration', - 'avg_if(span.self_time,span.op,queue.publish)': 'duration', - 'avg_if(span.self_time,span.op,queue.process)': 'duration', + 'sum(span.duration)': 'duration', + 'avg(span.duration)': 'duration', + 'avg_if(span.duration,span.op,queue.publish)': 'duration', + 'avg_if(span.duration,span.op,queue.process)': 'duration', 'avg(messaging.message.receive.latency)': 'duration', }, }, @@ -79,10 +79,10 @@ describe('transactionsTable', () => { 'count()', 'count_op(queue.publish)', 'count_op(queue.process)', - 'sum(span.self_time)', - 'avg(span.self_time)', - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', + 'sum(span.duration)', + 'avg(span.duration)', + 'avg_if(span.duration,span.op,queue.publish)', + 'avg_if(span.duration,span.op,queue.process)', 'avg(messaging.message.receive.latency)', ], dataset: 'spansMetrics', diff --git a/static/app/views/performance/queues/destinationSummary/transactionsTable.tsx b/static/app/views/performance/queues/destinationSummary/transactionsTable.tsx index 59de50ae587b5d..975855912b48ed 100644 --- a/static/app/views/performance/queues/destinationSummary/transactionsTable.tsx +++ b/static/app/views/performance/queues/destinationSummary/transactionsTable.tsx @@ -26,11 +26,10 @@ import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; type Row = Pick< SpanMetricsResponse, - | 'avg_if(span.self_time,span.op,queue.process)' - | 'count_op(queue.publish)' - | 'count_op(queue.process)' - | 'sum(span.self_time)' + | 'sum(span.duration)' | 'transaction' + | `avg_if(${string},${string},${string})` + | `count_op(${string})` >; type Column = GridColumnHeader; @@ -52,7 +51,7 @@ const COLUMN_ORDER: Column[] = [ width: COL_WIDTH_UNDEFINED, }, { - key: 'avg_if(span.self_time,span.op,queue.process)', + key: 'avg_if(span.duration,span.op,queue.process)', name: t('Avg Processing Time'), width: COL_WIDTH_UNDEFINED, }, @@ -72,7 +71,7 @@ const COLUMN_ORDER: Column[] = [ width: COL_WIDTH_UNDEFINED, }, { - key: 'sum(span.self_time)', + key: 'sum(span.duration)', name: t('Time Spent'), width: COL_WIDTH_UNDEFINED, }, @@ -138,7 +137,7 @@ function renderBodyCell( [ 'count_op(queue.process)', 'avg(messaging.message.receive.latency)', - 'avg_if(span.self_time,span.op,queue.process)', + 'avg_if(span.duration,span.op,queue.process)', ].includes(key)) ) { return ( diff --git a/static/app/views/performance/queues/messageConsumerSamplesPanel.spec.tsx b/static/app/views/performance/queues/messageConsumerSamplesPanel.spec.tsx index f1f47131013d59..7b89d7748180e9 100644 --- a/static/app/views/performance/queues/messageConsumerSamplesPanel.spec.tsx +++ b/static/app/views/performance/queues/messageConsumerSamplesPanel.spec.tsx @@ -74,7 +74,7 @@ describe('messageConsumerSamplesPanel', () => { trace: 'abc', project: 'project', timestamp: '2024-03-25T20:31:36+00:00', - 'span.self_time': 320.300102, + 'span.duration': 320.300102, }, ], }, @@ -100,10 +100,10 @@ describe('messageConsumerSamplesPanel', () => { 'count()', 'count_op(queue.publish)', 'count_op(queue.process)', - 'sum(span.self_time)', - 'avg(span.self_time)', - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', + 'sum(span.duration)', + 'avg(span.duration)', + 'avg_if(span.duration,span.op,queue.publish)', + 'avg_if(span.duration,span.op,queue.process)', 'avg(messaging.message.receive.latency)', ], per_page: 10, @@ -126,7 +126,7 @@ describe('messageConsumerSamplesPanel', () => { 'measurements.messaging.message.receive.latency', 'messaging.message.id', 'trace.status', - 'span.self_time', + 'span.duration', ], firstBound: 2666.6666666666665, lowerBound: 0, diff --git a/static/app/views/performance/queues/messageConsumerSamplesPanel.tsx b/static/app/views/performance/queues/messageConsumerSamplesPanel.tsx index b3f2bc6e40f1f6..f96e4ebabdbe9e 100644 --- a/static/app/views/performance/queues/messageConsumerSamplesPanel.tsx +++ b/static/app/views/performance/queues/messageConsumerSamplesPanel.tsx @@ -59,7 +59,6 @@ export function MessageConsumerSamplesPanel() { const isPanelOpen = Boolean(detailKey); - // TODO: This should also filter on destination const search = new MutableSearch(DEFAULT_QUERY_FILTER); search.addFilterValue('transaction', query.transaction); search.addFilterValue('messaging.destination.name', query.destination); @@ -71,6 +70,8 @@ export function MessageConsumerSamplesPanel() { enabled: isPanelOpen, }); + const avg = transactionMetrics?.[0]?.['avg(span.duration)']; + const { isFetching: isDurationDataFetching, data: durationData, @@ -78,13 +79,13 @@ export function MessageConsumerSamplesPanel() { } = useSpanMetricsSeries( { search, - yAxis: [`avg(span.self_time)`], + yAxis: [`avg(span.duration)`], enabled: isPanelOpen, }, 'api.performance.queues.avg-duration-chart' ); - const durationAxisMax = computeAxisMax([durationData?.[`avg(span.self_time)`]]); + const durationAxisMax = computeAxisMax([durationData?.[`avg(span.duration)`]]); const { data: durationSamplesData, @@ -104,19 +105,20 @@ export function MessageConsumerSamplesPanel() { SpanIndexedField.MESSAGING_MESSAGE_RECEIVE_LATENCY, SpanIndexedField.MESSAGING_MESSAGE_ID, SpanIndexedField.TRACE_STATUS, - SpanIndexedField.SPAN_SELF_TIME, + SpanIndexedField.SPAN_DURATION, ], }); const sampledSpanDataSeries = useSampleScatterPlotSeries( durationSamplesData, - transactionMetrics?.[0]?.['avg(span.self_time)'], - highlightedSpanId + transactionMetrics?.[0]?.['avg(span.duration)'], + highlightedSpanId, + 'span.duration' ); const findSampleFromDataPoint = (dataPoint: {name: string | number; value: number}) => { return durationSamplesData.find( - s => s.timestamp === dataPoint.name && s['span.self_time'] === dataPoint.value + s => s.timestamp === dataPoint.name && s['span.duration'] === dataPoint.value ); }; @@ -190,7 +192,7 @@ export function MessageConsumerSamplesPanel() { { expect(screen.getByRole('columnheader', {name: 'Span ID'})).toBeInTheDocument(); expect(screen.getByRole('columnheader', {name: 'Message ID'})).toBeInTheDocument(); expect( - screen.getByRole('columnheader', {name: 'Processing Latency'}) + screen.getByRole('columnheader', {name: 'Processing Time'}) ).toBeInTheDocument(); expect(screen.getByRole('columnheader', {name: 'Message Size'})).toBeInTheDocument(); expect(screen.getByRole('columnheader', {name: 'Status'})).toBeInTheDocument(); diff --git a/static/app/views/performance/queues/messageSpanSamplesTable.tsx b/static/app/views/performance/queues/messageSpanSamplesTable.tsx index 67d61cd27489df..645e3c1868310c 100644 --- a/static/app/views/performance/queues/messageSpanSamplesTable.tsx +++ b/static/app/views/performance/queues/messageSpanSamplesTable.tsx @@ -28,14 +28,14 @@ type DataRowKeys = | SpanIndexedField.MESSAGING_MESSAGE_RECEIVE_LATENCY | SpanIndexedField.MESSAGING_MESSAGE_ID | SpanIndexedField.TRACE_STATUS - | SpanIndexedField.SPAN_SELF_TIME; + | SpanIndexedField.SPAN_DURATION; type ColumnKeys = | SpanIndexedField.ID | SpanIndexedField.MESSAGING_MESSAGE_ID | SpanIndexedField.MESSAGING_MESSAGE_BODY_SIZE | SpanIndexedField.TRACE_STATUS - | SpanIndexedField.SPAN_SELF_TIME; + | SpanIndexedField.SPAN_DURATION; type DataRow = Pick; @@ -53,8 +53,8 @@ const COLUMN_ORDER: Column[] = [ width: COL_WIDTH_UNDEFINED, }, { - key: SpanIndexedField.SPAN_SELF_TIME, - name: t('Processing Latency'), + key: SpanIndexedField.SPAN_DURATION, + name: t('Processing Time'), width: COL_WIDTH_UNDEFINED, }, { diff --git a/static/app/views/performance/queues/queries/useQueuesByDestinationQuery.tsx b/static/app/views/performance/queues/queries/useQueuesByDestinationQuery.tsx index 129550de620367..c7933f437920f3 100644 --- a/static/app/views/performance/queues/queries/useQueuesByDestinationQuery.tsx +++ b/static/app/views/performance/queues/queries/useQueuesByDestinationQuery.tsx @@ -22,10 +22,10 @@ export function useQueuesByDestinationQuery({enabled}: Props) { 'count()', 'count_op(queue.publish)', 'count_op(queue.process)', - 'sum(span.self_time)', - 'avg(span.self_time)', - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', + 'sum(span.duration)', + 'avg(span.duration)', + 'avg_if(span.duration,span.op,queue.publish)', + 'avg_if(span.duration,span.op,queue.process)', 'avg(messaging.message.receive.latency)', ], enabled, diff --git a/static/app/views/performance/queues/queries/useQueuesByTransactionQuery.tsx b/static/app/views/performance/queues/queries/useQueuesByTransactionQuery.tsx index 015316a12f2623..f065c29b157128 100644 --- a/static/app/views/performance/queues/queries/useQueuesByTransactionQuery.tsx +++ b/static/app/views/performance/queues/queries/useQueuesByTransactionQuery.tsx @@ -27,10 +27,10 @@ export function useQueuesByTransactionQuery({destination, enabled}: Props) { 'count()', 'count_op(queue.publish)', 'count_op(queue.process)', - 'sum(span.self_time)', - 'avg(span.self_time)', - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', + 'sum(span.duration)', + 'avg(span.duration)', + 'avg_if(span.duration,span.op,queue.publish)', + 'avg_if(span.duration,span.op,queue.process)', 'avg(messaging.message.receive.latency)', ], enabled, diff --git a/static/app/views/performance/queues/queries/useQueuesMetricsQuery.tsx b/static/app/views/performance/queues/queries/useQueuesMetricsQuery.tsx index db076c78162580..5bd7dc0ccfca7e 100644 --- a/static/app/views/performance/queues/queries/useQueuesMetricsQuery.tsx +++ b/static/app/views/performance/queues/queries/useQueuesMetricsQuery.tsx @@ -23,10 +23,10 @@ export function useQueuesMetricsQuery({destination, transaction, enabled}: Props 'count()', 'count_op(queue.publish)', 'count_op(queue.process)', - 'sum(span.self_time)', - 'avg(span.self_time)', - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', + 'sum(span.duration)', + 'avg(span.duration)', + 'avg_if(span.duration,span.op,queue.publish)', + 'avg_if(span.duration,span.op,queue.process)', 'avg(messaging.message.receive.latency)', ], enabled, diff --git a/static/app/views/performance/queues/queries/useQueuesTimeSeriesQuery.tsx b/static/app/views/performance/queues/queries/useQueuesTimeSeriesQuery.tsx index e4ba1229716f15..7f014af69ae4c5 100644 --- a/static/app/views/performance/queues/queries/useQueuesTimeSeriesQuery.tsx +++ b/static/app/views/performance/queues/queries/useQueuesTimeSeriesQuery.tsx @@ -8,8 +8,8 @@ type Props = { }; const yAxis: SpanMetricsProperty[] = [ - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', + 'avg_if(span.duration,span.op,queue.publish)', + 'avg_if(span.duration,span.op,queue.process)', 'avg(messaging.message.receive.latency)', 'count_op(queue.publish)', 'count_op(queue.process)', diff --git a/static/app/views/performance/queues/queuesTable.spec.tsx b/static/app/views/performance/queues/queuesTable.spec.tsx index d0f26906fd99e0..bc2062b5d08a65 100644 --- a/static/app/views/performance/queues/queuesTable.spec.tsx +++ b/static/app/views/performance/queues/queuesTable.spec.tsx @@ -29,10 +29,10 @@ describe('queuesTable', () => { 'count()': 2, 'count_op(queue.publish)': 0, 'count_op(queue.process)': 2, - 'sum(span.self_time)': 6, - 'avg(span.self_time)': 3, - 'avg_if(span.self_time,span.op,queue.publish)': 0, - 'avg_if(span.self_time,span.op,queue.process)': 3, + 'sum(span.duration)': 6, + 'avg(span.duration)': 3, + 'avg_if(span.duration,span.op,queue.publish)': 0, + 'avg_if(span.duration,span.op,queue.process)': 3, 'avg(messaging.message.receive.latency)': 20, }, ], @@ -41,10 +41,10 @@ describe('queuesTable', () => { 'count()': 'integer', 'count_op(queue.publish)': 'integer', 'count_op(queue.process)': 'integer', - 'sum(span.self_time)': 'duration', - 'avg(span.self_time)': 'duration', - 'avg_if(span.self_time,span.op,queue.publish)': 'duration', - 'avg_if(span.self_time,span.op,queue.process)': 'duration', + 'sum(span.duration)': 'duration', + 'avg(span.duration)': 'duration', + 'avg_if(span.duration,span.op,queue.publish)': 'duration', + 'avg_if(span.duration,span.op,queue.process)': 'duration', 'avg(messaging.message.receive.latency)': 'duration', }, }, @@ -74,10 +74,10 @@ describe('queuesTable', () => { 'count()', 'count_op(queue.publish)', 'count_op(queue.process)', - 'sum(span.self_time)', - 'avg(span.self_time)', - 'avg_if(span.self_time,span.op,queue.publish)', - 'avg_if(span.self_time,span.op,queue.process)', + 'sum(span.duration)', + 'avg(span.duration)', + 'avg_if(span.duration,span.op,queue.publish)', + 'avg_if(span.duration,span.op,queue.process)', 'avg(messaging.message.receive.latency)', ], dataset: 'spansMetrics', diff --git a/static/app/views/performance/queues/queuesTable.tsx b/static/app/views/performance/queues/queuesTable.tsx index f337c2d7bfe06f..8cf64dd6f3694e 100644 --- a/static/app/views/performance/queues/queuesTable.tsx +++ b/static/app/views/performance/queues/queuesTable.tsx @@ -26,12 +26,11 @@ import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; type Row = Pick< SpanMetricsResponse, - | 'avg_if(span.self_time,span.op,queue.process)' - | 'count_op(queue.publish)' - | 'count_op(queue.process)' - | 'sum(span.self_time)' + | 'sum(span.duration)' | 'messaging.destination.name' | 'avg(messaging.message.receive.latency)' + | `avg_if(${string},${string},${string})` + | `count_op(${string})` >; type Column = GridColumnHeader; @@ -48,7 +47,7 @@ const COLUMN_ORDER: Column[] = [ width: COL_WIDTH_UNDEFINED, }, { - key: 'avg_if(span.self_time,span.op,queue.process)', + key: 'avg_if(span.duration,span.op,queue.process)', name: t('Avg Processing Time'), width: COL_WIDTH_UNDEFINED, }, @@ -68,7 +67,7 @@ const COLUMN_ORDER: Column[] = [ width: COL_WIDTH_UNDEFINED, }, { - key: 'sum(span.self_time)', + key: 'sum(span.duration)', name: t('Time Spent'), width: COL_WIDTH_UNDEFINED, }, diff --git a/static/app/views/performance/queues/settings.ts b/static/app/views/performance/queues/settings.ts index 4327330b9feb57..bb09ec98b3db1c 100644 --- a/static/app/views/performance/queues/settings.ts +++ b/static/app/views/performance/queues/settings.ts @@ -13,5 +13,4 @@ export const releaseLevelAsBadgeProps = { isNew: (RELEASE_LEVEL as BadgeType) === 'new', }; -// TODO: Currently this only filters to celery tasks. Add or genericize to include other queue/messaging tasks when available. export const DEFAULT_QUERY_FILTER = 'span.op:[queue.process,queue.publish]'; diff --git a/static/app/views/starfish/components/tableCells/renderHeadCell.tsx b/static/app/views/starfish/components/tableCells/renderHeadCell.tsx index 7046ee1b847302..d0566f6bf33b97 100644 --- a/static/app/views/starfish/components/tableCells/renderHeadCell.tsx +++ b/static/app/views/starfish/components/tableCells/renderHeadCell.tsx @@ -63,6 +63,7 @@ const NUMERIC_FIELDS = new Set([ SpanMetricsField.CACHE_ITEM_SIZE, SpanIndexedField.RESPONSE_CODE, SpanIndexedField.SPAN_SELF_TIME, + SpanIndexedField.SPAN_DURATION, SpanIndexedField.CACHE_ITEM_SIZE, SpanIndexedField.MESSAGING_MESSAGE_BODY_SIZE, ]); From f5e9c6d1a5eb89cf7b6caff11e0aed7cd86d3090 Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Thu, 9 May 2024 11:56:30 -0700 Subject: [PATCH 227/376] fix(crons): Fix connection spikes during partition assignment (#70585) `on_partitions_assigned` is called whenever partitions are assigned to a consumer. This calls `_create_strategy`, which calls `create_with_partitions`. Since we create the `ThreadPoolExecutor` in `create_parallel_worker`, this means we create a new threadpool whenever we partition assignment changes. The reason this causes spikes and isn't sustained is that presumably the previous `ProcessingStrategy` stops being used and ends up garbage collected, which results in the `ThreadPoolExecutor` being garbage collected too. --- src/sentry/monitors/consumers/monitor_consumer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py index a8be16132bcdd7..576ad00e6f1cd9 100644 --- a/src/sentry/monitors/consumers/monitor_consumer.py +++ b/src/sentry/monitors/consumers/monitor_consumer.py @@ -1028,6 +1028,7 @@ def __init__( ) -> None: if mode == "parallel": self.parallel = True + self.parallel_executor = ThreadPoolExecutor(max_workers=self.max_workers) if max_batch_size is not None: self.max_batch_size = max_batch_size @@ -1041,8 +1042,7 @@ def shutdown(self) -> None: self.parallel_executor.shutdown() def create_parallel_worker(self, commit: Commit) -> ProcessingStrategy[KafkaPayload]: - self.parallel_executor = ThreadPoolExecutor(max_workers=self.max_workers) - + assert self.parallel_executor is not None batch_processor = RunTask( function=partial(process_batch, self.parallel_executor), next_step=CommitOffsets(commit), From 593978bc4b6089d266c2bd09f882a4dddd5aed83 Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Thu, 9 May 2024 14:58:21 -0400 Subject: [PATCH 228/376] ref(cache): update transaction title, update chart tooltips, remove legend (#70599) 1. Update `txn` titles to `transaction` 2. Add more human readible tooltip on cache miss rate chart and transaction duration chart 3. Remove legend on cache miss chart as it only has one series. --- .../app/views/performance/cache/cacheLandingPage.tsx | 6 +++++- .../views/performance/cache/charts/hitMissChart.tsx | 1 - .../samplePanel/charts/transactionDurationChart.tsx | 11 +++++++++-- static/app/views/starfish/views/spans/types.tsx | 6 +++--- 4 files changed, 17 insertions(+), 7 deletions(-) diff --git a/static/app/views/performance/cache/cacheLandingPage.tsx b/static/app/views/performance/cache/cacheLandingPage.tsx index 23f6ff24311db5..d05411fe2bf5ef 100644 --- a/static/app/views/performance/cache/cacheLandingPage.tsx +++ b/static/app/views/performance/cache/cacheLandingPage.tsx @@ -37,6 +37,7 @@ import {useMetrics, useSpanMetrics} from 'sentry/views/starfish/queries/useDisco import {useSpanMetricsSeries} from 'sentry/views/starfish/queries/useDiscoverSeries'; import {SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types'; import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; +import {DataTitles} from 'sentry/views/starfish/views/spans/types'; const {CACHE_MISS_RATE} = SpanFunction; const {CACHE_ITEM_SIZE} = SpanMetricsField; @@ -168,7 +169,10 @@ export function CacheLandingPage() { diff --git a/static/app/views/performance/cache/charts/hitMissChart.tsx b/static/app/views/performance/cache/charts/hitMissChart.tsx index 182d9d5fa9be97..c051e232b55e19 100644 --- a/static/app/views/performance/cache/charts/hitMissChart.tsx +++ b/static/app/views/performance/cache/charts/hitMissChart.tsx @@ -16,7 +16,6 @@ export function CacheHitMissChart({series, isLoading, error}: Props) { return ( + = { change: t('Change'), @@ -45,12 +45,12 @@ export const DataTitles: Record = { 'avg(http.response_content_length)': t('Avg Encoded Size'), 'avg(http.decoded_response_content_length)': t('Avg Decoded Size'), 'avg(http.response_transfer_size)': t('Avg Transfer Size'), - 'avg(transaction.duration)': t('Avg Txn Duration'), + 'avg(transaction.duration)': t('Avg Transaction Duration'), 'avg(cache.item_size)': t('Avg Value Size'), unsuccessfulHTTPCodes: t('Response Codes (3XX, 4XX, 5XX)'), httpCodeBreakdown: t('Response Code Breakdown'), cacheMissRate: t('Miss Rate'), - transactionDuration: t('Transaction Duration'), + 'transaction.duration': t('Transaction Duration'), }; export const getThroughputTitle = ( From ee2d877d9b11e1dcbf1715ed69db958afe0a0b9f Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Thu, 9 May 2024 12:01:54 -0700 Subject: [PATCH 229/376] feat(api-idorslug): Updated Subset of Orgmember and Integration Endpoints to use `organization_id_or_slug` (#70567) A subset of changes from https://github.com/getsentry/sentry/pull/70081! --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- pyproject.toml | 5 -- src/sentry/api/bases/organization.py | 60 +++++++++++++------ .../api/bases/organization_integrations.py | 4 +- src/sentry/api/bases/organizationmember.py | 4 +- src/sentry/api/bases/project.py | 27 ++++++--- .../endpoints/accept_organization_invite.py | 14 +++-- .../api/endpoints/codeowners/details.py | 4 +- .../codeowners/external_actor/user_details.py | 2 +- .../endpoints/organization_event_details.py | 6 +- .../endpoints/organization_member/details.py | 6 +- .../organization_member/team_details.py | 12 ++-- .../api/endpoints/project_team_details.py | 10 ++-- src/sentry/api/urls.py | 26 ++++---- src/sentry/api/utils.py | 10 ++-- src/sentry/types/region.py | 2 +- .../test_project_codeowners_details.py | 6 +- .../test_organization_event_details.py | 26 ++++---- 17 files changed, 128 insertions(+), 96 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7a6b0bf3b9312d..fafd760e399ba8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -120,14 +120,12 @@ ignore_missing_imports = true module = [ "sentry.api.base", "sentry.api.bases.external_actor", - "sentry.api.bases.incident", "sentry.api.bases.integration", "sentry.api.bases.organization_events", "sentry.api.bases.organization_request_change", "sentry.api.bases.organizationmember", "sentry.api.bases.project", "sentry.api.bases.project_request_change", - "sentry.api.bases.rule", "sentry.api.bases.sentryapps", "sentry.api.bases.team", "sentry.api.endpoints.accept_organization_invite", @@ -152,11 +150,8 @@ module = [ "sentry.api.endpoints.integrations.sentry_apps.requests", "sentry.api.endpoints.integrations.sentry_apps.stats.details", "sentry.api.endpoints.internal.mail", - "sentry.api.endpoints.notifications.notification_actions_details", "sentry.api.endpoints.organization_code_mapping_codeowners", - "sentry.api.endpoints.organization_code_mapping_details", "sentry.api.endpoints.organization_code_mappings", - "sentry.api.endpoints.organization_dashboard_details", "sentry.api.endpoints.organization_details", "sentry.api.endpoints.organization_events", "sentry.api.endpoints.organization_events_facets", diff --git a/src/sentry/api/bases/organization.py b/src/sentry/api/bases/organization.py index 1c9d89096d83a9..bec1b80db75aaf 100644 --- a/src/sentry/api/bases/organization.py +++ b/src/sentry/api/bases/organization.py @@ -242,30 +242,42 @@ class ControlSiloOrganizationEndpoint(Endpoint): def convert_args( self, request: Request, - organization_slug: str | int | None = None, *args: Any, **kwargs: Any, ) -> tuple[tuple[Any, ...], dict[str, Any]]: + organization_id_or_slug: int | str | None = None + if args and args[0] is not None: + organization_id_or_slug = args[0] + # Required so it behaves like the original convert_args, where organization_id_or_slug was another parameter + # TODO: Remove this once we remove the old `organization_slug` parameter from getsentry + args = args[1:] + else: + organization_id_or_slug = kwargs.pop("organization_id_or_slug", None) or kwargs.pop( + "organization_slug", None + ) + + if not organization_id_or_slug: + raise ResourceDoesNotExist + if not subdomain_is_region(request): subdomain = getattr(request, "subdomain", None) - if subdomain is not None and subdomain != organization_slug: + if subdomain is not None and subdomain != organization_id_or_slug: raise ResourceDoesNotExist - if not organization_slug: - raise ResourceDoesNotExist - if ( - id_or_slug_path_params_enabled(self.convert_args.__qualname__, str(organization_slug)) - and str(organization_slug).isdecimal() + id_or_slug_path_params_enabled( + self.convert_args.__qualname__, str(organization_id_or_slug) + ) + and str(organization_id_or_slug).isdecimal() ): # It is ok that `get_organization_by_id` doesn't check for visibility as we # don't check the visibility in `get_organization_by_slug` either (only_active=False). organization_context = organization_service.get_organization_by_id( - id=int(organization_slug), user_id=request.user.id + id=int(organization_id_or_slug), user_id=request.user.id ) else: organization_context = organization_service.get_organization_by_slug( - slug=str(organization_slug), only_visible=False, user_id=request.user.id + slug=str(organization_id_or_slug), only_visible=False, user_id=request.user.id ) if organization_context is None: raise ResourceDoesNotExist @@ -536,32 +548,42 @@ def get_filter_params( def convert_args( self, request: Request, - organization_slug: str | int | None = None, *args: Any, **kwargs: Any, ) -> tuple[tuple[Any, ...], dict[str, Any]]: """ - We temporarily allow the organization_slug to be an integer as it actually can be both slug or id + We temporarily allow the organization_id_or_slug to be an integer as it actually can be both slug or id Eventually, we will rename this method to organization_id_or_slug """ + organization_id_or_slug: int | str | None = None + if args and args[0] is not None: + organization_id_or_slug = args[0] + # Required so it behaves like the original convert_args, where organization_id_or_slug was another parameter + # TODO: Remove this once we remove the old `organization_slug` parameter from getsentry + args = args[1:] + else: + organization_id_or_slug = kwargs.pop("organization_id_or_slug", None) or kwargs.pop( + "organization_slug", None + ) + + if not organization_id_or_slug: + raise ResourceDoesNotExist + if not subdomain_is_region(request): subdomain = getattr(request, "subdomain", None) - if subdomain is not None and subdomain != organization_slug: + if subdomain is not None and subdomain != organization_id_or_slug: raise ResourceDoesNotExist - if not organization_slug: - raise ResourceDoesNotExist - try: if ( id_or_slug_path_params_enabled( - self.convert_args.__qualname__, str(organization_slug) + self.convert_args.__qualname__, str(organization_id_or_slug) ) - and str(organization_slug).isdecimal() + and str(organization_id_or_slug).isdecimal() ): - organization = Organization.objects.get_from_cache(id=organization_slug) + organization = Organization.objects.get_from_cache(id=organization_id_or_slug) else: - organization = Organization.objects.get_from_cache(slug=organization_slug) + organization = Organization.objects.get_from_cache(slug=organization_id_or_slug) except Organization.DoesNotExist: raise ResourceDoesNotExist diff --git a/src/sentry/api/bases/organization_integrations.py b/src/sentry/api/bases/organization_integrations.py index fbdbc6cc1d60d6..f6b5be3bfa6cac 100644 --- a/src/sentry/api/bases/organization_integrations.py +++ b/src/sentry/api/bases/organization_integrations.py @@ -75,12 +75,12 @@ class RegionOrganizationIntegrationBaseEndpoint(RegionIntegrationEndpoint): def convert_args( self, request: Request, - organization_slug: str | int | None = None, + organization_id_or_slug: int | str | None = None, integration_id: str | None = None, *args: Any, **kwargs: Any, ) -> tuple[tuple[Any, ...], dict[str, Any]]: - args, kwargs = super().convert_args(request, organization_slug, *args, **kwargs) + args, kwargs = super().convert_args(request, organization_id_or_slug, *args, **kwargs) kwargs["integration_id"] = self.validate_integration_id(integration_id or "") return args, kwargs diff --git a/src/sentry/api/bases/organizationmember.py b/src/sentry/api/bases/organizationmember.py index 497b7709055b1e..b03d38ab3d9905 100644 --- a/src/sentry/api/bases/organizationmember.py +++ b/src/sentry/api/bases/organizationmember.py @@ -53,12 +53,12 @@ class OrganizationMemberEndpoint(OrganizationEndpoint): def convert_args( self, request: Request, - organization_slug: int | str | None = None, + organization_id_or_slug: str | int | None = None, member_id: str = "me", *args: Any, **kwargs: Any, ) -> tuple[tuple[Any, ...], dict[str, Any]]: - args, kwargs = super().convert_args(request, organization_slug, *args, **kwargs) + args, kwargs = super().convert_args(request, organization_id_or_slug, *args, **kwargs) serializer = MemberSerializer(data={"id": member_id}) if serializer.is_valid(): diff --git a/src/sentry/api/bases/project.py b/src/sentry/api/bases/project.py index fc39f7554c5a83..74d13260f62738 100644 --- a/src/sentry/api/bases/project.py +++ b/src/sentry/api/bases/project.py @@ -111,10 +111,19 @@ class ProjectEndpoint(Endpoint): def convert_args( self, request: Request, - organization_slug: str | int, *args, **kwargs, ): + if args and args[0] is not None: + organization_id_or_slug: int | str = args[0] + # Required so it behaves like the original convert_args, where organization_id_or_slug was another parameter + # TODO: Remove this once we remove the old `organization_slug` parameter from getsentry + args = args[1:] + else: + organization_id_or_slug = kwargs.pop("organization_id_or_slug", None) or kwargs.pop( + "organization_slug" + ) + if args and args[0] is not None: project_id_or_slug: int | str = args[0] # Required so it behaves like the original convert_args, where project_id_or_slug was another parameter @@ -125,11 +134,11 @@ def convert_args( ) try: if id_or_slug_path_params_enabled( - self.convert_args.__qualname__, str(organization_slug) + self.convert_args.__qualname__, str(organization_id_or_slug) ): project = ( Project.objects.filter( - organization__slug__id_or_slug=organization_slug, + organization__slug__id_or_slug=organization_id_or_slug, slug__id_or_slug=project_id_or_slug, ) .select_related("organization") @@ -139,7 +148,7 @@ def convert_args( else: project = ( Project.objects.filter( - organization__slug=organization_slug, slug=project_id_or_slug + organization__slug=organization_id_or_slug, slug=project_id_or_slug ) .select_related("organization") .prefetch_related("teams") @@ -151,15 +160,15 @@ def convert_args( # This will only happen if the passed in project_id_or_slug is a slug and not an id redirect = ProjectRedirect.objects.select_related("project") if id_or_slug_path_params_enabled( - self.convert_args.__qualname__, str(organization_slug) + self.convert_args.__qualname__, str(organization_id_or_slug) ): redirect = redirect.get( - organization__slug__id_or_slug=organization_slug, + organization__slug__id_or_slug=organization_id_or_slug, redirect_slug=project_id_or_slug, ) else: redirect = redirect.get( - organization__slug=organization_slug, redirect_slug=project_id_or_slug + organization__slug=organization_id_or_slug, redirect_slug=project_id_or_slug ) # Without object permissions don't reveal the rename self.check_object_permissions(request, redirect.project) @@ -167,8 +176,8 @@ def convert_args( # get full path so that we keep query strings requested_url = request.get_full_path() new_url = requested_url.replace( - f"projects/{organization_slug}/{project_id_or_slug}/", - f"projects/{organization_slug}/{redirect.project.slug}/", + f"projects/{organization_id_or_slug}/{project_id_or_slug}/", + f"projects/{organization_id_or_slug}/{redirect.project.slug}/", ) # Resource was moved/renamed if the requested url is different than the new url diff --git a/src/sentry/api/endpoints/accept_organization_invite.py b/src/sentry/api/endpoints/accept_organization_invite.py index e7851400181086..5d891d44ded603 100644 --- a/src/sentry/api/endpoints/accept_organization_invite.py +++ b/src/sentry/api/endpoints/accept_organization_invite.py @@ -85,7 +85,7 @@ def get_invite_state( if ( id_or_slug_path_params_enabled( convert_args_class=AcceptOrganizationInvite, - organization_slug=str(organization_id_or_slug), + organization_id_or_slug=str(organization_id_or_slug), ) and str(organization_id_or_slug).isdecimal() ): @@ -127,12 +127,12 @@ def get( request: Request, member_id: int, token: str, - organization_slug: int | str | None = None, + organization_id_or_slug: int | str | None = None, ) -> Response: invite_context = get_invite_state( member_id=int(member_id), - organization_id_or_slug=organization_slug, + organization_id_or_slug=organization_id_or_slug, user_id=request.user.id, request=request, ) @@ -222,11 +222,15 @@ def get( return response def post( - self, request: Request, member_id: int, token: str, organization_slug: str | None = None + self, + request: Request, + member_id: int, + token: str, + organization_id_or_slug: int | str | None = None, ) -> Response: invite_context = get_invite_state( member_id=int(member_id), - organization_id_or_slug=organization_slug, + organization_id_or_slug=organization_id_or_slug, user_id=request.user.id, request=request, ) diff --git a/src/sentry/api/endpoints/codeowners/details.py b/src/sentry/api/endpoints/codeowners/details.py index e37b370da45191..f8d872f8d6ac88 100644 --- a/src/sentry/api/endpoints/codeowners/details.py +++ b/src/sentry/api/endpoints/codeowners/details.py @@ -35,14 +35,14 @@ class ProjectCodeOwnersDetailsEndpoint(ProjectEndpoint, ProjectCodeOwnersMixin): def convert_args( self, request: Request, - organization_slug: str | int, + organization_id_or_slug: int | str, project_id_or_slug: int | str, codeowners_id: str, *args: Any, **kwargs: Any, ) -> tuple[Any, Any]: args, kwargs = super().convert_args( - request, organization_slug, project_id_or_slug, *args, **kwargs + request, organization_id_or_slug, project_id_or_slug, *args, **kwargs ) try: kwargs["codeowners"] = ProjectCodeOwners.objects.get( diff --git a/src/sentry/api/endpoints/codeowners/external_actor/user_details.py b/src/sentry/api/endpoints/codeowners/external_actor/user_details.py index 929101bda7eeb5..541c9bee627eaf 100644 --- a/src/sentry/api/endpoints/codeowners/external_actor/user_details.py +++ b/src/sentry/api/endpoints/codeowners/external_actor/user_details.py @@ -27,7 +27,7 @@ class ExternalUserDetailsEndpoint(OrganizationEndpoint, ExternalActorEndpointMix } owner = ApiOwner.ENTERPRISE - def convert_args( # type: ignore[override] + def convert_args( self, request: Request, organization_slug: str, diff --git a/src/sentry/api/endpoints/organization_event_details.py b/src/sentry/api/endpoints/organization_event_details.py index 4071a454486b81..03f421edac2cdf 100644 --- a/src/sentry/api/endpoints/organization_event_details.py +++ b/src/sentry/api/endpoints/organization_event_details.py @@ -91,11 +91,11 @@ class OrganizationEventDetailsEndpoint(OrganizationEventsEndpointBase): def convert_args( self, request: Request, - organization_slug: str | int | None = None, + organization_id_or_slug: int | str | None = None, *args: Any, **kwargs: Any, ) -> tuple[tuple[Any, ...], dict[str, Any]]: - args, kwargs = super().convert_args(request, organization_slug, *args, **kwargs) + args, kwargs = super().convert_args(request, organization_id_or_slug, *args, **kwargs) organization = kwargs["organization"] project_id_or_slug = kwargs.pop("project_id_or_slug") @@ -103,7 +103,7 @@ def convert_args( try: if id_or_slug_path_params_enabled( convert_args_class=self.convert_args.__qualname__, - organization_slug=organization.slug, + organization_id_or_slug=organization.slug, ): project = Project.objects.get( slug__id_or_slug=project_id_or_slug, diff --git a/src/sentry/api/endpoints/organization_member/details.py b/src/sentry/api/endpoints/organization_member/details.py index 1d12fd1be9b67f..b64837ef6efecb 100644 --- a/src/sentry/api/endpoints/organization_member/details.py +++ b/src/sentry/api/endpoints/organization_member/details.py @@ -113,7 +113,7 @@ def _get_member( @extend_schema( operation_id="Retrieve an Organization Member", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.member_id("The ID of the organization member."), ], responses={ @@ -146,7 +146,7 @@ def get( @extend_schema( operation_id="Update an Organization Member's Roles", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.member_id("The ID of the member to update."), ], request=inline_serializer( @@ -357,7 +357,7 @@ def _change_org_role(member: OrganizationMember, role: str) -> None: @extend_schema( operation_id="Delete an Organization Member", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.member_id("The ID of the member to delete."), ], responses={ diff --git a/src/sentry/api/endpoints/organization_member/team_details.py b/src/sentry/api/endpoints/organization_member/team_details.py index 931c8d3fe044ca..57df6a5ca08180 100644 --- a/src/sentry/api/endpoints/organization_member/team_details.py +++ b/src/sentry/api/endpoints/organization_member/team_details.py @@ -100,11 +100,11 @@ class OrganizationMemberTeamDetailsEndpoint(OrganizationMemberEndpoint): def convert_args( self, request: Request, - organization_slug: int | str | None = None, + organization_id_or_slug: int | str | None = None, *args: Any, **kwargs: Any, ) -> tuple[tuple[Any, ...], dict[str, Any]]: - args, kwargs = super().convert_args(request, organization_slug, *args, **kwargs) + args, kwargs = super().convert_args(request, organization_id_or_slug, *args, **kwargs) team_id_or_slug = kwargs.pop("team_id_or_slug") organization = kwargs["organization"] @@ -113,7 +113,7 @@ def convert_args( if request.method == "GET": try: if id_or_slug_path_params_enabled( - self.get.__qualname__, organization_slug=organization.slug + self.get.__qualname__, organization_id_or_slug=organization.slug ): omt = OrganizationMemberTeam.objects.get( team__slug__id_or_slug=team_id_or_slug, organizationmember=member @@ -130,7 +130,7 @@ def convert_args( else: try: if id_or_slug_path_params_enabled( - self.post.__qualname__, organization_slug=organization.slug + self.post.__qualname__, organization_id_or_slug=organization.slug ): team = Team.objects.get( organization__slug__id_or_slug=organization.slug, @@ -229,7 +229,7 @@ def get( @extend_schema( operation_id="Add an Organization Member to a Team", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.member_id("The ID of the organization member to add to the team"), GlobalParams.TEAM_ID_OR_SLUG, ], @@ -419,7 +419,7 @@ def _change_team_member_role( @extend_schema( operation_id="Delete an Organization Member from a Team", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.member_id("The ID of the organization member to delete from the team"), GlobalParams.TEAM_ID_OR_SLUG, ], diff --git a/src/sentry/api/endpoints/project_team_details.py b/src/sentry/api/endpoints/project_team_details.py index 695bfaa83c3189..61cb6d485f4382 100644 --- a/src/sentry/api/endpoints/project_team_details.py +++ b/src/sentry/api/endpoints/project_team_details.py @@ -39,21 +39,21 @@ class ProjectTeamDetailsEndpoint(ProjectEndpoint): def convert_args( self, request: Request, - organization_slug: str | int, + organization_id_or_slug: int | str, project_id_or_slug: int | str, team_id_or_slug: int | str, *args, **kwargs, ): (args, kwargs) = super().convert_args( - request, organization_slug, project_id_or_slug, *args, **kwargs + request, organization_id_or_slug, project_id_or_slug, *args, **kwargs ) project = kwargs["project"] try: if id_or_slug_path_params_enabled( - self.convert_args.__qualname__, organization_slug=project.organization.slug + self.convert_args.__qualname__, organization_id_or_slug=project.organization.slug ): team = Team.objects.get( organization__slug__id_or_slug=project.organization.slug, @@ -72,7 +72,7 @@ def convert_args( @extend_schema( operation_id="Add a Team to a Project", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, GlobalParams.TEAM_ID_OR_SLUG, ], @@ -103,7 +103,7 @@ def post(self, request: Request, project, team: Team) -> Response: @extend_schema( operation_id="Delete a Team from a Project", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, GlobalParams.TEAM_ID_OR_SLUG, ], diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 95be8f266235b8..49b110875f8ed7 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -1314,7 +1314,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-access-request-details", ), re_path( - r"^(?P[^\/]+)/activity/$", + r"^(?P[^\/]+)/activity/$", OrganizationActivityEndpoint.as_view(), name="sentry-api-0-organization-activity", ), @@ -1384,7 +1384,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-events", ), re_path( - r"^(?P[^\/]+)/events/(?P[^\/]+):(?P(?:\d+|[A-Fa-f0-9-]{32,36}))/$", + r"^(?P[^\/]+)/events/(?P[^\/]+):(?P(?:\d+|[A-Fa-f0-9-]{32,36}))/$", OrganizationEventDetailsEndpoint.as_view(), name="sentry-api-0-organization-event-details", ), @@ -1566,22 +1566,22 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-integration-details", ), re_path( - r"^(?P[^\/]+)/integrations/(?P[^\/]+)/repos/$", + r"^(?P[^\/]+)/integrations/(?P[^\/]+)/repos/$", OrganizationIntegrationReposEndpoint.as_view(), name="sentry-api-0-organization-integration-repos", ), re_path( - r"^(?P[^\/]+)/integrations/(?P[^\/]+)/issues/$", + r"^(?P[^\/]+)/integrations/(?P[^\/]+)/issues/$", OrganizationIntegrationIssuesEndpoint.as_view(), name="sentry-api-0-organization-integration-issues", ), re_path( - r"^(?P[^\/]+)/integrations/(?P[^\/]+)/migrate-opsgenie/$", + r"^(?P[^\/]+)/integrations/(?P[^\/]+)/migrate-opsgenie/$", OrganizationIntegrationMigrateOpsgenieEndpoint.as_view(), name="sentry-api-0-organization-integration-migrate-opsgenie", ), re_path( - r"^(?P[^\/]+)/integrations/(?P[^\/]+)/serverless-functions/$", + r"^(?P[^\/]+)/integrations/(?P[^\/]+)/serverless-functions/$", OrganizationIntegrationServerlessFunctionsEndpoint.as_view(), name="sentry-api-0-organization-integration-serverless-functions", ), @@ -1611,7 +1611,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-invite-request-index", ), re_path( - r"^(?P[^\/]+)/invite-requests/(?P[^\/]+)/$", + r"^(?P[^\/]+)/invite-requests/(?P[^\/]+)/$", OrganizationInviteRequestDetailsEndpoint.as_view(), name="sentry-api-0-organization-invite-request-detail", ), @@ -1709,17 +1709,17 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-release-resolved", ), re_path( - r"^(?P[^\/]+)/members/(?P[^\/]+)/$", + r"^(?P[^\/]+)/members/(?P[^\/]+)/$", OrganizationMemberDetailsEndpoint.as_view(), name="sentry-api-0-organization-member-details", ), re_path( - r"^(?P[^\/]+)/members/(?P[^\/]+)/unreleased-commits/$", + r"^(?P[^\/]+)/members/(?P[^\/]+)/unreleased-commits/$", OrganizationMemberUnreleasedCommitsEndpoint.as_view(), name="sentry-api-0-organization-member-unreleased-commits", ), re_path( - r"^(?P[^\/]+)/members/(?P[^\/]+)/teams/(?P[^\/]+)/$", + r"^(?P[^\/]+)/members/(?P[^\/]+)/teams/(?P[^\/]+)/$", OrganizationMemberTeamDetailsEndpoint.as_view(), name="sentry-api-0-organization-member-team-details", ), @@ -2551,7 +2551,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-teams", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/teams/(?P[^\/]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/teams/(?P[^\/]+)/$", ProjectTeamDetailsEndpoint.as_view(), name="sentry-api-0-project-team-details", ), @@ -2601,7 +2601,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-codeowners", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/codeowners/(?P[^\/]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/codeowners/(?P[^\/]+)/$", ProjectCodeOwnersDetailsEndpoint.as_view(), name="sentry-api-0-project-codeowners-details", ), @@ -3131,7 +3131,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ), # Organization invite re_path( - r"^accept-invite/(?P[^\/]+)/(?P[^\/]+)/(?P[^\/]+)/$", + r"^accept-invite/(?P[^\/]+)/(?P[^\/]+)/(?P[^\/]+)/$", AcceptOrganizationInvite.as_view(), name="sentry-api-0-organization-accept-organization-invite", ), diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index db553a95c002d7..6516be6e4e3bfd 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -487,19 +487,21 @@ def duration(self): def id_or_slug_path_params_enabled( - convert_args_class: str | None = None, organization_slug: str | None = None + convert_args_class: str | None = None, organization_id_or_slug: str | None = None ) -> bool: # GA option if options.get("api.id-or-slug-enabled"): return True # Apigateway - if not convert_args_class and organization_slug: + if not convert_args_class and organization_id_or_slug: # Return True if the organization is in the list of enabled organizations and the apigateway option is enabled - return organization_slug in options.get("api.id-or-slug-enabled-ea-org") + return organization_id_or_slug in options.get("api.id-or-slug-enabled-ea-org") # EA option for endpoints where organization is available - if organization_slug and organization_slug not in options.get("api.id-or-slug-enabled-ea-org"): + if organization_id_or_slug and organization_id_or_slug not in options.get( + "api.id-or-slug-enabled-ea-org" + ): return False # EA option for endpoints where organization is not available diff --git a/src/sentry/types/region.py b/src/sentry/types/region.py index 02066cf6df1fc8..378c40cc16c80a 100644 --- a/src/sentry/types/region.py +++ b/src/sentry/types/region.py @@ -269,7 +269,7 @@ def get_region_for_organization(organization_id_or_slug: str) -> Region: from sentry.models.organizationmapping import OrganizationMapping if ( - id_or_slug_path_params_enabled(organization_slug=organization_id_or_slug) + id_or_slug_path_params_enabled(organization_id_or_slug=organization_id_or_slug) and organization_id_or_slug.isdecimal() ): mapping = OrganizationMapping.objects.filter( diff --git a/tests/sentry/api/endpoints/test_project_codeowners_details.py b/tests/sentry/api/endpoints/test_project_codeowners_details.py index 13c7d55c547696..8e9541129d1036 100644 --- a/tests/sentry/api/endpoints/test_project_codeowners_details.py +++ b/tests/sentry/api/endpoints/test_project_codeowners_details.py @@ -37,7 +37,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-project-codeowners-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "codeowners_id": self.codeowners.id, }, @@ -71,7 +71,7 @@ def test_wrong_codeowners_id(self): self.url = reverse( "sentry-api-0-project-codeowners-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "codeowners_id": 1000, }, @@ -155,7 +155,7 @@ def test_codeowners_max_raw_length(self, mock_record): url = reverse( "sentry-api-0-project-codeowners-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "codeowners_id": codeowners.id, }, diff --git a/tests/snuba/api/endpoints/test_organization_event_details.py b/tests/snuba/api/endpoints/test_organization_event_details.py index 82da92135c3f9e..333f9a7da9f2fb 100644 --- a/tests/snuba/api/endpoints/test_organization_event_details.py +++ b/tests/snuba/api/endpoints/test_organization_event_details.py @@ -62,7 +62,7 @@ def test_performance_flag(self): url = reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": "a" * 32, }, @@ -79,7 +79,7 @@ def test_simple(self): url = reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": "a" * 32, }, @@ -97,7 +97,7 @@ def test_simple_with_id(self): url = reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.id, "event_id": "a" * 32, }, @@ -127,7 +127,7 @@ def test_simple_transaction(self): url = reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": event.event_id, }, @@ -143,7 +143,7 @@ def test_no_access_missing_feature(self): url = reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": "a" * 32, }, @@ -167,7 +167,7 @@ def test_access_non_member_project(self): url = reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "event_id": "a" * 32, }, @@ -188,7 +188,7 @@ def test_no_event(self): url = reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": "d" * 32, }, @@ -204,7 +204,7 @@ def test_invalid_event_id(self): reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": "not-an-event", }, @@ -221,7 +221,7 @@ def test_long_trace_description(self): url = reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": "d" * 32, }, @@ -241,7 +241,7 @@ def test_blank_fields(self): url = reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": "a" * 32, }, @@ -272,7 +272,7 @@ def test_out_of_retention(self): url = reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": "d" * 32, }, @@ -297,7 +297,7 @@ def test_generic_event(self): url = reverse( "sentry-api-0-organization-event-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": occurrence.event_id, }, @@ -325,7 +325,7 @@ def setUp(self): self.url = reverse( self.endpoint, kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": event.event_id, }, From 78c25c5bb76cc4438f812a3fd623d13d0ef44cc0 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Thu, 9 May 2024 15:12:48 -0400 Subject: [PATCH 230/376] feat(traces): Show error message on traces (#70601) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit #### Summary This shows an error message banner, which is helpful for timeouts. #### Screenshots ![Screenshot 2024-05-09 at 2 54 37 PM](https://github.com/getsentry/sentry/assets/6111995/8ea24c53-0b7d-4a35-bef8-2dc9b02aa13b) --- static/app/views/performance/traces/content.tsx | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/static/app/views/performance/traces/content.tsx b/static/app/views/performance/traces/content.tsx index 78cec828607bf6..86489974861cb1 100644 --- a/static/app/views/performance/traces/content.tsx +++ b/static/app/views/performance/traces/content.tsx @@ -135,6 +135,11 @@ export function Content() { })} )} + {isError && typeof traces.error?.responseJSON?.detail === 'string' ? ( + + {traces.error?.responseJSON?.detail} + + ) : null} Date: Thu, 9 May 2024 15:25:42 -0400 Subject: [PATCH 231/376] feat(related_issues): Decouple endpoint (#70504) The API tries to find issues for both approaches sequentially (slower), making it harder to iterate on each approach independently. This change allows using a `type` parameter with the endpoint to execute one approach or the other. The UI will be changed to make two independent requests, thus, improving load time. This is required to empower trace issues from the issues details page. --------- Co-authored-by: Matt Duncan <14761+mrduncan@users.noreply.github.com> --- .../api/endpoints/issues/related_issues.py | 23 +++++++++++----- src/sentry/issues/related/__init__.py | 2 +- .../endpoints/issues/test_related_issues.py | 27 +++++-------------- 3 files changed, 25 insertions(+), 27 deletions(-) diff --git a/src/sentry/api/endpoints/issues/related_issues.py b/src/sentry/api/endpoints/issues/related_issues.py index 4387521c3c4862..6a1104e73f27fb 100644 --- a/src/sentry/api/endpoints/issues/related_issues.py +++ b/src/sentry/api/endpoints/issues/related_issues.py @@ -5,7 +5,8 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.group import GroupEndpoint -from sentry.issues.related import find_related_issues +from sentry.issues.related import find_related_issues # To be deprecated +from sentry.issues.related import RELATED_ISSUES_ALGORITHMS from sentry.models.group import Group from sentry.types.ratelimit import RateLimit, RateLimitCategory @@ -24,13 +25,23 @@ class RelatedIssuesEndpoint(GroupEndpoint): } # We get a Group object since the endpoint is /issues/{issue_id}/related-issues - def get(self, _: Request, group: Group) -> Response: + def get(self, request: Request, group: Group) -> Response: """ - Retrieve related issues for an Issue + Retrieve related issues for a Group ```````````````````````````````````` Related issues can be based on the same root cause or trace connected. - :pparam string group_id: the ID of the issue + :pparam Request request: the request object + :pparam Group group: the group object """ - related_issues = find_related_issues(group) - return Response({"data": [related_set for related_set in related_issues]}) + # The type of related issues to retrieve. Can be either `same_root_cause` or `trace_connected`. + related_type = request.query_params.get("type") + related_issues: list[dict[str, str | list[int] | dict[str, str]]] = [] + + if related_type in RELATED_ISSUES_ALGORITHMS: + data, meta = RELATED_ISSUES_ALGORITHMS[related_type](group) + return Response({"type": related_type, "data": data, "meta": meta}) + else: + # XXX: We will be deprecating this approach soon + related_issues = find_related_issues(group) + return Response({"data": [related_set for related_set in related_issues]}) diff --git a/src/sentry/issues/related/__init__.py b/src/sentry/issues/related/__init__.py index 3148714d42ec18..c5771783e68973 100644 --- a/src/sentry/issues/related/__init__.py +++ b/src/sentry/issues/related/__init__.py @@ -5,7 +5,7 @@ from .same_root_cause import same_root_cause_analysis from .trace_connected import trace_connected_analysis -__all__ = ["find_related_issues"] +__all__ = ["find_related_issues", "same_root_cause_analysis", "trace_connected_analysis"] RELATED_ISSUES_ALGORITHMS = { "same_root_cause": same_root_cause_analysis, diff --git a/tests/sentry/api/endpoints/issues/test_related_issues.py b/tests/sentry/api/endpoints/issues/test_related_issues.py index 7fd5c60be299a1..1d2a31acf56549 100644 --- a/tests/sentry/api/endpoints/issues/test_related_issues.py +++ b/tests/sentry/api/endpoints/issues/test_related_issues.py @@ -40,16 +40,11 @@ def test_same_root_related_issues(self) -> None: for datum in groups_data: self.create_group(data=datum) - response = self.get_success_response() + response = self.get_success_response(qs_params={"type": "same_root_cause"}) # The UI will then make normal calls to get issues-stats # For instance, this URL # https://us.sentry.io/api/0/organizations/sentry/issues-stats/?groups=4741828952&groups=4489703641&statsPeriod=24h - assert response.json() == { - "data": [ - {"type": "same_root_cause", "data": [5], "meta": {}}, - {"type": "trace_connected", "data": [], "meta": {}}, - ], - } + assert response.json() == {"type": "same_root_cause", "data": [5], "meta": {}} def test_trace_connected_errors(self) -> None: error_event, _, another_proj_event = self.load_errors(self.project, uuid4().hex[:16]) @@ -62,18 +57,10 @@ def test_trace_connected_errors(self) -> None: assert error_event.project.id != another_proj_event.project.id assert error_event.trace_id == another_proj_event.trace_id - response = self.get_success_response() + response = self.get_success_response(qs_params={"type": "trace_connected"}) assert response.json() == { - "data": [ - {"type": "same_root_cause", "data": [], "meta": {}}, - { - "type": "trace_connected", - # This is the other issue in the trace that it is not itself - "data": [another_proj_event.group_id], - "meta": { - "event_id": recommended_event.event_id, - "trace_id": error_event.trace_id, - }, - }, - ] + "type": "trace_connected", + # This is the other issue in the trace that it is not itself + "data": [another_proj_event.group_id], + "meta": {"event_id": recommended_event.event_id, "trace_id": error_event.trace_id}, } From b29b8343de9c1f738e09a833f4e0663e0f2bc1fb Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Thu, 9 May 2024 12:32:47 -0700 Subject: [PATCH 232/376] fix(spamdetection): more prompt engineering (#70602) more prompt engineering --- src/sentry/feedback/usecases/spam_detection.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/sentry/feedback/usecases/spam_detection.py b/src/sentry/feedback/usecases/spam_detection.py index 8ee08fe691c67e..fe82a2fd23d969 100644 --- a/src/sentry/feedback/usecases/spam_detection.py +++ b/src/sentry/feedback/usecases/spam_detection.py @@ -8,7 +8,7 @@ def make_input_prompt(input): return f"""**Classification Task** -**Instructions: Please analyze the following input and output `spam` if the input is not coherent, and `notspam` if it is coherent.** +**Instructions: Please analyze the following input and output `spam` if the input is not coherent, and `notspam` if it is coherent. If the user is frustrated but describing a problem, that is notspam** **Label Options:** spam, notspam **Few-shot Examples:** @@ -20,6 +20,9 @@ def make_input_prompt(input): * **Example 6:** "i was playing a great game now it crashed" -> notspam * **Example 7:** "i can't login to my account wtf??!" -> notspam * **Example 8:** "ฉันไม่สามารถเข้าสู่ระบบและไม่มีอะไรทำงาน " -> notspam +* **Example 9:** "crashed" -> notspam +* **Example 9:** "MY GAME GLITCHED GRRRR!!!!" -> notspam +* **Example 10:** "THIS PIECE OF JUNK DOES NOT WORK!!!" -> notspam **Input Text:** "{input}" From 7572953acb1f28e400c45ec06526f2033b2a041c Mon Sep 17 00:00:00 2001 From: Nikhar Saxena <84807402+nikhars@users.noreply.github.com> Date: Thu, 9 May 2024 12:39:24 -0700 Subject: [PATCH 233/376] fix(metrics): Remove byte format within JSON (#70594) We need the bytes of metric type only in the kafka headers. Not in the actual payload. Made the change so that the JSON payload gets a string while the kafka header gets a byte. --- .../test_gen_metrics_multiprocess_steps.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/tests/sentry/sentry_metrics/test_gen_metrics_multiprocess_steps.py b/tests/sentry/sentry_metrics/test_gen_metrics_multiprocess_steps.py index 918500409034e2..2e228ad323c625 100644 --- a/tests/sentry/sentry_metrics/test_gen_metrics_multiprocess_steps.py +++ b/tests/sentry/sentry_metrics/test_gen_metrics_multiprocess_steps.py @@ -66,6 +66,10 @@ def compare_messages_ignoring_mapping_metadata(actual: Message, expected: Messag actual_deserialized = json.loads(actual_payload.value) expected_deserialized = json.loads(expected_payload.value) del actual_deserialized["mapping_meta"] + # The custom use case metrics payload adds the aggregation option to the transformed payload. + # Others don't. Since the tests are generic over different payload types, removed the checking + # of fields which are specific to a payload. + actual_deserialized.pop("aggregation_option", None) assert actual_deserialized == expected_deserialized @@ -241,7 +245,7 @@ def test_metrics_batch_builder() -> None: "session.status": "init", }, "timestamp": ts, - "type": b"c", + "type": "c", "value": 1.0, "org_id": 1, "project_id": 3, @@ -258,7 +262,7 @@ def test_metrics_batch_builder() -> None: "session.status": "healthy", }, "timestamp": ts, - "type": b"d", + "type": "d", "value": [4, 5, 6], "org_id": 1, "project_id": 3, @@ -276,7 +280,7 @@ def test_metrics_batch_builder() -> None: "session.status": "errored", }, "timestamp": ts, - "type": b"s", + "type": "s", "value": [3], "org_id": 1, "project_id": 3, @@ -358,7 +362,7 @@ def test_process_messages() -> None: None, json.dumps(__translated_payload(message_payloads[i])).encode("utf-8"), [ - ("metric_type", message_payloads[i]["type"]), + ("metric_type", message_payloads[i]["type"].encode()), ], ), m.value.partition, From d190f012ec461ee21405f2bf0f4ceeee66a9cbe6 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Thu, 9 May 2024 12:56:52 -0700 Subject: [PATCH 234/376] fix(billing): Avoid usage chart warning (#70547) --- .../dashboards/widgetBuilder/widgetBuilderSortBy.spec.tsx | 4 +++- static/app/views/organizationStats/usageChart/index.tsx | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/static/app/views/dashboards/widgetBuilder/widgetBuilderSortBy.spec.tsx b/static/app/views/dashboards/widgetBuilder/widgetBuilderSortBy.spec.tsx index f399240a10dc75..69880d60eedd9a 100644 --- a/static/app/views/dashboards/widgetBuilder/widgetBuilderSortBy.spec.tsx +++ b/static/app/views/dashboards/widgetBuilder/widgetBuilderSortBy.spec.tsx @@ -241,7 +241,9 @@ describe('WidgetBuilder', function () { // Selector "sortDirection" expect(screen.getByText('High to low')).toBeInTheDocument(); // Selector "sortBy" - expect(screen.getAllByText('count()')).toHaveLength(3); + await waitFor(() => { + expect(screen.getAllByText('count()')).toHaveLength(3); + }); }); it('sortBy defaults to the first field value when changing display type to table', async function () { diff --git a/static/app/views/organizationStats/usageChart/index.tsx b/static/app/views/organizationStats/usageChart/index.tsx index 6d8f7614d1c012..5b0c2435081be6 100644 --- a/static/app/views/organizationStats/usageChart/index.tsx +++ b/static/app/views/organizationStats/usageChart/index.tsx @@ -441,7 +441,9 @@ function UsageChartBody({ if (chartSeries) { chartSeries.forEach(chartOption => { - legend.push({name: `${chartOption.name}`}); + if (chartOption.name) { + legend.push({name: `${chartOption.name}`}); + } }); } From 26d4c836ad8e83247c7b588349049eea867e3271 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Thu, 9 May 2024 15:58:54 -0400 Subject: [PATCH 235/376] feat(traces): Change color scheme a bit (#70610) ### Summary This changes the colors on the timeline slightly so they have less collision for our projects --- static/app/views/performance/traces/utils.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/views/performance/traces/utils.tsx b/static/app/views/performance/traces/utils.tsx index 5b2309a36468bc..7b1dfc4794e1b3 100644 --- a/static/app/views/performance/traces/utils.tsx +++ b/static/app/views/performance/traces/utils.tsx @@ -21,7 +21,7 @@ export function getStylingSliceName( ) { if (sliceSecondaryName) { // Our color picking relies on the first 4 letters. Since we want to differentiate sdknames and project names we have to include part of the sdk name. - return sliceSecondaryName.slice(-2) + (sliceName ?? ''); + return (sliceName ?? '').slice(0, 1) + sliceSecondaryName.slice(-4); } return sliceName; From e8f6cb0bc36fb87030d9af958cee05a5dc4b3436 Mon Sep 17 00:00:00 2001 From: Josh Callender <1569818+saponifi3d@users.noreply.github.com> Date: Thu, 9 May 2024 13:04:11 -0700 Subject: [PATCH 236/376] Add limit to API for Alert Rules (#70596) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Description Fixes: https://github.com/getsentry/sentry/issues/67195 Adds the `x-sentry-rule-limit` header to the list views for alert rules (both the combined-rules and metric alert api). I tried adding an indicator to the UI, but it was pretty difficult to tell which page we're on in the UI, since we're handling it on the backend with a `Link`. Rather than blocking the API fix on trying to figure out the UI, i figure it's better to get this out and we can add to the UI later. ![Screenshot 2024-05-09 at 11 39 43 AM](https://github.com/getsentry/sentry/assets/1569818/3cb0d796-95ad-4f87-8344-88d9599b6c79) --- src/sentry/api/helpers/constants.py | 2 ++ .../endpoints/organization_alert_rule_index.py | 11 +++++++++-- .../endpoints/test_organization_alert_rule_index.py | 13 +++++++++++++ 3 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 src/sentry/api/helpers/constants.py diff --git a/src/sentry/api/helpers/constants.py b/src/sentry/api/helpers/constants.py new file mode 100644 index 00000000000000..b98c3d84b56f99 --- /dev/null +++ b/src/sentry/api/helpers/constants.py @@ -0,0 +1,2 @@ +MAX_QUERY_SUBSCRIPTIONS_HEADER = "X-Sentry-Alert-Rule-Limit" +ALERT_RULES_COUNT_HEADER = "X-Sentry-Alert-Rule-Hits" diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_index.py b/src/sentry/incidents/endpoints/organization_alert_rule_index.py index 07183d08261930..15d4933e6c161a 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_index.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_index.py @@ -1,6 +1,7 @@ from copy import deepcopy from datetime import UTC, datetime +from django.conf import settings from django.db.models import DateTimeField, IntegerField, OuterRef, Q, Subquery, Value from django.db.models.functions import Coalesce from drf_spectacular.utils import extend_schema, extend_schema_serializer @@ -15,6 +16,7 @@ from sentry.api.bases.organization import OrganizationAlertRulePermission, OrganizationEndpoint from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.fields.actor import ActorField +from sentry.api.helpers.constants import ALERT_RULES_COUNT_HEADER, MAX_QUERY_SUBSCRIPTIONS_HEADER from sentry.api.paginator import ( CombinedQuerysetIntermediary, CombinedQuerysetPaginator, @@ -70,7 +72,7 @@ def fetch_metric_alert(self, request, organization, project=None): if monitor_type is not None: alert_rules = alert_rules.filter(monitor_type=monitor_type) - return self.paginate( + response = self.paginate( request, queryset=alert_rules, order_by="-date_added", @@ -79,6 +81,10 @@ def fetch_metric_alert(self, request, organization, project=None): default_per_page=25, ) + response[ALERT_RULES_COUNT_HEADER] = len(alert_rules) + response[MAX_QUERY_SUBSCRIPTIONS_HEADER] = settings.MAX_QUERY_SUBSCRIPTIONS_PER_ORG + return response + def create_metric_alert(self, request, organization, project=None): if not features.has("organizations:incidents", organization, actor=request.user): raise ResourceDoesNotExist @@ -278,7 +284,8 @@ def get(self, request: Request, organization) -> Response: case_insensitive=case_insensitive, ) response["X-Sentry-Issue-Rule-Hits"] = issue_rules_count - response["X-Sentry-Alert-Rule-Hits"] = alert_rules_count + response[ALERT_RULES_COUNT_HEADER] = alert_rules_count + response[MAX_QUERY_SUBSCRIPTIONS_HEADER] = settings.MAX_QUERY_SUBSCRIPTIONS_PER_ORG return response diff --git a/tests/sentry/incidents/endpoints/test_organization_alert_rule_index.py b/tests/sentry/incidents/endpoints/test_organization_alert_rule_index.py index 799dd510673cb6..7d893bc8c27106 100644 --- a/tests/sentry/incidents/endpoints/test_organization_alert_rule_index.py +++ b/tests/sentry/incidents/endpoints/test_organization_alert_rule_index.py @@ -9,6 +9,7 @@ from rest_framework import status from sentry import audit_log +from sentry.api.helpers.constants import ALERT_RULES_COUNT_HEADER, MAX_QUERY_SUBSCRIPTIONS_HEADER from sentry.api.serializers import serialize from sentry.incidents.models.alert_rule import ( AlertRule, @@ -122,6 +123,18 @@ def test_filter_by_monitor_type(self): assert serialize([alert_rule2]) not in resp.data assert resp.data == serialize([alert_rule1]) + def test_response_headers(self): + self.create_team(organization=self.organization, members=[self.user]) + self.create_alert_rule(monitor_type=AlertRuleMonitorType.ACTIVATED) + self.create_alert_rule(monitor_type=AlertRuleMonitorType.CONTINUOUS) + self.login_as(self.user) + + with self.feature("organizations:incidents"): + resp = self.get_response(self.organization.slug) + + assert resp[ALERT_RULES_COUNT_HEADER] == "2" + assert resp[MAX_QUERY_SUBSCRIPTIONS_HEADER] == "1000" + def test_simple_with_activation(self): self.create_team(organization=self.organization, members=[self.user]) alert_rule = self.create_alert_rule() From 45dc47592f3982f7dc473e958ee6a7d9dbf4764a Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Thu, 9 May 2024 16:08:01 -0400 Subject: [PATCH 237/376] fix: accept non-str keys on autofix (#70609) Fixes https://sentry.sentry.io/issues/5326989067/?project=1&referrer=github-pr-bot --- src/sentry/api/endpoints/group_ai_autofix.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sentry/api/endpoints/group_ai_autofix.py b/src/sentry/api/endpoints/group_ai_autofix.py index 0ade5ce0084eec..0a4eaa2819adb8 100644 --- a/src/sentry/api/endpoints/group_ai_autofix.py +++ b/src/sentry/api/endpoints/group_ai_autofix.py @@ -108,7 +108,8 @@ def _call_autofix( if not isinstance(user, AnonymousUser) else None ), - } + }, + option=orjson.OPT_NON_STR_KEYS, ), headers={"content-type": "application/json;charset=utf-8"}, ) From 104ba6b91ec9d58d7b91419748fbd2dee1fb3313 Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Thu, 9 May 2024 13:22:03 -0700 Subject: [PATCH 238/376] ref(rules): Add delayed processor logging (#70593) Add logging so I can figure out where it's breaking down - I LA'd my test org and tried to trigger a rule without success. --- .../rules/processing/delayed_processing.py | 18 ++++++++++++++++++ src/sentry/rules/processing/processor.py | 4 ++++ 2 files changed, 22 insertions(+) diff --git a/src/sentry/rules/processing/delayed_processing.py b/src/sentry/rules/processing/delayed_processing.py index 1056d003172f56..7ea02c215a0dc1 100644 --- a/src/sentry/rules/processing/delayed_processing.py +++ b/src/sentry/rules/processing/delayed_processing.py @@ -242,6 +242,11 @@ def process_delayed_alert_conditions(buffer: RedisBuffer) -> None: project_ids = buffer.get_sorted_set( PROJECT_ID_BUFFER_LIST_KEY, min=0, max=fetch_time.timestamp() ) + log_str = "" + for project_id, timestamp in project_ids: + log_str += f"{project_id}: {timestamp}" + logger.info("delayed_processing.project_id_list", extra={"project_ids": log_str}) + for project_id, _ in project_ids: apply_delayed.delay(project_id) @@ -265,6 +270,10 @@ def apply_delayed(project_id: int, *args: Any, **kwargs: Any) -> None: project = Project.objects.get_from_cache(id=project_id) buffer = RedisBuffer() rulegroup_to_event_data = buffer.get_hash(model=Project, field={"project_id": project.id}) + logger.info( + "delayed_processing.rulegroupeventdata", + extra={"rulegroupdata": rulegroup_to_event_data}, + ) # STEP 2: Map each rule to the groups that must be checked for that rule. rules_to_groups = get_rules_to_groups(rulegroup_to_event_data) @@ -290,6 +299,10 @@ def apply_delayed(project_id: int, *args: Any, **kwargs: Any) -> None: rules_to_fire = get_rules_to_fire( condition_group_results, rule_to_slow_conditions, rules_to_groups ) + log_str = "" + for rule in rules_to_fire.keys(): + log_str += f"{str(rule.id)}, " + logger.info("delayed_processing.rule_to_fire", extra={"rules_to_fire": log_str}) # Step 7: Fire the rule's actions now = datetime.now(tz=timezone.utc) parsed_rulegroup_to_event_data = parse_rulegroup_to_event_data(rulegroup_to_event_data) @@ -304,6 +317,10 @@ def apply_delayed(project_id: int, *args: Any, **kwargs: Any) -> None: rule_statuses = bulk_get_rule_status(alert_rules, group, project) status = rule_statuses[rule.id] if status.last_active and status.last_active > freq_offset: + logger.info( + "delayed_processing.last_active", + extra={"last_active": status.last_active, "freq_offset": freq_offset}, + ) return updated = ( @@ -313,6 +330,7 @@ def apply_delayed(project_id: int, *args: Any, **kwargs: Any) -> None: ) if not updated: + logger.info("delayed_processing.not_updated", extra={"status_id": status.id}) return notification_uuid = str(uuid.uuid4()) diff --git a/src/sentry/rules/processing/processor.py b/src/sentry/rules/processing/processor.py index b13b1e584ef674..bbb96f1fd716ea 100644 --- a/src/sentry/rules/processing/processor.py +++ b/src/sentry/rules/processing/processor.py @@ -259,6 +259,10 @@ def group_conditions_by_speed( return fast_conditions, slow_conditions def enqueue_rule(self, rule: Rule) -> None: + logger.info( + "rule_processor.rule_enqueued", + extra={"rule": rule.id, "group": self.group.id, "project": rule.project.id}, + ) self.buffer = RedisBuffer() self.buffer.push_to_sorted_set(PROJECT_ID_BUFFER_LIST_KEY, rule.project.id) From 5c0a8b9ad9c32c7bfaff742da28eea3f34408ea9 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Thu, 9 May 2024 16:25:21 -0400 Subject: [PATCH 239/376] ref(ui): Use useLocation in Link (#70612) --- static/app/components/links/link.tsx | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/static/app/components/links/link.tsx b/static/app/components/links/link.tsx index 2c6dd466a357fe..772408ffeb4f09 100644 --- a/static/app/components/links/link.tsx +++ b/static/app/components/links/link.tsx @@ -1,10 +1,10 @@ -import {forwardRef, useContext} from 'react'; +import {forwardRef} from 'react'; import {Link as RouterLink} from 'react-router'; import styled from '@emotion/styled'; import type {Location, LocationDescriptor} from 'history'; +import {useLocation} from 'sentry/utils/useLocation'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; -import {RouteContext} from 'sentry/views/routeContext'; import {linkStyles} from './styles'; @@ -42,8 +42,7 @@ export interface LinkProps * back to if there is no router present */ function BaseLink({disabled, to, forwardedRef, ...props}: LinkProps): React.ReactElement { - const route = useContext(RouteContext); - const location = route?.location; + const location = useLocation(); to = normalizeUrl(to, location); if (!disabled && location) { From 32e5c268aef4b1583e598b2f7bb4c51f2f28c88a Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Thu, 9 May 2024 16:29:03 -0400 Subject: [PATCH 240/376] ref(ui): Use Link component in eventTagsTreeRow (#70613) --- static/app/components/events/eventTags/eventTagsTreeRow.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/components/events/eventTags/eventTagsTreeRow.tsx b/static/app/components/events/eventTags/eventTagsTreeRow.tsx index 4406672c895f64..4a40e1272ceddc 100644 --- a/static/app/components/events/eventTags/eventTagsTreeRow.tsx +++ b/static/app/components/events/eventTags/eventTagsTreeRow.tsx @@ -1,5 +1,4 @@ import {Fragment, useState} from 'react'; -import {Link} from 'react-router'; import styled from '@emotion/styled'; import * as qs from 'query-string'; @@ -10,6 +9,7 @@ import type {TagTreeContent} from 'sentry/components/events/eventTags/eventTagsT import EventTagsValue from 'sentry/components/events/eventTags/eventTagsValue'; import {AnnotatedTextErrors} from 'sentry/components/events/meta/annotatedText/annotatedTextErrors'; import ExternalLink from 'sentry/components/links/externalLink'; +import Link from 'sentry/components/links/link'; import Version from 'sentry/components/version'; import VersionHoverCard from 'sentry/components/versionHoverCard'; import {IconEllipsis} from 'sentry/icons'; From be1e9c7f4acdd4f51c7e7f6bac250e240073f40c Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Thu, 9 May 2024 16:30:24 -0400 Subject: [PATCH 241/376] ref(ui): Use Link compoent in resourceSummaryTable (#70614) --- .../resources/resourceSummaryPage/resourceSummaryTable.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryTable.tsx b/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryTable.tsx index a390087c493d7f..d699b0a4909041 100644 --- a/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryTable.tsx +++ b/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryTable.tsx @@ -1,9 +1,10 @@ import {Fragment} from 'react'; -import {browserHistory, Link} from 'react-router'; +import {browserHistory} from 'react-router'; import styled from '@emotion/styled'; import type {GridColumnHeader, GridColumnOrder} from 'sentry/components/gridEditable'; import GridEditable, {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable'; +import Link from 'sentry/components/links/link'; import type {CursorHandler} from 'sentry/components/pagination'; import Pagination from 'sentry/components/pagination'; import {t} from 'sentry/locale'; From ff1b84a5572c15c0c993c05269d406b0e30d0d6d Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Thu, 9 May 2024 16:49:19 -0400 Subject: [PATCH 242/376] perf: remove jsondata usages (#70608) By default JSONData is defined as Any. With `orjson`, we don't need `utils/json.py` and can slowly remove it. Reduces cyclic dependencies. --- src/sentry/api/bases/doc_integrations.py | 5 +- .../endpoints/group_integration_details.py | 3 +- .../api/endpoints/group_integrations.py | 3 +- .../notifications/notification_actions.md | 2 +- .../organization_member/team_details.py | 3 +- src/sentry/api/serializers/base.py | 6 +- .../api/serializers/models/doc_integration.py | 3 +- .../models/doc_integration_avatar.py | 4 +- src/sentry/api/serializers/models/group.py | 3 +- .../api/serializers/models/integration.py | 9 +- .../api/serializers/models/relocation.py | 3 +- .../serializers/models/sentry_app_avatar.py | 6 +- src/sentry/api/validators/doc_integration.py | 4 +- src/sentry/apidocs/build.py | 6 +- src/sentry/backup/comparators.py | 48 ++-- src/sentry/backup/crypto.py | 4 +- src/sentry/backup/imports.py | 4 +- src/sentry/backup/sanitize.py | 40 +-- src/sentry/backup/validate.py | 19 +- src/sentry/db/models/base.py | 5 +- src/sentry/integrations/github/client.py | 51 ++-- src/sentry/integrations/github/repository.py | 7 +- src/sentry/integrations/github/webhook.py | 5 +- src/sentry/integrations/jira/client.py | 4 +- src/sentry/integrations/jira_server/client.py | 4 +- src/sentry/integrations/slack/integration.py | 3 +- .../integrations/slack/requests/action.py | 3 +- .../integrations/slack/utils/rule_status.py | 5 +- src/sentry/integrations/vsts/integration.py | 3 +- src/sentry/lang/native/sources.py | 2 +- src/sentry/models/apiapplication.py | 5 +- src/sentry/models/apigrant.py | 5 +- src/sentry/models/apikey.py | 5 +- src/sentry/models/apitoken.py | 3 +- src/sentry/models/authenticator.py | 3 +- src/sentry/models/authidentity.py | 4 +- src/sentry/models/authprovider.py | 3 +- src/sentry/models/integrations/integration.py | 3 +- .../integrations/organization_integration.py | 3 +- .../integrations/project_integration.py | 5 +- src/sentry/models/integrations/sentry_app.py | 5 +- .../integrations/sentry_app_component.py | 5 +- src/sentry/models/notificationaction.py | 3 +- src/sentry/models/options/user_option.py | 3 +- src/sentry/models/relay.py | 7 +- src/sentry/models/repository.py | 5 +- src/sentry/models/servicehook.py | 5 +- src/sentry/models/user.py | 3 +- src/sentry/models/useremail.py | 5 +- src/sentry/models/userip.py | 5 +- src/sentry/runner/commands/backup.py | 4 +- src/sentry/tasks/derive_code_mappings.py | 3 +- src/sentry/testutils/helpers/backups.py | 34 +-- .../utils/appleconnect/appstore_connect.py | 15 +- src/sentry/utils/codecs.py | 9 +- src/sentry/utils/cursors.py | 6 +- src/sentry/utils/json.py | 16 +- src/sentry/utils/jwt.py | 13 +- .../api/endpoints/test_doc_integrations.py | 3 +- tests/sentry/backup/__init__.py | 7 +- tests/sentry/backup/test_comparators.py | 252 +++++++++--------- tests/sentry/backup/test_exports.py | 13 +- tests/sentry/backup/test_models.py | 4 +- tests/sentry/backup/test_rpc.py | 7 +- tests/sentry/backup/test_sanitize.py | 8 +- tests/sentry/backup/test_snapshots.py | 3 +- tests/sentry/backup/test_validate.py | 3 +- .../github/test_open_pr_comment.py | 4 +- 68 files changed, 358 insertions(+), 398 deletions(-) diff --git a/src/sentry/api/bases/doc_integrations.py b/src/sentry/api/bases/doc_integrations.py index 6ec2ba9b04ee0f..c4892d317d5197 100644 --- a/src/sentry/api/bases/doc_integrations.py +++ b/src/sentry/api/bases/doc_integrations.py @@ -1,5 +1,7 @@ from __future__ import annotations +from typing import Any + from django.http import Http404 from rest_framework.request import Request @@ -10,7 +12,6 @@ from sentry.api.validators.doc_integration import METADATA_PROPERTIES from sentry.auth.superuser import is_active_superuser from sentry.models.integrations.doc_integration import DocIntegration -from sentry.utils.json import JSONData from sentry.utils.sdk import configure_scope @@ -67,7 +68,7 @@ class DocIntegrationsBaseEndpoint(Endpoint): permission_classes = (DocIntegrationsAndStaffPermission,) - def generate_incoming_metadata(self, request: Request) -> JSONData: + def generate_incoming_metadata(self, request: Request) -> Any: return {k: v for k, v in request.json_body.items() if k in METADATA_PROPERTIES} diff --git a/src/sentry/api/endpoints/group_integration_details.py b/src/sentry/api/endpoints/group_integration_details.py index 996d87f5c4c763..02074b7dcba662 100644 --- a/src/sentry/api/endpoints/group_integration_details.py +++ b/src/sentry/api/endpoints/group_integration_details.py @@ -21,7 +21,6 @@ from sentry.shared_integrations.exceptions import IntegrationError, IntegrationFormError from sentry.signals import integration_issue_created, integration_issue_linked from sentry.types.activity import ActivityType -from sentry.utils.json import JSONData MISSING_FEATURE_MESSAGE = "Your organization does not have access to this feature." @@ -39,7 +38,7 @@ def __init__( def serialize( self, obj: RpcIntegration, attrs: Mapping[str, Any], user: User, **kwargs: Any - ) -> MutableMapping[str, JSONData]: + ) -> MutableMapping[str, Any]: data = super().serialize(obj, attrs, user) if self.action == "link": diff --git a/src/sentry/api/endpoints/group_integrations.py b/src/sentry/api/endpoints/group_integrations.py index e4b088af116fd7..0347f80d7a3280 100644 --- a/src/sentry/api/endpoints/group_integrations.py +++ b/src/sentry/api/endpoints/group_integrations.py @@ -19,7 +19,6 @@ from sentry.models.user import User from sentry.services.hybrid_cloud.integration import RpcIntegration, integration_service from sentry.services.hybrid_cloud.pagination import RpcPaginationArgs -from sentry.utils.json import JSONData class IntegrationIssueSerializer(IntegrationSerializer): @@ -63,7 +62,7 @@ def get_attrs( def serialize( self, obj: RpcIntegration, attrs: Mapping[str, Any], user: User, **kwargs: Any - ) -> MutableMapping[str, JSONData]: + ) -> MutableMapping[str, Any]: data = super().serialize(obj, attrs, user) data["externalIssues"] = attrs.get("external_issues", []) return data diff --git a/src/sentry/api/endpoints/notifications/notification_actions.md b/src/sentry/api/endpoints/notifications/notification_actions.md index 7cf5169dd980bf..314dc5faf02e63 100644 --- a/src/sentry/api/endpoints/notifications/notification_actions.md +++ b/src/sentry/api/endpoints/notifications/notification_actions.md @@ -58,6 +58,6 @@ class SentryAuditLogRegistration(ActionRegistration): @classmethod def serialize_available( cls, organization: Organization, integrations: List[RpcIntegration] = None - ) -> List[JSONData]: + ) -> List[Any]: return [] ``` diff --git a/src/sentry/api/endpoints/organization_member/team_details.py b/src/sentry/api/endpoints/organization_member/team_details.py index 57df6a5ca08180..557b08d019e190 100644 --- a/src/sentry/api/endpoints/organization_member/team_details.py +++ b/src/sentry/api/endpoints/organization_member/team_details.py @@ -36,7 +36,6 @@ from sentry.roles import organization_roles, team_roles from sentry.roles.manager import TeamRole from sentry.utils import metrics -from sentry.utils.json import JSONData from . import can_admin_team, can_set_team_role @@ -51,7 +50,7 @@ class OrganizationMemberTeamSerializer(serializers.Serializer): class OrganizationMemberTeamDetailsSerializer(Serializer): def serialize( self, obj: OrganizationMemberTeam, attrs: Mapping[Any, Any], user: Any, **kwargs: Any - ) -> MutableMapping[str, JSONData]: + ) -> MutableMapping[str, Any]: return { "isActive": obj.is_active, "teamRole": obj.role, diff --git a/src/sentry/api/serializers/base.py b/src/sentry/api/serializers/base.py index dc03002ae096bd..7a8982affab987 100644 --- a/src/sentry/api/serializers/base.py +++ b/src/sentry/api/serializers/base.py @@ -7,8 +7,6 @@ import sentry_sdk from django.contrib.auth.models import AnonymousUser -from sentry.utils.json import JSONData - logger = logging.getLogger(__name__) K = TypeVar("K") @@ -105,7 +103,7 @@ def get_attrs( def _serialize( self, obj: Any, attrs: Mapping[Any, Any], user: Any, **kwargs: Any - ) -> Mapping[str, JSONData] | None: + ) -> Mapping[str, Any] | None: try: return self.serialize(obj, attrs, user, **kwargs) except Exception: @@ -114,7 +112,7 @@ def _serialize( def serialize( self, obj: Any, attrs: Mapping[Any, Any], user: Any, **kwargs: Any - ) -> Mapping[str, JSONData]: + ) -> Mapping[str, Any]: """ Convert an arbitrary python object `obj` to an object that only contains primitives. diff --git a/src/sentry/api/serializers/models/doc_integration.py b/src/sentry/api/serializers/models/doc_integration.py index 8f237adb1143ba..c4e55457e642af 100644 --- a/src/sentry/api/serializers/models/doc_integration.py +++ b/src/sentry/api/serializers/models/doc_integration.py @@ -6,7 +6,6 @@ from sentry.models.integrations.doc_integration import DocIntegration from sentry.models.integrations.integration_feature import IntegrationFeature, IntegrationTypes from sentry.models.user import User -from sentry.utils.json import JSONData @register(DocIntegration) @@ -36,7 +35,7 @@ def serialize( attrs: Mapping[str, Any], user: User, **kwargs: Any, - ) -> JSONData: + ) -> Any: data = { "name": obj.name, "slug": obj.slug, diff --git a/src/sentry/api/serializers/models/doc_integration_avatar.py b/src/sentry/api/serializers/models/doc_integration_avatar.py index 315dfbfab1760f..9699b9e8ffa796 100644 --- a/src/sentry/api/serializers/models/doc_integration_avatar.py +++ b/src/sentry/api/serializers/models/doc_integration_avatar.py @@ -1,15 +1,15 @@ from collections.abc import MutableMapping +from typing import Any from sentry.api.serializers import Serializer, register from sentry.models.avatars.doc_integration_avatar import DocIntegrationAvatar -from sentry.utils.json import JSONData @register(DocIntegrationAvatar) class DocIntegrationAvatarSerializer(Serializer): def serialize( self, obj: DocIntegrationAvatar, attrs, user, **kwargs - ) -> MutableMapping[str, JSONData]: + ) -> MutableMapping[str, Any]: return { "avatarType": obj.get_avatar_type_display(), "avatarUuid": obj.ident, diff --git a/src/sentry/api/serializers/models/group.py b/src/sentry/api/serializers/models/group.py index d730e2c20e5e7a..72f05d592f2c0c 100644 --- a/src/sentry/api/serializers/models/group.py +++ b/src/sentry/api/serializers/models/group.py @@ -55,7 +55,6 @@ from sentry.tsdb.snuba import SnubaTSDB from sentry.types.group import SUBSTATUS_TO_STR, PriorityLevel from sentry.utils.cache import cache -from sentry.utils.json import JSONData from sentry.utils.safe import safe_execute from sentry.utils.snuba import aliased_query, raw_query @@ -85,7 +84,7 @@ class GroupStatusDetailsResponseOptional(TypedDict, total=False): inRelease: str inCommit: str pendingEvents: int - info: JSONData + info: Any class GroupStatusDetailsResponse(GroupStatusDetailsResponseOptional): diff --git a/src/sentry/api/serializers/models/integration.py b/src/sentry/api/serializers/models/integration.py index d52606da19457b..c46080b695f5e6 100644 --- a/src/sentry/api/serializers/models/integration.py +++ b/src/sentry/api/serializers/models/integration.py @@ -15,7 +15,6 @@ integration_service, ) from sentry.shared_integrations.exceptions import ApiError -from sentry.utils.json import JSONData logger = logging.getLogger(__name__) @@ -54,7 +53,7 @@ def serialize_provider(provider: IntegrationProvider) -> Mapping[str, Any]: class IntegrationSerializer(Serializer): def serialize( self, obj: Integration | RpcIntegration, attrs: Mapping[str, Any], user: User, **kwargs: Any - ) -> MutableMapping[str, JSONData]: + ) -> MutableMapping[str, Any]: provider = obj.get_provider() return { "id": str(obj.id), @@ -82,7 +81,7 @@ def serialize( user: User, include_config: bool = True, **kwargs: Any, - ) -> MutableMapping[str, JSONData]: + ) -> MutableMapping[str, Any]: data = super().serialize(obj, attrs, user) if not include_config: @@ -137,7 +136,7 @@ def serialize( attrs: Mapping[str, Any], user: User, include_config: bool = True, - ) -> MutableMapping[str, JSONData]: + ) -> MutableMapping[str, Any]: # XXX(epurkhiser): This is O(n) for integrations, especially since # we're using the IntegrationConfigSerializer which pulls in the # integration installation config object which very well may be making @@ -197,7 +196,7 @@ def serialize( class IntegrationProviderSerializer(Serializer): def serialize( self, obj: IntegrationProvider, attrs: Mapping[str, Any], user: User, **kwargs: Any - ) -> MutableMapping[str, JSONData]: + ) -> MutableMapping[str, Any]: org_slug = kwargs.pop("organization").slug metadata = obj.metadata metadata = metadata and metadata._asdict() or None diff --git a/src/sentry/api/serializers/models/relocation.py b/src/sentry/api/serializers/models/relocation.py index 30e8dfe8d1f5e0..6ad9d1723bcff6 100644 --- a/src/sentry/api/serializers/models/relocation.py +++ b/src/sentry/api/serializers/models/relocation.py @@ -9,7 +9,6 @@ from sentry.models.user import User from sentry.services.hybrid_cloud.user.model import RpcUser from sentry.services.hybrid_cloud.user.service import user_service -from sentry.utils.json import JSONData @dataclasses.dataclass(frozen=True) @@ -48,7 +47,7 @@ def serialize( attrs: Any, user: User, **kwargs: Any, - ) -> Mapping[str, JSONData]: + ) -> Mapping[str, Any]: scheduled_at_pause_step = ( Relocation.Step(obj.scheduled_pause_at_step).name if obj.scheduled_pause_at_step is not None diff --git a/src/sentry/api/serializers/models/sentry_app_avatar.py b/src/sentry/api/serializers/models/sentry_app_avatar.py index 194f2e179ceb07..fde83225542f8e 100644 --- a/src/sentry/api/serializers/models/sentry_app_avatar.py +++ b/src/sentry/api/serializers/models/sentry_app_avatar.py @@ -1,15 +1,13 @@ from collections.abc import MutableMapping +from typing import Any from sentry.api.serializers import Serializer, register from sentry.models.avatars.sentry_app_avatar import SentryAppAvatar -from sentry.utils.json import JSONData @register(SentryAppAvatar) class SentryAppAvatarSerializer(Serializer): - def serialize( - self, obj: SentryAppAvatar, attrs, user, **kwargs - ) -> MutableMapping[str, JSONData]: + def serialize(self, obj: SentryAppAvatar, attrs, user, **kwargs) -> MutableMapping[str, Any]: return { "avatarType": obj.get_avatar_type_display(), "avatarUuid": obj.ident, diff --git a/src/sentry/api/validators/doc_integration.py b/src/sentry/api/validators/doc_integration.py index 46bb8a26534cc1..16c6ebf324ac90 100644 --- a/src/sentry/api/validators/doc_integration.py +++ b/src/sentry/api/validators/doc_integration.py @@ -6,8 +6,6 @@ from jsonschema import Draft7Validator from jsonschema.exceptions import best_match -from sentry.utils.json import JSONData - logger = logging.getLogger(__name__) METADATA_SCHEMA: dict[str, Any] = { @@ -27,7 +25,7 @@ METADATA_PROPERTIES = list(METADATA_SCHEMA["properties"].keys()) -def validate_metadata_schema(instance: JSONData): +def validate_metadata_schema(instance: Any): v = Draft7Validator(METADATA_SCHEMA) if not v.is_valid(instance): raise best_match(v.iter_errors(instance)) diff --git a/src/sentry/apidocs/build.py b/src/sentry/apidocs/build.py index 8d6bf44d262811..fca8a7cff00098 100644 --- a/src/sentry/apidocs/build.py +++ b/src/sentry/apidocs/build.py @@ -1,7 +1,9 @@ +from typing import Any + from sentry.utils import json -def get_old_json_paths(filename: str) -> json.JSONData: +def get_old_json_paths(filename: str) -> Any: try: with open(filename) as f: old_raw_paths = json.load(f)["paths"] @@ -12,7 +14,7 @@ def get_old_json_paths(filename: str) -> json.JSONData: return old_raw_paths -def get_old_json_components(filename: str) -> json.JSONData: +def get_old_json_components(filename: str) -> Any: try: with open(filename) as f: old_raw_components = json.load(f)["components"] diff --git a/src/sentry/backup/comparators.py b/src/sentry/backup/comparators.py index 077b15e870c180..1962aa5e8a9965 100644 --- a/src/sentry/backup/comparators.py +++ b/src/sentry/backup/comparators.py @@ -6,6 +6,7 @@ from collections.abc import Callable from datetime import datetime, timezone from functools import lru_cache +from typing import Any from dateutil import parser from django.db import models @@ -18,7 +19,6 @@ ) from sentry.backup.findings import ComparatorFinding, ComparatorFindingKind, InstanceID from sentry.backup.helpers import Side -from sentry.utils.json import JSONData UNIX_EPOCH = unix_zero_date = datetime.fromtimestamp(0, timezone.utc).isoformat() @@ -53,7 +53,7 @@ class JSONScrubbingComparator(ABC): def __init__(self, *fields: str): self.fields = set(fields) - def check(self, side: Side, data: JSONData) -> None: + def check(self, side: Side, data: Any) -> None: """Ensure that we have received valid JSON data at runtime.""" if "model" not in data or not isinstance(data["model"], str): @@ -64,12 +64,12 @@ def check(self, side: Side, data: JSONData) -> None: raise RuntimeError(f"The {side.name} input must have a `fields` dictionary.") @abstractmethod - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: """An abstract method signature, to be implemented by inheriting classes with their own comparison logic. Implementations of this method MUST take care not to mutate the method's inputs!""" - def existence(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def existence(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: """Ensure that all tracked fields on either both models or neither.""" findings = [] @@ -102,8 +102,8 @@ def existence(self, on: InstanceID, left: JSONData, right: JSONData) -> list[Com def __scrub__( self, - left: JSONData, - right: JSONData, + left: Any, + right: Any, f: ( Callable[[list[str]], list[str]] | Callable[[list[str]], ScrubbedData] ) = lambda _: ScrubbedData(), @@ -142,8 +142,8 @@ def __scrub__( def scrub( self, - left: JSONData, - right: JSONData, + left: Any, + right: Any, ) -> None: self.__scrub__(left, right) @@ -168,7 +168,7 @@ class AutoSuffixComparator(JSONScrubbingComparator): becomes "my-org-1k1j"). This comparator is robust to such fields, and ensures that the left field entry is a strict prefix of the right.""" - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: findings = [] fields = sorted(self.fields) for f in fields: @@ -196,7 +196,7 @@ class DateUpdatedComparator(JSONScrubbingComparator): """Comparator that ensures that the specified fields' value on the right input is an ISO-8601 date that is greater than (ie, occurs after) or equal to the specified field's left input.""" - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: findings = [] fields = sorted(self.fields) for f in fields: @@ -223,7 +223,7 @@ class DatetimeEqualityComparator(JSONScrubbingComparator): exactly `.000` (ie, not milliseconds at all - what are the odds!). Because comparisons may fail in this case, we use a special comparator for these cases.""" - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: findings = [] fields = sorted(self.fields) for f in fields: @@ -263,7 +263,7 @@ def set_primary_key_maps(self, left_pk_map: PrimaryKeyMap, right_pk_map: Primary self.left_pk_map = left_pk_map self.right_pk_map = right_pk_map - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: findings = [] fields = sorted(self.fields) for f in fields: @@ -319,7 +319,7 @@ class ObfuscatingComparator(JSONScrubbingComparator, ABC): def __init__(self, *fields: str): super().__init__(*fields) - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: findings = [] fields = sorted(self.fields) for f in fields: @@ -344,8 +344,8 @@ def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[Compa def scrub( self, - left: JSONData, - right: JSONData, + left: Any, + right: Any, ) -> None: super().__scrub__(left, right, self.truncate) @@ -403,7 +403,7 @@ class UserPasswordObfuscatingComparator(ObfuscatingComparator): def __init__(self): super().__init__("password") - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: findings = [] # Error case: there is no importing action that can "claim" a user. @@ -496,12 +496,12 @@ class IgnoredComparator(JSONScrubbingComparator): sure you are validating them some other way! """ - def compare(self, _o: InstanceID, _l: JSONData, _r: JSONData) -> list[ComparatorFinding]: + def compare(self, _o: InstanceID, _l: Any, _r: Any) -> list[ComparatorFinding]: """Noop - there is nothing to compare once we've checked for existence.""" return [] - def existence(self, _o: InstanceID, _l: JSONData, _r: JSONData) -> list[ComparatorFinding]: + def existence(self, _o: InstanceID, _l: Any, _r: Any) -> list[ComparatorFinding]: """Noop - never compare existence for ignored fields, they're ignored after all.""" return [] @@ -514,7 +514,7 @@ def __init__(self, regex: re.Pattern, *fields: str): self.regex = regex super().__init__(*fields) - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: findings = [] fields = sorted(self.fields) for f in fields: @@ -553,7 +553,7 @@ class EqualOrRemovedComparator(JSONScrubbingComparator): missing. """ - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: findings = [] fields = sorted(self.fields) for f in fields: @@ -577,7 +577,7 @@ def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[Compa return findings - def existence(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def existence(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: """Ensure that all tracked fields on either both models or neither.""" findings = [] @@ -613,7 +613,7 @@ class SubscriptionIDComparator(RegexComparator): def __init__(self, *fields: str): super().__init__(re.compile("^\\d+/[0-9a-f]{32}$"), *fields) - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: # First, ensure that the two sides are not equivalent. findings = [] fields = sorted(self.fields) @@ -644,7 +644,7 @@ class UnorderedListComparator(JSONScrubbingComparator): """Comparator for fields that are lists of unordered elements, which simply orders them before doing the comparison.""" - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: findings = [] fields = sorted(self.fields) for f in fields: @@ -681,7 +681,7 @@ def __init__(self, *fields: str): *fields, ) - def compare(self, on: InstanceID, left: JSONData, right: JSONData) -> list[ComparatorFinding]: + def compare(self, on: InstanceID, left: Any, right: Any) -> list[ComparatorFinding]: # First, ensure that the two sides are not equivalent. findings = [] fields = sorted(self.fields) diff --git a/src/sentry/backup/crypto.py b/src/sentry/backup/crypto.py index 0002c119a6a8d6..bd69d849861e60 100644 --- a/src/sentry/backup/crypto.py +++ b/src/sentry/backup/crypto.py @@ -4,7 +4,7 @@ import tarfile from abc import ABC, abstractmethod from functools import lru_cache -from typing import IO, NamedTuple +from typing import IO, Any, NamedTuple from cryptography.fernet import Fernet from cryptography.hazmat.backends import default_backend @@ -115,7 +115,7 @@ def get_public_key_pem(self) -> bytes: return public_key.pem.encode("utf-8") -def create_encrypted_export_tarball(json_export: json.JSONData, encryptor: Encryptor) -> io.BytesIO: +def create_encrypted_export_tarball(json_export: Any, encryptor: Encryptor) -> io.BytesIO: """ Generate a tarball with 3 files: diff --git a/src/sentry/backup/imports.py b/src/sentry/backup/imports.py index d9e6ebdbc83ca0..91c37e9811ae71 100644 --- a/src/sentry/backup/imports.py +++ b/src/sentry/backup/imports.py @@ -3,7 +3,7 @@ import logging from collections.abc import Iterator from dataclasses import dataclass -from typing import IO +from typing import IO, Any from uuid import uuid4 from django.core import serializers @@ -315,7 +315,7 @@ def do_write( import_write_context: ImportWriteContext, pk_map: PrimaryKeyMap, model_name: NormalizedModelName, - json_data: json.JSONData, + json_data: Any, offset: int, ) -> None: model_relations = import_write_context.dependencies.get(model_name) diff --git a/src/sentry/backup/sanitize.py b/src/sentry/backup/sanitize.py index dae10110a84180..7e5f4392a8ff38 100644 --- a/src/sentry/backup/sanitize.py +++ b/src/sentry/backup/sanitize.py @@ -5,6 +5,7 @@ from datetime import UTC, datetime, timedelta, timezone from ipaddress import IPv4Address, IPv6Address, ip_address from random import choice, randint +from typing import Any from urllib.parse import urlparse, urlunparse from uuid import UUID, uuid4 @@ -13,7 +14,6 @@ from django.utils.text import slugify from sentry.utils import json -from sentry.utils.json import JSONData UPPER_CASE_HEX = {"A", "B", "C", "D", "E", "F"} UPPER_CASE_NON_HEX = { @@ -89,7 +89,7 @@ class SanitizableField: model: NormalizedModelName field: str - def validate_json_model(self, json: JSONData) -> None: + def validate_json_model(self, json: Any) -> None: """ Validates the JSON model is shaped the way we expect a serialized Django model to be, and that we have the right kind of model for this `SanitizableField`. Raises errors if there @@ -106,11 +106,11 @@ def validate_json_model(self, json: JSONData) -> None: return None -def _get_field_value(json: JSONData, field: SanitizableField) -> JSONData | None: +def _get_field_value(json: Any, field: SanitizableField) -> Any | None: return json.get("fields", {}).get(field.field, None) -def _set_field_value(json: JSONData, field: SanitizableField, value: JSONData) -> JSONData: +def _set_field_value(json: Any, field: SanitizableField, value: Any) -> Any: json.get("fields", {})[field.field] = value return value @@ -186,11 +186,11 @@ class Sanitizer: `set_name()`, etc), but all of these ultimately call into `set_string()` and `set_datetime()`. """ - json: JSONData + json: Any interned_strings: dict[str, str] interned_datetimes: dict[datetime, datetime] - def __init__(self, export: JSONData, datetime_offset: timedelta | None = None): + def __init__(self, export: Any, datetime_offset: timedelta | None = None): self.json = export self.interned_strings = {"": ""} # Always map empty string to itself. self.interned_datetimes = dict() @@ -280,7 +280,7 @@ def map_ip(self, old: str) -> str: self.interned_strings[old] = random_ipv6() return self.interned_strings[old] - def map_json(self, old_json: JSONData, new_json: JSONData) -> JSONData: + def map_json(self, old_json: Any, new_json: Any) -> Any: """ Maps a JSON object. If the `old` JSON object has already been seen, the already-generated value for that existing key will be used instead. If it has not, we'll generate a new one. @@ -399,7 +399,7 @@ def map_uuid(self, old: str) -> str: return self.map_string(old, lambda _: str(uuid4())) - def set_datetime(self, json: JSONData, field: SanitizableField) -> datetime | None: + def set_datetime(self, json: Any, field: SanitizableField) -> datetime | None: """ Replaces a datetime by replacing it with a different, but still correctly ordered, alternative. @@ -425,7 +425,7 @@ def set_datetime(self, json: JSONData, field: SanitizableField) -> datetime | No return None if parsed is None else _set_field_value(json, field, self.map_datetime(parsed)) - def set_email(self, json: JSONData, field: SanitizableField) -> str | None: + def set_email(self, json: Any, field: SanitizableField) -> str | None: """ Replaces an email in a manner that retains domain relationships - ie, all sanitized emails from domain `@foo` will now be from `@bar`. If the `old` string is not a valid email (ie, @@ -450,7 +450,7 @@ def set_email(self, json: JSONData, field: SanitizableField) -> str | None: def set_ip( self, - json: JSONData, + json: Any, field: SanitizableField, ) -> str | None: """ @@ -478,10 +478,10 @@ def set_ip( def set_json( self, - json: JSONData, + json: Any, field: SanitizableField, - replace_with: JSONData, - ) -> JSONData | None: + replace_with: Any, + ) -> Any | None: """ Replaces a JSON object with a randomly generated value. If the existing value of the JSON object has already been seen, the already-generated value for that existing key will be used @@ -505,7 +505,7 @@ def set_json( def set_name( self, - json: JSONData, + json: Any, field: SanitizableField, ) -> str | None: """ @@ -533,7 +533,7 @@ def set_name( return _set_field_value(json, field, self.map_name(old)) def set_name_and_slug_pair( - self, json: JSONData, name_field: SanitizableField, slug_field: SanitizableField + self, json: Any, name_field: SanitizableField, slug_field: SanitizableField ) -> tuple[str | None, str | None]: """ Replaces a pair of a proper noun name and its matching slug with some randomly generated @@ -574,7 +574,7 @@ def set_name_and_slug_pair( def set_string( self, - json: JSONData, + json: Any, field: SanitizableField, generate: Callable[[str], str] = default_string_sanitizer, ) -> str | None: @@ -603,7 +603,7 @@ def set_string( def set_url( self, - json: JSONData, + json: Any, field: SanitizableField, ) -> str | None: """ @@ -634,7 +634,7 @@ def set_url( def set_uuid( self, - json: JSONData, + json: Any, field: SanitizableField, ) -> str | None: """ @@ -664,7 +664,7 @@ def set_uuid( return _set_field_value(json, field, self.map_uuid(old)) -def sanitize(export: JSONData, datetime_offset: timedelta | None = None) -> JSONData: +def sanitize(export: Any, datetime_offset: timedelta | None = None) -> Any: """ Sanitize an entire export JSON. """ @@ -673,7 +673,7 @@ def sanitize(export: JSONData, datetime_offset: timedelta | None = None) -> JSON from sentry.backup.dependencies import NormalizedModelName, get_model sanitizer = Sanitizer(export, datetime_offset) - sanitized: list[JSONData] = [] + sanitized: list[Any] = [] for item in sanitizer.json: clone = deepcopy(item) model_name = NormalizedModelName(clone["model"]) diff --git a/src/sentry/backup/validate.py b/src/sentry/backup/validate.py index 6ee5b317e294fd..7189fc59c833b5 100644 --- a/src/sentry/backup/validate.py +++ b/src/sentry/backup/validate.py @@ -5,6 +5,7 @@ from collections import defaultdict from copy import deepcopy from difflib import unified_diff +from typing import Any from sentry.backup.comparators import ComparatorMap, ForeignKeyComparator, get_default_comparators from sentry.backup.dependencies import ImportKind, NormalizedModelName, PrimaryKeyMap, get_model @@ -15,7 +16,7 @@ InstanceID, ) from sentry.backup.helpers import Side -from sentry.utils.json import JSONData, JSONEncoder, better_default_encoder +from sentry.utils.json import JSONEncoder, better_default_encoder JSON_PRETTY_PRINTER = JSONEncoder( default=better_default_encoder, indent=2, ignore_nan=True, sort_keys=True @@ -23,8 +24,8 @@ def validate( - expect: JSONData, - actual: JSONData, + expect: Any, + actual: Any, comparators: ComparatorMap | None = None, ) -> ComparatorFindings: """Ensures that originally imported data correctly matches actual outputted data, and produces a @@ -43,7 +44,7 @@ def __init__(self): self.next_ordinal = 1 def assign( - self, obj: JSONData, ordinal_value: int | tuple, side: Side + self, obj: Any, ordinal_value: int | tuple, side: Side ) -> tuple[InstanceID, list[ComparatorFinding]]: """Assigns the next available ordinal to the supplied `obj` model.""" @@ -76,10 +77,10 @@ def assign( return (InstanceID(str(model_name), obj["ordinal"]), findings if findings else []) OrdinalCounters = dict[NormalizedModelName, OrdinalCounter] - ModelMap = dict[NormalizedModelName, OrderedDict[InstanceID, JSONData]] + ModelMap = dict[NormalizedModelName, OrderedDict[InstanceID, Any]] def build_model_map( - models: JSONData, side: Side, findings: ComparatorFindings + models: Any, side: Side, findings: ComparatorFindings ) -> tuple[ModelMap, OrdinalCounters]: """Does two things in tandem: builds a map of InstanceID -> JSON model, and simultaneously builds a map of model name -> number of ordinals assigned.""" @@ -88,7 +89,7 @@ def build_model_map( model_map: ModelMap = defaultdict(ordereddict) ordinal_counters: OrdinalCounters = defaultdict(OrdinalCounter) - need_ordering: dict[NormalizedModelName, dict[tuple, JSONData]] = defaultdict(dict) + need_ordering: dict[NormalizedModelName, dict[tuple, Any]] = defaultdict(dict) pks_to_usernames: dict[int, str] = dict() for model in models: @@ -156,8 +157,8 @@ def build_model_map( return (model_map, ordinal_counters) - def json_lines(obj: JSONData) -> list[str]: - """Take a JSONData object and pretty-print it as JSON.""" + def json_lines(obj: Any) -> list[str]: + """Take an object and pretty-print it as JSON.""" return JSON_PRETTY_PRINTER.encode(obj).splitlines() diff --git a/src/sentry/db/models/base.py b/src/sentry/db/models/base.py index 33523b8681ddec..48a807b41182df 100644 --- a/src/sentry/db/models/base.py +++ b/src/sentry/db/models/base.py @@ -20,7 +20,6 @@ from sentry.backup.scopes import ImportScope, RelocationScope from sentry.db.models.fields.uuid import UUIDField from sentry.silo.base import SiloLimit, SiloMode -from sentry.utils.json import JSONData from .fields.bounded import BoundedBigAutoField from .manager import BaseManager @@ -138,7 +137,7 @@ def get_relocation_scope(self) -> RelocationScope: return self.__relocation_scope__ @classmethod - def get_relocation_ordinal_fields(self, _json_model: JSONData) -> list[str] | None: + def get_relocation_ordinal_fields(self, _json_model: Any) -> list[str] | None: """ Retrieves the custom ordinal fields for models that may be re-used at import time (that is, the `write_relocation_import()` method may return an `ImportKind` besides @@ -196,7 +195,7 @@ def query_for_relocation_export(cls, q: models.Q, pk_map: PrimaryKeyMap) -> mode @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: """ Takes the export JSON representation of this model, and "sanitizes" any data that might be diff --git a/src/sentry/integrations/github/client.py b/src/sentry/integrations/github/client.py index 97526cef82e381..275b7f99aaa75b 100644 --- a/src/sentry/integrations/github/client.py +++ b/src/sentry/integrations/github/client.py @@ -34,7 +34,6 @@ from sentry.types.integrations import EXTERNAL_PROVIDERS, ExternalProviders from sentry.utils import metrics from sentry.utils.cache import cache -from sentry.utils.json import JSONData logger = logging.getLogger("sentry.integrations.github") @@ -192,7 +191,7 @@ class GitHubClientMixin(GithubProxyClient): # Github gives us links to navigate, however, let's be safe in case we're fed garbage page_number_limit = 50 # With a default of 100 per page -> 5,000 items - def get_last_commits(self, repo: str, end_sha: str) -> Sequence[JSONData]: + def get_last_commits(self, repo: str, end_sha: str) -> Sequence[Any]: """ Return API request that fetches last ~30 commits see https://docs.github.com/en/rest/commits/commits#list-commits-on-a-repository @@ -200,32 +199,32 @@ def get_last_commits(self, repo: str, end_sha: str) -> Sequence[JSONData]: """ return self.get_cached(f"/repos/{repo}/commits", params={"sha": end_sha}) - def compare_commits(self, repo: str, start_sha: str, end_sha: str) -> JSONData: + def compare_commits(self, repo: str, start_sha: str, end_sha: str) -> Any: """ See https://docs.github.com/en/rest/commits/commits#compare-two-commits where start sha is oldest and end is most recent. """ return self.get_cached(f"/repos/{repo}/compare/{start_sha}...{end_sha}") - def repo_hooks(self, repo: str) -> Sequence[JSONData]: + def repo_hooks(self, repo: str) -> Sequence[Any]: """ https://docs.github.com/en/rest/webhooks/repos#list-repository-webhooks """ return self.get(f"/repos/{repo}/hooks") - def get_commits(self, repo: str) -> Sequence[JSONData]: + def get_commits(self, repo: str) -> Sequence[Any]: """ https://docs.github.com/en/rest/commits/commits#list-commits """ return self.get(f"/repos/{repo}/commits") - def get_commit(self, repo: str, sha: str) -> JSONData: + def get_commit(self, repo: str, sha: str) -> Any: """ https://docs.github.com/en/rest/commits/commits#get-a-commit """ return self.get_cached(f"/repos/{repo}/commits/{sha}") - def get_pullrequest_from_commit(self, repo: str, sha: str) -> JSONData: + def get_pullrequest_from_commit(self, repo: str, sha: str) -> Any: """ https://docs.github.com/en/rest/commits/commits#list-pull-requests-associated-with-a-commit @@ -233,7 +232,7 @@ def get_pullrequest_from_commit(self, repo: str, sha: str) -> JSONData: """ return self.get(f"/repos/{repo}/commits/{sha}/pulls") - def get_pullrequest(self, repo: str, pull_number: str) -> JSONData: + def get_pullrequest(self, repo: str, pull_number: str) -> Any: """ https://docs.github.com/en/rest/pulls/pulls#get-a-pull-request @@ -241,7 +240,7 @@ def get_pullrequest(self, repo: str, pull_number: str) -> JSONData: """ return self.get(f"/repos/{repo}/pulls/{pull_number}") - def get_pullrequest_files(self, repo: str, pull_number: str) -> JSONData: + def get_pullrequest_files(self, repo: str, pull_number: str) -> Any: """ https://docs.github.com/en/rest/pulls/pulls#list-pull-requests-files @@ -249,7 +248,7 @@ def get_pullrequest_files(self, repo: str, pull_number: str) -> JSONData: """ return self.get(f"/repos/{repo}/pulls/{pull_number}/files") - def get_repo(self, repo: str) -> JSONData: + def get_repo(self, repo: str) -> Any: """ https://docs.github.com/en/rest/repos/repos#get-a-repository """ @@ -263,8 +262,8 @@ def get_rate_limit(self, specific_resource: str = "core") -> GithubRateLimitInfo return GithubRateLimitInfo(self.get("/rate_limit")["resources"][specific_resource]) # https://docs.github.com/en/rest/git/trees#get-a-tree - def get_tree(self, repo_full_name: str, tree_sha: str) -> JSONData: - tree: JSONData = {} + def get_tree(self, repo_full_name: str, tree_sha: str) -> Any: + tree: Any = {} # We do not cache this call since it is a rather large object contents: dict[str, Any] = self.get( f"/repos/{repo_full_name}/git/trees/{tree_sha}", @@ -370,7 +369,7 @@ def _populate_trees_process_error(self, error: ApiError, extra: dict[str, str]) should_count_error = False txt = error.text if error.json: - json_data: JSONData = error.json + json_data: Any = error.json txt = json_data.get("message") # TODO: Add condition for getsentry/DataForThePeople @@ -473,7 +472,7 @@ def _populate_tree( ) return RepoTree(Repo(full_name, branch), repo_files) - def get_repositories(self, fetch_max_pages: bool = False) -> Sequence[JSONData]: + def get_repositories(self, fetch_max_pages: bool = False) -> Sequence[Any]: """ args: * fetch_max_pages - fetch as many repos as possible using pagination (slow) @@ -494,7 +493,7 @@ def get_repositories(self, fetch_max_pages: bool = False) -> Sequence[JSONData]: return [repo for repo in repos if not repo.get("archived")] # XXX: Find alternative approach - def search_repositories(self, query: bytes) -> Mapping[str, Sequence[JSONData]]: + def search_repositories(self, query: bytes) -> Mapping[str, Sequence[Any]]: """ Find repositories matching a query. NOTE: All search APIs share a rate limit of 30 requests/minute @@ -503,7 +502,7 @@ def search_repositories(self, query: bytes) -> Mapping[str, Sequence[JSONData]]: """ return self.get("/search/repositories", params={"q": query}) - def get_assignees(self, repo: str) -> Sequence[JSONData]: + def get_assignees(self, repo: str) -> Sequence[Any]: """ https://docs.github.com/en/rest/issues/assignees#list-assignees """ @@ -511,7 +510,7 @@ def get_assignees(self, repo: str) -> Sequence[JSONData]: def get_with_pagination( self, path: str, response_key: str | None = None, page_number_limit: int | None = None - ) -> Sequence[JSONData]: + ) -> Sequence[Any]: """ Github uses the Link header to provide pagination links. Github recommends using the provided link relations and not constructing our @@ -559,8 +558,8 @@ def get_with_pagination( page_number += 1 return output - def get_issues(self, repo: str) -> Sequence[JSONData]: - issues: Sequence[JSONData] = self.get(f"/repos/{repo}/issues") + def get_issues(self, repo: str) -> Sequence[Any]: + issues: Sequence[Any] = self.get(f"/repos/{repo}/issues") return issues def search_issues(self, query: str) -> Mapping[str, Sequence[Mapping[str, Any]]]: @@ -570,44 +569,44 @@ def search_issues(self, query: str) -> Mapping[str, Sequence[Mapping[str, Any]]] """ return self.get("/search/issues", params={"q": query}) - def get_issue(self, repo: str, number: str) -> JSONData: + def get_issue(self, repo: str, number: str) -> Any: """ https://docs.github.com/en/rest/issues/issues#get-an-issue """ return self.get(f"/repos/{repo}/issues/{number}") - def create_issue(self, repo: str, data: Mapping[str, Any]) -> JSONData: + def create_issue(self, repo: str, data: Mapping[str, Any]) -> Any: """ https://docs.github.com/en/rest/issues/issues#create-an-issue """ endpoint = f"/repos/{repo}/issues" return self.post(endpoint, data=data) - def create_comment(self, repo: str, issue_id: str, data: Mapping[str, Any]) -> JSONData: + def create_comment(self, repo: str, issue_id: str, data: Mapping[str, Any]) -> Any: """ https://docs.github.com/en/rest/issues/comments#create-an-issue-comment """ endpoint = f"/repos/{repo}/issues/{issue_id}/comments" return self.post(endpoint, data=data) - def update_comment(self, repo: str, comment_id: str, data: Mapping[str, Any]) -> JSONData: + def update_comment(self, repo: str, comment_id: str, data: Mapping[str, Any]) -> Any: endpoint = f"/repos/{repo}/issues/comments/{comment_id}" return self.patch(endpoint, data=data) - def get_comment_reactions(self, repo: str, comment_id: str) -> JSONData: + def get_comment_reactions(self, repo: str, comment_id: str) -> Any: endpoint = f"/repos/{repo}/issues/comments/{comment_id}" response = self.get(endpoint) reactions = response["reactions"] del reactions["url"] return reactions - def get_user(self, gh_username: str) -> JSONData: + def get_user(self, gh_username: str) -> Any: """ https://docs.github.com/en/rest/users/users#get-a-user """ return self.get(f"/users/{gh_username}") - def get_labels(self, repo: str) -> Sequence[JSONData]: + def get_labels(self, repo: str) -> Sequence[Any]: """ Fetches up to the first 100 labels for a repository. https://docs.github.com/en/rest/issues/labels#list-labels-for-a-repository diff --git a/src/sentry/integrations/github/repository.py b/src/sentry/integrations/github/repository.py index 5dcf90ea87cef0..5d093a9d5f32e1 100644 --- a/src/sentry/integrations/github/repository.py +++ b/src/sentry/integrations/github/repository.py @@ -11,7 +11,6 @@ from sentry.services.hybrid_cloud.integration import integration_service from sentry.services.hybrid_cloud.organization.model import RpcOrganization from sentry.shared_integrations.exceptions import ApiError, IntegrationError -from sentry.utils.json import JSONData WEBHOOK_EVENTS = ["push", "pull_request"] @@ -20,9 +19,7 @@ class GitHubRepositoryProvider(IntegrationRepositoryProvider): name = "GitHub" repo_provider = "github" - def _validate_repo( - self, client: Any, installation: IntegrationInstallation, repo: str - ) -> JSONData: + def _validate_repo(self, client: Any, installation: IntegrationInstallation, repo: str) -> Any: try: repo_data = client.get_repo(repo) except Exception as e: @@ -92,7 +89,7 @@ def _format_commits( self, client: Any, repo_name: str, - commit_list: JSONData, + commit_list: Any, ) -> Sequence[Mapping[str, Any]]: """Convert GitHub commits into our internal format diff --git a/src/sentry/integrations/github/webhook.py b/src/sentry/integrations/github/webhook.py index 2af1d45e39febc..107a4a962a6da6 100644 --- a/src/sentry/integrations/github/webhook.py +++ b/src/sentry/integrations/github/webhook.py @@ -46,7 +46,6 @@ from sentry.shared_integrations.exceptions import ApiError from sentry.tasks.integrations.github.open_pr_comment import open_pr_comment_workflow from sentry.utils import metrics -from sentry.utils.json import JSONData from .integration import GitHubIntegrationProvider from .repository import GitHubRepositoryProvider @@ -574,8 +573,8 @@ class GitHubIntegrationsWebhookEndpoint(Endpoint): "installation": InstallationEventWebhook, } - def get_handler(self, event_type: str) -> Callable[[], Callable[[JSONData], Any]] | None: - handler: Callable[[], Callable[[JSONData], Any]] | None = self._handlers.get(event_type) + def get_handler(self, event_type: str) -> Callable[[], Callable[[Any], Any]] | None: + handler: Callable[[], Callable[[Any], Any]] | None = self._handlers.get(event_type) return handler def is_valid_signature(self, method: str, body: bytes, secret: str, signature: str) -> bool: diff --git a/src/sentry/integrations/jira/client.py b/src/sentry/integrations/jira/client.py index 7dad5de1fa644c..61881de67e5c89 100644 --- a/src/sentry/integrations/jira/client.py +++ b/src/sentry/integrations/jira/client.py @@ -1,6 +1,7 @@ import datetime import logging import re +from typing import Any from urllib.parse import parse_qs, urlparse, urlsplit from requests import PreparedRequest @@ -11,7 +12,6 @@ from sentry.shared_integrations.exceptions import ApiError from sentry.utils import jwt from sentry.utils.http import absolute_uri -from sentry.utils.json import JSONData logger = logging.getLogger("sentry.integrations.jira") @@ -52,7 +52,7 @@ def __init__( self, integration: RpcIntegration, verify_ssl: bool, - logging_context: JSONData | None = None, + logging_context: Any | None = None, ): self.base_url = integration.metadata.get("base_url") self.shared_secret = integration.metadata.get("shared_secret") diff --git a/src/sentry/integrations/jira_server/client.py b/src/sentry/integrations/jira_server/client.py index a062217e549192..e60514d7b50f9f 100644 --- a/src/sentry/integrations/jira_server/client.py +++ b/src/sentry/integrations/jira_server/client.py @@ -2,6 +2,7 @@ import logging import re +from typing import Any from urllib.parse import parse_qsl, urlparse from django.urls import reverse @@ -17,7 +18,6 @@ from sentry.shared_integrations.exceptions import ApiError from sentry.utils import jwt from sentry.utils.http import absolute_uri -from sentry.utils.json import JSONData logger = logging.getLogger(__name__) @@ -57,7 +57,7 @@ def __init__( self, integration: RpcIntegration | Integration, identity: RpcIdentity, - logging_context: JSONData | None = None, + logging_context: Any | None = None, ): self.base_url = integration.metadata["base_url"] self.identity = identity diff --git a/src/sentry/integrations/slack/integration.py b/src/sentry/integrations/slack/integration.py index 1d65b996cfca30..44c61c7c7540f7 100644 --- a/src/sentry/integrations/slack/integration.py +++ b/src/sentry/integrations/slack/integration.py @@ -21,7 +21,6 @@ from sentry.shared_integrations.exceptions import ApiError, IntegrationError from sentry.tasks.integrations.slack import link_slack_user_identities from sentry.utils.http import absolute_uri -from sentry.utils.json import JSONData from .client import SlackClient from .notifications import SlackNotifyBasicMixin @@ -132,7 +131,7 @@ def get_pipeline_views(self) -> Sequence[View]: return [identity_pipeline_view] - def _get_team_info(self, access_token: str) -> JSONData: + def _get_team_info(self, access_token: str) -> Any: # Manually add authorization since this method is part of slack installation headers = {"Authorization": f"Bearer {access_token}"} try: diff --git a/src/sentry/integrations/slack/requests/action.py b/src/sentry/integrations/slack/requests/action.py index 8edc0930ebe55c..9aa08ecc7dfed5 100644 --- a/src/sentry/integrations/slack/requests/action.py +++ b/src/sentry/integrations/slack/requests/action.py @@ -8,7 +8,6 @@ from sentry.integrations.slack.requests.base import SlackRequest, SlackRequestError from sentry.models.group import Group from sentry.utils import json -from sentry.utils.json import JSONData class SlackActionRequest(SlackRequest): @@ -25,7 +24,7 @@ def type(self) -> str: return str(self.data.get("type")) @cached_property - def callback_data(self) -> JSONData: + def callback_data(self) -> Any: """ We store certain data in ``callback_id`` as JSON. It's a bit hacky, but it's the simplest way to store state without saving it on the Sentry diff --git a/src/sentry/integrations/slack/utils/rule_status.py b/src/sentry/integrations/slack/utils/rule_status.py index 876afe3d880bd8..d34d2f92af0822 100644 --- a/src/sentry/integrations/slack/utils/rule_status.py +++ b/src/sentry/integrations/slack/utils/rule_status.py @@ -1,12 +1,11 @@ from __future__ import annotations -from typing import Union, cast +from typing import Any, Union, cast from uuid import uuid4 from django.conf import settings from sentry.utils import json -from sentry.utils.json import JSONData from sentry.utils.redis import redis_clusters SLACK_FAILED_MESSAGE = ( @@ -35,7 +34,7 @@ def set_value( value = self._format_value(status, rule_id, error_message) self.client.set(self._get_redis_key(), f"{value}", ex=60 * 60) - def get_value(self) -> JSONData: + def get_value(self) -> Any: key = self._get_redis_key() value = self.client.get(key) return json.loads_experimental( diff --git a/src/sentry/integrations/vsts/integration.py b/src/sentry/integrations/vsts/integration.py index e5ecfc4b6501b3..b6e2f9037e1c36 100644 --- a/src/sentry/integrations/vsts/integration.py +++ b/src/sentry/integrations/vsts/integration.py @@ -44,7 +44,6 @@ from sentry.silo.base import SiloMode from sentry.tasks.integrations import migrate_repo from sentry.utils.http import absolute_uri -from sentry.utils.json import JSONData from sentry.web.helpers import render_to_response from .client import VstsApiClient, VstsSetupApiClient @@ -610,7 +609,7 @@ def get_account_from_id( return account return None - def get_accounts(self, access_token: str, user_id: int) -> JSONData | None: + def get_accounts(self, access_token: str, user_id: int) -> Any | None: url = ( f"https://app.vssps.visualstudio.com/_apis/accounts?memberId={user_id}&api-version=4.1" ) diff --git a/src/sentry/lang/native/sources.py b/src/sentry/lang/native/sources.py index e5d3128c96dafb..746986c709a654 100644 --- a/src/sentry/lang/native/sources.py +++ b/src/sentry/lang/native/sources.py @@ -434,7 +434,7 @@ def backfill_source(source, original_sources_by_id): def redact_source_secrets(config_sources: Any) -> Any: """ - Returns a JSONData with all of the secrets redacted from every source. + Returns a json data with all of the secrets redacted from every source. The original value is not mutated in the process; A clone is created and returned by this function. diff --git a/src/sentry/models/apiapplication.py b/src/sentry/models/apiapplication.py index edb112fbb0d323..667e9f1f163cdf 100644 --- a/src/sentry/models/apiapplication.py +++ b/src/sentry/models/apiapplication.py @@ -1,5 +1,5 @@ import secrets -from typing import ClassVar, Self +from typing import Any, ClassVar, Self from urllib.parse import urlparse import petname @@ -20,7 +20,6 @@ ) from sentry.models.outbox import ControlOutbox, OutboxCategory, OutboxScope, outbox_context from sentry.types.region import find_all_region_names -from sentry.utils.json import JSONData def generate_name(): @@ -134,7 +133,7 @@ def get_audit_log_data(self): @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/apigrant.py b/src/sentry/models/apigrant.py index a48b8514e3e4c0..3e98fd213fe22e 100644 --- a/src/sentry/models/apigrant.py +++ b/src/sentry/models/apigrant.py @@ -1,6 +1,6 @@ import secrets from datetime import timedelta -from typing import TypedDict +from typing import Any, TypedDict from django.db import models from django.utils import timezone @@ -10,7 +10,6 @@ from sentry.backup.sanitize import SanitizableField, Sanitizer from sentry.backup.scopes import RelocationScope from sentry.db.models import ArrayField, FlexibleForeignKey, Model, control_silo_model -from sentry.utils.json import JSONData DEFAULT_EXPIRATION = timedelta(minutes=10) @@ -86,7 +85,7 @@ def redirect_uri_allowed(self, uri): @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/apikey.py b/src/sentry/models/apikey.py index 6217686daae909..58973831ff2292 100644 --- a/src/sentry/models/apikey.py +++ b/src/sentry/models/apikey.py @@ -1,5 +1,5 @@ import secrets -from typing import ClassVar, Self +from typing import Any, ClassVar, Self from django.db import models from django.utils import timezone @@ -14,7 +14,6 @@ from sentry.models.apiscopes import HasApiScopes from sentry.models.outbox import OutboxCategory from sentry.services.hybrid_cloud.replica import region_replica_service -from sentry.utils.json import JSONData # TODO(dcramer): pull in enum library @@ -86,7 +85,7 @@ def get_audit_log_data(self): @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/apitoken.py b/src/sentry/models/apitoken.py index d5192a70870727..dc33eb84725bae 100644 --- a/src/sentry/models/apitoken.py +++ b/src/sentry/models/apitoken.py @@ -23,7 +23,6 @@ from sentry.models.outbox import OutboxCategory from sentry.types.region import find_all_region_names from sentry.types.token import AuthTokenType -from sentry.utils.json import JSONData DEFAULT_EXPIRATION = timedelta(days=30) TOKEN_REDACTED = "***REDACTED***" @@ -322,7 +321,7 @@ def write_relocation_import( @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/authenticator.py b/src/sentry/models/authenticator.py index ab67568057f85b..ba817e0ffb45e3 100644 --- a/src/sentry/models/authenticator.py +++ b/src/sentry/models/authenticator.py @@ -31,7 +31,6 @@ from sentry.db.models.outboxes import ControlOutboxProducingModel from sentry.models.outbox import ControlOutboxBase, OutboxCategory from sentry.types.region import find_regions_for_user -from sentry.utils.json import JSONData class AuthenticatorManager(BaseManager["Authenticator"]): @@ -197,7 +196,7 @@ def __repr__(self): @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/authidentity.py b/src/sentry/models/authidentity.py index f58e6f8d9e84a0..7ee9b9a2b404aa 100644 --- a/src/sentry/models/authidentity.py +++ b/src/sentry/models/authidentity.py @@ -1,4 +1,5 @@ from collections.abc import Collection +from typing import Any from django.conf import settings from django.db import models @@ -12,7 +13,6 @@ from sentry.db.models.outboxes import ReplicatedControlModel from sentry.models.outbox import OutboxCategory from sentry.types.region import find_regions_for_orgs -from sentry.utils.json import JSONData @control_silo_model @@ -44,7 +44,7 @@ def handle_async_replication(self, region_name: str, shard_identifier: int) -> N @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/authprovider.py b/src/sentry/models/authprovider.py index f389345eea5cba..9cd836bcc3fc16 100644 --- a/src/sentry/models/authprovider.py +++ b/src/sentry/models/authprovider.py @@ -23,7 +23,6 @@ from sentry.db.models.outboxes import ReplicatedControlModel from sentry.models.outbox import ControlOutbox, OutboxCategory, OutboxScope from sentry.types.region import find_regions_for_orgs -from sentry.utils.json import JSONData logger = logging.getLogger("sentry.authprovider") @@ -228,7 +227,7 @@ def outboxes_for_mark_invalid_sso(self, user_id: int) -> list[ControlOutbox]: @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/integrations/integration.py b/src/sentry/models/integrations/integration.py index fe77df19d96000..248e2a8cbb7cd5 100644 --- a/src/sentry/models/integrations/integration.py +++ b/src/sentry/models/integrations/integration.py @@ -17,7 +17,6 @@ from sentry.services.hybrid_cloud.organization import RpcOrganization, organization_service from sentry.signals import integration_added from sentry.types.region import find_regions_for_orgs -from sentry.utils.json import JSONData if TYPE_CHECKING: from sentry.integrations import ( @@ -164,7 +163,7 @@ def disable(self): @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/integrations/organization_integration.py b/src/sentry/models/integrations/organization_integration.py index 289845df1b8966..b0322760d7ec50 100644 --- a/src/sentry/models/integrations/organization_integration.py +++ b/src/sentry/models/integrations/organization_integration.py @@ -15,7 +15,6 @@ from sentry.db.models.fields.jsonfield import JSONField from sentry.db.models.outboxes import ControlOutboxProducingManager, ReplicatedControlModel from sentry.models.outbox import OutboxCategory -from sentry.utils.json import JSONData @control_silo_model @@ -62,7 +61,7 @@ def handle_async_deletion( @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/integrations/project_integration.py b/src/sentry/models/integrations/project_integration.py index 4c5292ed6f0985..556869e109d8de 100644 --- a/src/sentry/models/integrations/project_integration.py +++ b/src/sentry/models/integrations/project_integration.py @@ -1,10 +1,11 @@ +from typing import Any + from sentry.backup.dependencies import NormalizedModelName, get_model_name from sentry.backup.sanitize import SanitizableField, Sanitizer from sentry.backup.scopes import RelocationScope from sentry.db.models import FlexibleForeignKey, Model, region_silo_model from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey from sentry.db.models.fields.jsonfield import JSONField -from sentry.utils.json import JSONData @region_silo_model @@ -27,7 +28,7 @@ class Meta: @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/integrations/sentry_app.py b/src/sentry/models/integrations/sentry_app.py index fb67e189f839c8..f7c29fffa1edf7 100644 --- a/src/sentry/models/integrations/sentry_app.py +++ b/src/sentry/models/integrations/sentry_app.py @@ -2,7 +2,7 @@ import itertools import uuid from hashlib import sha256 -from typing import ClassVar +from typing import Any, ClassVar from django.db import models, router, transaction from django.db.models import QuerySet @@ -33,7 +33,6 @@ from sentry.models.outbox import ControlOutbox, OutboxCategory, OutboxScope, outbox_context from sentry.types.region import find_all_region_names from sentry.utils import metrics -from sentry.utils.json import JSONData # When a developer selects to receive " Webhooks" it really means # listening to a list of specific events. This is a mapping of what those @@ -240,7 +239,7 @@ def _disable(self): @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/integrations/sentry_app_component.py b/src/sentry/models/integrations/sentry_app_component.py index 0cfc8018ea38a5..a2a851c7680b19 100644 --- a/src/sentry/models/integrations/sentry_app_component.py +++ b/src/sentry/models/integrations/sentry_app_component.py @@ -1,3 +1,5 @@ +from typing import Any + from django.db import models from sentry.backup.dependencies import NormalizedModelName, get_model_name @@ -5,7 +7,6 @@ from sentry.backup.scopes import RelocationScope from sentry.db.models import FlexibleForeignKey, Model, UUIDField, control_silo_model from sentry.db.models.fields.jsonfield import JSONField -from sentry.utils.json import JSONData @control_silo_model @@ -23,7 +24,7 @@ class Meta: @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/notificationaction.py b/src/sentry/models/notificationaction.py index 4d901c5ba62639..75bbe5c71256a1 100644 --- a/src/sentry/models/notificationaction.py +++ b/src/sentry/models/notificationaction.py @@ -15,7 +15,6 @@ from sentry.models.organization import Organization from sentry.services.hybrid_cloud.integration import RpcIntegration from sentry.types.integrations import ExternalProviders -from sentry.utils.json import JSONData logger = logging.getLogger(__name__) @@ -137,7 +136,7 @@ def validate_action(cls, data: NotificationActionInputData) -> None: @classmethod def serialize_available( cls, organization: Organization, integrations: list[RpcIntegration] | None = None - ) -> list[JSONData]: + ) -> list[Any]: """ Optional class method to serialize this registration's available actions to an organization. See NotificationActionsAvailableEndpoint. diff --git a/src/sentry/models/options/user_option.py b/src/sentry/models/options/user_option.py index ab477098925af5..56b86e40f33fc8 100644 --- a/src/sentry/models/options/user_option.py +++ b/src/sentry/models/options/user_option.py @@ -13,7 +13,6 @@ from sentry.db.models.fields import PickledObjectField from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey from sentry.db.models.manager import OptionManager, Value -from sentry.utils.json import JSONData if TYPE_CHECKING: from sentry.models.organization import Organization @@ -212,7 +211,7 @@ class Meta: __repr__ = sane_repr("user_id", "project_id", "organization_id", "key", "value") @classmethod - def get_relocation_ordinal_fields(self, json_model: JSONData) -> list[str] | None: + def get_relocation_ordinal_fields(self, json_model: Any) -> list[str] | None: # "global" user options (those with no organization and/or project scope) get a custom # ordinal; non-global ones use the default ordering. org_id = json_model["fields"].get("organization_id", None) diff --git a/src/sentry/models/relay.py b/src/sentry/models/relay.py index e9a6d2d38313a1..71ca762ee2f13a 100644 --- a/src/sentry/models/relay.py +++ b/src/sentry/models/relay.py @@ -1,3 +1,5 @@ +from typing import Any + from django.db import models from django.utils import timezone from django.utils.functional import cached_property @@ -8,7 +10,6 @@ from sentry.backup.sanitize import SanitizableField, Sanitizer from sentry.backup.scopes import RelocationScope from sentry.db.models import Model, region_silo_model -from sentry.utils.json import JSONData @region_silo_model @@ -29,7 +30,7 @@ class Meta: @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) @@ -82,7 +83,7 @@ def for_keys(keys): @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/repository.py b/src/sentry/models/repository.py index b150dbe7a34f79..544fb77b52c3d8 100644 --- a/src/sentry/models/repository.py +++ b/src/sentry/models/repository.py @@ -1,5 +1,7 @@ from __future__ import annotations +from typing import Any + from django.db import models from django.db.models.signals import pre_delete from django.utils import timezone @@ -20,7 +22,6 @@ from sentry.db.models.fields.array import ArrayField from sentry.services.hybrid_cloud.user import RpcUser from sentry.signals import pending_delete -from sentry.utils.json import JSONData @region_silo_model @@ -99,7 +100,7 @@ def reset_pending_deletion_field_names( @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/servicehook.py b/src/sentry/models/servicehook.py index 3fc15c5db278c6..e732d39ff0bbbf 100644 --- a/src/sentry/models/servicehook.py +++ b/src/sentry/models/servicehook.py @@ -2,7 +2,7 @@ import secrets from functools import cached_property from hashlib import sha256 -from typing import ClassVar, Self +from typing import Any, ClassVar, Self from uuid import uuid4 from django.db import models @@ -24,7 +24,6 @@ from sentry.db.models.fields.bounded import BoundedBigIntegerField from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey from sentry.services.hybrid_cloud.app import app_service -from sentry.utils.json import JSONData SERVICE_HOOK_EVENTS = [ "event.alert", @@ -122,7 +121,7 @@ def add_project(self, project_or_project_id): @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/user.py b/src/sentry/models/user.py index af3aa71e17302d..e87449871a9f62 100644 --- a/src/sentry/models/user.py +++ b/src/sentry/models/user.py @@ -50,7 +50,6 @@ from sentry.types.integrations import EXTERNAL_PROVIDERS, ExternalProviders from sentry.types.region import find_all_region_names, find_regions_for_user from sentry.utils.http import absolute_uri -from sentry.utils.json import JSONData from sentry.utils.retries import TimedRetryPolicy audit_logger = logging.getLogger("sentry.audit.user") @@ -513,7 +512,7 @@ def do_write(): @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/useremail.py b/src/sentry/models/useremail.py index dc245782aeb574..d95bee6676d68a 100644 --- a/src/sentry/models/useremail.py +++ b/src/sentry/models/useremail.py @@ -3,7 +3,7 @@ from collections import defaultdict from collections.abc import Iterable, Mapping from datetime import timedelta -from typing import TYPE_CHECKING, ClassVar +from typing import TYPE_CHECKING, Any, ClassVar from django.conf import settings from django.db import models @@ -25,7 +25,6 @@ from sentry.services.hybrid_cloud.organization.model import RpcOrganization from sentry.services.hybrid_cloud.user.model import RpcUser from sentry.types.region import find_regions_for_user -from sentry.utils.json import JSONData from sentry.utils.security import get_secure_token if TYPE_CHECKING: @@ -152,7 +151,7 @@ def write_relocation_import( @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/models/userip.py b/src/sentry/models/userip.py index f6e65bef33e023..9d7d63471e28e8 100644 --- a/src/sentry/models/userip.py +++ b/src/sentry/models/userip.py @@ -1,5 +1,7 @@ from __future__ import annotations +from typing import Any + from django.conf import settings from django.core.cache import cache from django.db import models @@ -19,7 +21,6 @@ from sentry.services.hybrid_cloud.log import UserIpEvent, log_service from sentry.services.hybrid_cloud.user import RpcUser from sentry.utils.geo import geo_by_addr -from sentry.utils.json import JSONData @control_silo_model @@ -112,7 +113,7 @@ def write_relocation_import( @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) diff --git a/src/sentry/runner/commands/backup.py b/src/sentry/runner/commands/backup.py index 46bff18359f785..becb3c6234d856 100644 --- a/src/sentry/runner/commands/backup.py +++ b/src/sentry/runner/commands/backup.py @@ -414,7 +414,7 @@ def load_data( else: input = src - # Now read the input string into memory as JSONData. + # Now read the input string into memory as json data. try: data = json.load(input) except json.JSONDecodeError: @@ -591,7 +591,7 @@ def sanitize_( else: input = src - # Now read the input string into memory as JSONData. + # Now read the input string into memory as json data. try: unsanitized_json = json.load(input) except json.JSONDecodeError: diff --git a/src/sentry/tasks/derive_code_mappings.py b/src/sentry/tasks/derive_code_mappings.py index 66204da9926573..eeb04deed2d650 100644 --- a/src/sentry/tasks/derive_code_mappings.py +++ b/src/sentry/tasks/derive_code_mappings.py @@ -18,7 +18,6 @@ from sentry.services.hybrid_cloud.integration import RpcOrganizationIntegration, integration_service from sentry.shared_integrations.exceptions import ApiError from sentry.tasks.base import instrumented_task -from sentry.utils.json import JSONData from sentry.utils.locking import UnableToAcquireLock from sentry.utils.safe import get_path @@ -33,7 +32,7 @@ def process_error(error: ApiError, extra: dict[str, str]) -> None: """Log known issues and report unknown ones""" if error.json: - json_data: JSONData = error.json + json_data: Any = error.json msg = json_data.get("message") else: msg = error.text diff --git a/src/sentry/testutils/helpers/backups.py b/src/sentry/testutils/helpers/backups.py index 1b08548c0f2f72..d78d40d3a9e526 100644 --- a/src/sentry/testutils/helpers/backups.py +++ b/src/sentry/testutils/helpers/backups.py @@ -6,6 +6,7 @@ from datetime import UTC, datetime, timedelta from functools import cached_property, cmp_to_key from pathlib import Path +from typing import Any from unittest.mock import MagicMock from uuid import uuid4 @@ -100,7 +101,6 @@ from sentry.testutils.silo import assume_test_silo_mode from sentry.types.token import AuthTokenType from sentry.utils import json -from sentry.utils.json import JSONData __all__ = [ "export_to_file", @@ -131,7 +131,7 @@ def __init__(self, info: ComparatorFindings): self.info = info -def export_to_file(path: Path, scope: ExportScope, filter_by: set[str] | None = None) -> JSONData: +def export_to_file(path: Path, scope: ExportScope, filter_by: set[str] | None = None) -> Any: """ Helper function that exports the current state of the database to the specified file. """ @@ -178,7 +178,7 @@ def export_to_encrypted_tarball( scope: ExportScope, *, filter_by: set[str] | None = None, -) -> JSONData: +) -> Any: """ Helper function that exports the current state of the database to the specified encrypted tarball. @@ -276,7 +276,7 @@ def clear_database(*, reset_pks: bool = False): pass -def import_export_then_validate(method_name: str, *, reset_pks: bool = True) -> JSONData: +def import_export_then_validate(method_name: str, *, reset_pks: bool = True) -> Any: """ Test helper that validates that data imported from an export of the current state of the test database correctly matches the actual outputted export data. @@ -669,49 +669,49 @@ def create_exhaustive_instance(self, *, is_superadmin: bool = False): self.create_exhaustive_sentry_app("test app", owner, org) self.create_exhaustive_global_configs(owner) - def import_export_then_validate(self, out_name, *, reset_pks: bool = True) -> JSONData: + def import_export_then_validate(self, out_name, *, reset_pks: bool = True) -> Any: return import_export_then_validate(out_name, reset_pks=reset_pks) @cached_property - def _json_of_exhaustive_user_with_maximum_privileges(self) -> JSONData: + def _json_of_exhaustive_user_with_maximum_privileges(self) -> Any: with open(get_fixture_path("backup", "user-with-maximum-privileges.json")) as backup_file: return json.load(backup_file) - def json_of_exhaustive_user_with_maximum_privileges(self) -> JSONData: + def json_of_exhaustive_user_with_maximum_privileges(self) -> Any: return deepcopy(self._json_of_exhaustive_user_with_maximum_privileges) @cached_property - def _json_of_exhaustive_user_with_minimum_privileges(self) -> JSONData: + def _json_of_exhaustive_user_with_minimum_privileges(self) -> Any: with open(get_fixture_path("backup", "user-with-minimum-privileges.json")) as backup_file: return json.load(backup_file) - def json_of_exhaustive_user_with_minimum_privileges(self) -> JSONData: + def json_of_exhaustive_user_with_minimum_privileges(self) -> Any: return deepcopy(self._json_of_exhaustive_user_with_minimum_privileges) @cached_property - def _json_of_exhaustive_user_with_roles_no_superadmin(self) -> JSONData: + def _json_of_exhaustive_user_with_roles_no_superadmin(self) -> Any: with open(get_fixture_path("backup", "user-with-roles-no-superadmin.json")) as backup_file: return json.load(backup_file) - def json_of_exhaustive_user_with_roles_no_superadmin(self) -> JSONData: + def json_of_exhaustive_user_with_roles_no_superadmin(self) -> Any: return deepcopy(self._json_of_exhaustive_user_with_roles_no_superadmin) @cached_property - def _json_of_exhaustive_user_with_superadmin_no_roles(self) -> JSONData: + def _json_of_exhaustive_user_with_superadmin_no_roles(self) -> Any: with open(get_fixture_path("backup", "user-with-superadmin-no-roles.json")) as backup_file: return json.load(backup_file) - def json_of_exhaustive_user_with_superadmin_no_roles(self) -> JSONData: + def json_of_exhaustive_user_with_superadmin_no_roles(self) -> Any: return deepcopy(self._json_of_exhaustive_user_with_superadmin_no_roles) @staticmethod - def sort_in_memory_json(json_data: JSONData) -> JSONData: + def sort_in_memory_json(json_data: Any) -> Any: """ Helper function that takes an unordered set of JSON models and sorts them first in dependency order, and then, within each model, by ascending pk number. """ - def sort_by_model_then_pk(a: JSONData, b: JSONData) -> int: + def sort_by_model_then_pk(a: Any, b: Any) -> int: sorted_deps = sorted_dependencies() a_model = get_model(NormalizedModelName(a["model"])) b_model = get_model(NormalizedModelName(b["model"])) @@ -726,7 +726,7 @@ def sort_by_model_then_pk(a: JSONData, b: JSONData) -> int: key=cmp_to_key(sort_by_model_then_pk), ) - def generate_tmp_users_json(self) -> JSONData: + def generate_tmp_users_json(self) -> Any: """ Generates an in-memory JSON array of users with different combinations of admin privileges. """ @@ -745,7 +745,7 @@ def generate_tmp_users_json(self) -> JSONData: return self.sort_in_memory_json(max_user + min_user + roles_user + superadmin_user) - def generate_tmp_users_json_file(self, tmp_path: Path) -> JSONData: + def generate_tmp_users_json_file(self, tmp_path: Path) -> Any: """ Generates a file filled with users with different combinations of admin privileges. """ diff --git a/src/sentry/utils/appleconnect/appstore_connect.py b/src/sentry/utils/appleconnect/appstore_connect.py index 128424ce4e2a31..9353e401f68a45 100644 --- a/src/sentry/utils/appleconnect/appstore_connect.py +++ b/src/sentry/utils/appleconnect/appstore_connect.py @@ -15,7 +15,6 @@ from requests import Session, Timeout from sentry.utils import jwt, safe, sdk -from sentry.utils.json import JSONData logger = logging.getLogger(__name__) @@ -167,7 +166,7 @@ def _get_next_page(response_json: Mapping[str, Any]) -> str | None: def _get_appstore_info_paged( session: Session, credentials: AppConnectCredentials, url: str -) -> Generator[JSONData, None, None]: +) -> Generator[Any, None, None]: """Iterates through all the pages from a paged response. App Store Connect responses shares the general format: @@ -208,14 +207,14 @@ class _IncludedRelations: from this. """ - def __init__(self, page_data: JSONData): - self._items: dict[tuple[_RelType, _RelId], JSONData] = {} + def __init__(self, page_data: Any): + self._items: dict[tuple[_RelType, _RelId], Any] = {} for relation in page_data.get("included", []): rel_type = _RelType(relation["type"]) rel_id = _RelId(relation["id"]) self._items[(rel_type, rel_id)] = relation - def get_related(self, data: JSONData, relation: str) -> JSONData | None: + def get_related(self, data: Any, relation: str) -> Any | None: """Returns the named relation of the object. ``data`` must be a JSON object which has a ``relationships`` object and @@ -236,7 +235,7 @@ def get_related(self, data: JSONData, relation: str) -> JSONData | None: rel_id = _RelId(rel_ptr_data["id"]) return self._items[(rel_type, rel_id)] - def get_multiple_related(self, data: JSONData, relation: str) -> list[JSONData] | None: + def get_multiple_related(self, data: Any, relation: str) -> list[Any] | None: """Returns a list of all the related objects of the named relation type. This is like :meth:`get_related` but is for relation types which have a list of @@ -355,7 +354,7 @@ def get_build_info( return build_info -def _get_dsym_url(bundles: list[JSONData] | None) -> NoDsymUrl | str: +def _get_dsym_url(bundles: list[Any] | None) -> NoDsymUrl | str: """Returns the dSYMs URL from the extracted from the build bundles.""" # https://developer.apple.com/documentation/appstoreconnectapi/build/relationships/buildbundles # https://developer.apple.com/documentation/appstoreconnectapi/buildbundle/attributes @@ -372,7 +371,7 @@ def _get_dsym_url(bundles: list[JSONData] | None) -> NoDsymUrl | str: if not bundles: return NoDsymUrl.NOT_NEEDED - get_bundle_url: Callable[[JSONData], Any] = lambda bundle: safe.get_path( + get_bundle_url: Callable[[Any], Any] = lambda bundle: safe.get_path( bundle, "attributes", "dSYMUrl", default=NoDsymUrl.NOT_NEEDED ) diff --git a/src/sentry/utils/codecs.py b/src/sentry/utils/codecs.py index c5b46edab5993a..8b40b34ad39ddc 100644 --- a/src/sentry/utils/codecs.py +++ b/src/sentry/utils/codecs.py @@ -1,11 +1,10 @@ import zlib from abc import ABC, abstractmethod -from typing import Generic, TypeVar +from typing import Any, Generic, TypeVar import zstandard from sentry.utils import json -from sentry.utils.json import JSONData T = TypeVar("T") @@ -66,15 +65,15 @@ def decode(self, value: bytes) -> str: return value.decode(self.encoding) -class JSONCodec(Codec[JSONData, str]): +class JSONCodec(Codec[Any, str]): """ Encode/decode Python data structures to/from JSON-encoded strings. """ - def encode(self, value: JSONData) -> str: + def encode(self, value: Any) -> str: return str(json.dumps(value)) - def decode(self, value: str) -> JSONData: + def decode(self, value: str) -> Any: return json.loads(value, skip_trace=True) diff --git a/src/sentry/utils/cursors.py b/src/sentry/utils/cursors.py index 756291b52b7111..b58b62c5b25a5d 100644 --- a/src/sentry/utils/cursors.py +++ b/src/sentry/utils/cursors.py @@ -3,8 +3,6 @@ from collections.abc import Callable, Iterator, Sequence from typing import Any, Protocol, TypeVar, Union -from sentry.utils.json import JSONData - T = TypeVar("T") CursorValue = Union[float, int, str] @@ -14,7 +12,7 @@ def __call__(self, value: T, for_prev: bool = ...) -> CursorValue: ... -OnResultCallable = Callable[[Sequence[T]], JSONData] +OnResultCallable = Callable[[Sequence[T]], Any] class Cursor: @@ -251,7 +249,7 @@ def build_cursor( hits: int | None = None, max_hits: int | None = None, on_results: OnResultCallable[T] | None = None, -) -> CursorResult[T | JSONData]: +) -> CursorResult[T | Any]: if cursor is None: cursor = Cursor(0, 0, 0) diff --git a/src/sentry/utils/json.py b/src/sentry/utils/json.py index 22da8b7e06cd10..eb118a6d40d257 100644 --- a/src/sentry/utils/json.py +++ b/src/sentry/utils/json.py @@ -110,17 +110,14 @@ def iterencode(self, o: object, _one_shot: bool = False) -> Generator[str, None, ) -JSONData = Any # https://github.com/python/typing/issues/182 - - # NoReturn here is to make this a mypy error to pass kwargs, since they are currently silently dropped -def dump(value: JSONData, fp: IO[str], **kwargs: NoReturn) -> None: +def dump(value: Any, fp: IO[str], **kwargs: NoReturn) -> None: for chunk in _default_encoder.iterencode(value): fp.write(chunk) # NoReturn here is to make this a mypy error to pass kwargs, since they are currently silently dropped -def dumps(value: JSONData, escape: bool = False, **kwargs: NoReturn) -> str: +def dumps(value: Any, escape: bool = False, **kwargs: NoReturn) -> str: # Legacy use. Do not use. Use dumps_htmlsafe if escape: return _default_escaped_encoder.encode(value) @@ -128,14 +125,14 @@ def dumps(value: JSONData, escape: bool = False, **kwargs: NoReturn) -> str: # NoReturn here is to make this a mypy error to pass kwargs, since they are currently silently dropped -def load(fp: IO[str] | IO[bytes], **kwargs: NoReturn) -> JSONData: +def load(fp: IO[str] | IO[bytes], **kwargs: NoReturn) -> Any: return loads(fp.read()) # NoReturn here is to make this a mypy error to pass kwargs, since they are currently silently dropped def loads( value: str | bytes, use_rapid_json: bool = False, skip_trace: bool = False, **kwargs: NoReturn -) -> JSONData: +) -> Any: with contextlib.ExitStack() as ctx: if not skip_trace: ctx.enter_context(sentry_sdk.start_span(op="sentry.utils.json.loads")) @@ -147,7 +144,7 @@ def loads( # loads JSON with `orjson` or the default function depending on `option_name` # TODO: remove this once we're confident that orjson is working as expected -def loads_experimental(option_name: str, data: str | bytes, skip_trace: bool = False) -> JSONData: +def loads_experimental(option_name: str, data: str | bytes, skip_trace: bool = False) -> Any: from sentry.features.rollout import in_random_rollout if in_random_rollout(option_name): @@ -161,7 +158,7 @@ def loads_experimental(option_name: str, data: str | bytes, skip_trace: bool = F # dumps JSON with `orjson` or the default function depending on `option_name` # TODO: remove this when orjson experiment is successful -def dumps_experimental(option_name: str, data: JSONData) -> str: +def dumps_experimental(option_name: str, data: Any) -> str: from sentry.features.rollout import in_random_rollout if in_random_rollout(option_name): @@ -202,7 +199,6 @@ def prune_empty_keys(obj: Mapping[TKey, TValue | None] | None) -> dict[TKey, TVa __all__ = ( - "JSONData", "JSONDecodeError", "JSONEncoder", "dump", diff --git a/src/sentry/utils/jwt.py b/src/sentry/utils/jwt.py index 61957d0601d304..02e14332928b5b 100644 --- a/src/sentry/utils/jwt.py +++ b/src/sentry/utils/jwt.py @@ -6,6 +6,7 @@ from __future__ import annotations from collections.abc import Mapping +from typing import Any import jwt as pyjwt from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey @@ -17,12 +18,10 @@ ) from jwt import DecodeError -from sentry.utils.json import JSONData - __all__ = ["peek_claims", "decode", "encode", "authorization_header", "DecodeError"] -def peek_header(token: str) -> JSONData: +def peek_header(token: str) -> Any: """Returns the headers in the JWT token without validation. Headers are not signed and can thus be spoofed. You can use these to decide on what @@ -34,7 +33,7 @@ def peek_header(token: str) -> JSONData: return pyjwt.get_unverified_header(token.encode("UTF-8")) -def peek_claims(token: str) -> JSONData: +def peek_claims(token: str) -> Any: """Returns the claims (payload) in the JWT token without validation. These claims can be used to look up the correct key to use in :func:`decode`. @@ -50,7 +49,7 @@ def decode( *, # Force passing optional arguments by keyword audience: str | bool | None = None, algorithms: list[str] | None = None, -) -> dict[str, JSONData]: +) -> dict[str, Any]: """Returns the claims (payload) in the JWT token. This will raise an exception if the claims can not be validated with the provided key. @@ -84,11 +83,11 @@ def decode( def encode( - payload: JSONData, + payload: Any, key: str, *, # Force passing optional arguments by keyword algorithm: str = "HS256", - headers: JSONData | None = None, + headers: Any | None = None, ) -> str: """Encode a JWT token containing the provided payload/claims. diff --git a/tests/sentry/api/endpoints/test_doc_integrations.py b/tests/sentry/api/endpoints/test_doc_integrations.py index 92918238411593..de62db8061abab 100644 --- a/tests/sentry/api/endpoints/test_doc_integrations.py +++ b/tests/sentry/api/endpoints/test_doc_integrations.py @@ -11,7 +11,6 @@ from sentry.testutils.cases import APITestCase from sentry.testutils.helpers.options import override_options from sentry.testutils.silo import control_silo_test -from sentry.utils.json import JSONData class DocIntegrationsTest(APITestCase): @@ -30,7 +29,7 @@ def setUp(self): features=[2, 3, 4], ) - def get_avatars(self, response: Response) -> list[JSONData]: + def get_avatars(self, response: Response) -> list[Any]: return [doc.get("avatar") for doc in response.data] diff --git a/tests/sentry/backup/__init__.py b/tests/sentry/backup/__init__.py index 9e2dd6716cde8b..e25a3f2ae1306e 100644 --- a/tests/sentry/backup/__init__.py +++ b/tests/sentry/backup/__init__.py @@ -2,7 +2,7 @@ from collections.abc import Callable from functools import wraps -from typing import Literal +from typing import Any, Literal from django.db import models @@ -15,12 +15,9 @@ sorted_dependencies, ) from sentry.backup.helpers import DatetimeSafeDjangoJSONEncoder -from sentry.utils.json import JSONData -def verify_models_in_output( - expected_models: list[type[models.Model]], actual_json: JSONData -) -> None: +def verify_models_in_output(expected_models: list[type[models.Model]], actual_json: Any) -> None: """ A helper context manager that checks that every model that a test "targeted" was actually seen in the output, ensuring that we're actually testing the thing we think we are. Additionally, diff --git a/tests/sentry/backup/test_comparators.py b/tests/sentry/backup/test_comparators.py index f3403b27dd5038..f3cbaad8860099 100644 --- a/tests/sentry/backup/test_comparators.py +++ b/tests/sentry/backup/test_comparators.py @@ -1,4 +1,5 @@ from copy import deepcopy +from typing import Any import pytest @@ -21,13 +22,12 @@ ) from sentry.backup.dependencies import ImportKind, NormalizedModelName, PrimaryKeyMap, dependencies from sentry.backup.findings import ComparatorFindingKind, InstanceID -from sentry.utils.json import JSONData def test_good_comparator_both_sides_existing(): cmp = DateUpdatedComparator("my_date_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -41,7 +41,7 @@ def test_good_comparator_both_sides_existing(): def test_good_comparator_neither_side_existing(): cmp = DateUpdatedComparator("my_date_field") id = InstanceID("sentry.test", 0) - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -53,7 +53,7 @@ def test_good_comparator_neither_side_existing(): def test_bad_comparator_only_one_side_existing(): cmp = DateUpdatedComparator("my_date_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -61,7 +61,7 @@ def test_bad_comparator_only_one_side_existing(): "my_date_field": "2023-06-22T23:12:34.567Z", }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -95,7 +95,7 @@ def test_bad_comparator_only_one_side_existing(): def test_good_comparator_both_sides_null(): cmp = DateUpdatedComparator("my_date_field") id = InstanceID("sentry.test", 0) - nulled: JSONData = { + nulled: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -109,7 +109,7 @@ def test_good_comparator_both_sides_null(): def test_bad_comparator_only_one_side_null(): cmp = DateUpdatedComparator("my_date_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -117,7 +117,7 @@ def test_bad_comparator_only_one_side_null(): "my_date_field": "2023-06-22T23:12:34.567Z", }, } - nulled: JSONData = { + nulled: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -153,7 +153,7 @@ def test_bad_comparator_only_one_side_null(): def test_good_comparator_one_side_null_other_side_missing(): cmp = DateUpdatedComparator("my_date_field") id = InstanceID("sentry.test", 0) - nulled: JSONData = { + nulled: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -161,7 +161,7 @@ def test_good_comparator_one_side_null_other_side_missing(): "my_date_field": None, }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -177,7 +177,7 @@ def test_good_comparator_one_side_null_other_side_missing(): def test_good_auto_suffix_comparator(): cmp = AutoSuffixComparator("same", "suffixed") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -186,7 +186,7 @@ def test_good_auto_suffix_comparator(): "suffixed": "foo-bar", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -201,7 +201,7 @@ def test_good_auto_suffix_comparator(): def test_bad_auto_suffix_comparator(): cmp = AutoSuffixComparator("same", "suffixed") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -210,7 +210,7 @@ def test_bad_auto_suffix_comparator(): "suffixed": "foo-bar", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -243,7 +243,7 @@ def test_bad_auto_suffix_comparator(): def test_good_auto_suffix_comparator_existence(): cmp = AutoSuffixComparator("auto_suffix_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -251,7 +251,7 @@ def test_good_auto_suffix_comparator_existence(): "auto_suffix_field": "foo-bar", }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -272,7 +272,7 @@ def test_good_auto_suffix_comparator_existence(): def test_good_auto_suffix_comparator_scrubbed(): cmp = AutoSuffixComparator("same", "suffixed") - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -281,7 +281,7 @@ def test_good_auto_suffix_comparator_scrubbed(): "suffixed": "foo-bar", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -303,7 +303,7 @@ def test_good_auto_suffix_comparator_scrubbed(): def test_good_datetime_equality_comparator(): cmp = DatetimeEqualityComparator("my_date_field") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -311,7 +311,7 @@ def test_good_datetime_equality_comparator(): "my_date_field": "2023-06-22T23:00:00.123Z", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -325,7 +325,7 @@ def test_good_datetime_equality_comparator(): def test_bad_datetime_equality_comparator(): cmp = DatetimeEqualityComparator("my_date_field") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -333,7 +333,7 @@ def test_bad_datetime_equality_comparator(): "my_date_field": "2023-06-22T00:00:00.000Z", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -358,7 +358,7 @@ def test_bad_datetime_equality_comparator(): def test_good_date_updated_comparator(): cmp = DateUpdatedComparator("my_date_field") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -366,7 +366,7 @@ def test_good_date_updated_comparator(): "my_date_field": "2023-06-22T23:00:00.123Z", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -380,7 +380,7 @@ def test_good_date_updated_comparator(): def test_bad_date_updated_comparator(): cmp = DateUpdatedComparator("my_date_field") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -388,7 +388,7 @@ def test_bad_date_updated_comparator(): "my_date_field": "2023-06-22T23:12:34.567Z", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -431,7 +431,7 @@ def test_good_email_obfuscating_comparator(): def test_bad_email_obfuscating_comparator(): cmp = EmailObfuscatingComparator("one_email", "many_emails") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -443,7 +443,7 @@ def test_bad_email_obfuscating_comparator(): ], }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -479,7 +479,7 @@ def test_bad_email_obfuscating_comparator(): def test_good_email_obfuscating_comparator_existence(): cmp = EmailObfuscatingComparator("email_obfuscating_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -487,7 +487,7 @@ def test_good_email_obfuscating_comparator_existence(): "email_obfuscating_field": "brian@testing.com", }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -508,7 +508,7 @@ def test_good_email_obfuscating_comparator_existence(): def test_good_email_obfuscating_comparator_scrubbed(): cmp = EmailObfuscatingComparator("one_email", "many_emails") - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -520,7 +520,7 @@ def test_good_email_obfuscating_comparator_scrubbed(): ], }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -551,7 +551,7 @@ def test_good_email_obfuscating_comparator_scrubbed(): def test_good_equal_or_removed_comparator_equal(): cmp = EqualOrRemovedComparator("my_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -567,7 +567,7 @@ def test_good_equal_or_removed_comparator_equal(): def test_good_equal_or_removed_comparator_not_equal(): cmp = EqualOrRemovedComparator("my_field") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -575,7 +575,7 @@ def test_good_equal_or_removed_comparator_not_equal(): "my_field": "foo", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -603,7 +603,7 @@ def test_good_equal_or_removed_comparator_not_equal(): def test_good_equal_or_removed_comparator_neither_side_existing(): cmp = EqualOrRemovedComparator("my_field") id = InstanceID("sentry.test", 0) - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -615,7 +615,7 @@ def test_good_equal_or_removed_comparator_neither_side_existing(): def test_good_equal_or_removed_comparator_only_right_side_missing(): cmp = EqualOrRemovedComparator("my_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -623,7 +623,7 @@ def test_good_equal_or_removed_comparator_only_right_side_missing(): "my_field": "foo", }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -636,7 +636,7 @@ def test_good_equal_or_removed_comparator_only_right_side_missing(): def test_bad_equal_or_removed_comparator_only_left_side_missing(): cmp = EqualOrRemovedComparator("my_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -644,7 +644,7 @@ def test_bad_equal_or_removed_comparator_only_left_side_missing(): "my_field": "foo", }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -666,7 +666,7 @@ def test_bad_equal_or_removed_comparator_only_left_side_missing(): def test_good_equal_or_removed_comparator_both_sides_nulled(): cmp = EqualOrRemovedComparator("my_field") id = InstanceID("sentry.test", 0) - nulled: JSONData = { + nulled: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -680,7 +680,7 @@ def test_good_equal_or_removed_comparator_both_sides_nulled(): def test_good_equal_or_removed_comparator_only_right_side_nulled(): cmp = EqualOrRemovedComparator("my_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -688,7 +688,7 @@ def test_good_equal_or_removed_comparator_only_right_side_nulled(): "my_field": "foo", }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -703,7 +703,7 @@ def test_good_equal_or_removed_comparator_only_right_side_nulled(): def test_bad_equal_or_removed_comparator_only_left_side_nulled(): cmp = EqualOrRemovedComparator("my_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -711,7 +711,7 @@ def test_bad_equal_or_removed_comparator_only_left_side_nulled(): "my_field": "foo", }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -735,7 +735,7 @@ def test_bad_equal_or_removed_comparator_only_left_side_nulled(): def test_good_hash_obfuscating_comparator(): cmp = HashObfuscatingComparator("one_hash", "many_hashes") id = InstanceID("sentry.test", 0) - model: JSONData = { + model: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -753,7 +753,7 @@ def test_good_hash_obfuscating_comparator(): def test_bad_hash_obfuscating_comparator(): cmp = HashObfuscatingComparator("one_hash", "many_hashes") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -765,7 +765,7 @@ def test_bad_hash_obfuscating_comparator(): ], }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -801,7 +801,7 @@ def test_bad_hash_obfuscating_comparator(): def test_good_hash_obfuscating_comparator_existence(): cmp = HashObfuscatingComparator("hash_obfuscating_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -809,7 +809,7 @@ def test_good_hash_obfuscating_comparator_existence(): "hash_obfuscating_field": "foo", }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -830,7 +830,7 @@ def test_good_hash_obfuscating_comparator_existence(): def test_good_hash_obfuscating_comparator_scrubbed(): cmp = HashObfuscatingComparator("one_hash", "many_hashes") - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -842,7 +842,7 @@ def test_good_hash_obfuscating_comparator_scrubbed(): ], }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -880,7 +880,7 @@ def test_good_foreign_key_comparator(): left_pk_map.insert(NormalizedModelName("sentry.user"), 12, 1, ImportKind.Inserted) right_pk_map = PrimaryKeyMap() right_pk_map.insert(NormalizedModelName("sentry.user"), 34, 1, ImportKind.Inserted) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -892,7 +892,7 @@ def test_good_foreign_key_comparator(): "is_verified": True, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -915,7 +915,7 @@ def test_good_foreign_key_comparator_existence(): {k: v.model for k, v in deps[NormalizedModelName("sentry.UserEmail")].foreign_keys.items()} ) id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -927,7 +927,7 @@ def test_good_foreign_key_comparator_existence(): "is_verified": True, }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -956,7 +956,7 @@ def test_good_foreign_key_comparator_scrubbed(): cmp = ForeignKeyComparator( {k: v.model for k, v in deps[NormalizedModelName("sentry.UserEmail")].foreign_keys.items()} ) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -987,7 +987,7 @@ def test_bad_foreign_key_comparator_set_primary_key_maps_not_called(): left_pk_map.insert(NormalizedModelName("sentry.user"), 12, 1, ImportKind.Inserted) right_pk_map = PrimaryKeyMap() right_pk_map.insert(NormalizedModelName("sentry.user"), 34, 1, ImportKind.Inserted) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -999,7 +999,7 @@ def test_bad_foreign_key_comparator_set_primary_key_maps_not_called(): "is_verified": True, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1026,7 +1026,7 @@ def test_bad_foreign_key_comparator_unequal_mapping(): left_pk_map.insert(NormalizedModelName("sentry.user"), 12, 1, ImportKind.Inserted) right_pk_map = PrimaryKeyMap() right_pk_map.insert(NormalizedModelName("sentry.user"), 34, 2, ImportKind.Inserted) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1038,7 +1038,7 @@ def test_bad_foreign_key_comparator_unequal_mapping(): "is_verified": True, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1074,7 +1074,7 @@ def test_bad_foreign_key_comparator_missing_mapping(): id = InstanceID("sentry.useremail", 0) left_pk_map = PrimaryKeyMap() right_pk_map = PrimaryKeyMap() - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1086,7 +1086,7 @@ def test_bad_foreign_key_comparator_missing_mapping(): "is_verified": True, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1125,7 +1125,7 @@ def test_bad_foreign_key_comparator_missing_mapping(): def test_good_ignored_comparator(): cmp = IgnoredComparator("ignored_field") id = InstanceID("sentry.test", 0) - model: JSONData = { + model: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1140,7 +1140,7 @@ def test_good_ignored_comparator(): def test_good_ignored_comparator_existence(): cmp = IgnoredComparator("ignored_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1148,7 +1148,7 @@ def test_good_ignored_comparator_existence(): "ignored_field": "foo", }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1160,7 +1160,7 @@ def test_good_ignored_comparator_existence(): def test_good_ignored_comparator_scrubbed(): cmp = IgnoredComparator("ignored_field") - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1183,7 +1183,7 @@ def test_good_ignored_comparator_scrubbed(): def test_good_secret_hex_comparator(): cmp = SecretHexComparator(8, "equal", "unequal") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1192,7 +1192,7 @@ def test_good_secret_hex_comparator(): "unequal": "3e04f551c7219550", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1207,7 +1207,7 @@ def test_good_secret_hex_comparator(): def test_bad_secret_hex_comparator(): cmp = SecretHexComparator(8, "same", "invalid_left", "invalid_right") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1217,7 +1217,7 @@ def test_bad_secret_hex_comparator(): "invalid_right": "50a7e2c7e3ca35fc", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1254,7 +1254,7 @@ def test_bad_secret_hex_comparator(): def test_good_secret_hex_comparator_scrubbed(): cmp = SecretHexComparator(8, "secret_hex_field") - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1262,7 +1262,7 @@ def test_good_secret_hex_comparator_scrubbed(): "secret_hex_field": "3e04f551c7219550", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1281,7 +1281,7 @@ def test_good_secret_hex_comparator_scrubbed(): def test_good_subscription_id_comparator(): cmp = SubscriptionIDComparator("subscription_id_field") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1289,7 +1289,7 @@ def test_good_subscription_id_comparator(): "subscription_id_field": "0/12363aae153911eeac590242ac130004", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1303,7 +1303,7 @@ def test_good_subscription_id_comparator(): def test_bad_subscription_id_comparator(): cmp = SubscriptionIDComparator("same", "invalid_left", "invalid_right") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1313,7 +1313,7 @@ def test_bad_subscription_id_comparator(): "invalid_right": "0/12363aae153911eeac590242ac130004", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1360,7 +1360,7 @@ def test_bad_subscription_id_comparator(): def test_good_subscription_id_comparator_existence(): cmp = SubscriptionIDComparator("subscription_id_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1368,7 +1368,7 @@ def test_good_subscription_id_comparator_existence(): "subscription_id_field": "0/45663aae153911eeac590242acabc123", }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1389,7 +1389,7 @@ def test_good_subscription_id_comparator_existence(): def test_good_subscription_id_comparator_scrubbed(): cmp = SubscriptionIDComparator("subscription_id_field") - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1397,7 +1397,7 @@ def test_good_subscription_id_comparator_scrubbed(): "subscription_id_field": "0/12363aae153911eeac590242ac130004", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1416,7 +1416,7 @@ def test_good_subscription_id_comparator_scrubbed(): def test_good_unordered_list_comparator(): cmp = UnorderedListComparator("ordered", "unordered") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1425,7 +1425,7 @@ def test_good_unordered_list_comparator(): "unordered": ["b", "a", "c"], }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1440,7 +1440,7 @@ def test_good_unordered_list_comparator(): def test_bad_unordered_list_comparator(): cmp = UnorderedListComparator("unequal") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1448,7 +1448,7 @@ def test_bad_unordered_list_comparator(): "unequal": ["b", "a"], }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1474,7 +1474,7 @@ def test_bad_unordered_list_comparator(): def test_good_unordered_list_comparator_existence(): cmp = UnorderedListComparator("unordered_list_field") id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1482,7 +1482,7 @@ def test_good_unordered_list_comparator_existence(): "unordered_list_field": ["a", "b", "c"], }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1503,7 +1503,7 @@ def test_good_unordered_list_comparator_existence(): def test_good_unordered_list_comparator_scrubbed(): cmp = UnorderedListComparator("unordered_list_field") - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1511,7 +1511,7 @@ def test_good_unordered_list_comparator_scrubbed(): "unordered_list_field": ["a", "b", "c"], }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1530,7 +1530,7 @@ def test_good_unordered_list_comparator_scrubbed(): def test_good_uuid4_comparator(): cmp = UUID4Comparator("guid_field") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1538,7 +1538,7 @@ def test_good_uuid4_comparator(): "guid_field": "4c79eea3-8a71-4b99-b291-1f6a906fbb47", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1552,7 +1552,7 @@ def test_good_uuid4_comparator(): def test_bad_uuid4_comparator(): cmp = UUID4Comparator("same", "invalid_left", "invalid_right") id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1562,7 +1562,7 @@ def test_bad_uuid4_comparator(): "invalid_right": "bb41a040-b413-4b89-aa03-179470d9ee05", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1608,7 +1608,7 @@ def test_bad_uuid4_comparator(): def test_good_uuid4_comparator_scrubbed(): cmp = UUID4Comparator("guid_field") - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1616,7 +1616,7 @@ def test_good_uuid4_comparator_scrubbed(): "guid_field": "4c79eea3-8a71-4b99-b291-1f6a906fbb47", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1635,7 +1635,7 @@ def test_good_uuid4_comparator_scrubbed(): def test_good_user_password_obfuscating_comparator_claimed_user(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - model: JSONData = { + model: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1652,7 +1652,7 @@ def test_good_user_password_obfuscating_comparator_claimed_user(): def test_good_user_password_obfuscating_comparator_claimed_user_never_changed_password(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1663,7 +1663,7 @@ def test_good_user_password_obfuscating_comparator_claimed_user_never_changed_pa "is_password_expired": True, }, } - nulled: JSONData = { + nulled: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1681,7 +1681,7 @@ def test_good_user_password_obfuscating_comparator_claimed_user_never_changed_pa def test_good_user_password_obfuscating_comparator_newly_unclaimed_user(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1692,7 +1692,7 @@ def test_good_user_password_obfuscating_comparator_newly_unclaimed_user(): "is_password_expired": True, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1709,7 +1709,7 @@ def test_good_user_password_obfuscating_comparator_newly_unclaimed_user(): def test_good_user_password_obfuscating_comparator_newly_unclaimed_user_never_changed_password(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1720,7 +1720,7 @@ def test_good_user_password_obfuscating_comparator_newly_unclaimed_user_never_ch "is_password_expired": False, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1737,7 +1737,7 @@ def test_good_user_password_obfuscating_comparator_newly_unclaimed_user_never_ch def test_good_user_password_obfuscating_comparator_already_unclaimed_user(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1748,7 +1748,7 @@ def test_good_user_password_obfuscating_comparator_already_unclaimed_user(): "is_password_expired": False, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1765,7 +1765,7 @@ def test_good_user_password_obfuscating_comparator_already_unclaimed_user(): def test_bad_user_password_obfuscating_comparator_claimed_user_password_changed(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1776,7 +1776,7 @@ def test_bad_user_password_obfuscating_comparator_claimed_user_password_changed( "is_password_expired": False, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1804,7 +1804,7 @@ def test_bad_user_password_obfuscating_comparator_claimed_user_password_changed( def test_bad_user_password_obfuscating_comparator_newly_unclaimed_user_password_unchanged(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1815,7 +1815,7 @@ def test_bad_user_password_obfuscating_comparator_newly_unclaimed_user_password_ "is_password_expired": False, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1842,7 +1842,7 @@ def test_bad_user_password_obfuscating_comparator_newly_unclaimed_user_password_ def test_bad_user_password_obfuscating_comparator_already_unclaimed_user_password_unchanged(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1853,7 +1853,7 @@ def test_bad_user_password_obfuscating_comparator_already_unclaimed_user_passwor "is_password_expired": False, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1880,7 +1880,7 @@ def test_bad_user_password_obfuscating_comparator_already_unclaimed_user_passwor def test_bad_user_password_obfuscating_comparator_impossible_newly_claimed_user(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1891,7 +1891,7 @@ def test_bad_user_password_obfuscating_comparator_impossible_newly_claimed_user( "is_password_expired": False, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1918,7 +1918,7 @@ def test_bad_user_password_obfuscating_comparator_impossible_newly_claimed_user( def test_bad_user_password_obfuscating_comparator_unclaimed_user_last_password_change_nulled(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1929,7 +1929,7 @@ def test_bad_user_password_obfuscating_comparator_unclaimed_user_last_password_c "is_password_expired": False, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1956,7 +1956,7 @@ def test_bad_user_password_obfuscating_comparator_unclaimed_user_last_password_c def test_bad_user_password_obfuscating_comparator_already_unclaimed_user_password_unexpired(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1967,7 +1967,7 @@ def test_bad_user_password_obfuscating_comparator_already_unclaimed_user_passwor "is_password_expired": False, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -1993,7 +1993,7 @@ def test_bad_user_password_obfuscating_comparator_already_unclaimed_user_passwor def test_bad_user_password_obfuscating_comparator_newly_unclaimed_user_password_still_expired(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -2004,7 +2004,7 @@ def test_bad_user_password_obfuscating_comparator_newly_unclaimed_user_password_ "is_password_expired": True, }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -2031,7 +2031,7 @@ def test_bad_user_password_obfuscating_comparator_newly_unclaimed_user_password_ def test_good_user_password_obfuscating_comparator_existence(): cmp = UserPasswordObfuscatingComparator() id = InstanceID("sentry.test", 0) - present: JSONData = { + present: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -2039,7 +2039,7 @@ def test_good_user_password_obfuscating_comparator_existence(): "password": "pbkdf2_sha256$260000$3v4Cyy3TAhp14YCB8Zh7Gq$SjB35BELrwwfOCaiz8O/SdbvhXq+l02BRpKtwxOCTiw=", }, } - missing: JSONData = { + missing: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -2060,7 +2060,7 @@ def test_good_user_password_obfuscating_comparator_existence(): def test_good_user_password_obfuscating_comparator_scrubbed_long(): cmp = UserPasswordObfuscatingComparator() - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -2069,7 +2069,7 @@ def test_good_user_password_obfuscating_comparator_scrubbed_long(): "password": "pbkdf2_sha256$260000$3v4Cyy3TAhp14YCB8Zh7Gq$SjB35BELrwwfOCaiz8O/SdbvhXq+l02BRpKtwxOCTiw=", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -2092,7 +2092,7 @@ def test_good_user_password_obfuscating_comparator_scrubbed_long(): def test_good_user_password_obfuscating_comparator_scrubbed_medium(): cmp = UserPasswordObfuscatingComparator() - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -2101,7 +2101,7 @@ def test_good_user_password_obfuscating_comparator_scrubbed_medium(): "password": "sha1$abc123$a0aac0d9559f1e7f4b6931f3918e72ad8ec01c04", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -2120,7 +2120,7 @@ def test_good_user_password_obfuscating_comparator_scrubbed_medium(): def test_good_user_password_obfuscating_comparator_scrubbed_short(): cmp = UserPasswordObfuscatingComparator() - left: JSONData = { + left: Any = { "model": "test", "ordinal": 1, "pk": 1, @@ -2129,7 +2129,7 @@ def test_good_user_password_obfuscating_comparator_scrubbed_short(): "password": "md5$abc$d2315d2c3883695e40598e56792847ab", }, } - right: JSONData = { + right: Any = { "model": "test", "ordinal": 1, "pk": 1, diff --git a/tests/sentry/backup/test_exports.py b/tests/sentry/backup/test_exports.py index c10093d006eac3..b03cb6ab1a5efa 100644 --- a/tests/sentry/backup/test_exports.py +++ b/tests/sentry/backup/test_exports.py @@ -25,18 +25,17 @@ export_to_file, ) from sentry.testutils.helpers.datetime import freeze_time -from sentry.utils.json import JSONData from tests.sentry.backup import get_matching_exportable_models class ExportTestCase(BackupTestCase): @staticmethod - def count(data: JSONData, model: type[models.base.BaseModel]) -> int: + def count(data: Any, model: type[models.base.BaseModel]) -> int: return len(list(filter(lambda d: d["model"] == str(get_model_name(model)), data))) @staticmethod def exists( - data: JSONData, model: type[models.base.BaseModel], key: str, value: Any | None = None + data: Any, model: type[models.base.BaseModel], key: str, value: Any | None = None ) -> bool: for d in data: if d["model"] == str(get_model_name(model)): @@ -55,7 +54,7 @@ def export( *, scope: ExportScope, filter_by: set[str] | None = None, - ) -> JSONData: + ) -> Any: tmp_path = Path(tmp_dir).joinpath(f"{self._testMethodName}.json") return export_to_file(tmp_path, scope=scope, filter_by=filter_by) @@ -65,7 +64,7 @@ def export_and_encrypt( *, scope: ExportScope, filter_by: set[str] | None = None, - ) -> JSONData: + ) -> Any: tmp_path = Path(tmp_dir).joinpath(f"{self._testMethodName}.enc.tar") return export_to_encrypted_tarball(tmp_path, scope=scope, filter_by=filter_by) @@ -76,7 +75,7 @@ class ScopingTests(ExportTestCase): """ @staticmethod - def verify_model_inclusion(data: JSONData, scope: ExportScope) -> None: + def verify_model_inclusion(data: Any, scope: ExportScope) -> None: """ Ensure all in-scope models are included, and that no out-of-scope models are included. """ @@ -101,7 +100,7 @@ def verify_model_inclusion(data: JSONData, scope: ExportScope) -> None: ) def verify_encryption_equality( - self, tmp_dir: str, unencrypted: JSONData, scope: ExportScope + self, tmp_dir: str, unencrypted: Any, scope: ExportScope ) -> None: res = validate( unencrypted, diff --git a/tests/sentry/backup/test_models.py b/tests/sentry/backup/test_models.py index 512d768e68ed9e..d1ba7bceaf02eb 100644 --- a/tests/sentry/backup/test_models.py +++ b/tests/sentry/backup/test_models.py @@ -2,6 +2,7 @@ import tempfile from pathlib import Path +from typing import Any from django.db.models import Model @@ -15,7 +16,6 @@ from sentry.testutils.cases import TransactionTestCase from sentry.testutils.helpers.backups import export_to_file from sentry.testutils.silo import assume_test_silo_mode -from sentry.utils.json import JSONData from tests.sentry.backup import expect_models, verify_models_in_output DYNAMIC_RELOCATION_SCOPE_TESTED: set[NormalizedModelName] = set() @@ -28,7 +28,7 @@ class DynamicRelocationScopeTests(TransactionTestCase): For models that support different relocation scopes depending on properties of the model instance itself (ie, they have a set for their `__relocation_scope__`, rather than a single value), make sure that this dynamic deduction works correctly. """ - def export(self) -> JSONData: + def export(self) -> Any: with tempfile.TemporaryDirectory() as tmp_dir: tmp_path = Path(tmp_dir).joinpath(f"{self._testMethodName}.expect.json") return export_to_file(tmp_path, ExportScope.Global) diff --git a/tests/sentry/backup/test_rpc.py b/tests/sentry/backup/test_rpc.py index 4ab0592b159d33..5323bfef9d65af 100644 --- a/tests/sentry/backup/test_rpc.py +++ b/tests/sentry/backup/test_rpc.py @@ -2,6 +2,7 @@ from copy import deepcopy from functools import cached_property +from typing import Any from unittest.mock import MagicMock, patch from uuid import uuid4 @@ -306,15 +307,15 @@ class RpcImportErrorTests(TestCase): """ @staticmethod - def is_user_model(model: json.JSONData) -> bool: + def is_user_model(model: Any) -> bool: return NormalizedModelName(model["model"]) == USER_MODEL_NAME @cached_property - def _json_of_exhaustive_user_with_minimum_privileges(self) -> json.JSONData: + def _json_of_exhaustive_user_with_minimum_privileges(self) -> Any: with open(get_fixture_path("backup", "user-with-minimum-privileges.json")) as backup_file: return json.load(backup_file) - def json_of_exhaustive_user_with_minimum_privileges(self) -> json.JSONData: + def json_of_exhaustive_user_with_minimum_privileges(self) -> Any: return deepcopy(self._json_of_exhaustive_user_with_minimum_privileges) def test_bad_invalid_min_ordinal(self): diff --git a/tests/sentry/backup/test_sanitize.py b/tests/sentry/backup/test_sanitize.py index df7ef2738c9bf2..dd7b7f142936e8 100644 --- a/tests/sentry/backup/test_sanitize.py +++ b/tests/sentry/backup/test_sanitize.py @@ -2,6 +2,7 @@ from collections import defaultdict from collections.abc import Sequence from datetime import datetime, timedelta +from typing import Any from unittest.mock import Mock, patch import pytest @@ -31,7 +32,6 @@ from sentry.testutils.helpers.backups import BackupTestCase from sentry.testutils.silo import strip_silo_mode_test_suffix from sentry.utils import json -from sentry.utils.json import JSONData from tests.sentry.backup import expect_models, verify_models_in_output FAKE_EMAIL = "test@fake.com" @@ -76,7 +76,7 @@ class Meta: @classmethod def sanitize_relocation_json( - cls, json: JSONData, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None + cls, json: Any, sanitizer: Sanitizer, model_name: NormalizedModelName | None = None ) -> None: model_name = get_model_name(cls) if model_name is None else model_name super().sanitize_relocation_json(json, sanitizer, model_name) @@ -89,7 +89,7 @@ def sanitize_relocation_json( @patch("sentry.backup.dependencies.get_model", Mock(return_value=FakeSanitizableModel)) class SanitizationUnitTests(TestCase): - def serialize_to_json_data(self, models: Sequence[FakeSanitizableModel]) -> JSONData: + def serialize_to_json_data(self, models: Sequence[FakeSanitizableModel]) -> Any: json_string = serialize( "json", models, @@ -427,7 +427,7 @@ def test_bad_invalid_string_type(self): class IntegrationTestCase(TestCase): - def sanitize_and_compare(self, unsanitized_json: JSONData) -> JSONData: + def sanitize_and_compare(self, unsanitized_json: Any) -> Any: root_dir = os.path.dirname(os.path.realpath(__file__)) # Use the same data for monolith and region mode. diff --git a/tests/sentry/backup/test_snapshots.py b/tests/sentry/backup/test_snapshots.py index 8f09f7bac5a775..d407331e72a4e5 100644 --- a/tests/sentry/backup/test_snapshots.py +++ b/tests/sentry/backup/test_snapshots.py @@ -1,5 +1,6 @@ from pathlib import Path from tempfile import TemporaryDirectory +from typing import Any import pytest @@ -30,7 +31,7 @@ def setUp(self): def import_export_fixture_then_validate( self, *, tmp_out_path: Path, fixture_file_name: str - ) -> json.JSONData: + ) -> Any: """ Test helper that validates that data imported from a fixture `.json` file correctly matches the actual outputted export data. diff --git a/tests/sentry/backup/test_validate.py b/tests/sentry/backup/test_validate.py index 585da382d0664a..2bce94ca2f81cc 100644 --- a/tests/sentry/backup/test_validate.py +++ b/tests/sentry/backup/test_validate.py @@ -1,4 +1,5 @@ from copy import deepcopy +from typing import Any from sentry.backup.comparators import get_default_comparators from sentry.backup.findings import ComparatorFindingKind, InstanceID @@ -7,7 +8,7 @@ from sentry.utils import json -def copy_model(model: json.JSONData, new_pk: int) -> json.JSONData: +def copy_model(model: Any, new_pk: int) -> Any: new_model = deepcopy(model) new_model["pk"] = new_pk return new_model diff --git a/tests/sentry/tasks/integrations/github/test_open_pr_comment.py b/tests/sentry/tasks/integrations/github/test_open_pr_comment.py index ac44b61b96e432..a0d10eac867554 100644 --- a/tests/sentry/tasks/integrations/github/test_open_pr_comment.py +++ b/tests/sentry/tasks/integrations/github/test_open_pr_comment.py @@ -1,3 +1,4 @@ +from typing import Any from unittest.mock import patch import pytest @@ -23,7 +24,6 @@ from sentry.testutils.cases import IntegrationTestCase, TestCase from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.testutils.skips import requires_snuba -from sentry.utils.json import JSONData from tests.sentry.tasks.integrations.github.test_pr_comment import GithubCommentTestCase pytestmark = [requires_snuba] @@ -240,7 +240,7 @@ def setUp(self): @responses.activate def test_get_pr_files(self): - data: JSONData = [ + data: Any = [ {"filename": "bar.py", "status": "modified", "patch": "b"}, {"filename": "baz.py", "status": "modified"}, ] From 32e93069c18489880c61a605171aed0776a24d05 Mon Sep 17 00:00:00 2001 From: David Wang Date: Thu, 9 May 2024 13:53:33 -0700 Subject: [PATCH 243/376] feat(crons): Add 404 monitor details page (#70619) simple 404 image also added some basic tests to this file --- static/app/views/monitors/details.spec.tsx | 54 ++++++++++++++++++++++ static/app/views/monitors/details.tsx | 9 +++- 2 files changed, 62 insertions(+), 1 deletion(-) create mode 100644 static/app/views/monitors/details.spec.tsx diff --git a/static/app/views/monitors/details.spec.tsx b/static/app/views/monitors/details.spec.tsx new file mode 100644 index 00000000000000..e98f2bf8074674 --- /dev/null +++ b/static/app/views/monitors/details.spec.tsx @@ -0,0 +1,54 @@ +import {MonitorFixture} from 'sentry-fixture/monitor'; + +import {initializeOrg} from 'sentry-test/initializeOrg'; +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import MonitorDetails from 'sentry/views/monitors/details'; + +describe('Monitor Details', () => { + const monitor = MonitorFixture(); + const {organization, project, routerProps} = initializeOrg({ + router: {params: {monitorSlug: monitor.slug, projectId: monitor.project.slug}}, + }); + + beforeEach(() => { + MockApiClient.clearMockResponses(); + MockApiClient.addMockResponse({ + url: `/projects/${organization.slug}/${project.slug}/monitors/${monitor.slug}/`, + body: {...monitor}, + }); + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/users/`, + body: [], + }); + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/issues/?limit=20&project=${project.id}&query=monitor.slug%3A${monitor.slug}%20environment%3A%5Bproduction%5D%20is%3Aunresolved&statsPeriod=14d`, + body: [], + }); + MockApiClient.addMockResponse({ + url: `/projects/${organization.slug}/${project.slug}/monitors/${monitor.slug}/stats/`, + body: [], + }); + MockApiClient.addMockResponse({ + url: `/projects/${organization.slug}/${project.slug}/monitors/${monitor.slug}/checkins/`, + body: [], + }); + }); + + it('renders', async function () { + render(); + expect(await screen.findByText(monitor.slug, {exact: false})).toBeInTheDocument(); + }); + + it('renders error when monitor is not found', async function () { + MockApiClient.addMockResponse({ + url: `/projects/${organization.slug}/${project.slug}/monitors/${monitor.slug}/`, + statusCode: 404, + }); + + render(); + expect( + await screen.findByText('The monitor you were looking for was not found.') + ).toBeInTheDocument(); + }); +}); diff --git a/static/app/views/monitors/details.tsx b/static/app/views/monitors/details.tsx index a350c6b681e883..ba9fe8c953c4bd 100644 --- a/static/app/views/monitors/details.tsx +++ b/static/app/views/monitors/details.tsx @@ -6,6 +6,7 @@ import sortBy from 'lodash/sortBy'; import {updateMonitor} from 'sentry/actionCreators/monitors'; import Alert from 'sentry/components/alert'; import * as Layout from 'sentry/components/layouts/thirds'; +import LoadingError from 'sentry/components/loadingError'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; @@ -51,7 +52,7 @@ function MonitorDetails({params, location}: Props) { } ); - const {data: monitor} = useApiQuery(queryKey, { + const {data: monitor, isError} = useApiQuery(queryKey, { staleTime: 0, refetchOnWindowFocus: true, // Refetches while we are waiting for the user to send their first check-in @@ -86,6 +87,12 @@ function MonitorDetails({params, location}: Props) { } }; + if (isError) { + return ( + + ); + } + if (!monitor) { return ( From 6b17cd265930493b6afa841f0ca92db392259dea Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Thu, 9 May 2024 14:02:07 -0700 Subject: [PATCH 244/376] feat(usage): Rename metrics to metricSecond (#70476) --- static/app/constants/index.tsx | 6 +++--- static/app/types/core.tsx | 3 ++- static/app/views/organizationStats/index.tsx | 4 ++-- static/app/views/organizationStats/usageChart/index.tsx | 6 +++--- static/app/views/organizationStats/usageStatsOrg.tsx | 6 +++--- static/app/views/organizationStats/usageStatsPerMin.tsx | 2 +- 6 files changed, 14 insertions(+), 13 deletions(-) diff --git a/static/app/constants/index.tsx b/static/app/constants/index.tsx index 77ef36e628ec49..24595fea3b249f 100644 --- a/static/app/constants/index.tsx +++ b/static/app/constants/index.tsx @@ -318,9 +318,9 @@ export const DATA_CATEGORY_INFO = { titleName: t('Cron Monitors'), uid: 13, }, - [DataCategoryExact.METRICS]: { - name: DataCategoryExact.METRICS, - apiName: 'metrics', + [DataCategoryExact.METRIC_SECOND]: { + name: DataCategoryExact.METRIC_SECOND, + apiName: 'metricSecond', plural: 'metrics', displayName: 'metrics', titleName: t('Metrics'), diff --git a/static/app/types/core.tsx b/static/app/types/core.tsx index cdbba78fe44275..8077bee8f9f522 100644 --- a/static/app/types/core.tsx +++ b/static/app/types/core.tsx @@ -80,6 +80,7 @@ export enum DataCategory { PROFILES = 'profiles', REPLAYS = 'replays', MONITOR_SEATS = 'monitorSeats', + METRIC_SECOND = 'metricSecond', } /** @@ -97,7 +98,7 @@ export enum DataCategoryExact { TRANSACTION_INDEXED = 'transaction_indexed', MONITOR = 'monitor', MONITOR_SEAT = 'monitorSeat', - METRICS = 'metrics', + METRIC_SECOND = 'metricSecond', } export interface DataCategoryInfo { diff --git a/static/app/views/organizationStats/index.tsx b/static/app/views/organizationStats/index.tsx index 6912f98afdd15f..9bdfe772a5b916 100644 --- a/static/app/views/organizationStats/index.tsx +++ b/static/app/views/organizationStats/index.tsx @@ -263,7 +263,7 @@ export class OrganizationStats extends Component { if (opt.value === DATA_CATEGORY_INFO.replay.plural) { return organization.features.includes('session-replay'); } - if (opt.value === DATA_CATEGORY_INFO.metrics.plural) { + if (opt.value === DATA_CATEGORY_INFO.metricSecond.plural) { return hasMetricStats(organization); } return true; @@ -321,7 +321,7 @@ export class OrganizationStats extends Component { if (opt.value === DATA_CATEGORY_INFO.replay.plural) { return organization.features.includes('session-replay'); } - if (opt.value === DATA_CATEGORY_INFO.metrics.plural) { + if (opt.value === DATA_CATEGORY_INFO.metricSecond.plural) { return hasMetricStats(organization); } return true; diff --git a/static/app/views/organizationStats/usageChart/index.tsx b/static/app/views/organizationStats/usageChart/index.tsx index 5b0c2435081be6..07129efb28bae4 100644 --- a/static/app/views/organizationStats/usageChart/index.tsx +++ b/static/app/views/organizationStats/usageChart/index.tsx @@ -89,8 +89,8 @@ export const CHART_OPTIONS_DATACATEGORY: CategoryOption[] = [ yAxisMinInterval: 100, }, { - label: DATA_CATEGORY_INFO.metrics.titleName, - value: DATA_CATEGORY_INFO.metrics.plural, + label: DATA_CATEGORY_INFO.metricSecond.titleName, + value: DATA_CATEGORY_INFO.metricSecond.plural, disabled: false, yAxisMinInterval: 100, }, @@ -360,7 +360,7 @@ function UsageChartBody({ const filteredOptions = useMemo(() => { return categoryOptions.filter(option => { - if (option.value !== DATA_CATEGORY_INFO.metrics.plural) { + if (option.value !== DATA_CATEGORY_INFO.metricSecond.plural) { return true; } return ( diff --git a/static/app/views/organizationStats/usageStatsOrg.tsx b/static/app/views/organizationStats/usageStatsOrg.tsx index 66b1cd1d50450b..67bebd2054c5ea 100644 --- a/static/app/views/organizationStats/usageStatsOrg.tsx +++ b/static/app/views/organizationStats/usageStatsOrg.tsx @@ -136,7 +136,7 @@ class UsageStatsOrganization< { query: { ...this.endpointQuery, - category: DATA_CATEGORY_INFO.metrics.apiName, + category: DATA_CATEGORY_INFO.metricSecond.apiName, groupBy: ['outcome'], }, }, @@ -159,7 +159,7 @@ class UsageStatsOrganization< ...group, by: { ...group.by, - category: DATA_CATEGORY_INFO.metrics.apiName, + category: DATA_CATEGORY_INFO.metricSecond.apiName, }, }; }); @@ -345,7 +345,7 @@ class UsageStatsOrganization< filtered: { title: tct('Filtered [dataCategory]', {dataCategory: dataCategoryName}), help: - dataCategory === DATA_CATEGORY_INFO.metrics.plural + dataCategory === DATA_CATEGORY_INFO.metricSecond.plural ? tct( 'Filtered metrics were blocked due to your disabled metrics [settings: settings]', { diff --git a/static/app/views/organizationStats/usageStatsPerMin.tsx b/static/app/views/organizationStats/usageStatsPerMin.tsx index d3dd2efbb03cb9..49cad934d6fd64 100644 --- a/static/app/views/organizationStats/usageStatsPerMin.tsx +++ b/static/app/views/organizationStats/usageStatsPerMin.tsx @@ -78,7 +78,7 @@ function UsageStatsPerMin({dataCategory, organization, projectIds}: Props) { }; // Metrics stats ingestion is delayed, so we can't show this for metrics right now - if (dataCategory === DATA_CATEGORY_INFO.metrics.plural) { + if (dataCategory === DATA_CATEGORY_INFO.metricSecond.plural) { return null; } From 65b26fa23a1a7ec5a045210bd3ba6b622af05b3e Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Thu, 9 May 2024 17:10:03 -0400 Subject: [PATCH 245/376] fix(trace-explorer): Better quantizing for timeline (#70621) The timeline requires some better quantizing or it can appear really noisy. And this tries to merge as many together as possible. --- .../api/endpoints/organization_traces.py | 105 ++++++++--- .../api/endpoints/test_organization_traces.py | 173 +++++++++++++++++- 2 files changed, 254 insertions(+), 24 deletions(-) diff --git a/src/sentry/api/endpoints/organization_traces.py b/src/sentry/api/endpoints/organization_traces.py index c54be68c9228f9..8fa31183650b30 100644 --- a/src/sentry/api/endpoints/organization_traces.py +++ b/src/sentry/api/endpoints/organization_traces.py @@ -4,7 +4,7 @@ from collections import defaultdict from collections.abc import Callable, Mapping, MutableMapping from datetime import datetime, timedelta -from typing import Any, Literal, TypedDict, cast +from typing import Any, Literal, NotRequired, TypedDict, cast from rest_framework import serializers from rest_framework.exceptions import ParseError @@ -40,6 +40,8 @@ class TraceInterval(TypedDict): end: int kind: Literal["project", "missing", "other"] opCategory: str | None + duration: int + components: NotRequired[list[tuple[int, int]]] class TraceResult(TypedDict): @@ -967,25 +969,31 @@ def get_suggested_spans_query( def quantize_range(span_start, span_end, trace_range): trace_start = trace_range["start"] trace_end = trace_range["end"] - min_duration = trace_range["min"] + + bin_size = trace_range["min"] span_duration = span_end - span_start - if min_duration > 0: - rounded_start = ( - round((span_start - trace_start) / min_duration) * min_duration + trace_start - ) + if bin_size > 0: + rounded_start = round((span_start - trace_start) / bin_size) * bin_size + trace_start + rounded_end = round((span_end - trace_start) / bin_size) * bin_size + trace_start + + # if the span is at least the min duration, ensure it spans 1 bin + if rounded_start == rounded_end and span_duration >= (bin_size * 0.1): + rounded_end += bin_size else: rounded_start = span_start + rounded_end = span_end + + if span_start <= trace_start: + rounded_start = trace_start # To avoid creating gaps at the end of the trace, # do not adjust the end if it's at the trace end. if span_end >= trace_end: - rounded_end = span_end - else: - rounded_end = rounded_start + span_duration + rounded_end = trace_end - return rounded_start, rounded_end + return int(rounded_start), int(rounded_end) def process_breakdowns(data, traces_range): @@ -1001,13 +1009,6 @@ def should_merge(interval_a, interval_b): ) def breakdown_push(trace, interval): - # Clip the intervals os that it is within range of the trace - if trace_range := traces_range.get(trace): - start = trace_range["start"] - end = trace_range["end"] - interval["start"] = clip(interval["start"], start, end) - interval["end"] = clip(interval["end"], start, end) - breakdown = breakdowns[trace] # Find the last interval. If there is an interval on the stack, it @@ -1029,6 +1030,10 @@ def breakdown_push(trace, interval): "opCategory": None, "start": last_interval["end"], "end": interval["start"], + "duration": 0, + "components": [ + (last_interval["components"][-1][1], interval["components"][0][0]), + ], } ) @@ -1040,6 +1045,21 @@ def stack_push(trace, interval): # update the end of this interval and it will # be updated in the breakdown as well last_interval["end"] = max(interval["end"], last_interval["end"]) + + # need to update the components of the last interval by merging + # current interval into it + last_component = last_interval["components"][-1] + # there should always be 1 component in the current interval + assert len(interval["components"]) == 1 + cur_component = interval["components"][0] + if last_component[1] >= cur_component[0]: + last_interval["components"][-1] = ( + last_component[0], + max(last_component[1], cur_component[1]), + ) + else: + last_interval["components"].extend(interval["components"]) + return # Make sure to push the breakdown before the stack. This is because @@ -1068,23 +1088,51 @@ def stack_clear(trace, until=None): for row in data: trace = row["trace"] - precise_start = int(row["precise.start_ts"] * 1000) precise_end = int(row["precise.finish_ts"] * 1000) - span_start, span_end = quantize_range( + trace_range = traces_range[trace] + trace_start = trace_range["start"] + trace_end = trace_range["end"] + + # Clip the intervals os that it is within range of the trace + precise_start = clip(precise_start, trace_start, trace_end) + precise_end = clip(precise_end, trace_start, trace_end) + + quantized_start, quantized_end = quantize_range( precise_start, precise_end, traces_range[trace], ) + row["precise.start_ts"] = precise_start + row["precise.finish_ts"] = precise_end + row["quantized.start_ts"] = quantized_start + row["quantized.finish_ts"] = quantized_end + + data.sort(key=lambda row: (row["quantized.start_ts"], -row["quantized.finish_ts"])) + + last_timestamp_per_trace: dict[str, int] = defaultdict(int) + + for row in data: + trace = row["trace"] + + last_timestamp_per_trace["trace"] = max( + row["precise.finish_ts"], last_timestamp_per_trace["trace"] + ) + + if row["quantized.start_ts"] == row["quantized.finish_ts"]: + # after quantizing, this span is far too small to render, so remove it + continue cur: TraceInterval = { "kind": "project", "project": row["project"], "sdkName": row["sdk.name"], "opCategory": row.get("span.category"), - "start": span_start, - "end": span_end, + "start": row["quantized.start_ts"], + "end": row["quantized.finish_ts"], + "duration": 0, + "components": [(row["precise.start_ts"], row["precise.finish_ts"])], } # Clear the stack of any intervals that end before the current interval @@ -1109,6 +1157,7 @@ def stack_clear(trace, until=None): "opCategory": None, "start": trace_range["start"], "end": trace_range["end"], + "duration": 0, } # Clear the remaining intervals on the stack to find the latest end time @@ -1116,11 +1165,23 @@ def stack_clear(trace, until=None): # of the trace that was not covered by one of the intervals. while stacks[trace]: interval = stack_pop(trace) - other["start"] = max(other["start"], interval["end"]) + # use the end time of the last component of the interval + other["start"] = max(other["start"], interval["components"][-1][1]) if other["start"] < other["end"]: breakdown_push(trace, other) + for breakdown in breakdowns.values(): + for interval in breakdown: + components = interval.pop("components", []) + component_duration = sum(component[1] - component[0] for component in components) + interval_duration = interval["end"] - interval["start"] + + # in the event we don't have a duration from the components, we fall back to the interval + interval["duration"] = ( + component_duration if component_duration > 0 else interval_duration + ) + return breakdowns diff --git a/tests/sentry/api/endpoints/test_organization_traces.py b/tests/sentry/api/endpoints/test_organization_traces.py index 35b1e18af74942..91d03717ef7200 100644 --- a/tests/sentry/api/endpoints/test_organization_traces.py +++ b/tests/sentry/api/endpoints/test_organization_traces.py @@ -461,6 +461,7 @@ def test_matching_tag(self): "start": int(timestamps[0].timestamp() * 1000), "end": int(timestamps[0].timestamp() * 1000) + 60_100, "kind": "project", + "duration": 60_100, }, { "project": project_2.slug, @@ -469,6 +470,7 @@ def test_matching_tag(self): "start": int(timestamps[1].timestamp() * 1000), "end": int(timestamps[3].timestamp() * 1000) + 30_003, "kind": "project", + "duration": 32_003, }, ], "spans": [ @@ -514,6 +516,7 @@ def test_matching_tag(self): "start": int(timestamps[4].timestamp() * 1000), "end": int(timestamps[4].timestamp() * 1000) + 90_123, "kind": "project", + "duration": 90_123, }, { "project": project_2.slug, @@ -522,6 +525,7 @@ def test_matching_tag(self): "start": int(timestamps[5].timestamp() * 1000), "end": int(timestamps[6].timestamp() * 1000) + 20_006, "kind": "project", + "duration": 21_006, }, ], "spans": [ @@ -610,6 +614,7 @@ def test_matching_tag_breakdown_with_category(self): "start": int(timestamps[4].timestamp() * 1000), "end": int(timestamps[4].timestamp() * 1000) + 90_123, "kind": "project", + "duration": 90_123, }, { "project": project_1.slug, @@ -618,6 +623,7 @@ def test_matching_tag_breakdown_with_category(self): "start": int(timestamps[7].timestamp() * 1000), "end": int(timestamps[7].timestamp() * 1000) + 1_000, "kind": "project", + "duration": 1_000, }, { "project": project_2.slug, @@ -626,6 +632,7 @@ def test_matching_tag_breakdown_with_category(self): "start": int(timestamps[5].timestamp() * 1000), "end": int(timestamps[6].timestamp() * 1000) + 20_006, "kind": "project", + "duration": 21_006, }, { "project": project_1.slug, @@ -634,6 +641,7 @@ def test_matching_tag_breakdown_with_category(self): "start": int(timestamps[8].timestamp() * 1000), "end": int(timestamps[8].timestamp() * 1000) + 3_000, "kind": "project", + "duration": 3_000, }, ], "spans": [ @@ -718,6 +726,7 @@ def test_matching_tag_metrics(self): "start": int(timestamps[10].timestamp() * 1000), "end": int(timestamps[10].timestamp() * 1000) + 40_000, "kind": "project", + "duration": 40_000, }, { "project": project_1.slug, @@ -726,6 +735,7 @@ def test_matching_tag_metrics(self): "start": int(timestamps[11].timestamp() * 1000), "end": int(timestamps[11].timestamp() * 1000) + 10_000, "kind": "project", + "duration": 10_000, }, ], "spans": [ @@ -814,6 +824,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 100, "kind": "project", + "duration": 100, }, ], }, @@ -849,6 +860,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 100, "kind": "project", + "duration": 100, }, { "project": "bar", @@ -857,6 +869,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 25, "end": 75, "kind": "project", + "duration": 50, }, ], }, @@ -900,6 +913,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 50, "kind": "project", + "duration": 50, }, { "project": "bar", @@ -908,6 +922,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 25, "end": 75, "kind": "project", + "duration": 50, }, { "project": "baz", @@ -916,6 +931,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 50, "end": 100, "kind": "project", + "duration": 50, }, ], }, @@ -951,6 +967,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 25, "kind": "project", + "duration": 25, }, { "project": None, @@ -959,6 +976,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 25, "end": 50, "kind": "missing", + "duration": 25, }, { "project": "bar", @@ -967,6 +985,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 50, "end": 75, "kind": "project", + "duration": 25, }, ], }, @@ -1002,6 +1021,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 100, "kind": "project", + "duration": 100, }, ], }, @@ -1037,6 +1057,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 100, "kind": "project", + "duration": 100, }, ], }, @@ -1072,6 +1093,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 25, "kind": "project", + "duration": 25, }, { "project": None, @@ -1080,6 +1102,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 25, "end": 50, "kind": "missing", + "duration": 25, }, { "project": "foo", @@ -1088,6 +1111,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 50, "end": 75, "kind": "project", + "duration": 25, }, ], }, @@ -1131,6 +1155,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 100, "kind": "project", + "duration": 100, }, { "project": "bar", @@ -1139,6 +1164,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 20, "end": 80, "kind": "project", + "duration": 60, }, { "project": "baz", @@ -1147,6 +1173,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 40, "end": 60, "kind": "project", + "duration": 20, }, ], }, @@ -1190,6 +1217,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 100, "kind": "project", + "duration": 100, }, { "project": "bar", @@ -1198,6 +1226,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 25, "end": 50, "kind": "project", + "duration": 25, }, { "project": "baz", @@ -1206,6 +1235,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 50, "end": 75, "kind": "project", + "duration": 25, }, ], }, @@ -1249,6 +1279,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 50, "kind": "project", + "duration": 50, }, { "project": "bar", @@ -1257,6 +1288,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 20, "end": 30, "kind": "project", + "duration": 10, }, { "project": "baz", @@ -1265,6 +1297,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 50, "end": 75, "kind": "project", + "duration": 25, }, ], }, @@ -1308,6 +1341,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 50, "kind": "project", + "duration": 50, }, { "project": "bar", @@ -1316,6 +1350,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 20, "end": 30, "kind": "project", + "duration": 10, }, { "project": "baz", @@ -1324,6 +1359,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 40, "end": 60, "kind": "project", + "duration": 20, }, ], }, @@ -1367,6 +1403,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 50, "kind": "project", + "duration": 50, }, { "project": "bar", @@ -1375,6 +1412,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 10, "end": 20, "kind": "project", + "duration": 10, }, ], }, @@ -1402,6 +1440,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 50, "kind": "project", + "duration": 50, }, ], }, @@ -1429,6 +1468,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 0, "end": 50, "kind": "project", + "duration": 50, }, { "project": None, @@ -1437,6 +1477,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 50, "end": 100, "kind": "other", + "duration": 50, }, ], }, @@ -1478,16 +1519,18 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "opCategory": None, "sdkName": "sentry.javascript.node", "start": 0, - "end": 21, + "end": 20, "kind": "project", + "duration": 23, }, { "project": None, "opCategory": None, "sdkName": None, - "start": 21, + "start": 20, "end": 30, "kind": "missing", + "duration": 8, }, { "project": "foo", @@ -1496,11 +1539,137 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "start": 30, "end": 40, "kind": "project", + "duration": 8, }, ], }, id="merge quantized spans", ), + pytest.param( + [ + { + "trace": "a" * 32, + "project": "foo", + "sdk.name": "sentry.javascript.node", + "transaction": "foo1", + "precise.start_ts": 0, + "precise.finish_ts": 0.1, + }, + { + "trace": "a" * 32, + "project": "bar", + "sdk.name": "sentry.javascript.node", + "transaction": "bar1", + "precise.start_ts": 0.020, + "precise.finish_ts": 0.021, + }, + ], + {"a" * 32: (0, 100, 20)}, + { + "a" + * 32: [ + { + "project": "foo", + "opCategory": None, + "sdkName": "sentry.javascript.node", + "start": 0, + "end": 100, + "kind": "project", + "duration": 100, + }, + ], + }, + id="remove spans that are too small", + ), + pytest.param( + [ + { + "trace": "a" * 32, + "project": "foo", + "sdk.name": "sentry.javascript.node", + "transaction": "foo1", + "precise.start_ts": 0, + "precise.finish_ts": 0.1, + }, + { + "trace": "a" * 32, + "project": "bar", + "sdk.name": "sentry.javascript.node", + "transaction": "bar1", + "precise.start_ts": 0.002, + "precise.finish_ts": 0.044, + }, + { + "trace": "a" * 32, + "project": "foo", + "sdk.name": "sentry.javascript.node", + "transaction": "foo1", + "precise.start_ts": 0.007, + "precise.finish_ts": 0.1, + }, + ], + {"a" * 32: (0, 100, 20)}, + { + "a" + * 32: [ + { + "project": "foo", + "opCategory": None, + "sdkName": "sentry.javascript.node", + "start": 0, + "end": 100, + "kind": "project", + "duration": 100, + }, + { + "project": "bar", + "opCategory": None, + "sdkName": "sentry.javascript.node", + "start": 0, + "end": 40, + "kind": "project", + "duration": 42, + }, + ], + }, + id="resorts spans after quantizing", + ), + pytest.param( + [ + { + "trace": "a" * 32, + "project": "foo", + "sdk.name": "sentry.javascript.node", + "transaction": "foo1", + "precise.start_ts": 0, + "precise.finish_ts": 0.051, + }, + { + "trace": "a" * 32, + "project": "foo", + "sdk.name": "sentry.javascript.node", + "transaction": "foo1", + "precise.start_ts": 0.069, + "precise.finish_ts": 0.1, + }, + ], + {"a" * 32: (0, 100, 20)}, + { + "a" + * 32: [ + { + "project": "foo", + "opCategory": None, + "sdkName": "sentry.javascript.node", + "start": 0, + "end": 100, + "kind": "project", + "duration": 82, + }, + ], + }, + id="merges nearby spans", + ), ], ) def test_process_breakdowns(data, traces_range, expected): From 5e202ee49ad44be659f3bbc070bb1422f29b9d56 Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Thu, 9 May 2024 17:17:48 -0400 Subject: [PATCH 246/376] fix: remove unnecessary list[int] type (#70617) Follow up for https://github.com/getsentry/sentry/pull/69990 We already removed list of bytes from the type, and no longer need it as the Python type. --- src/sentry/replays/lib/event_linking.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/sentry/replays/lib/event_linking.py b/src/sentry/replays/lib/event_linking.py index 42177281df50c3..3f361458602a70 100644 --- a/src/sentry/replays/lib/event_linking.py +++ b/src/sentry/replays/lib/event_linking.py @@ -15,9 +15,7 @@ class EventLinkKafkaMessage(TypedDict): replay_id: str project_id: int segment_id: None - payload: list[ - int - ] | EventLinkPayloadDebugId | EventLinkPayloadInfoId | EventLinkPayloadWarningId | EventLinkPayloadErrorId | EventLinkPayloadFatalId + payload: EventLinkPayloadDebugId | EventLinkPayloadInfoId | EventLinkPayloadWarningId | EventLinkPayloadErrorId | EventLinkPayloadFatalId retention_days: int From 7eb0c8f2160415376876f4e11580910e9994ef95 Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Thu, 9 May 2024 14:25:01 -0700 Subject: [PATCH 247/376] perf(assignees): Reduce render time of assignee dropdown (#70616) This PR gets rid of a usage of dynamic props in styled components in since this is [bad for performance](https://develop.sentry.dev/frontend/using-styled-components/#use-style--css-attributes). It also gets rid of any usages of replaces any usage of idBadge in with userBadge or teamBadge, since idBadge might re-fetch the avatar when we already have it. I'm seeing ~~20-30~~ 40-50% faster initial render speeds on local dev. --- .../components/assigneeSelectorDropdown.tsx | 71 ++++++++----------- static/app/components/avatar/baseAvatar.tsx | 34 +++++---- static/app/components/idBadge/baseBadge.tsx | 5 +- 3 files changed, 52 insertions(+), 58 deletions(-) diff --git a/static/app/components/assigneeSelectorDropdown.tsx b/static/app/components/assigneeSelectorDropdown.tsx index 2be4a6d0281d9d..1abab00e3a7d15 100644 --- a/static/app/components/assigneeSelectorDropdown.tsx +++ b/static/app/components/assigneeSelectorDropdown.tsx @@ -12,7 +12,8 @@ import { type SelectOption, type SelectOptionOrSection, } from 'sentry/components/compactSelect'; -import IdBadge from 'sentry/components/idBadge'; +import {TeamBadge} from 'sentry/components/idBadge/teamBadge'; +import UserBadge from 'sentry/components/idBadge/userBadge'; import ExternalLink from 'sentry/components/links/externalLink'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import {Tooltip} from 'sentry/components/tooltip'; @@ -170,20 +171,17 @@ export default function AssigneeSelectorDropdown({ const memberLists = useLegacyStore(MemberListStore); const sessionUser = ConfigStore.get('user'); - const currentMemberList = (): User[] | undefined => { - return memberList ?? memberLists?.members; - }; + const currentMemberList = memberList ?? memberLists?.members ?? []; const getSuggestedAssignees = (): SuggestedAssignee[] => { const currAssignableTeams = getAssignableTeams(); - const currMembers = currentMemberList() ?? []; if (owners !== undefined) { // Add team or user from store return owners .map(owner => { if (owner.type === 'user') { - const member = currMembers.find(user => user.id === owner.id); + const member = currentMemberList.find(user => user.id === owner.id); if (member) { return { ...owner, @@ -219,7 +217,7 @@ export default function AssigneeSelectorDropdown({ const [suggestionType, suggestionId] = suggestion.owner.split(':'); const suggestedReasonText = suggestedReasonTable[suggestion.type]; if (suggestionType === 'user') { - const member = currMembers.find(user => user.id === suggestionId); + const member = currentMemberList.find(user => user.id === suggestionId); if (member) { return { id: suggestionId, @@ -277,7 +275,7 @@ export default function AssigneeSelectorDropdown({ let assignee: User | Actor; if (type === 'user') { - assignee = currentMemberList()?.find(member => member.id === assigneeId) as User; + assignee = currentMemberList.find(member => member.id === assigneeId) as User; } else { const assignedTeam = getAssignableTeams().find( assignableTeam => assignableTeam.team.id === assigneeId @@ -303,31 +301,27 @@ export default function AssigneeSelectorDropdown({ } }; - const makeMemberOption = ( - userId: string, - userDisplay: string - ): SelectOption => { - const isCurrentUser = userId === sessionUser?.id; + const makeMemberOption = (user: User): SelectOption => { + const isCurrentUser = user.id === sessionUser?.id; + const userDisplay = user.name || user.email; return { label: ( - ), // Jank way to pass assignee type (team or user) into each row - value: `user:${userId}`, + value: `user:${user.id}`, textValue: userDisplay, }; }; const makeTeamOption = (assignableTeam: AssignableTeam): SelectOption => ({ - label: , + label: , value: `team:${assignableTeam.team.id}`, textValue: assignableTeam.team.slug, }); @@ -339,13 +333,11 @@ export default function AssigneeSelectorDropdown({ const isCurrentUser = assignee.id === sessionUser?.id; return { label: ( - ), @@ -356,7 +348,8 @@ export default function AssigneeSelectorDropdown({ const assignedTeam = assignee.assignee as AssignableTeam; return { label: ( - @@ -369,7 +362,7 @@ export default function AssigneeSelectorDropdown({ const makeAllOptions = (): SelectOptionOrSection[] => { const options: SelectOptionOrSection[] = []; - let memList = currentMemberList(); + let memList = currentMemberList; let assignableTeamList = getAssignableTeams(); let suggestedAssignees = getSuggestedAssignees(); let assignedUser: User | undefined; @@ -391,12 +384,10 @@ export default function AssigneeSelectorDropdown({ }); } } else { - assignedUser = memList?.find(user => user.id === group.assignedTo?.id); + assignedUser = currentMemberList.find(user => user.id === group.assignedTo?.id); if (assignedUser) { - options.push( - makeMemberOption(assignedUser.id, assignedUser.name || assignedUser.email) - ); - memList = memList?.filter(member => member.id !== group.assignedTo?.id); + options.push(makeMemberOption(assignedUser)); + memList = memList.filter(member => member.id !== group.assignedTo?.id); suggestedAssignees = suggestedAssignees?.filter(suggestedAssignee => { return suggestedAssignee.id !== group.assignedTo?.id; }); @@ -411,22 +402,19 @@ export default function AssigneeSelectorDropdown({ suggestedAssignee => suggestedAssignee.id === sessionUser.id ); if (!isUserAssignedOrSuggested) { - const currentUser = memList?.find(user => user.id === sessionUser.id); + const currentUser = memList.find(user => user.id === sessionUser.id); if (currentUser) { - memList = memList?.filter(user => user.id !== sessionUser.id); + memList = memList.filter(user => user.id !== sessionUser.id); // This can't be sessionUser even though they're the same thing // because it would bork the tests - memList?.unshift(currentUser); + memList.unshift(currentUser); } } const memberOptions = { value: '_members', label: t('Members'), - options: - memList?.map(member => - makeMemberOption(member.id, member.name || member.email) - ) ?? [], + options: memList.map(member => makeMemberOption(member)) ?? [], }; const teamOptions = { @@ -504,6 +492,7 @@ export default function AssigneeSelectorDropdown({ e.stopPropagation()} value={ diff --git a/static/app/components/avatar/baseAvatar.tsx b/static/app/components/avatar/baseAvatar.tsx index acab9b4a6fa078..b564ded6bed73f 100644 --- a/static/app/components/avatar/baseAvatar.tsx +++ b/static/app/components/avatar/baseAvatar.tsx @@ -122,21 +122,27 @@ function BaseAvatar({ width: size, }; - return ( - - - {hasError ? backup : imageAvatar} - + const avatarComponent = ( + + {hasError ? backup : imageAvatar} + + ); + + return hasTooltip ? ( + + {avatarComponent} + ) : ( + avatarComponent ); } diff --git a/static/app/components/idBadge/baseBadge.tsx b/static/app/components/idBadge/baseBadge.tsx index 507cd499c4d9cd..8637d918801dd1 100644 --- a/static/app/components/idBadge/baseBadge.tsx +++ b/static/app/components/idBadge/baseBadge.tsx @@ -43,7 +43,7 @@ export const BaseBadge = memo( const wrapperGap: ValidSize = avatarSize <= 14 ? 0.5 : avatarSize <= 20 ? 0.75 : 1; return ( - + {!hideAvatar && ( ` +const Wrapper = styled('div')` display: flex; - gap: ${p => space(p.gap)}; align-items: center; flex-shrink: 0; `; From 5172ac6e8a32ebc6a12ae8fc2d198e6c8bce4883 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Thu, 9 May 2024 14:25:53 -0700 Subject: [PATCH 248/376] ref(seer grouping): Add constants for Seer model version and URL (#70546) This adds two constants, `SEER_SIMILARITY_MODEL_VERSION` and `SEER_SIMILAR_ISSUES_URL`, and uses them in places where we've previously been hardcoding values. it also adds model version to the `SeerSimilarIssueData` dataclass, defaulted to the constant's value. --- src/sentry/conf/server.py | 3 +++ src/sentry/seer/utils.py | 6 ++++-- .../test_group_similar_issues_embeddings.py | 13 +++++++------ 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index e8a466e2bd4ef8..63aa3d16e2c9b6 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3950,6 +3950,9 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: # Disable DDM entirely SENTRY_DDM_DISABLE = os.getenv("SENTRY_DDM_DISABLE", "0") in ("1", "true", "True") +SEER_SIMILARITY_MODEL_VERSION = "v0" +SEER_SIMILAR_ISSUES_URL = f"/{SEER_SIMILARITY_MODEL_VERSION}/issues/similar-issues" + # Devserver configuration overrides. ngrok_host = os.environ.get("SENTRY_DEVSERVER_NGROK") if ngrok_host: diff --git a/src/sentry/seer/utils.py b/src/sentry/seer/utils.py index 55a957609d6a12..3842a02dcbb027 100644 --- a/src/sentry/seer/utils.py +++ b/src/sentry/seer/utils.py @@ -6,6 +6,7 @@ from django.conf import settings from urllib3 import Retry +from sentry.conf.server import SEER_SIMILAR_ISSUES_URL, SEER_SIMILARITY_MODEL_VERSION from sentry.models.group import Group from sentry.models.grouphash import GroupHash from sentry.net.http import connection_from_url @@ -124,6 +125,7 @@ class SeerSimilarIssueData: message_distance: float should_group: bool parent_group_id: int + similarity_model_version: str = SEER_SIMILARITY_MODEL_VERSION # TODO: See if we end up needing the hash here parent_hash: str | None = None @@ -175,10 +177,10 @@ def from_raw(cls, project_id: int, raw_similar_issue_data: RawSeerSimilarIssueDa def get_similar_issues_embeddings( similar_issues_request: SimilarIssuesEmbeddingsRequest, ) -> list[SeerSimilarIssueData]: - """Call /v0/issues/similar-issues endpoint from seer.""" + """Request similar issues data from seer and normalize the results.""" response = seer_staging_connection_pool.urlopen( "POST", - "/v0/issues/similar-issues", + SEER_SIMILAR_ISSUES_URL, body=json.dumps(similar_issues_request), headers={"Content-Type": "application/json;charset=utf-8"}, ) diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index 65714ae5ce85dd..c02c59c2a1ae5c 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -11,6 +11,7 @@ get_stacktrace_string, ) from sentry.api.serializers.base import serialize +from sentry.conf.server import SEER_SIMILAR_ISSUES_URL from sentry.models.group import Group from sentry.seer.utils import SeerSimilarIssueData, SimilarIssuesEmbeddingsResponse from sentry.testutils.cases import APITestCase @@ -730,7 +731,7 @@ def test_simple_only_group_id_returned(self, mock_logger, mock_seer_request): mock_seer_request.assert_called_with( "POST", - "/v0/issues/similar-issues", + SEER_SIMILAR_ISSUES_URL, body=orjson.dumps(expected_seer_request_params).decode(), headers={"Content-Type": "application/json;charset=utf-8"}, ) @@ -777,7 +778,7 @@ def test_simple_only_hash_returned(self, mock_logger, mock_seer_request): mock_seer_request.assert_called_with( "POST", - "/v0/issues/similar-issues", + SEER_SIMILAR_ISSUES_URL, body=orjson.dumps(expected_seer_request_params).decode(), headers={"Content-Type": "application/json;charset=utf-8"}, ) @@ -826,7 +827,7 @@ def test_simple_group_id_and_hash_returned(self, mock_logger, mock_seer_request) mock_seer_request.assert_called_with( "POST", - "/v0/issues/similar-issues", + SEER_SIMILAR_ISSUES_URL, body=orjson.dumps(expected_seer_request_params).decode(), headers={"Content-Type": "application/json;charset=utf-8"}, ) @@ -1096,7 +1097,7 @@ def test_no_optional_params(self, mock_seer_request): mock_seer_request.assert_called_with( "POST", - "/v0/issues/similar-issues", + SEER_SIMILAR_ISSUES_URL, body=orjson.dumps( { "group_id": self.group.id, @@ -1120,7 +1121,7 @@ def test_no_optional_params(self, mock_seer_request): mock_seer_request.assert_called_with( "POST", - "/v0/issues/similar-issues", + SEER_SIMILAR_ISSUES_URL, body=orjson.dumps( { "group_id": self.group.id, @@ -1145,7 +1146,7 @@ def test_no_optional_params(self, mock_seer_request): mock_seer_request.assert_called_with( "POST", - "/v0/issues/similar-issues", + SEER_SIMILAR_ISSUES_URL, body=orjson.dumps( { "group_id": self.group.id, From 0db41ed740a82db89e71efd3bed8b1ad5960bd87 Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Thu, 9 May 2024 14:32:03 -0700 Subject: [PATCH 249/376] ref(rules): Fetch events and occurrences in batches (#70543) Fetch events and issue occurrences in bulk in the delayed rule processor. Closes https://github.com/getsentry/team-core-product-foundations/issues/306 and https://getsentry.atlassian.net/browse/ALRT-5 --- .../rules/processing/delayed_processing.py | 105 ++++++++++++++---- 1 file changed, 81 insertions(+), 24 deletions(-) diff --git a/src/sentry/rules/processing/delayed_processing.py b/src/sentry/rules/processing/delayed_processing.py index 7ea02c215a0dc1..e018b117497937 100644 --- a/src/sentry/rules/processing/delayed_processing.py +++ b/src/sentry/rules/processing/delayed_processing.py @@ -4,7 +4,7 @@ from datetime import datetime, timedelta, timezone from typing import Any, DefaultDict, NamedTuple -from sentry import eventstore +from sentry import nodestore from sentry.buffer.redis import BufferHookEvent, RedisBuffer, redis_buffer_registry from sentry.eventstore.models import Event, GroupEvent from sentry.issues.issue_occurrence import IssueOccurrence @@ -28,10 +28,14 @@ ) from sentry.silo.base import SiloMode from sentry.tasks.base import instrumented_task +from sentry.tasks.post_process import should_retry_fetch from sentry.utils import json, metrics +from sentry.utils.iterators import chunked +from sentry.utils.retries import ConditionalRetryPolicy, exponential_delay from sentry.utils.safe import safe_execute logger = logging.getLogger("sentry.rules.delayed_processing") +EVENT_LIMIT = 100 class UniqueCondition(NamedTuple): @@ -200,38 +204,91 @@ def parse_rulegroup_to_event_data( return parsed_rulegroup_to_event_data +def bulk_fetch_events(event_ids: list[str], project_id: int) -> dict[str, Event]: + node_id_to_event_id: dict[str, str] = { + Event.generate_node_id(project_id, event_id=event_id): event_id for event_id in event_ids + } + node_ids = list(node_id_to_event_id.keys()) + fetch_retry_policy = ConditionalRetryPolicy(should_retry_fetch, exponential_delay(1.00)) + + bulk_data = {} + for node_id_chunk in chunked(node_ids, EVENT_LIMIT): + bulk_results = fetch_retry_policy(lambda: nodestore.backend.get_multi(node_id_chunk)) + bulk_data.update(bulk_results) + + bulk_event_id_to_events: dict[str, Event] = {} + for node_id, data in bulk_data.items(): + event_id = node_id_to_event_id[node_id] + if data is not None: + event = Event(event_id=event_id, project_id=project_id, data=data) + bulk_event_id_to_events[event_id] = event + + return bulk_event_id_to_events + + +def build_group_to_groupevent( + parsed_rulegroup_to_event_data: dict[tuple[str, str], dict[str, str]], + bulk_event_id_to_events: dict[str, Event], + bulk_occurrence_id_to_occurrence: dict[str, IssueOccurrence], + group_id_to_group: dict[int, Group], +) -> dict[Group, GroupEvent]: + group_to_groupevent: dict[Group, GroupEvent] = {} + + for rule_group, instance_data in parsed_rulegroup_to_event_data.items(): + event_id = instance_data.get("event_id") + occurrence_id = instance_data.get("occurrence_id") + occurrence = None + + if event_id: + event = bulk_event_id_to_events.get(event_id) + else: + logger.info("delayed_processing.missing_event_id", extra={"rule": rule_group[0]}) + group = group_id_to_group.get(int(rule_group[1])) + if not group or not event: + if not group: + logger.info("delayed_processing.missing_group", extra={"rule": rule_group[0]}) + if not event: + logger.info("delayed_processing.missing_event", extra={"rule": rule_group[0]}) + continue + + group_event = event.for_group(group) + if occurrence_id: + occurrence = bulk_occurrence_id_to_occurrence.get(occurrence_id) + group_event.occurrence = occurrence + group_to_groupevent[group] = group_event + + return group_to_groupevent + + def get_group_to_groupevent( parsed_rulegroup_to_event_data: dict[tuple[str, str], dict[str, str]], project_id: int, group_ids: set[int], ) -> dict[Group, GroupEvent]: - group_to_groupevent: dict[Group, GroupEvent] = {} groups = Group.objects.filter(id__in=group_ids) group_id_to_group = {group.id: group for group in groups} - for rule_group, instance_data in parsed_rulegroup_to_event_data.items(): + event_ids: set[str] = set() + occurrence_ids: list[str] = [] + + for instance_data in parsed_rulegroup_to_event_data.values(): event_id = instance_data.get("event_id") + if event_id: + event_ids.add(event_id) occurrence_id = instance_data.get("occurrence_id") - group_id = rule_group[1] - group = group_id_to_group.get(int(group_id)) - if group and event_id: - # TODO: fetch events and occurrences in batches - event = Event( - event_id=event_id, - project_id=project_id, - snuba_data={ - "event_id": event_id, - "group_id": group.id, - "project_id": project_id, - }, - ) - eventstore.backend.bind_nodes([event]) - group_event = event.for_group(group) - if occurrence_id: - occurrence = IssueOccurrence.fetch(occurrence_id, project_id=project_id) - if occurrence: - group_event.occurrence = occurrence - - group_to_groupevent[group] = group_event + if occurrence_id: + occurrence_ids.append(occurrence_id) + + bulk_event_id_to_events = bulk_fetch_events(list(event_ids), project_id) + bulk_occurrences = IssueOccurrence.fetch_multi(occurrence_ids, project_id=project_id) + bulk_occurrence_id_to_occurrence = { + occurrence.id: occurrence for occurrence in bulk_occurrences if occurrence + } + group_to_groupevent = build_group_to_groupevent( + parsed_rulegroup_to_event_data, + bulk_event_id_to_events, + bulk_occurrence_id_to_occurrence, + group_id_to_group, + ) return group_to_groupevent From 82eeb04db677cf52b6e6e53f0312e75f04436a66 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Thu, 9 May 2024 15:14:23 -0700 Subject: [PATCH 250/376] chore(grouping): Add grouping threshold constants (#70459) This adds to constants, `SEER_MAX_GROUPING_DISTANCE` and `SEER_MAX_SIMILARITY_DISTANCE` to `server.py`, along with their default values. The former is usable now, as Seer accepts a `threshold` request parameter and will use that to determine whether a similar issue falls into the `should_group: True` or `should_group: False` category. The latter we can't yet use, because Seer does not yet accept a parameter for the threshold which determines whether a `should_group: False` neighbor issue should be returned as similar at all, but it'll be there to use once we make that Seer change. --- src/sentry/conf/server.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 63aa3d16e2c9b6..7921750f3c33cd 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3952,6 +3952,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: SEER_SIMILARITY_MODEL_VERSION = "v0" SEER_SIMILAR_ISSUES_URL = f"/{SEER_SIMILARITY_MODEL_VERSION}/issues/similar-issues" +SEER_MAX_GROUPING_DISTANCE = 0.01 +SEER_MAX_SIMILARITY_DISTANCE = 0.15 # Not yet in use - Seer doesn't obey this right now # Devserver configuration overrides. ngrok_host = os.environ.get("SENTRY_DEVSERVER_NGROK") From b2197ee53052a7e594bcc3c0c34a6c93d83e33b8 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Thu, 9 May 2024 15:31:44 -0700 Subject: [PATCH 251/376] feat(issues): Update react-mention (#69559) --- package.json | 7 +++++-- yarn.lock | 36 ++++++++++++------------------------ 2 files changed, 17 insertions(+), 26 deletions(-) diff --git a/package.json b/package.json index e9cf79b2efd25b..dd46b7d713b072 100644 --- a/package.json +++ b/package.json @@ -84,7 +84,7 @@ "@types/react-dom": "18.2.19", "@types/react-grid-layout": "^1.3.2", "@types/react-lazyload": "3.2.3", - "@types/react-mentions": "4.1.6", + "@types/react-mentions": "4.1.13", "@types/react-router": "^3.0.28", "@types/react-select": "4.0.18", "@types/react-sparklines": "^1.7.2", @@ -151,7 +151,7 @@ "react-dom": "18.2.0", "react-grid-layout": "^1.3.4", "react-lazyload": "^3.2.1", - "react-mentions": "4.4.2", + "react-mentions": "4.4.10", "react-popper": "^2.3.0", "react-router": "3.2.6", "react-select": "4.3.1", @@ -204,6 +204,9 @@ "tsconfig-paths": "^4.2.0", "webpack-dev-server": "5.0.4" }, + "resolutions": { + "react-mentions/@babel/runtime": "*" + }, "optionalDependencies": { "fsevents": "^2.3.2" }, diff --git a/yarn.lock b/yarn.lock index bd279e51f1db56..2f0b8f337e5969 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1142,14 +1142,7 @@ resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== -"@babel/runtime@7.4.5": - version "7.4.5" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.4.5.tgz#582bb531f5f9dc67d2fcb682979894f75e253f12" - integrity sha512-TuI4qpWZP6lGOGIuGWtp9sPluqYICmbk8T/1vpSysqJxRPkudh/ofFWyqdcMsDf2s7KvDL4/YHgKyvcS3g9CJQ== - dependencies: - regenerator-runtime "^0.13.2" - -"@babel/runtime@^7.12.0", "@babel/runtime@^7.12.5", "@babel/runtime@^7.16.3", "@babel/runtime@^7.18.3", "@babel/runtime@^7.3.4", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2", "@babel/runtime@~7.24.5": +"@babel/runtime@*", "@babel/runtime@7.4.5", "@babel/runtime@^7.12.0", "@babel/runtime@^7.12.5", "@babel/runtime@^7.16.3", "@babel/runtime@^7.18.3", "@babel/runtime@^7.3.4", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2", "@babel/runtime@~7.24.5": version "7.24.5" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.5.tgz#230946857c053a36ccc66e1dd03b17dd0c4ed02c" integrity sha512-Nms86NXrsaeU9vbBJKni6gXiEXZ4CVpYVzEjDH9Sb8vmZ3UljyA1GSOJl/6LGPO8EHLuSF9H+IxNXHPX8QHJ4g== @@ -3899,10 +3892,10 @@ dependencies: "@types/react" "*" -"@types/react-mentions@4.1.6": - version "4.1.6" - resolved "https://registry.yarnpkg.com/@types/react-mentions/-/react-mentions-4.1.6.tgz#0ecdb61785c22edbf9c7d6718505d4814ad3a65c" - integrity sha512-f4/BdnjlMxT47q+WqlcYYwFABbBMVQrDoFFeMeljtFC5nnR9/x8TOFmN18BJKgNuWMgivy9uE5EKtsjlay751w== +"@types/react-mentions@4.1.13": + version "4.1.13" + resolved "https://registry.yarnpkg.com/@types/react-mentions/-/react-mentions-4.1.13.tgz#293e56e14c502f6a73217fece0b870e54a0cc657" + integrity sha512-kRulAAjlmhCtsJ9bapO0foocknaE/rEuFKpmFEU81fBfnXZmZNBaJ9J/DBjwigT3WDHjQVUmYoi5sxEXrcdzAw== dependencies: "@types/react" "*" @@ -10408,10 +10401,10 @@ react-list@^0.8.13: dependencies: prop-types "15" -react-mentions@4.4.2: - version "4.4.2" - resolved "https://registry.yarnpkg.com/react-mentions/-/react-mentions-4.4.2.tgz#b832eeca0b2e141a6fc80d49cadd7c17680c99b1" - integrity sha512-vkhTeUQaxUYlWKxj/wTFBX+h+JmsKIwjLvigeeYar/+UlJ8vFVKq7iM/9YwAsVg+Wye3nvihH7WPld66fVrjmg== +react-mentions@4.4.10: + version "4.4.10" + resolved "https://registry.yarnpkg.com/react-mentions/-/react-mentions-4.4.10.tgz#ae6c1e310a405597e83ce786f12c5bfb93b097ce" + integrity sha512-JHiQlgF1oSZR7VYPjq32wy97z1w1oE4x10EuhKjPr4WUKhVzG1uFQhQjKqjQkbVqJrmahf+ldgBTv36NrkpKpA== dependencies: "@babel/runtime" "7.4.5" invariant "^2.2.4" @@ -10585,11 +10578,6 @@ regenerate@^1.4.2: resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== -regenerator-runtime@^0.13.2: - version "0.13.11" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" - integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== - regenerator-runtime@^0.14.0: version "0.14.0" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz#5e19d68eb12d486f797e15a3c6a918f7cec5eb45" @@ -11421,9 +11409,9 @@ stylis@4.2.0: integrity sha512-Orov6g6BB1sDfYgzWfTHDOxamtX1bE/zo104Dh9e6fqJ3PooipYyfJ0pUmrZO2wAvO8YbEyeFrkV91XTsGMSrw== substyle@^9.1.0: - version "9.2.0" - resolved "https://registry.yarnpkg.com/substyle/-/substyle-9.2.0.tgz#71144955058f8f19509187bb9466a13ffbb41710" - integrity sha512-iPvumr9jSEKrCBik3UOYJfvtw2T6Ki7mzx3tMTA6Xtl7mpjLOgPfnImt6kjrdOvvFe05aWxUEgB9jWLm/IBJBQ== + version "9.4.1" + resolved "https://registry.yarnpkg.com/substyle/-/substyle-9.4.1.tgz#6a4647f363bc14fecc51aac371d4dbeda082aa50" + integrity sha512-VOngeq/W1/UkxiGzeqVvDbGDPM8XgUyJVWjrqeh+GgKqspEPiLYndK+XRcsKUHM5Muz/++1ctJ1QCF/OqRiKWA== dependencies: "@babel/runtime" "^7.3.4" invariant "^2.2.4" From 5d2a04e04ba57a570c9366b4fb435aaa01034f5d Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Thu, 9 May 2024 15:52:15 -0700 Subject: [PATCH 252/376] fix(crons): Fix `max_workers` (#70626) Missed this in the previous pr --- src/sentry/monitors/consumers/monitor_consumer.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py index 576ad00e6f1cd9..a139a6369c8c8c 100644 --- a/src/sentry/monitors/consumers/monitor_consumer.py +++ b/src/sentry/monitors/consumers/monitor_consumer.py @@ -1004,11 +1004,6 @@ class StoreMonitorCheckInStrategyFactory(ProcessingStrategyFactory[KafkaPayload] Does the consumer process unrelated check-ins in parallel? """ - max_workers: int | None = None - """ - Number of Executor workers to use when running in parallel - """ - max_batch_size = 500 """ How many messages will be batched at once when in parallel mode. @@ -1028,14 +1023,12 @@ def __init__( ) -> None: if mode == "parallel": self.parallel = True - self.parallel_executor = ThreadPoolExecutor(max_workers=self.max_workers) + self.parallel_executor = ThreadPoolExecutor(max_workers=max_workers) if max_batch_size is not None: self.max_batch_size = max_batch_size if max_batch_time is not None: self.max_batch_time = max_batch_time - if max_workers is not None: - self.max_workers = max_workers def shutdown(self) -> None: if self.parallel_executor: From b570e75f9d6302b3e201ddeb228d923834777e5f Mon Sep 17 00:00:00 2001 From: Nathan Hsieh <6186377+nhsiehgit@users.noreply.github.com> Date: Thu, 9 May 2024 15:53:42 -0700 Subject: [PATCH 253/376] update: remove timebox from query_extra (#70624) PR to remove the time restrictions from the activated snuba subscription queries See: https://github.com/getsentry/sentry/pull/70486 Initially we had wanted to introduce a timebox to our subscriptions to ensure we do not receive any events outside of our timebox. snuba subscriptions do _not_ support timestamp restrictions though as this could cause zombied queries that just eat memory. Since we're already restricting on the Release versions and we do not have any use cases currently where this would become an issue, we're rolling forwards with simply removing the timeboxed restrictions. IF other activation triggers emerge that might be problematic with this set up, then we can reach out to the SNS team to update their apis to enable timestamps for subscriptions --- src/sentry/incidents/models/alert_rule.py | 2 +- src/sentry/models/releases/release_project.py | 7 ++-- src/sentry/snuba/models.py | 1 + src/sentry/snuba/tasks.py | 1 - .../models/releases/test_release_project.py | 39 ++++++++----------- 5 files changed, 22 insertions(+), 28 deletions(-) diff --git a/src/sentry/incidents/models/alert_rule.py b/src/sentry/incidents/models/alert_rule.py index 669bc9fcf3cad2..06f44d1f7ab8ae 100644 --- a/src/sentry/incidents/models/alert_rule.py +++ b/src/sentry/incidents/models/alert_rule.py @@ -320,7 +320,7 @@ def subscribe_projects( projects, INCIDENTS_SNUBA_SUBSCRIPTION_TYPE, self.snuba_query, - query_extra, + query_extra=query_extra, ) if self.monitor_type == AlertRuleMonitorType.ACTIVATED.value: # NOTE: Activated Alert Rules are conditionally subscribed diff --git a/src/sentry/models/releases/release_project.py b/src/sentry/models/releases/release_project.py index 4e86a8f3e02a3d..76620a7d679b85 100644 --- a/src/sentry/models/releases/release_project.py +++ b/src/sentry/models/releases/release_project.py @@ -4,7 +4,6 @@ from typing import TYPE_CHECKING, ClassVar from django.db import models -from django.utils import timezone from sentry import features from sentry.backup.scopes import RelocationScope @@ -41,7 +40,7 @@ def _on_post(project, trigger): schedule_invalidate_project_config(project_id=project.id, trigger=trigger) @staticmethod - def _subscribe_project_to_alert_rule( + def subscribe_project_to_alert_rule( project: Project, release: Release, trigger: str ) -> list[QuerySubscription]: """ @@ -51,7 +50,7 @@ def _subscribe_project_to_alert_rule( """ from sentry.incidents.models.alert_rule import AlertRule - query_extra = f"release:{release.version} AND event.timestamp:>{timezone.now().isoformat()}" + query_extra = f"release:{release.version}" return AlertRule.objects.conditionally_subscribe_project_to_alert_rules( project=project, activation_condition=AlertRuleActivationConditionType.RELEASE_CREATION, @@ -63,7 +62,7 @@ def _subscribe_project_to_alert_rule( def post_save(self, instance, created, **kwargs): self._on_post(project=instance.project, trigger="releaseproject.post_save") if created: - self._subscribe_project_to_alert_rule( + self.subscribe_project_to_alert_rule( project=instance.project, release=instance.release, trigger="releaseproject.post_save", diff --git a/src/sentry/snuba/models.py b/src/sentry/snuba/models.py index 65057ceea604bf..4c6d206d59a02d 100644 --- a/src/sentry/snuba/models.py +++ b/src/sentry/snuba/models.py @@ -117,6 +117,7 @@ class Status(Enum): query_extra = models.TextField( null=True ) # additional query filters to attach to the query created in Snuba such as datetime filters, or release/deploy tags + # TODO: timebox is not utilized. Subscription queries do not support timestamp restrictions # timebox_start/end is optional timebox restrictions to apply to the snuba query timebox_start = models.DateTimeField(null=True) timebox_end = models.DateTimeField(null=True) diff --git a/src/sentry/snuba/tasks.py b/src/sentry/snuba/tasks.py index 13c06cabf865d7..77792b4cfea398 100644 --- a/src/sentry/snuba/tasks.py +++ b/src/sentry/snuba/tasks.py @@ -218,7 +218,6 @@ def _create_in_snuba(subscription: QuerySubscription) -> str: snuba_query, subscription.project.organization_id, ) - # TODO: determine whether concatenating query_extra is proper snql_query = build_query_builder( entity_subscription=entity_subscription, query=f'{snuba_query.query}{f" and {subscription.query_extra}" if subscription.query_extra else ""}', diff --git a/tests/sentry/models/releases/test_release_project.py b/tests/sentry/models/releases/test_release_project.py index 3c8586987fa551..2bf95c0ad3db2a 100644 --- a/tests/sentry/models/releases/test_release_project.py +++ b/tests/sentry/models/releases/test_release_project.py @@ -1,8 +1,6 @@ from unittest.mock import call as mock_call from unittest.mock import patch -from django.utils import timezone - from sentry.dynamic_sampling import ProjectBoostedReleases from sentry.incidents.models.alert_rule import AlertRule, AlertRuleMonitorType from sentry.incidents.utils.types import AlertRuleActivationConditionType @@ -12,7 +10,6 @@ from sentry.snuba.models import QuerySubscription from sentry.testutils.cases import TransactionTestCase from sentry.testutils.helpers import Feature -from sentry.testutils.helpers.datetime import freeze_time class ReleaseProjectManagerTestCase(TransactionTestCase): @@ -20,7 +17,7 @@ def test_custom_manager(self): self.assertIsInstance(ReleaseProject.objects, ReleaseProjectModelManager) @receivers_raise_on_send() - @patch.object(ReleaseProjectModelManager, "_subscribe_project_to_alert_rule") + @patch.object(ReleaseProjectModelManager, "subscribe_project_to_alert_rule") def test_post_save_signal_runs_if_dynamic_sampling_is_disabled(self, _): project = self.create_project(name="foo") release = Release.objects.create(organization_id=project.organization_id, version="42") @@ -32,7 +29,7 @@ def test_post_save_signal_runs_if_dynamic_sampling_is_disabled(self, _): assert mock_task.mock_calls == [] @receivers_raise_on_send() - @patch.object(ReleaseProjectModelManager, "_subscribe_project_to_alert_rule") + @patch.object(ReleaseProjectModelManager, "subscribe_project_to_alert_rule") def test_post_save_signal_runs_if_dynamic_sampling_is_enabled_and_latest_release_rule_does_not_exist( self, _, @@ -52,7 +49,7 @@ def test_post_save_signal_runs_if_dynamic_sampling_is_enabled_and_latest_release assert mock_task.mock_calls == [] @receivers_raise_on_send() - @patch.object(ReleaseProjectModelManager, "_subscribe_project_to_alert_rule") + @patch.object(ReleaseProjectModelManager, "subscribe_project_to_alert_rule") def test_post_save_signal_runs_if_dynamic_sampling_is_enabled_and_latest_release_rule_exists( self, _, @@ -79,7 +76,7 @@ def test_post_save_signal_runs_if_dynamic_sampling_is_enabled_and_latest_release @receivers_raise_on_send() @patch("sentry.models.releases.release_project.schedule_invalidate_project_config") - @patch.object(ReleaseProjectModelManager, "_subscribe_project_to_alert_rule") + @patch.object(ReleaseProjectModelManager, "subscribe_project_to_alert_rule") def test_post_save_subscribes_project_to_alert_rule_if_created( self, mock_subscribe_project_to_alert_rule, _ ): @@ -97,22 +94,20 @@ def test_post_save_subscribes_project_to_alert_rule_if_created( def test_subscribe_project_to_alert_rule_constructs_query(self, mock_conditionally_subscribe): project = self.create_project(name="foo") release = Release.objects.create(organization_id=project.organization_id, version="42") - now = timezone.now() - with freeze_time(now): - ReleaseProjectModelManager._subscribe_project_to_alert_rule( - project=project, release=release, trigger="test" - ) + ReleaseProjectModelManager.subscribe_project_to_alert_rule( + project=project, release=release, trigger="test" + ) - assert mock_conditionally_subscribe.call_count == 1 - assert mock_conditionally_subscribe.mock_calls == [ - mock_call( - project=project, - activation_condition=AlertRuleActivationConditionType.RELEASE_CREATION, - query_extra=f"release:42 AND event.timestamp:>{now.isoformat()}", - origin="test", - activator="42", - ) - ] + assert mock_conditionally_subscribe.call_count == 1 + assert mock_conditionally_subscribe.mock_calls == [ + mock_call( + project=project, + activation_condition=AlertRuleActivationConditionType.RELEASE_CREATION, + query_extra="release:42", + origin="test", + activator="42", + ) + ] def test_unmocked_subscribe_project_to_alert_rule_constructs_query(self): # Let the logic flow through to snuba and see whether we properly construct the snuba query From 680131d60980a3b078494ea75697dfe8071d7d8c Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Thu, 9 May 2024 16:05:29 -0700 Subject: [PATCH 254/376] cogs(issues): remove noisy handle owner assignment log (#70618) this log is very noisy and is not critical for debugging. removing. we also already have a metric. --- src/sentry/tasks/post_process.py | 13 ----------- tests/sentry/tasks/test_post_process.py | 29 ++++++++----------------- 2 files changed, 9 insertions(+), 33 deletions(-) diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index f68ccae62d6f67..504ae6c6d414be 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -216,12 +216,6 @@ def handle_owner_assignment(job): event = job["event"] project, group = event.project, event.group - basic_logging_details = { - "event": event.event_id, - "group": event.group_id, - "project": event.project_id, - "organization": event.project.organization_id, - } # We want to debounce owner assignment when: # - GroupOwner of type Ownership Rule || CodeOwner exist with TTL 1 day # - we tried to calculate and could not find issue owners with TTL 1 day @@ -233,13 +227,6 @@ def handle_owner_assignment(job): group_id=group.id, organization_id=event.project.organization_id, ): - logger.info( - "handle_owner_assignment.ratelimited", - extra={ - **basic_logging_details, - "reason": "ratelimited", - }, - ) metrics.incr("sentry.task.post_process.handle_owner_assignment.ratelimited") return diff --git a/tests/sentry/tasks/test_post_process.py b/tests/sentry/tasks/test_post_process.py index 46183a8d5bb7ea..86c14410fc2ed1 100644 --- a/tests/sentry/tasks/test_post_process.py +++ b/tests/sentry/tasks/test_post_process.py @@ -1333,8 +1333,8 @@ def test_debounces_handle_owner_assignments(self, mock_incr): ) mock_incr.assert_any_call("sentry.tasks.post_process.handle_owner_assignment.debounce") - @patch("sentry.tasks.post_process.logger") - def test_issue_owners_should_ratelimit(self, mock_logger): + @patch("sentry.utils.metrics.incr") + def test_issue_owners_should_ratelimit(self, mock_incr): cache.set( f"issue_owner_assignment_ratelimiter:{self.project.id}", (set(range(0, ISSUE_OWNERS_PER_PROJECT_PER_MIN_RATELIMIT * 10, 10)), datetime.now()), @@ -1354,17 +1354,8 @@ def test_issue_owners_should_ratelimit(self, mock_logger): is_new_group_environment=False, event=event, ) - expected_extra = { - "event": event.event_id, - "group": event.group_id, - "project": event.project_id, - "organization": event.project.organization_id, - "reason": "ratelimited", - } - mock_logger.info.assert_any_call( - "handle_owner_assignment.ratelimited", extra=expected_extra - ) - mock_logger.reset_mock() + mock_incr.assert_any_call("sentry.task.post_process.handle_owner_assignment.ratelimited") + mock_incr.reset_mock() # Raise this organization's ratelimit with self.feature("organizations:increased-issue-owners-rate-limit"): @@ -1375,12 +1366,10 @@ def test_issue_owners_should_ratelimit(self, mock_logger): event=event, ) with pytest.raises(AssertionError): - mock_logger.info.assert_any_call( - "handle_owner_assignment.ratelimited", extra=expected_extra + mock_incr.assert_any_call( + "sentry.task.post_process.handle_owner_assignment.ratelimited" ) - - # Still enforce the raised limit - mock_logger.reset_mock() + mock_incr.reset_mock() cache.set( f"issue_owner_assignment_ratelimiter:{self.project.id}", ( @@ -1395,8 +1384,8 @@ def test_issue_owners_should_ratelimit(self, mock_logger): is_new_group_environment=False, event=event, ) - mock_logger.info.assert_any_call( - "handle_owner_assignment.ratelimited", extra=expected_extra + mock_incr.assert_any_call( + "sentry.task.post_process.handle_owner_assignment.ratelimited" ) From 61ae1ef9de33d2724ab3caa98a5bd265df897d89 Mon Sep 17 00:00:00 2001 From: Tillman Elser Date: Thu, 9 May 2024 16:14:33 -0700 Subject: [PATCH 255/376] ref(seer): separate out all seer connections (#69961) there are now 4 different URLs corresponding to the 4 different seer deployments/services - breakpoint detection - severity - grouping - autofix i have left anomaly detection for now as the endpoint file is still there, but in a separate PR we can clean that up too --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- ...anization_transaction_anomaly_detection.py | 4 ++-- src/sentry/conf/server.py | 23 +++++++++++++------ src/sentry/event_manager.py | 12 +++++----- src/sentry/seer/utils.py | 16 ++++++------- .../test_group_similar_issues_embeddings.py | 16 ++++++------- tests/sentry/seer/test_utils.py | 12 +++++----- 6 files changed, 46 insertions(+), 37 deletions(-) diff --git a/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py b/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py index 486f783f1477fd..7acf5f06e3ddf4 100644 --- a/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py +++ b/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py @@ -16,12 +16,12 @@ from sentry.snuba.metrics_enhanced_performance import timeseries_query ads_connection_pool = connection_from_url( - settings.ANOMALY_DETECTION_URL, + settings.SEER_ANOMALY_DETECTION_URL, retries=Retry( total=5, status_forcelist=[408, 429, 502, 503, 504], ), - timeout=settings.ANOMALY_DETECTION_TIMEOUT, + timeout=settings.SEER_ANOMALY_DETECTION_TIMEOUT, ) MappedParams = namedtuple("MappedParams", ["query_start", "query_end", "granularity"]) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 7921750f3c33cd..e9f5149e8f941b 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3668,15 +3668,24 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: # constraints instead of setting the column to not null. ZERO_DOWNTIME_MIGRATIONS_USE_NOT_NULL = False -ANOMALY_DETECTION_URL = "http://127.0.0.1:9091" -ANOMALY_DETECTION_TIMEOUT = 30 +SEER_DEFAULT_URL = "http://127.0.0.1:9091" # for local development +SEER_DEFAULT_TIMEOUT = 5 -# TODO: Once this moves to its own service, this URL will need to be updated -SEVERITY_DETECTION_URL = ANOMALY_DETECTION_URL -SEVERITY_DETECTION_TIMEOUT = 0.3 # 300 milliseconds -SEVERITY_DETECTION_RETRIES = 1 +SEER_BREAKPOINT_DETECTION_URL = SEER_DEFAULT_URL # for local development, these share a URL +SEER_BREAKPOINT_DETECTION_TIMEOUT = 5 + +SEER_SEVERITY_URL = SEER_DEFAULT_URL # for local development, these share a URL +SEER_SEVERITY_TIMEOUT = 0.3 # 300 milliseconds +SEER_SEVERITY_RETRIES = 1 + +SEER_AUTOFIX_URL = SEER_DEFAULT_URL # for local development, these share a URL + +SEER_GROUPING_URL = SEER_DEFAULT_URL # for local development, these share a URL +SEER_GROUPING_TIMEOUT = 1 + +SEER_ANOMALY_DETECTION_URL = SEER_DEFAULT_URL # for local development, these share a URL +SEER_ANOMALY_DETECTION_TIMEOUT = 5 -SEER_AUTOFIX_URL = ANOMALY_DETECTION_URL # In local development this is the same as ANOMALY_DETECTION_URL, for prod check getsentry. # This is the URL to the profiling service SENTRY_VROOM = os.getenv("VROOM", "http://127.0.0.1:8085") diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 4e3e8cb8ed32b8..32e441e3775ca4 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -31,7 +31,7 @@ tsdb, ) from sentry.attachments import CachedAttachment, MissingAttachmentChunks, attachment_cache -from sentry.conf.server import SEVERITY_DETECTION_RETRIES +from sentry.conf.server import SEER_SEVERITY_RETRIES from sentry.constants import ( DEFAULT_STORE_NORMALIZER_ARGS, LOG_LEVELS_MAP, @@ -2220,8 +2220,8 @@ def _process_existing_aggregate( severity_connection_pool = connection_from_url( - settings.SEVERITY_DETECTION_URL, - timeout=settings.SEVERITY_DETECTION_TIMEOUT, # Defaults to 300 milliseconds + settings.SEER_SEVERITY_URL, + timeout=settings.SEER_SEVERITY_TIMEOUT, # Defaults to 300 milliseconds ) @@ -2432,7 +2432,7 @@ def _get_severity_score(event: Event) -> tuple[float, str]: with metrics.timer(op): timeout = options.get( "issues.severity.seer-timout", - settings.SEVERITY_DETECTION_TIMEOUT / 1000, + settings.SEER_SEVERITY_TIMEOUT / 1000, ) response = severity_connection_pool.urlopen( "POST", @@ -2448,8 +2448,8 @@ def _get_severity_score(event: Event) -> tuple[float, str]: except MaxRetryError as e: logger.warning( "Unable to get severity score from microservice after %s retr%s. Got MaxRetryError caused by: %s.", - SEVERITY_DETECTION_RETRIES, - "ies" if SEVERITY_DETECTION_RETRIES > 1 else "y", + SEER_SEVERITY_RETRIES, + "ies" if SEER_SEVERITY_RETRIES > 1 else "y", repr(e.reason), extra=logger_data, ) diff --git a/src/sentry/seer/utils.py b/src/sentry/seer/utils.py index 3842a02dcbb027..6e5b83089034ae 100644 --- a/src/sentry/seer/utils.py +++ b/src/sentry/seer/utils.py @@ -47,27 +47,27 @@ class BreakpointResponse(TypedDict): data: list[BreakpointData] -seer_connection_pool = connection_from_url( - settings.ANOMALY_DETECTION_URL, +seer_grouping_connection_pool = connection_from_url( + settings.SEER_GROUPING_URL, retries=Retry( total=5, status_forcelist=[408, 429, 502, 503, 504], ), - timeout=settings.ANOMALY_DETECTION_TIMEOUT, + timeout=settings.SEER_GROUPING_TIMEOUT, ) -seer_staging_connection_pool = connection_from_url( - settings.SEER_AUTOFIX_URL, +seer_breakpoint_connection_pool = connection_from_url( + settings.SEER_BREAKPOINT_DETECTION_URL, retries=Retry( total=5, status_forcelist=[408, 429, 502, 503, 504], ), - timeout=settings.ANOMALY_DETECTION_TIMEOUT, + timeout=settings.SEER_BREAKPOINT_DETECTION_TIMEOUT, ) def detect_breakpoints(breakpoint_request) -> BreakpointResponse: - response = seer_connection_pool.urlopen( + response = seer_breakpoint_connection_pool.urlopen( "POST", "/trends/breakpoint-detector", body=json.dumps(breakpoint_request), @@ -178,7 +178,7 @@ def get_similar_issues_embeddings( similar_issues_request: SimilarIssuesEmbeddingsRequest, ) -> list[SeerSimilarIssueData]: """Request similar issues data from seer and normalize the results.""" - response = seer_staging_connection_pool.urlopen( + response = seer_grouping_connection_pool.urlopen( "POST", SEER_SIMILAR_ISSUES_URL, body=json.dumps(similar_issues_request), diff --git a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py index c02c59c2a1ae5c..bba58fe4c7c401 100644 --- a/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py +++ b/tests/sentry/api/endpoints/test_group_similar_issues_embeddings.py @@ -695,7 +695,7 @@ def test_no_feature_flag(self): # TODO: Remove once switch is complete @with_feature("projects:similarity-embeddings") - @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") @mock.patch("sentry.api.endpoints.group_similar_issues_embeddings.logger") def test_simple_only_group_id_returned(self, mock_logger, mock_seer_request): seer_return_value: SimilarIssuesEmbeddingsResponse = { @@ -742,7 +742,7 @@ def test_simple_only_group_id_returned(self, mock_logger, mock_seer_request): ) @with_feature("projects:similarity-embeddings") - @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") @mock.patch("sentry.api.endpoints.group_similar_issues_embeddings.logger") def test_simple_only_hash_returned(self, mock_logger, mock_seer_request): seer_return_value: SimilarIssuesEmbeddingsResponse = { @@ -790,7 +790,7 @@ def test_simple_only_hash_returned(self, mock_logger, mock_seer_request): # TODO: Remove once switch is complete @with_feature("projects:similarity-embeddings") - @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") @mock.patch("sentry.api.endpoints.group_similar_issues_embeddings.logger") def test_simple_group_id_and_hash_returned(self, mock_logger, mock_seer_request): seer_return_value: SimilarIssuesEmbeddingsResponse = { @@ -839,7 +839,7 @@ def test_simple_group_id_and_hash_returned(self, mock_logger, mock_seer_request) @with_feature("projects:similarity-embeddings") @mock.patch("sentry.analytics.record") - @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") def test_multiple(self, mock_seer_request, mock_record): over_threshold_group_event = save_new_event({"message": "Maisey is silly"}, self.project) under_threshold_group_event = save_new_event({"message": "Charlie is goofy"}, self.project) @@ -899,7 +899,7 @@ def test_multiple(self, mock_seer_request, mock_record): @with_feature("projects:similarity-embeddings") @mock.patch("sentry.seer.utils.logger") - @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") def test_incomplete_return_data(self, mock_seer_request, mock_logger): # Two suggested groups, one with valid data, one missing both parent group id and parent hash. # We should log the second and return the first. @@ -946,7 +946,7 @@ def test_incomplete_return_data(self, mock_seer_request, mock_logger): @with_feature("projects:similarity-embeddings") @mock.patch("sentry.seer.utils.logger") - @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") def test_nonexistent_group(self, mock_seer_request, mock_logger): """ The seer API can return groups that do not exist if they have been deleted/merged. @@ -1000,7 +1000,7 @@ def test_nonexistent_group(self, mock_seer_request, mock_logger): @with_feature("projects:similarity-embeddings") @mock.patch("sentry.analytics.record") - @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") def test_empty_seer_return(self, mock_seer_request, mock_record): mock_seer_request.return_value = HTTPResponse([]) response = self.client.get(self.path) @@ -1070,7 +1070,7 @@ def test_no_exception(self): assert response.data == [] @with_feature("projects:similarity-embeddings") - @mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") + @mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") def test_no_optional_params(self, mock_seer_request): """ Test that optional parameters, k and threshold, can not be included. diff --git a/tests/sentry/seer/test_utils.py b/tests/sentry/seer/test_utils.py index a6ef47c970e381..3e23bf6813421c 100644 --- a/tests/sentry/seer/test_utils.py +++ b/tests/sentry/seer/test_utils.py @@ -18,7 +18,7 @@ from sentry.utils.types import NonNone -@mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") +@mock.patch("sentry.seer.utils.seer_breakpoint_connection_pool.urlopen") def test_detect_breakpoints(mock_urlopen): data = { "data": [ @@ -50,7 +50,7 @@ def test_detect_breakpoints(mock_urlopen): ], ) @mock.patch("sentry_sdk.capture_exception") -@mock.patch("sentry.seer.utils.seer_connection_pool.urlopen") +@mock.patch("sentry.seer.utils.seer_breakpoint_connection_pool.urlopen") def test_detect_breakpoints_errors(mock_urlopen, mock_capture_exception, body, status): mock_urlopen.return_value = HTTPResponse(body, status=status) @@ -60,7 +60,7 @@ def test_detect_breakpoints_errors(mock_urlopen, mock_capture_exception, body, s # TODO: Remove once switch is complete @django_db_all -@mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") +@mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") def test_simple_similar_issues_embeddings_only_group_id_returned( mock_seer_request, default_project ): @@ -89,7 +89,7 @@ def test_simple_similar_issues_embeddings_only_group_id_returned( @django_db_all -@mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") +@mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") def test_simple_similar_issues_embeddings_only_hash_returned(mock_seer_request, default_project): """Test that valid responses are decoded and returned.""" event = save_new_event({"message": "Dogs are great!"}, default_project) @@ -125,7 +125,7 @@ def test_simple_similar_issues_embeddings_only_hash_returned(mock_seer_request, # TODO: Remove once switch is complete @django_db_all -@mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") +@mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") def test_simple_similar_issues_embeddings_both_returned(mock_seer_request, default_project): """Test that valid responses are decoded and returned.""" event = save_new_event({"message": "Dogs are great!"}, default_project) @@ -154,7 +154,7 @@ def test_simple_similar_issues_embeddings_both_returned(mock_seer_request, defau @django_db_all -@mock.patch("sentry.seer.utils.seer_staging_connection_pool.urlopen") +@mock.patch("sentry.seer.utils.seer_grouping_connection_pool.urlopen") def test_empty_similar_issues_embeddings(mock_seer_request, default_project): """Test that empty responses are returned.""" event = save_new_event({"message": "Dogs are great!"}, default_project) From 07ac9006c1cfa7c9fc873f5fa27774d16df8cbd3 Mon Sep 17 00:00:00 2001 From: Nathan Hsieh <6186377+nhsiehgit@users.noreply.github.com> Date: Thu, 9 May 2024 16:59:59 -0700 Subject: [PATCH 256/376] update: QuerySubscriptions remove timebox columns from state (#70628) --- migrations_lockfile.txt | 2 +- .../migrations/0718_delete_timebox_columns.py | 41 +++++++++++++++++++ src/sentry/snuba/models.py | 4 -- .../test_default_comparators.pysnap | 4 +- 4 files changed, 43 insertions(+), 8 deletions(-) create mode 100644 src/sentry/migrations/0718_delete_timebox_columns.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index a311f852ab3cb8..6116641b01cb7f 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0016_add_control_cacheversion nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0717_query_subscription_timebox +sentry: 0718_delete_timebox_columns social_auth: 0002_default_auto_field diff --git a/src/sentry/migrations/0718_delete_timebox_columns.py b/src/sentry/migrations/0718_delete_timebox_columns.py new file mode 100644 index 00000000000000..322cb3e0a82f4d --- /dev/null +++ b/src/sentry/migrations/0718_delete_timebox_columns.py @@ -0,0 +1,41 @@ +# Generated by Django 5.0.4 on 2024-05-09 22:41 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0717_query_subscription_timebox"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[], + state_operations=[ + migrations.RemoveField( + model_name="querysubscription", + name="timebox_end", + ), + migrations.RemoveField( + model_name="querysubscription", + name="timebox_start", + ), + ], + ) + ] diff --git a/src/sentry/snuba/models.py b/src/sentry/snuba/models.py index 4c6d206d59a02d..c8468eb52057f4 100644 --- a/src/sentry/snuba/models.py +++ b/src/sentry/snuba/models.py @@ -117,10 +117,6 @@ class Status(Enum): query_extra = models.TextField( null=True ) # additional query filters to attach to the query created in Snuba such as datetime filters, or release/deploy tags - # TODO: timebox is not utilized. Subscription queries do not support timestamp restrictions - # timebox_start/end is optional timebox restrictions to apply to the snuba query - timebox_start = models.DateTimeField(null=True) - timebox_end = models.DateTimeField(null=True) objects: ClassVar[BaseManager[Self]] = BaseManager( cache_fields=("pk", "subscription_id"), cache_ttl=int(timedelta(hours=1).total_seconds()) diff --git a/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap b/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap index 644876c001ccfe..736818d7ba12c4 100644 --- a/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap +++ b/tests/sentry/backup/snapshots/test_comparators/test_default_comparators.pysnap @@ -1,5 +1,5 @@ --- -created: '2024-05-08T18:49:05.727788+00:00' +created: '2024-05-09T23:29:47.969852+00:00' creator: sentry source: tests/sentry/backup/test_comparators.py --- @@ -1150,8 +1150,6 @@ source: tests/sentry/backup/test_comparators.py - class: DatetimeEqualityComparator fields: - date_added - - timebox_end - - timebox_start - class: ForeignKeyComparator fields: - project From 0094f6d37699561a03fd8e621de7a77ac17078ac Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Fri, 10 May 2024 09:05:55 -0400 Subject: [PATCH 257/376] fix(trace-explorer): Other category needs to have components (#70633) The other category needs to have components computed so it can be correctly pushed to the breakdowns. --- .../api/endpoints/organization_traces.py | 20 +++++---- .../api/endpoints/test_organization_traces.py | 42 ++----------------- 2 files changed, 15 insertions(+), 47 deletions(-) diff --git a/src/sentry/api/endpoints/organization_traces.py b/src/sentry/api/endpoints/organization_traces.py index 8fa31183650b30..ea3518afe78d82 100644 --- a/src/sentry/api/endpoints/organization_traces.py +++ b/src/sentry/api/endpoints/organization_traces.py @@ -972,14 +972,12 @@ def quantize_range(span_start, span_end, trace_range): bin_size = trace_range["min"] - span_duration = span_end - span_start - if bin_size > 0: rounded_start = round((span_start - trace_start) / bin_size) * bin_size + trace_start rounded_end = round((span_end - trace_start) / bin_size) * bin_size + trace_start - # if the span is at least the min duration, ensure it spans 1 bin - if rounded_start == rounded_end and span_duration >= (bin_size * 0.1): + # ensure minimum of 1 width + if rounded_start == rounded_end: rounded_end += bin_size else: rounded_start = span_start @@ -1150,13 +1148,15 @@ def stack_clear(trace, until=None): # Check to see if there is still a gap before the trace ends and fill it # with an other interval. + other_start = trace_range["start"] + other_end = trace_range["end"] other: TraceInterval = { "kind": "other", "project": None, "sdkName": None, "opCategory": None, - "start": trace_range["start"], - "end": trace_range["end"], + "start": other_start, + "end": other_end, "duration": 0, } @@ -1165,8 +1165,12 @@ def stack_clear(trace, until=None): # of the trace that was not covered by one of the intervals. while stacks[trace]: interval = stack_pop(trace) - # use the end time of the last component of the interval - other["start"] = max(other["start"], interval["components"][-1][1]) + other["start"] = max(other["start"], interval["end"]) + # other["start"] = max(other["start"], interval["components"][-1][1]) + last_component = interval["components"][-1] + other_start = max(other_start, last_component[1]) + + other["components"] = [(other_start, other_end)] if other["start"] < other["end"]: breakdown_push(trace, other) diff --git a/tests/sentry/api/endpoints/test_organization_traces.py b/tests/sentry/api/endpoints/test_organization_traces.py index 91d03717ef7200..3c63bddc49eb15 100644 --- a/tests/sentry/api/endpoints/test_organization_traces.py +++ b/tests/sentry/api/endpoints/test_organization_traces.py @@ -1457,7 +1457,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "precise.finish_ts": 0.05, }, ], - {"a" * 32: (0, 100, 0)}, + {"a" * 32: (0, 100, 20)}, { "a" * 32: [ @@ -1466,7 +1466,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "opCategory": None, "sdkName": "sentry.javascript.node", "start": 0, - "end": 50, + "end": 40, "kind": "project", "duration": 50, }, @@ -1474,7 +1474,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "project": None, "opCategory": None, "sdkName": None, - "start": 50, + "start": 40, "end": 100, "kind": "other", "duration": 50, @@ -1545,42 +1545,6 @@ def test_matching_tag_metrics_but_no_matching_spans(self): }, id="merge quantized spans", ), - pytest.param( - [ - { - "trace": "a" * 32, - "project": "foo", - "sdk.name": "sentry.javascript.node", - "transaction": "foo1", - "precise.start_ts": 0, - "precise.finish_ts": 0.1, - }, - { - "trace": "a" * 32, - "project": "bar", - "sdk.name": "sentry.javascript.node", - "transaction": "bar1", - "precise.start_ts": 0.020, - "precise.finish_ts": 0.021, - }, - ], - {"a" * 32: (0, 100, 20)}, - { - "a" - * 32: [ - { - "project": "foo", - "opCategory": None, - "sdkName": "sentry.javascript.node", - "start": 0, - "end": 100, - "kind": "project", - "duration": 100, - }, - ], - }, - id="remove spans that are too small", - ), pytest.param( [ { From ae2b0136593769324c04f3808a993dd2aad2f262 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Fri, 10 May 2024 09:45:18 -0400 Subject: [PATCH 258/376] fix(trace-explorer): Quantize trace duration on frontend too (#70646) When quantizing, the backend always rounds the bucket size down ensure whole number durations. Do this on the frontend too when rendering to avoid a off by 1 error where the bar sizes on the frontend is a tiny bit larger (<1ms) causing slices to be rendered in a previous offset. --- .../app/views/performance/traces/fieldRenderers.tsx | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/static/app/views/performance/traces/fieldRenderers.tsx b/static/app/views/performance/traces/fieldRenderers.tsx index 4409d3bcd3ddcf..a7da26efe8d4ac 100644 --- a/static/app/views/performance/traces/fieldRenderers.tsx +++ b/static/app/views/performance/traces/fieldRenderers.tsx @@ -125,6 +125,7 @@ export function TraceBreakdownRenderer({ const BREAKDOWN_BAR_SIZE = 200; const BREAKDOWN_QUANTIZE_STEP = 5; +const BREAKDOWN_BAR_WIDTH = BREAKDOWN_BAR_SIZE / BREAKDOWN_QUANTIZE_STEP; export function SpanBreakdownSliceRenderer({ trace, @@ -145,7 +146,8 @@ export function SpanBreakdownSliceRenderer({ trace: TraceResult; offset?: number; }) { - const traceDuration = trace.end - trace.start; + const traceDuration = + Math.floor((trace.end - trace.start) / BREAKDOWN_BAR_WIDTH) * BREAKDOWN_BAR_WIDTH; const sliceDuration = sliceEnd - sliceStart; @@ -158,16 +160,11 @@ export function SpanBreakdownSliceRenderer({ const sliceWidth = BREAKDOWN_QUANTIZE_STEP * - Math.ceil( - (BREAKDOWN_BAR_SIZE / BREAKDOWN_QUANTIZE_STEP) * (sliceDuration / traceDuration) - ); + Math.ceil(BREAKDOWN_BAR_WIDTH * (sliceDuration / traceDuration)); const relativeSliceStart = sliceStart - trace.start; const sliceOffset = BREAKDOWN_QUANTIZE_STEP * - Math.floor( - ((BREAKDOWN_BAR_SIZE / BREAKDOWN_QUANTIZE_STEP) * relativeSliceStart) / - traceDuration - ); + Math.floor((BREAKDOWN_BAR_WIDTH * relativeSliceStart) / traceDuration); return ( Date: Fri, 10 May 2024 09:46:07 -0400 Subject: [PATCH 259/376] feat(mobile-ui): Add screen details and span op table (#70580) Adds the route for the screen detail page and the span op table UI element. I've added a new component that renders this switcher for reuse but takes a module specific implementation of the event samples panel and the span op table. I'll refactor App Starts to use this component at a later time, wanted to avoid a larger PR. --- static/app/routes.tsx | 6 + .../mobile/components/samplesTables.spec.tsx | 44 ++++ .../mobile/components/samplesTables.tsx | 139 ++++++++++++ .../views/performance/mobile/ui/referrers.tsx | 1 + .../mobile/ui/screenSummary/index.tsx | 156 ++++++++++++++ .../screenSummary/spanOperationTable.spec.tsx | 82 +++++++ .../ui/screenSummary/spanOperationTable.tsx | 201 ++++++++++++++++++ 7 files changed, 629 insertions(+) create mode 100644 static/app/views/performance/mobile/components/samplesTables.spec.tsx create mode 100644 static/app/views/performance/mobile/components/samplesTables.tsx create mode 100644 static/app/views/performance/mobile/ui/screenSummary/index.tsx create mode 100644 static/app/views/performance/mobile/ui/screenSummary/spanOperationTable.spec.tsx create mode 100644 static/app/views/performance/mobile/ui/screenSummary/spanOperationTable.tsx diff --git a/static/app/routes.tsx b/static/app/routes.tsx index 5799e637d535f3..5bfa9d4029d96b 100644 --- a/static/app/routes.tsx +++ b/static/app/routes.tsx @@ -1574,6 +1574,12 @@ function buildRoutes() { import('sentry/views/performance/mobile/ui'))} /> + import('sentry/views/performance/mobile/ui/screenSummary') + )} + /> diff --git a/static/app/views/performance/mobile/components/samplesTables.spec.tsx b/static/app/views/performance/mobile/components/samplesTables.spec.tsx new file mode 100644 index 00000000000000..eaeddb323f172c --- /dev/null +++ b/static/app/views/performance/mobile/components/samplesTables.spec.tsx @@ -0,0 +1,44 @@ +import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; + +import {SamplesTables} from 'sentry/views/performance/mobile/components/samplesTables'; +import {useReleaseSelection} from 'sentry/views/starfish/queries/useReleases'; + +jest.mock('sentry/views/starfish/queries/useReleases'); + +jest.mocked(useReleaseSelection).mockReturnValue({ + primaryRelease: 'com.example.vu.android@2.10.5-alpha.1+42', + isLoading: false, + secondaryRelease: 'com.example.vu.android@2.10.3+42', +}); + +describe('SamplesTables', () => { + it('accepts components for event samples and span operation table', async () => { + render( + ( +
{`This is a custom Event Samples table for release: ${release}`}
+ )} + SpanOperationTable={_props =>
This is a custom Span Operation table
} + transactionName={''} + /> + ); + + // The span operation table is rendered first + expect( + await screen.findByText('This is a custom Span Operation table') + ).toBeInTheDocument(); + + await userEvent.click(screen.getByRole('radio', {name: 'By Event'})); + + expect( + await screen.findByText( + 'This is a custom Event Samples table for release: com.example.vu.android@2.10.5-alpha.1+42' + ) + ).toBeInTheDocument(); + expect( + screen.getByText( + 'This is a custom Event Samples table for release: com.example.vu.android@2.10.3+42' + ) + ).toBeInTheDocument(); + }); +}); diff --git a/static/app/views/performance/mobile/components/samplesTables.tsx b/static/app/views/performance/mobile/components/samplesTables.tsx new file mode 100644 index 00000000000000..5dff00a36c878a --- /dev/null +++ b/static/app/views/performance/mobile/components/samplesTables.tsx @@ -0,0 +1,139 @@ +import {useMemo, useState} from 'react'; +import styled from '@emotion/styled'; + +import ErrorBoundary from 'sentry/components/errorBoundary'; +import {SegmentedControl} from 'sentry/components/segmentedControl'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import {SpanOpSelector} from 'sentry/views/performance/mobile/appStarts/screenSummary/spanOpSelector'; +import {DeviceClassSelector} from 'sentry/views/performance/mobile/screenload/screenLoadSpans/deviceClassSelector'; +import { + MobileCursors, + MobileSortKeys, +} from 'sentry/views/performance/mobile/screenload/screens/constants'; +import {useReleaseSelection} from 'sentry/views/starfish/queries/useReleases'; + +const EVENT = 'event'; +const SPANS = 'spans'; + +interface EventSamplesProps { + cursorName: string; + footerAlignedPagination: boolean; + sortKey: string; + transaction: string; + release?: string; +} + +export interface SpanOperationTableProps { + transaction: string; + primaryRelease?: string; + secondaryRelease?: string; +} + +interface SamplesTablesProps { + EventSamples: React.ComponentType; + SpanOperationTable: React.ComponentType; + transactionName: string; +} + +export function SamplesTables({ + transactionName, + EventSamples, + SpanOperationTable, +}: SamplesTablesProps) { + const [sampleType, setSampleType] = useState(SPANS); + const {primaryRelease, secondaryRelease} = useReleaseSelection(); + + const content = useMemo(() => { + if (sampleType === EVENT) { + return ( + + + + + + + + + ); + } + + return ( + + + + ); + }, [ + EventSamples, + SpanOperationTable, + primaryRelease, + sampleType, + secondaryRelease, + transactionName, + ]); + + return ( +
+ + + {sampleType === SPANS && ( + + )} + + + setSampleType(value)} + defaultValue={SPANS} + label={t('Sample Type Selection')} + > + + {t('By Spans')} + + + {t('By Event')} + + + + {content} +
+ ); +} + +const EventSplitContainer = styled('div')` + display: grid; + grid-template-columns: 1fr 1fr; + gap: ${space(1.5)}; +`; + +const Controls = styled('div')` + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: ${space(1)}; +`; + +const FiltersContainer = styled('div')` + display: flex; + gap: ${space(1)}; + align-items: center; +`; diff --git a/static/app/views/performance/mobile/ui/referrers.tsx b/static/app/views/performance/mobile/ui/referrers.tsx index c0246f26efb6e7..ecf651377d5d10 100644 --- a/static/app/views/performance/mobile/ui/referrers.tsx +++ b/static/app/views/performance/mobile/ui/referrers.tsx @@ -1,4 +1,5 @@ export enum Referrer { OVERVIEW_SCREENS_TABLE = 'api.performance.module.ui.screen-table', MOBILE_UI_BAR_CHART = 'api.performance.mobile.ui.bar-chart', + SPAN_OPERATION_TABLE = 'api.performance.mobile.ui.span-table', } diff --git a/static/app/views/performance/mobile/ui/screenSummary/index.tsx b/static/app/views/performance/mobile/ui/screenSummary/index.tsx new file mode 100644 index 00000000000000..ffe95513c4f128 --- /dev/null +++ b/static/app/views/performance/mobile/ui/screenSummary/index.tsx @@ -0,0 +1,156 @@ +import styled from '@emotion/styled'; +import omit from 'lodash/omit'; + +import type {Crumb} from 'sentry/components/breadcrumbs'; +import Breadcrumbs from 'sentry/components/breadcrumbs'; +import * as Layout from 'sentry/components/layouts/thirds'; +import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; +import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; +import PageFiltersContainer from 'sentry/components/organizations/pageFilters/container'; +import SentryDocumentTitle from 'sentry/components/sentryDocumentTitle'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import {PageAlert, PageAlertProvider} from 'sentry/utils/performance/contexts/pageAlert'; +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import useRouter from 'sentry/utils/useRouter'; +import {normalizeUrl} from 'sentry/utils/withDomainRequired'; +import {SamplesTables} from 'sentry/views/performance/mobile/components/samplesTables'; +import {ScreenLoadSpanSamples} from 'sentry/views/performance/mobile/screenload/screenLoadSpans/samples'; +import {SpanOperationTable} from 'sentry/views/performance/mobile/ui/screenSummary/spanOperationTable'; +import {ReleaseComparisonSelector} from 'sentry/views/starfish/components/releaseSelector'; +import {SpanMetricsField} from 'sentry/views/starfish/types'; +import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; + +type Query = { + 'device.class': string; + primaryRelease: string; + project: string; + secondaryRelease: string; + spanDescription: string; + spanGroup: string; + spanOp: string; + transaction: string; +}; + +function ScreenSummary() { + const organization = useOrganization(); + const location = useLocation(); + const router = useRouter(); + + const { + transaction: transactionName, + spanGroup, + spanDescription, + spanOp, + 'device.class': deviceClass, + } = location.query; + + const crumbs: Crumb[] = [ + { + label: t('Performance'), + to: normalizeUrl(`/organizations/${organization.slug}/performance/`), + preservePageFilters: true, + }, + { + label: t('Mobile UI'), + to: normalizeUrl({ + pathname: `/organizations/${organization.slug}/performance/mobile/ui/`, + query: { + ...omit(location.query, [ + QueryParameterNames.SPANS_SORT, + 'transaction', + SpanMetricsField.SPAN_OP, + ]), + }, + }), + preservePageFilters: true, + }, + { + label: t('Screen Summary'), + }, + ]; + + return ( + + + + + + + {transactionName} + + + + + + + + + + + + + + + + +
} + /> + + + {spanGroup && spanOp && ( + { + router.replace({ + pathname: router.location.pathname, + query: omit( + router.location.query, + 'spanGroup', + 'transactionMethod', + 'spanDescription', + 'spanOp' + ), + }); + }} + /> + )} + + + + + + + ); +} + +export default ScreenSummary; + +const ControlsContainer = styled('div')` + display: flex; + gap: ${space(1.5)}; +`; + +const HeaderContainer = styled('div')` + display: flex; + flex-wrap: wrap; + gap: ${space(2)}; + justify-content: space-between; +`; + +const SamplesContainer = styled('div')` + margin-top: ${space(2)}; +`; diff --git a/static/app/views/performance/mobile/ui/screenSummary/spanOperationTable.spec.tsx b/static/app/views/performance/mobile/ui/screenSummary/spanOperationTable.spec.tsx new file mode 100644 index 00000000000000..14b3636d913213 --- /dev/null +++ b/static/app/views/performance/mobile/ui/screenSummary/spanOperationTable.spec.tsx @@ -0,0 +1,82 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import usePageFilters from 'sentry/utils/usePageFilters'; +import {Referrer} from 'sentry/views/performance/mobile/ui/referrers'; +import {SpanOperationTable} from 'sentry/views/performance/mobile/ui/screenSummary/spanOperationTable'; + +jest.mock('sentry/utils/usePageFilters'); + +jest.mocked(usePageFilters).mockReturnValue({ + isReady: true, + desyncedFilters: new Set(), + pinnedFilters: new Set(), + shouldPersist: true, + selection: { + datetime: { + period: '10d', + start: null, + end: null, + utc: false, + }, + environments: [], + projects: [], + }, +}); + +describe('SpanOperationTable', () => { + it('renders and fetches the proper data', () => { + const spanOpTableRequestMock = MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/', + body: [], + match: [MockApiClient.matchQuery({referrer: Referrer.SPAN_OPERATION_TABLE})], + }); + + render( + + ); + + [ + 'Operation', + 'Span Description', + 'Slow (R1)', + 'Slow (R2)', + 'Frozen (R1)', + 'Frozen (R2)', + 'Delay (R1)', + 'Delay (R2)', + ].forEach(header => { + expect(screen.getByRole('columnheader', {name: header})).toBeInTheDocument(); + }); + + expect(screen.getAllByRole('columnheader', {name: 'Change'})).toHaveLength(3); + + expect(spanOpTableRequestMock).toHaveBeenCalledTimes(1); + + expect(spanOpTableRequestMock).toHaveBeenCalledWith( + '/organizations/org-slug/events/', + expect.objectContaining({ + query: expect.objectContaining({ + field: [ + 'project.id', + 'span.op', + 'span.group', + 'span.description', + 'avg_if(mobile.slow_frames,release,foo)', + 'avg_if(mobile.slow_frames,release,bar)', + 'avg_compare(mobile.slow_frames,release,foo,bar)', + 'avg_if(mobile.frozen_frames,release,foo)', + 'avg_if(mobile.frozen_frames,release,bar)', + 'avg_compare(mobile.frozen_frames,release,foo,bar)', + 'avg_if(mobile.frames_delay,release,foo)', + 'avg_if(mobile.frames_delay,release,bar)', + 'avg_compare(mobile.frames_delay,release,foo,bar)', + ], + }), + }) + ); + }); +}); diff --git a/static/app/views/performance/mobile/ui/screenSummary/spanOperationTable.tsx b/static/app/views/performance/mobile/ui/screenSummary/spanOperationTable.tsx new file mode 100644 index 00000000000000..0f7b65585dd8c5 --- /dev/null +++ b/static/app/views/performance/mobile/ui/screenSummary/spanOperationTable.tsx @@ -0,0 +1,201 @@ +import * as qs from 'query-string'; + +import {getInterval} from 'sentry/components/charts/utils'; +import Duration from 'sentry/components/duration'; +import Link from 'sentry/components/links/link'; +import {t} from 'sentry/locale'; +import type {NewQuery} from 'sentry/types/organization'; +import EventView from 'sentry/utils/discover/eventView'; +import {NumberContainer} from 'sentry/utils/discover/styles'; +import {DiscoverDatasets} from 'sentry/utils/discover/types'; +import {decodeScalar} from 'sentry/utils/queryString'; +import {MutableSearch} from 'sentry/utils/tokenizeSearch'; +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import usePageFilters from 'sentry/utils/usePageFilters'; +import {normalizeUrl} from 'sentry/utils/withDomainRequired'; +import {APP_START_SPANS} from 'sentry/views/performance/mobile/appStarts/screenSummary/spanOpSelector'; +import type {SpanOperationTableProps} from 'sentry/views/performance/mobile/components/samplesTables'; +import {ScreensTable} from 'sentry/views/performance/mobile/components/screensTable'; +import {MobileCursors} from 'sentry/views/performance/mobile/screenload/screens/constants'; +import {useTableQuery} from 'sentry/views/performance/mobile/screenload/screens/screensTable'; +import {Referrer} from 'sentry/views/performance/mobile/ui/referrers'; +import { + PRIMARY_RELEASE_ALIAS, + SECONDARY_RELEASE_ALIAS, +} from 'sentry/views/starfish/components/releaseSelector'; +import {OverflowEllipsisTextContainer} from 'sentry/views/starfish/components/textAlign'; +import {SpanMetricsField} from 'sentry/views/starfish/types'; +import {STARFISH_CHART_INTERVAL_FIDELITY} from 'sentry/views/starfish/utils/constants'; +import {appendReleaseFilters} from 'sentry/views/starfish/utils/releaseComparison'; + +const {SPAN_DESCRIPTION, SPAN_GROUP, SPAN_OP, PROJECT_ID} = SpanMetricsField; + +const VALID_SPAN_OPS = APP_START_SPANS; + +export function SpanOperationTable({ + transaction, + primaryRelease, + secondaryRelease, +}: SpanOperationTableProps) { + const location = useLocation(); + const {selection} = usePageFilters(); + const organization = useOrganization(); + const cursor = decodeScalar(location.query?.[MobileCursors.SPANS_TABLE]); + + const spanOp = decodeScalar(location.query[SpanMetricsField.SPAN_OP]) ?? ''; + const deviceClass = decodeScalar(location.query[SpanMetricsField.DEVICE_CLASS]) ?? ''; + + // TODO: These filters seem to be too aggressive, check that they are ingesting properly + const searchQuery = new MutableSearch([ + // 'has:span.description', + // 'transaction.op:ui.load', + `transaction:${transaction}`, + `${SpanMetricsField.SPAN_OP}:${spanOp ? spanOp : `[${VALID_SPAN_OPS.join(',')}]`}`, + ...(spanOp ? [`${SpanMetricsField.SPAN_OP}:${spanOp}`] : []), + ...(deviceClass ? [`${SpanMetricsField.DEVICE_CLASS}:${deviceClass}`] : []), + ]); + const queryStringPrimary = appendReleaseFilters( + searchQuery, + primaryRelease, + secondaryRelease + ); + + const orderby = decodeScalar(location.query.sort, ''); + + const newQuery: NewQuery = { + name: '', + fields: [ + PROJECT_ID, + SPAN_OP, + SPAN_GROUP, + SPAN_DESCRIPTION, + `avg_if(mobile.slow_frames,release,${primaryRelease})`, + `avg_if(mobile.slow_frames,release,${secondaryRelease})`, + `avg_compare(mobile.slow_frames,release,${primaryRelease},${secondaryRelease})`, + `avg_if(mobile.frozen_frames,release,${primaryRelease})`, + `avg_if(mobile.frozen_frames,release,${secondaryRelease})`, + `avg_compare(mobile.frozen_frames,release,${primaryRelease},${secondaryRelease})`, + `avg_if(mobile.frames_delay,release,${primaryRelease})`, + `avg_if(mobile.frames_delay,release,${secondaryRelease})`, + `avg_compare(mobile.frames_delay,release,${primaryRelease},${secondaryRelease})`, + ], + query: queryStringPrimary, + orderby, + dataset: DiscoverDatasets.SPANS_METRICS, + version: 2, + projects: selection.projects, + interval: getInterval(selection.datetime, STARFISH_CHART_INTERVAL_FIDELITY), + }; + + const eventView = EventView.fromNewQueryWithLocation(newQuery, location); + + const {data, isLoading, pageLinks} = useTableQuery({ + eventView, + enabled: true, + referrer: Referrer.SPAN_OPERATION_TABLE, + cursor, + }); + + const columnNameMap = { + [SPAN_OP]: t('Operation'), + [SPAN_DESCRIPTION]: t('Span Description'), + [`avg_if(mobile.slow_frames,release,${primaryRelease})`]: t( + 'Slow (%s)', + PRIMARY_RELEASE_ALIAS + ), + [`avg_if(mobile.slow_frames,release,${secondaryRelease})`]: t( + 'Slow (%s)', + SECONDARY_RELEASE_ALIAS + ), + [`avg_compare(mobile.slow_frames,release,${primaryRelease},${secondaryRelease})`]: + t('Change'), + [`avg_if(mobile.frozen_frames,release,${primaryRelease})`]: t( + 'Frozen (%s)', + PRIMARY_RELEASE_ALIAS + ), + [`avg_if(mobile.frozen_frames,release,${secondaryRelease})`]: t( + 'Frozen (%s)', + SECONDARY_RELEASE_ALIAS + ), + [`avg_compare(mobile.frozen_frames,release,${primaryRelease},${secondaryRelease})`]: + t('Change'), + [`avg_if(mobile.frames_delay,release,${primaryRelease})`]: t( + 'Delay (%s)', + PRIMARY_RELEASE_ALIAS + ), + [`avg_if(mobile.frames_delay,release,${secondaryRelease})`]: t( + 'Delay (%s)', + SECONDARY_RELEASE_ALIAS + ), + [`avg_compare(mobile.frames_delay,release,${primaryRelease},${secondaryRelease})`]: + t('Change'), + }; + + function renderBodyCell(column, row) { + if (column.key === SPAN_DESCRIPTION) { + const label = row[SpanMetricsField.SPAN_DESCRIPTION]; + + const pathname = normalizeUrl( + `/organizations/${organization.slug}/performance/mobile/ui/spans/` + ); + const query = { + ...location.query, + transaction, + spanOp: row[SpanMetricsField.SPAN_OP], + spanGroup: row[SpanMetricsField.SPAN_GROUP], + spanDescription: row[SpanMetricsField.SPAN_DESCRIPTION], + }; + + return ( + + {label} + + ); + } + + if (column.key.startsWith('avg_if(mobile.frames_delay')) { + return ( + + {typeof row[column.key] === 'number' ? ( + + ) : ( + '-' + )} + + ); + } + + return null; + } + + return ( + + ); +} From 7ff59c6673ff1ae8e7fb3aeb38c59fbc10e4770a Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Fri, 10 May 2024 09:48:48 -0400 Subject: [PATCH 260/376] ref: fix types for sentry.utils.sentry_apps.* (#70623) --- pyproject.toml | 3 +- .../installation_notifier.py | 16 +++--- .../utils/sentry_apps/request_buffer.py | 57 +++++++++++++------ src/sentry/utils/sentry_apps/webhooks.py | 29 +++++++--- .../api/endpoints/test_sentry_app_requests.py | 2 +- 5 files changed, 73 insertions(+), 34 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fafd760e399ba8..78236f07e5b4da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -468,7 +468,6 @@ module = [ "sentry.utils.distutils.commands.base", "sentry.utils.distutils.commands.build_assets", "sentry.utils.locking.backends.migration", - "sentry.utils.sentry_apps.webhooks", "sentry.utils.services", "sentry.utils.snowflake", "sentry.web.forms.accounts", @@ -590,6 +589,7 @@ module = [ "sentry.issues.update_inbox", "sentry.lang.java.processing", "sentry.llm.*", + "sentry.mediators.sentry_app_installations.installation_notifier", "sentry.migrations.*", "sentry.nodestore.base", "sentry.nodestore.bigtable.backend", @@ -626,6 +626,7 @@ module = [ "sentry.utils.performance_issues.performance_detection", "sentry.utils.redis", "sentry.utils.redis_metrics", + "sentry.utils.sentry_apps.*", "sentry.utils.sms", "sentry.utils.uwsgi", "sentry.utils.zip", diff --git a/src/sentry/mediators/sentry_app_installations/installation_notifier.py b/src/sentry/mediators/sentry_app_installations/installation_notifier.py index 0626376fdca179..b6467c48d827e2 100644 --- a/src/sentry/mediators/sentry_app_installations/installation_notifier.py +++ b/src/sentry/mediators/sentry_app_installations/installation_notifier.py @@ -5,6 +5,8 @@ from sentry.coreapi import APIUnauthorized from sentry.mediators.mediator import Mediator from sentry.mediators.param import Param +from sentry.models.apigrant import ApiGrant +from sentry.models.integrations.sentry_app import SentryApp from sentry.models.integrations.sentry_app_installation import SentryAppInstallation from sentry.services.hybrid_cloud.user.model import RpcUser from sentry.utils.sentry_apps import send_and_save_webhook_request @@ -16,19 +18,19 @@ class InstallationNotifier(Mediator): action = Param(str) using = router.db_for_write(SentryAppInstallation) - def call(self): + def call(self) -> None: self._verify_action() self._send_webhook() - def _verify_action(self): + def _verify_action(self) -> None: if self.action not in ["created", "deleted"]: raise APIUnauthorized(f"Invalid action '{self.action}'") - def _send_webhook(self): - return send_and_save_webhook_request(self.sentry_app, self.request) + def _send_webhook(self) -> None: + send_and_save_webhook_request(self.sentry_app, self.request) @property - def request(self): + def request(self) -> AppPlatformEvent: data = serialize( [self.install], user=self.user, serializer=SentryAppInstallationSerializer() )[0] @@ -42,9 +44,9 @@ def request(self): ) @cached_property - def sentry_app(self): + def sentry_app(self) -> SentryApp: return self.install.sentry_app @cached_property - def api_grant(self): + def api_grant(self) -> ApiGrant | None: return self.install.api_grant_id and self.install.api_grant diff --git a/src/sentry/utils/sentry_apps/request_buffer.py b/src/sentry/utils/sentry_apps/request_buffer.py index 4e2131cf29e757..5f666a976fae4d 100644 --- a/src/sentry/utils/sentry_apps/request_buffer.py +++ b/src/sentry/utils/sentry_apps/request_buffer.py @@ -1,12 +1,22 @@ +from __future__ import annotations + import logging +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, overload from dateutil.parser import parse as parse_date from django.conf import settings from django.utils import timezone +from redis.client import Pipeline +from requests.models import Response from sentry.models.integrations.sentry_app import VALID_EVENTS from sentry.utils import json, redis +if TYPE_CHECKING: + from sentry.models.integrations.sentry_app import SentryApp + from sentry.services.hybrid_cloud.app.model import RpcSentryApp + BUFFER_SIZE = 100 KEY_EXPIRY = 60 * 60 * 24 * 30 # 30 days @@ -34,13 +44,13 @@ class SentryAppWebhookRequestsBuffer: This should store the last 100 requests and last 100 errors (in different keys) for each event type, for each Sentry App """ - def __init__(self, sentry_app): + def __init__(self, sentry_app: SentryApp | RpcSentryApp) -> None: self.sentry_app = sentry_app cluster_id = settings.SENTRY_WEBHOOK_LOG_REDIS_CLUSTER self.client = redis.redis_clusters.get(cluster_id) - def _get_redis_key(self, event, error=False): + def _get_redis_key(self, event: str, error: bool = False) -> str: sentry_app_id = self.sentry_app.id if error: @@ -48,7 +58,7 @@ def _get_redis_key(self, event, error=False): else: return f"sentry-app-webhook-request:{{{sentry_app_id}}}:{event}" - def _convert_redis_request(self, redis_request, event): + def _convert_redis_request(self, redis_request: str, event: str) -> dict[str, Any]: """ Convert the request string stored in Redis to a python dict Add the event type to the dict so that the request can be identified correctly @@ -58,7 +68,9 @@ def _convert_redis_request(self, redis_request, event): return request - def _add_to_buffer_pipeline(self, buffer_key, item, pipeline): + def _add_to_buffer_pipeline( + self, buffer_key: str, item: object, pipeline: Pipeline[str] + ) -> None: """ Add the item to the buffer key specified, using the given pipeline. This does not execute the pipeline's commands. @@ -68,7 +80,17 @@ def _add_to_buffer_pipeline(self, buffer_key, item, pipeline): pipeline.ltrim(buffer_key, 0, BUFFER_SIZE - 1) pipeline.expire(buffer_key, KEY_EXPIRY) - def _get_all_from_buffer(self, buffer_key, pipeline=None): + @overload + def _get_all_from_buffer(self, buffer_key: str, pipeline: Pipeline[str]) -> None: + ... + + @overload + def _get_all_from_buffer(self, buffer_key: str) -> list[str]: + ... + + def _get_all_from_buffer( + self, buffer_key: str, pipeline: Pipeline[str] | None = None + ) -> list[str] | None: """ Get the list at the buffer key, using the given pipeline if available. If a pipeline is provided, this does not return a value as the pipeline must still be executed. @@ -76,10 +98,11 @@ def _get_all_from_buffer(self, buffer_key, pipeline=None): if pipeline is not None: pipeline.lrange(buffer_key, 0, BUFFER_SIZE - 1) + return None else: return self.client.lrange(buffer_key, 0, BUFFER_SIZE - 1) - def _get_requests(self, event=None, error=False): + def _get_requests(self, event: str | None = None, error: bool = False) -> list[dict[str, Any]]: # If no event is specified, return the latest requests/errors for all event types if event is None: pipe = self.client.pipeline() @@ -105,20 +128,22 @@ def _get_requests(self, event=None, error=False): for request in self._get_all_from_buffer(self._get_redis_key(event, error=error)) ] - def get_requests(self, event=None, errors_only=False): + def get_requests( + self, event: str | None = None, errors_only: bool = False + ) -> list[dict[str, Any]]: return self._get_requests(event=event, error=errors_only) def add_request( self, - response_code, - org_id, - event, - url, - error_id=None, - project_id=None, - response=None, - headers=None, - ): + response_code: int, + org_id: int, + event: str, + url: str, + error_id: str | None = None, + project_id: int | None = None, + response: Response | None = None, + headers: Mapping[str, str] | None = None, + ) -> None: from sentry.utils.sentry_apps.webhooks import TIMEOUT_STATUS_CODE if event not in EXTENDED_VALID_EVENTS: diff --git a/src/sentry/utils/sentry_apps/webhooks.py b/src/sentry/utils/sentry_apps/webhooks.py index bcc0f027f4d66a..7228216da67101 100644 --- a/src/sentry/utils/sentry_apps/webhooks.py +++ b/src/sentry/utils/sentry_apps/webhooks.py @@ -1,7 +1,8 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING +from collections.abc import Callable +from typing import TYPE_CHECKING, Concatenate, ParamSpec, TypeVar from requests import Response from requests.exceptions import ConnectionError, Timeout @@ -27,21 +28,28 @@ logger = logging.getLogger("sentry.sentry_apps.webhooks") +P = ParamSpec("P") +R = TypeVar("R") -def ignore_unpublished_app_errors(func): - def wrapper(sentry_app, app_platform_event, url=None): + +def ignore_unpublished_app_errors( + func: Callable[Concatenate[SentryApp | RpcSentryApp, P], R] +) -> Callable[Concatenate[SentryApp | RpcSentryApp, P], R | None]: + def wrapper( + sentry_app: SentryApp | RpcSentryApp, *args: P.args, **kwargs: P.kwargs + ) -> R | None: try: - return func(sentry_app, app_platform_event, url) + return func(sentry_app, *args, **kwargs) except Exception: if sentry_app.is_published: raise else: - return + return None return wrapper -def check_broken(sentryapp: SentryApp | RpcSentryApp, org_id: str): +def check_broken(sentryapp: SentryApp | RpcSentryApp, org_id: str) -> None: from sentry.services.hybrid_cloud.app.service import app_service redis_key = get_redis_key(sentryapp, org_id) @@ -70,7 +78,9 @@ def check_broken(sentryapp: SentryApp | RpcSentryApp, org_id: str): ) -def record_timeout(sentryapp: SentryApp | RpcSentryApp, org_id: str, e: ConnectionError | Timeout): +def record_timeout( + sentryapp: SentryApp | RpcSentryApp, org_id: str, e: ConnectionError | Timeout +) -> None: """ Record Unpublished Sentry App timeout or connection error in integration buffer to check if it is broken and should be disabled """ @@ -86,7 +96,7 @@ def record_timeout(sentryapp: SentryApp | RpcSentryApp, org_id: str, e: Connecti def record_response_for_disabling_integration( sentryapp: SentryApp | RpcSentryApp, org_id: str, response: Response -): +) -> None: if not sentryapp.is_internal: return redis_key = get_redis_key(sentryapp, org_id) @@ -121,7 +131,8 @@ def send_and_save_webhook_request( event = f"{app_platform_event.resource}.{app_platform_event.action}" slug = sentry_app.slug_for_metrics url = url or sentry_app.webhook_url - response = None + assert url is not None + try: response = safe_urlopen( url=url, diff --git a/tests/sentry/api/endpoints/test_sentry_app_requests.py b/tests/sentry/api/endpoints/test_sentry_app_requests.py index e2830d8622c44a..c19316808bd340 100644 --- a/tests/sentry/api/endpoints/test_sentry_app_requests.py +++ b/tests/sentry/api/endpoints/test_sentry_app_requests.py @@ -241,7 +241,7 @@ def test_linked_error_not_returned_if_project_does_not_exist(self): event="issue.assigned", url=self.unpublished_app.webhook_url, error_id=self.event_id, - project_id="1000", + project_id=1000, ) url = reverse("sentry-api-0-sentry-app-requests", args=[self.published_app.slug]) From 896b63e6c211c1cad92078f7176c7be35471bcbc Mon Sep 17 00:00:00 2001 From: Mark Story Date: Fri, 10 May 2024 10:26:23 -0400 Subject: [PATCH 261/376] fix(hybridcloud) Fix loading project lists in notification settings (#70603) We should apply the organization region URL to requests made for project list requests from notifications. Doing so avoids sending requests to the wrong region, as `Client` will implicitly use the region of the organization the HTML page was initially loaded if no `host` is provided. Fixes HC-1187 --- static/app/utils/queryClient.tsx | 2 + .../notificationSettingsByEntity.spec.tsx | 42 +++++++++++++++++++ .../notificationSettingsByEntity.tsx | 8 +++- 3 files changed, 50 insertions(+), 2 deletions(-) diff --git a/static/app/utils/queryClient.tsx b/static/app/utils/queryClient.tsx index e4ea4f1a3dfb4e..6c3469116a956a 100644 --- a/static/app/utils/queryClient.tsx +++ b/static/app/utils/queryClient.tsx @@ -47,6 +47,7 @@ export type QueryKeyEndpointOptions< > = { data?: Data; headers?: Headers; + host?: string; method?: APIRequestMethod; query?: Query; }; @@ -184,6 +185,7 @@ export function fetchDataQuery(api: Client) { return api.requestPromise(url, { includeAllArgs: true, + host: opts?.host, method: opts?.method ?? 'GET', data: opts?.data, query: opts?.query, diff --git a/static/app/views/settings/account/notifications/notificationSettingsByEntity.spec.tsx b/static/app/views/settings/account/notifications/notificationSettingsByEntity.spec.tsx index 191a5f170cd8fa..db5af860dede6a 100644 --- a/static/app/views/settings/account/notifications/notificationSettingsByEntity.spec.tsx +++ b/static/app/views/settings/account/notifications/notificationSettingsByEntity.spec.tsx @@ -1,4 +1,5 @@ import {OrganizationFixture} from 'sentry-fixture/organization'; +import {ProjectFixture} from 'sentry-fixture/project'; import {render, screen} from 'sentry-test/reactTestingLibrary'; @@ -42,4 +43,45 @@ describe('NotificationSettingsByEntity', function () { expect(await screen.findByText(otherOrganization.name)).toBeInTheDocument(); expect(projectsMock).toHaveBeenCalledTimes(1); }); + + it('should load from the organization region', async function () { + const organization = OrganizationFixture(); + const deOrg = OrganizationFixture({ + id: '2', + slug: 'de-org', + name: 'de org', + links: { + organizationUrl: 'https://de-org.sentry.io', + regionUrl: 'https://de.sentry.io', + }, + }); + ConfigStore.set('customerDomain', { + ...ConfigStore.get('customerDomain')!, + subdomain: deOrg.slug, + }); + const projectsMock = MockApiClient.addMockResponse({ + url: `/organizations/${deOrg.slug}/projects/`, + method: 'GET', + body: [ProjectFixture({organization: deOrg})], + match: [ + function (_url: string, options: Record) { + return options.host === 'https://de.sentry.io'; + }, + ], + }); + + render( + + ); + expect(await screen.findByText(deOrg.name)).toBeInTheDocument(); + expect(projectsMock).toHaveBeenCalledTimes(1); + }); }); diff --git a/static/app/views/settings/account/notifications/notificationSettingsByEntity.tsx b/static/app/views/settings/account/notifications/notificationSettingsByEntity.tsx index 7fb8e8807d7cec..cd9fabf4f513eb 100644 --- a/static/app/views/settings/account/notifications/notificationSettingsByEntity.tsx +++ b/static/app/views/settings/account/notifications/notificationSettingsByEntity.tsx @@ -57,8 +57,11 @@ function NotificationSettingsByEntity({ const orgId = router.location?.query?.organizationId ?? orgFromSubdomain ?? organizations[0]?.id; - const orgSlug = - organizations.find(({id}) => id === orgId)?.slug || organizations[0]?.slug; + let organization = organizations.find(({id}) => id === orgId); + if (!organization) { + organization = organizations[0]; + } + const orgSlug = organization.slug; // loads all the projects for an org const { @@ -71,6 +74,7 @@ function NotificationSettingsByEntity({ [ `/organizations/${orgSlug}/projects/`, { + host: organization.links.regionUrl, query: { all_projects: '1', collapse: ['latestDeploys', 'unusedFeatures'], From d9e0fa94165ac805ac80449bd8ec2bdb115e9c3a Mon Sep 17 00:00:00 2001 From: Mark Story Date: Fri, 10 May 2024 10:26:38 -0400 Subject: [PATCH 262/376] chore(hybridcloud) Add docstrings for the Organization and User services (#70590) These are both frequently used and can benefit from better documentation. Refs HC-1180 --------- Co-authored-by: Matt Duncan <14761+mrduncan@users.noreply.github.com> --- .../hybrid_cloud/organization/service.py | 191 +++++++++++++++++- .../services/hybrid_cloud/user/model.py | 13 ++ .../services/hybrid_cloud/user/service.py | 132 ++++++++++-- 3 files changed, 312 insertions(+), 24 deletions(-) diff --git a/src/sentry/services/hybrid_cloud/organization/service.py b/src/sentry/services/hybrid_cloud/organization/service.py index 86a4a617fead39..678934b725eb2c 100644 --- a/src/sentry/services/hybrid_cloud/organization/service.py +++ b/src/sentry/services/hybrid_cloud/organization/service.py @@ -49,6 +49,11 @@ def get_local_implementation(cls) -> RpcService: return DatabaseBackedOrganizationService() def get(self, id: int) -> RpcOrganization | None: + """ + Get an organization by id + + :param id: The organization id + """ org_context = self.get_organization_by_id(id=id) return org_context.organization if org_context else None @@ -62,8 +67,13 @@ def serialize_organization( as_user: RpcUser | None = None, ) -> Any | None: """ - Attempts to serialize a given organization. Note that this can be None if the organization is already deleted + Fetch an organization's API serialized form + + Note that this can be None if the organization is already deleted in the corresponding region silo. + + :param id: The organization id + :param as_user: The user making the request, used for authorization on the output. """ @regional_rpc_method(resolve=ByOrganizationId("id"), return_none_if_mapping_not_found=True) @@ -78,9 +88,17 @@ def get_organization_by_id( include_teams: bool | None = True, ) -> RpcUserOrganizationContext | None: """ - Fetches the organization, team, and project data given by an organization id, regardless of its visibility - status. When user_id is provided, membership data related to that user from the organization + Fetches the organization, team, and project data given by an organization id, regardless of + its visibility status + + When user_id is provided, membership data related to that user from the organization is also given in the response. See RpcUserOrganizationContext for more info. + + :param id: The id of the organization to fetch + :param user_id: The id of the user to fetch membership for. + :param slug: The slug of the organization to fetch (alternative to id) + :param include_projects: Whether you want projects in the response. + :param include_teams: Whether you want teams in the response. """ @regional_rpc_method(resolve=ByOrganizationSlug(), return_none_if_mapping_not_found=True) @@ -92,9 +110,14 @@ def get_org_by_slug( user_id: int | None = None, ) -> RpcOrganizationSummary | None: """ - Fetches the organization, by an organization slug. If user_id is passed, it will enforce visibility - rules. This method is differentiated from get_organization_by_slug by not being cached and returning - RpcOrganizationSummary instead of org contexts + Fetches an organization by slug. + + If user_id is passed, it will enforce visibility rules. This method is differentiated from + get_organization_by_slug by not being cached and returning RpcOrganizationSummary instead of + org contexts + + :param slug: The slug to search by + :param user_id: The user to check membership with """ @regional_rpc_method(resolve=ByOrganizationId("id"), return_none_if_mapping_not_found=True) @@ -106,9 +129,14 @@ def get_org_by_id( user_id: int | None = None, ) -> RpcOrganizationSummary | None: """ - Fetches the organization, by an organization id. If user_id is passed, it will enforce visibility - rules. This method is differentiated from get_organization_by_id by not being cached and returning - RpcOrganizationSummary instead of org contexts + Fetch an organization by id. + + If user_id is passed, it will enforce visibility rules. This method is differentiated from + get_organization_by_id by not being cached and returning RpcOrganizationSummary instead of + org contexts + + :param id: The id to search by + :param user_id: The user to check membership with """ @regional_rpc_method(resolve=ByRegionName()) @@ -118,11 +146,21 @@ def get_organizations_by_user_and_scope( ) -> list[RpcOrganization]: """ Fetches organizations for the given user, with the given organization member scope. + + :param region_name: The region to locate an organization in + :param user: The user to filter by membership + :param scope: The api scopes to search by """ @regional_rpc_method(resolve=ByOrganizationId()) @abstractmethod def update_flags(self, *, organization_id: int, flags: RpcOrganizationFlagsUpdate) -> None: + """ + Update the flags on an organization + + :param organization_id: The organization id + :param flags: Dict of flags to set. + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -141,6 +179,9 @@ def check_membership_by_id( ) -> RpcOrganizationMember | None: """ Used to look up an organization membership by a user id + + :param organization_id: The id to search by + :param user_id: The user to check membership with """ @regional_rpc_method(resolve=ByOrganizationId()) @@ -150,6 +191,9 @@ def get_member_summaries_by_ids( ) -> list[RpcOrganizationMemberSummary]: """ Used to look up multiple membership summaries by users' id. + + :param organization_id: The id to search by + :param user_ids: The userids to get membership data on. """ @regional_rpc_method(resolve=ByOrganizationId()) @@ -162,6 +206,16 @@ def get_invite_by_id( user_id: int | None = None, email: str | None = None, ) -> RpcUserInviteContext | None: + """ + Get a membership invite context + + Provide an organization_id and one of organziation_member_id, user_id and email. + + :param organziation_id: The organization to search in + :param organization_member_id: The member id to search by + :param user_id: The userid to search by + :param email: The email to search by + """ pass @regional_rpc_method(resolve=ByOrganizationSlug(), return_none_if_mapping_not_found=True) @@ -174,6 +228,16 @@ def get_invite_by_slug( user_id: int | None = None, email: str | None = None, ) -> RpcUserInviteContext | None: + """ + Get a membership invite context + + Provide an organization slug and one of organziation_member_id, user_id and email. + + :param slug: The organization to search in + :param organization_member_id: The member id to search by + :param user_id: The userid to search by + :param email: The email to search by + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -183,6 +247,9 @@ def delete_organization_member( ) -> bool: """ Delete an organization member by its id. + + :param organization_id: The organization to search in + :param organization_member_id: The id of the membership """ @regional_rpc_method(resolve=ByOrganizationId()) @@ -196,11 +263,18 @@ def set_user_for_organization_member( ) -> RpcOrganizationMember | None: """ Set the user id for an organization member. + + :param organization_id: The organization to search in + :param organization_member_id: The id of the membership + :param user_id: The new user for the membership """ def check_organization_by_slug(self, *, slug: str, only_visible: bool) -> int | None: """ If exists and matches the only_visible requirement, returns an organization's id by the slug. + + :param slug: The organization to search in + :param only_visible: Whether or not to consider only visible orgs """ return _organization_check_service.check_organization_by_slug( slug=slug, only_visible=only_visible @@ -209,6 +283,9 @@ def check_organization_by_slug(self, *, slug: str, only_visible: bool) -> int | def check_organization_by_id(self, *, id: int, only_visible: bool) -> bool: """ Checks if an organization exists by the id. + + :param id: The organization to search in + :param only_visible: Whether or not to consider only visible orgs """ return _organization_check_service.check_organization_by_id( id=id, only_visible=only_visible @@ -249,6 +326,11 @@ def get_organization_by_slug( @regional_rpc_method(resolve=RequireSingleOrganization()) @abstractmethod def get_default_organization(self) -> RpcOrganization: + """ + Get the default Organization + + See Organization.get_default() + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -265,6 +347,18 @@ def add_organization_member( inviter_id: int | None = None, invite_status: int | None = None, ) -> RpcOrganizationMember: + """ + Add an organization member + + :param organization_id: The id of the organization to add a member to + :param default_org_role: The fallback role the member should have. + :param user_id: The id of the user to create a membership for + :param email: The email to create a membership invite for. + :param flags: The membership flags to use. + :param role: The member's role, overrides default_org_role + :param inviter_id: The user_id who is creating the membership + :param invite_status: The status of the invitation. + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -272,6 +366,13 @@ def add_organization_member( def update_organization_member( self, *, organization_id: int, member_id: int, attrs: OrganizationMemberUpdateArgs ) -> RpcOrganizationMember | None: + """ + Update an organization member + + :param organziation_id: The organization to update + :param member_id: The org membership id to update. + :param attrs: The attributes to set. + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -287,6 +388,9 @@ def get_single_team(self, *, organization_id: int) -> RpcTeam | None: def add_team_member( self, *, organization_id: int, team_id: int, organization_member_id: int ) -> None: + """ + Add a team member for a given organization, team and member. + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -299,6 +403,14 @@ def get_or_create_team_member( organization_member_id: int, role: str | None, ) -> None: + """ + Get or create a team member + + :param organziation_id: The organization to update + :param team_id: The team to add a member to + :param organization_member_id: The member id + :param role: The member role (only used during create) + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -309,51 +421,92 @@ def get_or_create_default_team( organization_id: int, new_team_slug: str, ) -> RpcTeam: + """ + Get or create a team with a given slug. + """ pass @regional_rpc_method(resolve=ByOrganizationIdAttribute("organization_member")) @abstractmethod def update_membership_flags(self, *, organization_member: RpcOrganizationMember) -> None: + """ + Update the flags on a membership + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @abstractmethod def merge_users(self, *, organization_id: int, from_user_id: int, to_user_id: int) -> None: + """ + Merge two members. + + Will update all teams and group related models to reflect new member + + If `to_user_id` does not have a membership in the organization, a membership + will be created for them. + + :param organization_id: The organization to operate on + :param from_user_id: The user id of the membership to merge + :param to_user_id: The user id of the user to merge into + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @abstractmethod def update_default_role(self, *, organization_id: int, default_role: str) -> RpcOrganization: + """ + Update the default role for an organization + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @abstractmethod def remove_user(self, *, organization_id: int, user_id: int) -> RpcOrganizationMember | None: + """ + Remove a membership by user_id + """ pass @regional_rpc_method(resolve=ByRegionName()) @abstractmethod def update_region_user(self, *, user: RpcRegionUser, region_name: str) -> None: + """ + Update all memberships in a region to reflect changes in user details. + + Will sync is_active and email attributes. + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @abstractmethod def reset_idp_flags(self, *, organization_id: int) -> None: + """ + Reset the identity provider related flags for all members in an organization + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @abstractmethod def get_option(self, *, organization_id: int, key: str) -> OptionValue: + """ + Get an organziation option by key + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @abstractmethod def update_option(self, *, organization_id: int, key: str, value: OptionValue) -> bool: + """ + Update an organziation option by key + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @abstractmethod def delete_option(self, *, organization_id: int, key: str) -> None: + """ + Delete an organization option by key + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -361,6 +514,13 @@ def delete_option(self, *, organization_id: int, key: str) -> None: def send_sso_link_emails( self, *, organization_id: int, sending_user_email: str, provider_key: str ) -> None: + """ + Send SSO link emails to all members in the organization + + :param organization_id: The organization to operate on + :param sending_user_email: The email address who initiated the link process + :param provider_key: The SSO provider key + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -368,11 +528,19 @@ def send_sso_link_emails( def send_sso_unlink_emails( self, *, organization_id: int, sending_user_email: str, provider_key: str ) -> None: + """ + Send SSO link break emails to all members in the organization + + :param organization_id: The organization to operate on + :param sending_user_email: The email address who initiated the link process + :param provider_key: The SSO provider key + """ pass @regional_rpc_method(resolve=ByOrganizationId()) @abstractmethod def count_members_without_sso(self, *, organization_id: int) -> int: + """Get the number of users without SSO flags set""" pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -380,6 +548,7 @@ def count_members_without_sso(self, *, organization_id: int) -> int: def delete_organization( self, *, organization_id: int, user: RpcUser ) -> RpcOrganizationDeleteResponse: + """Delete an organization""" pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -387,6 +556,7 @@ def delete_organization( def create_org_delete_log( self, *, organization_id: int, audit_log_actor: RpcAuditLogEntryActor ) -> None: + """Record an audit log for an organization deletion""" pass @regional_rpc_method(resolve=ByOrganizationId()) @@ -398,6 +568,7 @@ def send_signal( organization_id: int, args: Mapping[str, int | str | None], ) -> None: + """Trigger a django signal on an organization synchronously""" pass def schedule_signal( @@ -406,6 +577,7 @@ def schedule_signal( organization_id: int, args: Mapping[str, int | str | None], ) -> None: + """Trigger a django signal on an organization asynchronously""" _organization_signal_service.schedule_signal( signal=signal, organization_id=organization_id, args=args ) @@ -415,6 +587,7 @@ def schedule_signal( def get_organization_owner_members( self, *, organization_id: int ) -> list[RpcOrganizationMember]: + """Get a list of members with the owner role""" pass diff --git a/src/sentry/services/hybrid_cloud/user/model.py b/src/sentry/services/hybrid_cloud/user/model.py index 92d88342f1ce73..4ce263f1b54d50 100644 --- a/src/sentry/services/hybrid_cloud/user/model.py +++ b/src/sentry/services/hybrid_cloud/user/model.py @@ -131,12 +131,25 @@ class UserSerializeType(IntEnum): # annoying class UserFilterArgs(TypedDict, total=False): user_ids: list[int] + """List of user ids to search with""" + is_active: bool + """Whether the user needs to be active""" + organization_id: int + """Organization to check membership in""" + emails: list[str] + """list of emails to match with""" + email_verified: bool + """Whether emails have to be verified or not""" + query: str + """Filter by email or name""" + authenticator_types: list[int] | None + """The type of MFA authenticator you want to query by""" class UserUpdateArgs(TypedDict, total=False): diff --git a/src/sentry/services/hybrid_cloud/user/service.py b/src/sentry/services/hybrid_cloud/user/service.py index d34076fb731666..88ddba1e494abb 100644 --- a/src/sentry/services/hybrid_cloud/user/service.py +++ b/src/sentry/services/hybrid_cloud/user/service.py @@ -47,21 +47,52 @@ def serialize_many( auth_context: AuthenticationContext | None = None, serializer: UserSerializeType | None = None, ) -> list[OpaqueSerializedResponse]: + """ + Get a list of users serialized as dictionaries with the API serializer. + + This is most useful when you need to stitch users into an API response. + + :param filter: Filtering options. See UserFilterArgs + :param as_user: The user making the request, this is used to perform field level authorization required by the serializer + :param auth_context: Authentication context that the request is being made under. + :param serializer: The serializer to use. + """ pass @rpc_method @abstractmethod def get_many(self, *, filter: UserFilterArgs) -> list[RpcUser]: + """ + Get a list of users as RpcUser objects. + + :param filter: Filtering options. See UserFilterArgs + """ pass @rpc_method @abstractmethod def get_many_ids(self, *, filter: UserFilterArgs) -> list[int]: + """ + Get a list of userids that match the filter operation + + This is a more efficient way to fetch users when you need to create + query conditions on other tables. + + :param filter: Filtering options. See UserFilterArgs + """ pass @rpc_method @abstractmethod def get_many_profiles(self, *, filter: UserFilterArgs) -> list[RpcUserProfile]: + """ + Get a list of RpcUserProfile matching `filter` + + If you only need basic profile information about a user this is more efficient + than `get_many` + + :param filter: Filtering options. See UserFilterArgs + """ pass @rpc_method @@ -76,9 +107,10 @@ def get_many_by_email( ) -> list[RpcUser]: """ Return a list of users matching the filters - :param email: - A case insensitive email to match - :return: + + :param emails: A list of case insensitive emails to match + :param is_active: Whether the users need to be active + :param is_verified: Whether the user's emails need to be verified. """ @rpc_method @@ -87,20 +119,21 @@ def get_by_username( self, *, username: str, with_valid_password: bool = True, is_active: bool | None = None ) -> list[RpcUser]: """ - Return a list of users that match a username and falling back to email - :param username: - A case insensitive username/email to match - :param with_valid_password: - filter to ensure a password is set - :param is_active: - filter for only active users - :return: + Return a list of users that match a username and falling back to email. + + :param username: A case insensitive username/email to match + :param with_valid_password: filter to ensure a password is set + :param is_active: filter for only active users """ @rpc_method @abstractmethod def get_existing_usernames(self, *, usernames: list[str]) -> list[str]: - """Get all usernames from the set that belong to existing users.""" + """ + Get all usernames from the set that belong to existing users. + + :param usernames: A list of usernames to match + """ @rpc_method @abstractmethod @@ -110,28 +143,55 @@ def get_organizations( user_id: int, only_visible: bool = False, ) -> list[RpcOrganizationMapping]: - """Get summary data for all organizations of which the user is a member. + """ + Get summary data for all organizations of which the user is a member. The organizations may span multiple regions. + + :param user_id: The user to find organizations from. + :param only_visible: Whether or not to only fetch visible organizations """ @rpc_method @abstractmethod def get_member_region_names(self, *, user_id: int) -> list[str]: - """Get a list of region names where the user is a member of at least one org.""" + """ + Get a list of region names where the user is a member of at least one org. + + :param user_id: The user to fetch region names for. + """ @rpc_method @abstractmethod def update_user(self, *, user_id: int, attrs: UserUpdateArgs) -> Any: - # Returns a serialized user + """ + Update a user and return the API serialized form + + :param user_id: The user to update + :param attrs: A dictionary of properties to update. + """ pass @rpc_method @abstractmethod def flush_nonce(self, *, user_id: int) -> None: + """ + Reset a user's session nonce + + This will log out all sessions that don't contain the same session nonce. + + :param user_id: The user to update + """ pass def get_user(self, user_id: int) -> RpcUser | None: + """ + Get a single user by id + + The result of this method is cached. + + :param user_id: The user to fetch + """ metrics.incr("user_service.get_user.call") return get_user(user_id) @@ -140,11 +200,23 @@ def get_user(self, user_id: int) -> RpcUser | None: def get_user_by_social_auth( self, *, organization_id: int, provider: str, uid: str ) -> RpcUser | None: + """ + Get a user for a given organization, social auth provider and public id + + :param organization_id: The organization to search in. + :param provider: the authentication provider to search in. + :param uid: The external id to search with. + """ pass @rpc_method @abstractmethod def get_first_superuser(self) -> RpcUser | None: + """ + Get the first superuser in the database. + + The results of this method are ordered by id + """ pass @rpc_method @@ -156,6 +228,13 @@ def get_or_create_user_by_email( ident: str | None = None, referrer: str | None = None, ) -> tuple[RpcUser, bool]: + """ + Get or create a user with a matching email address or AuthIdentity + + :param email: The email to search by. + :param ident: If provided, and multiple users are found with a matching email, the ident + is used to narrow down results. + """ pass @rpc_method @@ -166,16 +245,34 @@ def get_user_by_email( email: str, ident: str | None = None, ) -> RpcUser | None: + """ + Get a user with a matching email address or AuthIdentity + + :param email: The email/username to use. + :param ident: If provided, and multiple users are found with a matching email, the ident + is used to narrow down results. + """ pass @rpc_method @abstractmethod def verify_user_email(self, *, email: str, user_id: int) -> bool: + """ + Verify a user's email address + + :param email: The email to verify + :param user_id: The user id to verify email for. + """ pass @rpc_method @abstractmethod def verify_any_email(self, *, email: str) -> bool: + """ + Verifies the first email address (ordered by id) that matches. + + :param email: The email to verify. + """ pass @rpc_method @@ -205,6 +302,11 @@ def verify_user_emails( @rpc_method @abstractmethod def get_user_avatar(self, *, user_id: int) -> RpcAvatar | None: + """ + Get a user's avatar if available + + :param user_id: The user to get an avatar for. + """ pass From 9cf96f4afade6faf559937832c02ba7a2b3bc85c Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Fri, 10 May 2024 10:39:21 -0400 Subject: [PATCH 263/376] ref(js): Deprecate useRouter (#70651) Prefer using the other hooks instead --- static/app/utils/useRouter.tsx | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/static/app/utils/useRouter.tsx b/static/app/utils/useRouter.tsx index 8e57fd8b8ca1d1..8352aad84d40f6 100644 --- a/static/app/utils/useRouter.tsx +++ b/static/app/utils/useRouter.tsx @@ -1,5 +1,11 @@ import {useRouteContext} from 'sentry/utils/useRouteContext'; +/** + * @deprecated Please do not use this. Use a specific hook instead. Including + * use{Location,Params,Routes,Navigate}. + * + * react-router 6 does not include this hook. + */ function useRouter() { const route = useRouteContext(); return route.router; From ae2b8a517464da2fddb214ca91a74167151f60ad Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Fri, 10 May 2024 10:42:49 -0400 Subject: [PATCH 264/376] ref(js): Remove usage of getCurrentLocation in feedbackListItem (#70650) Newer versions of the history library do not support this function [0]. Use `useLocation` in this case instead. [0]: https://github.com/remix-run/history/blob/485ebc177c1f3f8eef93b0d654fffd1d321c2ecd/packages/history/index.ts#L188 --- static/app/components/feedback/list/feedbackListItem.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/components/feedback/list/feedbackListItem.tsx b/static/app/components/feedback/list/feedbackListItem.tsx index 8d6b34fef19d50..911592ab2b1e14 100644 --- a/static/app/components/feedback/list/feedbackListItem.tsx +++ b/static/app/components/feedback/list/feedbackListItem.tsx @@ -20,12 +20,12 @@ import {useLegacyStore} from 'sentry/stores/useLegacyStore'; import {space} from 'sentry/styles/space'; import type {Group} from 'sentry/types/group'; import {trackAnalytics} from 'sentry/utils/analytics'; -import {browserHistory} from 'sentry/utils/browserHistory'; import type {FeedbackIssue} from 'sentry/utils/feedback/types'; import {decodeScalar} from 'sentry/utils/queryString'; import useReplayCountForFeedbacks from 'sentry/utils/replayCount/useReplayCountForFeedbacks'; import {darkTheme, lightTheme} from 'sentry/utils/theme'; import useLocationQuery from 'sentry/utils/url/useLocationQuery'; +import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; @@ -52,6 +52,7 @@ const FeedbackListItem = forwardRef( const isOpen = useIsSelectedFeedback({feedbackItem}); const {feedbackHasReplay} = useReplayCountForFeedbacks(); const hasReplayId = feedbackHasReplay(feedbackItem.id); + const location = useLocation(); const isCrashReport = feedbackItem.metadata.source === 'crash_report_embed_form'; const isUserReportWithError = feedbackItem.metadata.source === 'user_report_envelope'; @@ -65,7 +66,6 @@ const FeedbackListItem = forwardRef( { - const location = browserHistory.getCurrentLocation(); return { pathname: normalizeUrl(`/organizations/${organization.slug}/feedback/`), query: { From b6ef805c4d1f86fe8b89110bc2a0644ab3f5c4cb Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Fri, 10 May 2024 10:56:31 -0400 Subject: [PATCH 265/376] ref(routes): Avoid usage of optional route params (#70631) The syntax has changed in react-router 6. Instead of trying to build compatability we can just remove the one optional route we have. --- static/app/routes.tsx | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/static/app/routes.tsx b/static/app/routes.tsx index 5bfa9d4029d96b..014aedee9ea37a 100644 --- a/static/app/routes.tsx +++ b/static/app/routes.tsx @@ -1711,12 +1711,9 @@ function buildRoutes() { ); const issueListRoutes = ( - + + ); From a594acd4b430802efa9b8e029fc373ec273fe6aa Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Fri, 10 May 2024 11:12:58 -0400 Subject: [PATCH 266/376] ref(deps): Run yarn-deduplicate (#70654) --- yarn.lock | 37 +++---------------------------------- 1 file changed, 3 insertions(+), 34 deletions(-) diff --git a/yarn.lock b/yarn.lock index 2f0b8f337e5969..a943cbf82d241a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2019,13 +2019,6 @@ dependencies: "@opentelemetry/api" "^1.0.0" -"@opentelemetry/api-logs@0.51.0": - version "0.51.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.51.0.tgz#71f296661d2215167c748ca044ff184a65d9426b" - integrity sha512-m/jtfBPEIXS1asltl8fPQtO3Sb1qMpuL61unQajUmM8zIxeMF1AlqzWXM3QedcYgTTFiJCew5uJjyhpmqhc0+g== - dependencies: - "@opentelemetry/api" "^1.0.0" - "@opentelemetry/api-logs@0.51.1": version "0.51.1" resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.51.1.tgz#ded1874c04516c2b8cb24828eef3d6c3d1f75343" @@ -2050,20 +2043,13 @@ dependencies: "@opentelemetry/semantic-conventions" "1.23.0" -"@opentelemetry/core@1.24.1", "@opentelemetry/core@^1.24.1": +"@opentelemetry/core@1.24.1", "@opentelemetry/core@^1.1.0", "@opentelemetry/core@^1.24.1", "@opentelemetry/core@^1.8.0": version "1.24.1" resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.24.1.tgz#35ab9d2ac9ca938e0ffbdfa40c49c169ac8ba80d" integrity sha512-wMSGfsdmibI88K9wB498zXY04yThPexo8jvwNNlm542HZB7XrrMRBbAyKJqG8qDRJwIBdBrPMi4V9ZPW/sqrcg== dependencies: "@opentelemetry/semantic-conventions" "1.24.1" -"@opentelemetry/core@^1.1.0", "@opentelemetry/core@^1.8.0": - version "1.24.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.24.0.tgz#5568b6c1328a6b9c94a77f9b2c7f872b852bba40" - integrity sha512-FP2oN7mVPqcdxJDTTnKExj4mi91EH+DNuArKfHTjPuJWe2K1JfMIVXNfahw1h3onJxQnxS8K0stKkogX05s+Aw== - dependencies: - "@opentelemetry/semantic-conventions" "1.24.0" - "@opentelemetry/instrumentation-connect@0.35.0": version "0.35.0" resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation-connect/-/instrumentation-connect-0.35.0.tgz#d7c68f46ab04f10dc8792ef1fd023eb01748d8db" @@ -2205,7 +2191,7 @@ semver "^7.5.2" shimmer "^1.2.1" -"@opentelemetry/instrumentation@0.51.1", "@opentelemetry/instrumentation@^0.51.1": +"@opentelemetry/instrumentation@0.51.1", "@opentelemetry/instrumentation@^0.51.0", "@opentelemetry/instrumentation@^0.51.1": version "0.51.1" resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation/-/instrumentation-0.51.1.tgz#46fb2291150ec6923e50b2f094b9407bc726ca9b" integrity sha512-JIrvhpgqY6437QIqToyozrUG1h5UhwHkaGK/WAX+fkrpyPtc+RO5FkRtUd9BH0MibabHHvqsnBGKfKVijbmp8w== @@ -2239,18 +2225,6 @@ semver "^7.5.2" shimmer "^1.2.1" -"@opentelemetry/instrumentation@^0.51.0": - version "0.51.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation/-/instrumentation-0.51.0.tgz#93dbe96c87da539081d0ccd07475cfc0b0c61233" - integrity sha512-Eg/+Od5bEvzpvZQGhvMyKIkrzB9S7jW+6z9LHEI2VXhl/GrqQ3oBqlzJt4tA6pGtxRmqQWKWGM1wAbwDdW/gUA== - dependencies: - "@opentelemetry/api-logs" "0.51.0" - "@types/shimmer" "^1.0.2" - import-in-the-middle "1.7.1" - require-in-the-middle "^7.1.1" - semver "^7.5.2" - shimmer "^1.2.1" - "@opentelemetry/redis-common@^0.36.2": version "0.36.2" resolved "https://registry.yarnpkg.com/@opentelemetry/redis-common/-/redis-common-0.36.2.tgz#906ac8e4d804d4109f3ebd5c224ac988276fdc47" @@ -2287,12 +2261,7 @@ resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.23.0.tgz#627f2721b960fe586b7f72a07912cb7699f06eef" integrity sha512-MiqFvfOzfR31t8cc74CTP1OZfz7MbqpAnLCra8NqQoaHJX6ncIRTdYOQYBDQ2uFISDq0WY8Y9dDTWvsgzzBYRg== -"@opentelemetry/semantic-conventions@1.24.0", "@opentelemetry/semantic-conventions@^1.0.0", "@opentelemetry/semantic-conventions@^1.17.0", "@opentelemetry/semantic-conventions@^1.22.0", "@opentelemetry/semantic-conventions@^1.23.0": - version "1.24.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.24.0.tgz#f074db930a7feb4d64103a9a576c5fbad046fcac" - integrity sha512-yL0jI6Ltuz8R+Opj7jClGrul6pOoYrdfVmzQS4SITXRPH7I5IRZbrwe/6/v8v4WYMa6MYZG480S1+uc/IGfqsA== - -"@opentelemetry/semantic-conventions@1.24.1": +"@opentelemetry/semantic-conventions@1.24.1", "@opentelemetry/semantic-conventions@^1.0.0", "@opentelemetry/semantic-conventions@^1.17.0", "@opentelemetry/semantic-conventions@^1.22.0", "@opentelemetry/semantic-conventions@^1.23.0": version "1.24.1" resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.24.1.tgz#d4bcebda1cb5146d47a2a53daaa7922f8e084dfb" integrity sha512-VkliWlS4/+GHLLW7J/rVBA00uXus1SWvwFvcUDxDwmFxYfg/2VI6ekwdXS28cjI8Qz2ky2BzG8OUHo+WeYIWqw== From 312bacc8eec1099182b8c1c25ced8b4f1f9b703b Mon Sep 17 00:00:00 2001 From: Nathan Hsieh <6186377+nhsiehgit@users.noreply.github.com> Date: Fri, 10 May 2024 08:24:00 -0700 Subject: [PATCH 267/376] update: Querysubscription timebox delete column db operation (#70630) requires https://github.com/getsentry/sentry/pull/70628 --- migrations_lockfile.txt | 2 +- ...subscription_timebox_column_deletion_db.py | 40 +++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index 6116641b01cb7f..3fd18eba3a0405 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0016_add_control_cacheversion nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0718_delete_timebox_columns +sentry: 0719_querysubscription_timebox_column_deletion_db social_auth: 0002_default_auto_field diff --git a/src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py b/src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py new file mode 100644 index 00000000000000..59ca6933a64ed6 --- /dev/null +++ b/src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py @@ -0,0 +1,40 @@ +# Generated by Django 5.0.4 on 2024-05-09 23:10 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0718_delete_timebox_columns"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + """ + ALTER TABLE "sentry_querysubscription" DROP COLUMN "timebox_start"; + ALTER TABLE "sentry_querysubscription" DROP COLUMN "timebox_end"; + """, + hints={"tables": ["sentry_querysubscription"]}, + ) + ], + state_operations=[], + ) + ] From ea495cb50e9159aafc1a611e83d01b942412dbfd Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Fri, 10 May 2024 11:34:18 -0400 Subject: [PATCH 268/376] ref: use classproperty from django instead of ours (#70655) --- src/sentry/auth/authenticators/sms.py | 2 +- src/sentry/auth/authenticators/u2f.py | 2 +- src/sentry/integrations/jira/integration.py | 2 +- src/sentry/interfaces/base.py | 2 +- src/sentry/utils/decorators.py | 12 ------------ 5 files changed, 4 insertions(+), 16 deletions(-) delete mode 100644 src/sentry/utils/decorators.py diff --git a/src/sentry/auth/authenticators/sms.py b/src/sentry/auth/authenticators/sms.py index 4a54cf4a6fa42d..6678c52393c4f7 100644 --- a/src/sentry/auth/authenticators/sms.py +++ b/src/sentry/auth/authenticators/sms.py @@ -5,10 +5,10 @@ from typing import TYPE_CHECKING from django.http.request import HttpRequest +from django.utils.functional import classproperty from django.utils.translation import gettext_lazy as _ from sentry.ratelimits import backend as ratelimiter -from sentry.utils.decorators import classproperty from sentry.utils.otp import TOTP from sentry.utils.sms import phone_number_as_e164, send_sms, sms_available diff --git a/src/sentry/auth/authenticators/u2f.py b/src/sentry/auth/authenticators/u2f.py index 184f61ff3c208e..18947a2cf4e94b 100644 --- a/src/sentry/auth/authenticators/u2f.py +++ b/src/sentry/auth/authenticators/u2f.py @@ -6,6 +6,7 @@ from cryptography.exceptions import InvalidKey, InvalidSignature from django.http.request import HttpRequest from django.urls import reverse +from django.utils.functional import classproperty from django.utils.translation import gettext_lazy as _ from fido2 import cbor from fido2.client import ClientData @@ -19,7 +20,6 @@ from sentry.auth.authenticators.base import EnrollmentStatus from sentry.utils import json from sentry.utils.dates import to_datetime -from sentry.utils.decorators import classproperty from sentry.utils.http import absolute_uri from .base import ActivationChallengeResult, AuthenticatorInterface diff --git a/src/sentry/integrations/jira/integration.py b/src/sentry/integrations/jira/integration.py index a5d97f0061a18a..ae5b8614a66bb4 100644 --- a/src/sentry/integrations/jira/integration.py +++ b/src/sentry/integrations/jira/integration.py @@ -8,6 +8,7 @@ from django.conf import settings from django.urls import reverse +from django.utils.functional import classproperty from django.utils.translation import gettext as _ from sentry import features @@ -37,7 +38,6 @@ IntegrationFormError, ) from sentry.tasks.integrations import migrate_issues -from sentry.utils.decorators import classproperty from sentry.utils.strings import truncatechars from .client import JiraCloudClient diff --git a/src/sentry/interfaces/base.py b/src/sentry/interfaces/base.py index 70b404d77c7962..0de17f74066a8f 100644 --- a/src/sentry/interfaces/base.py +++ b/src/sentry/interfaces/base.py @@ -5,10 +5,10 @@ from typing import ClassVar, Union from django.conf import settings +from django.utils.functional import classproperty from django.utils.translation import gettext as _ from sentry.utils.canonical import get_canonical_name -from sentry.utils.decorators import classproperty from sentry.utils.imports import import_string from sentry.utils.json import prune_empty_keys from sentry.utils.safe import get_path, safe_execute diff --git a/src/sentry/utils/decorators.py b/src/sentry/utils/decorators.py deleted file mode 100644 index fe291fe6b0aadc..00000000000000 --- a/src/sentry/utils/decorators.py +++ /dev/null @@ -1,12 +0,0 @@ -class classproperty: - # Vendored from newer Django: - # https://github.com/django/django/blob/1.9.6/django/utils/decorators.py#L188-L197 - def __init__(self, method=None): - self.fget = method - - def __get__(self, instance, owner): - return self.fget(owner) - - def getter(self, method): - self.fget = method - return self From 3a159a1c1fe95f35a603eeb5305cf02780e9a8ca Mon Sep 17 00:00:00 2001 From: Abdkhan14 <60121741+Abdkhan14@users.noreply.github.com> Date: Fri, 10 May 2024 11:53:54 -0400 Subject: [PATCH 269/376] feat(new-trace): Using codesnippet and stackminitrace from tracedrawer. (#70605) We already have all the data needed by `CodeSnippet `and `StackTraceMiniFrame` at the trace level to render DB query description, so using them seems wiser than higher level components like `DBQueryDescription` that re-fetch the data. Screenshot 2024-05-09 at 3 31 02 PM Co-authored-by: Abdullah Khan --- .../details/span/sections/description.tsx | 57 +++++++++++++------ 1 file changed, 39 insertions(+), 18 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/description.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/description.tsx index 384e280c590787..87e49e1d9875d8 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/description.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/description.tsx @@ -1,7 +1,9 @@ +import {Fragment, useMemo} from 'react'; import styled from '@emotion/styled'; import type {Location} from 'history'; import {Button} from 'sentry/components/button'; +import {CodeSnippet} from 'sentry/components/codeSnippet'; import SpanSummaryButton from 'sentry/components/events/interfaces/spans/spanSummaryButton'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; @@ -12,14 +14,17 @@ import type { } from 'sentry/views/performance/newTraceDetails/traceModels/traceTree'; import {spanDetailsRouteWithQuery} from 'sentry/views/performance/transactionSummary/transactionSpans/spanDetails/utils'; import { - Frame, - SpanDescription as DBQueryDescription, -} from 'sentry/views/starfish/components/spanDescription'; + MissingFrame, + StackTraceMiniFrame, +} from 'sentry/views/starfish/components/stackTraceMiniFrame'; import {ModuleName} from 'sentry/views/starfish/types'; import {resolveSpanModule} from 'sentry/views/starfish/utils/resolveSpanModule'; +import {SQLishFormatter} from 'sentry/views/starfish/utils/sqlish/SQLishFormatter'; import {TraceDrawerComponents} from '../../styles'; +const formatter = new SQLishFormatter(); + export function SpanDescription({ node, organization, @@ -36,7 +41,18 @@ export function SpanDescription({ span.sentry_tags?.category ); - if (![ModuleName.DB, ModuleName.RESOURCE].includes(resolvedModule)) { + const formattedDescription = useMemo(() => { + if (resolvedModule !== ModuleName.DB) { + return span.description ?? ''; + } + + return formatter.toString(span.description ?? ''); + }, [span.description, resolvedModule]); + + if ( + !formattedDescription || + ![ModuleName.DB, ModuleName.RESOURCE].includes(resolvedModule) + ) { return null; } @@ -66,15 +82,26 @@ export function SpanDescription({ const value = resolvedModule === ModuleName.DB ? ( - - - + + + {formattedDescription} + + {span?.data?.['code.filepath'] ? ( + + ) : ( + + )} + ) : ( - span.description + formattedDescription ); const title = @@ -108,12 +135,6 @@ const TitleContainer = styled('div')` justify-content: space-between; `; -const SpanDescriptionWrapper = styled('div')` - ${Frame} { - border: none; - } -`; - const ButtonGroup = styled('div')` display: flex; gap: ${space(0.5)}; From 2fda6f1d2383a1f356f13c6f7fc8a4a5216e1d88 Mon Sep 17 00:00:00 2001 From: Abdkhan14 <60121741+Abdkhan14@users.noreply.github.com> Date: Fri, 10 May 2024 11:54:08 -0400 Subject: [PATCH 270/376] feat(new-trace): Fixing routing to discover from trace view bug. (#70600) When we have a timestamp in the url, we use `start: timestamp - 1.5days` and `end: timestamp + 1.5days` instead of statsPeriod, when routing to discover to review all events associated with trace id. Co-authored-by: Abdullah Khan --- .../performance/newTraceDetails/index.tsx | 30 +++++++++++++++---- .../views/performance/traceDetails/utils.tsx | 3 +- 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/index.tsx b/static/app/views/performance/newTraceDetails/index.tsx index 871f117b092722..37a1943174c444 100644 --- a/static/app/views/performance/newTraceDetails/index.tsx +++ b/static/app/views/performance/newTraceDetails/index.tsx @@ -132,14 +132,34 @@ export function TraceView() { allowAbsolutePageDatetime: true, }); const start = decodeScalar(normalizedParams.start); + const timestamp = decodeScalar(normalizedParams.timestamp); const end = decodeScalar(normalizedParams.end); const statsPeriod = decodeScalar(normalizedParams.statsPeriod); - return {start, end, statsPeriod, useSpans: 1}; + return {start, end, statsPeriod, timestamp, useSpans: 1}; }, []); const traceEventView = useMemo(() => { - const {start, end, statsPeriod} = queryParams; + const {start, end, statsPeriod, timestamp} = queryParams; + + let startTimeStamp = start; + let endTimeStamp = end; + + // If timestamp exists in the query params, we want to use it to set the start and end time + // with a buffer of 1.5 days, for retrieving events belonging to the trace. + if (timestamp) { + const parsedTimeStamp = Number(timestamp); + + if (isNaN(parsedTimeStamp)) { + throw new Error('Invalid timestamp'); + } + + const buffer = 36 * 60 * 60 * 1000; // 1.5 days in milliseconds + const dateFromTimestamp = new Date(parsedTimeStamp * 1000); + + startTimeStamp = new Date(dateFromTimestamp.getTime() - buffer).toISOString(); + endTimeStamp = new Date(dateFromTimestamp.getTime() + buffer).toISOString(); + } return EventView.fromSavedQuery({ id: undefined, @@ -149,9 +169,9 @@ export function TraceView() { query: `trace:${traceSlug}`, projects: [ALL_ACCESS_PROJECTS], version: 2, - start, - end, - range: statsPeriod, + start: startTimeStamp, + end: endTimeStamp, + range: !(startTimeStamp || endTimeStamp) ? statsPeriod : undefined, }); }, [queryParams, traceSlug]); diff --git a/static/app/views/performance/traceDetails/utils.tsx b/static/app/views/performance/traceDetails/utils.tsx index d72dd785f63fef..607be7f787d242 100644 --- a/static/app/views/performance/traceDetails/utils.tsx +++ b/static/app/views/performance/traceDetails/utils.tsx @@ -2,6 +2,7 @@ import type {LocationDescriptorObject} from 'history'; import {PAGE_URL_PARAM} from 'sentry/constants/pageFilters'; import type {Organization, OrganizationSummary} from 'sentry/types'; +import {getTimeStampFromTableDateField} from 'sentry/utils/dates'; import type { EventLite, TraceError, @@ -41,7 +42,7 @@ export function getTraceDetailsUrl( ), query: { ...queryParams, - timestamp, + timestamp: getTimeStampFromTableDateField(timestamp), eventId, }, }; From d0de9e538b2f18ddcebc7237f139022e3def15ce Mon Sep 17 00:00:00 2001 From: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com> Date: Fri, 10 May 2024 12:08:55 -0400 Subject: [PATCH 271/376] feat(performance): updates time_spent_percentage to accept an optional column arg (#70625) `time_spent_percentage` is hard coded to be based on the `span.self_time` metric. Updates the function so that an optional metric (such as `span.duration`) can be provided instead. --- .../search/events/datasets/spans_metrics.py | 20 ++++++---- .../test_organization_events_span_metrics.py | 38 +++++++++++++++++++ 2 files changed, 51 insertions(+), 7 deletions(-) diff --git a/src/sentry/search/events/datasets/spans_metrics.py b/src/sentry/search/events/datasets/spans_metrics.py index 7a2afd3ff97461..c412ac37f735ea 100644 --- a/src/sentry/search/events/datasets/spans_metrics.py +++ b/src/sentry/search/events/datasets/spans_metrics.py @@ -11,7 +11,7 @@ from sentry.search.events import builder, constants, fields from sentry.search.events.datasets import field_aliases, filter_aliases, function_aliases from sentry.search.events.datasets.base import DatasetConfig -from sentry.search.events.fields import SnQLStringArg +from sentry.search.events.fields import SnQLStringArg, get_function_alias from sentry.search.events.types import SelectType, WhereType from sentry.search.utils import DEVICE_CLASS from sentry.snuba.metrics.naming_layer.mri import SpanMRI @@ -307,7 +307,13 @@ def function_converter(self) -> Mapping[str, fields.MetricsFunction]: optional_args=[ fields.with_default( "app", fields.SnQLStringArg("scope", allowed_strings=["app", "local"]) - ) + ), + fields.with_default( + "span.self_time", + fields.MetricArg( + "column", allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS + ), + ), ], snql_distribution=self._resolve_time_spent_percentage, default_result_type="percentage", @@ -655,7 +661,7 @@ def _resolve_count_if( alias, ) - def _resolve_total_span_duration(self, alias: str, scope: str) -> SelectType: + def _resolve_total_span_duration(self, alias: str, scope: str, column: str) -> SelectType: """This calculates the total time, and based on the scope will return either the apps total time or whatever other local scope/filters are applied. @@ -669,7 +675,7 @@ def _resolve_total_span_duration(self, alias: str, scope: str) -> SelectType: params={}, snuba_params=self.builder.params, query=self.builder.query if scope == "local" else None, - selected_columns=["sum(span.self_time)"], + selected_columns=[f"sum({column})"], ) sentry_sdk.set_tag("query.resolved_total", scope) @@ -681,16 +687,16 @@ def _resolve_total_span_duration(self, alias: str, scope: str) -> SelectType: if len(results["data"]) != 1: self.total_span_duration = 0 return Function("toFloat64", [0], alias) - self.total_span_duration = results["data"][0]["sum_span_self_time"] + self.total_span_duration = results["data"][0][get_function_alias(f"sum({column})")] return Function("toFloat64", [self.total_span_duration], alias) def _resolve_time_spent_percentage( self, args: Mapping[str, str | Column | SelectType | int | float], alias: str ) -> SelectType: total_time = self._resolve_total_span_duration( - constants.TOTAL_SPAN_DURATION_ALIAS, args["scope"] + constants.TOTAL_SPAN_DURATION_ALIAS, args["scope"], args["column"] ) - metric_id = self.resolve_metric("span.self_time") + metric_id = self.resolve_metric(args["column"]) return function_aliases.resolve_division( Function( diff --git a/tests/snuba/api/endpoints/test_organization_events_span_metrics.py b/tests/snuba/api/endpoints/test_organization_events_span_metrics.py index 6485b88992455c..0798dfb618bafa 100644 --- a/tests/snuba/api/endpoints/test_organization_events_span_metrics.py +++ b/tests/snuba/api/endpoints/test_organization_events_span_metrics.py @@ -343,6 +343,40 @@ def test_time_spent_percentage_local(self): assert data[0]["time_spent_percentage(local)"] is None assert meta["dataset"] == "spansMetrics" + def test_time_spent_percentage_on_span_duration(self): + for _ in range(4): + self.store_span_metric( + 1, + internal_metric=constants.SPAN_METRICS_MAP["span.duration"], + tags={"transaction": "foo_transaction"}, + timestamp=self.min_ago, + ) + self.store_span_metric( + 1, + internal_metric=constants.SPAN_METRICS_MAP["span.duration"], + tags={"transaction": "bar_transaction"}, + timestamp=self.min_ago, + ) + response = self.do_request( + { + "field": ["transaction", "time_spent_percentage(app,span.duration)"], + "query": "", + "orderby": ["-time_spent_percentage(app,span.duration)"], + "project": self.project.id, + "dataset": "spansMetrics", + "statsPeriod": "10m", + } + ) + assert response.status_code == 200, response.content + data = response.data["data"] + meta = response.data["meta"] + assert len(data) == 2 + assert data[0]["time_spent_percentage(app,span.duration)"] == 0.8 + assert data[0]["transaction"] == "foo_transaction" + assert data[1]["time_spent_percentage(app,span.duration)"] == 0.2 + assert data[1]["transaction"] == "bar_transaction" + assert meta["dataset"] == "spansMetrics" + def test_http_error_rate_and_count(self): for _ in range(4): self.store_span_metric( @@ -1636,6 +1670,10 @@ def test_time_spent_percentage(self): def test_time_spent_percentage_local(self): super().test_time_spent_percentage_local() + @pytest.mark.xfail(reason="Not implemented") + def test_time_spent_percentage_on_span_duration(self): + super().test_time_spent_percentage_on_span_duration() + @pytest.mark.xfail(reason="Cannot group by function 'if'") def test_span_module(self): super().test_span_module() From 71fba9c8fc81f108eacc392c19c7e8f70f5c3ae5 Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Fri, 10 May 2024 09:15:12 -0700 Subject: [PATCH 272/376] feat(crons): Add api for fetching checking processing errors for a monitor. (#70557) This allows us to return processing errors for monitors. Will follow up with apis to dismiss/delete items and to fetch errors at the project level too. --- src/sentry/api/urls.py | 9 +++ ...project_monitor_processing_errors_index.py | 56 +++++++++++++++++++ src/sentry/monitors/processing_errors.py | 13 +++++ src/sentry/monitors/serializers.py | 9 +++ .../test_project_monitor_processing_errors.py | 51 +++++++++++++++++ 5 files changed, 138 insertions(+) create mode 100644 src/sentry/monitors/endpoints/project_monitor_processing_errors_index.py create mode 100644 tests/sentry/monitors/endpoints/test_project_monitor_processing_errors.py diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 49b110875f8ed7..ca733243714386 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -146,6 +146,9 @@ from sentry.monitors.endpoints.project_monitor_environment_details import ( ProjectMonitorEnvironmentDetailsEndpoint, ) +from sentry.monitors.endpoints.project_monitor_processing_errors_index import ( + ProjectMonitorProcessingErrorsIndexEndpoint, +) from sentry.monitors.endpoints.project_monitor_stats import ProjectMonitorStatsEndpoint from sentry.replays.endpoints.organization_replay_count import OrganizationReplayCountEndpoint from sentry.replays.endpoints.organization_replay_details import OrganizationReplayDetailsEndpoint @@ -645,6 +648,7 @@ __all__ = ("urlpatterns",) + # issues endpoints are available both top level (by numerical ID) as well as coupled # to the organization (and queryable via short ID) @@ -2747,6 +2751,11 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ProjectMonitorEnvironmentDetailsEndpoint.as_view(), name="sentry-api-0-project-monitor-environment-details", ), + re_path( + r"^(?P[^\/]+)/(?P[^\/]+)/monitors/(?P[^\/]+)/processing-errors/$", + ProjectMonitorProcessingErrorsIndexEndpoint.as_view(), + name="sentry-api-0-project-monitor-processing-errors-index", + ), re_path( r"^(?P[^\/]+)/(?P[^\/]+)/monitors/(?P[^\/]+)/stats/$", ProjectMonitorStatsEndpoint.as_view(), diff --git a/src/sentry/monitors/endpoints/project_monitor_processing_errors_index.py b/src/sentry/monitors/endpoints/project_monitor_processing_errors_index.py new file mode 100644 index 00000000000000..eaadb96874455a --- /dev/null +++ b/src/sentry/monitors/endpoints/project_monitor_processing_errors_index.py @@ -0,0 +1,56 @@ +from drf_spectacular.utils import extend_schema +from rest_framework.response import Response + +from sentry.api.api_owners import ApiOwner +from sentry.api.api_publish_status import ApiPublishStatus +from sentry.api.base import region_silo_endpoint +from sentry.api.paginator import SequencePaginator +from sentry.api.serializers import serialize +from sentry.apidocs.constants import RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND, RESPONSE_UNAUTHORIZED +from sentry.apidocs.parameters import GlobalParams, MonitorParams +from sentry.apidocs.utils import inline_sentry_response_serializer +from sentry.monitors.endpoints.base import ProjectMonitorEndpoint +from sentry.monitors.processing_errors import ( + CheckinProcessErrorsManager, + CheckinProcessingErrorData, +) +from sentry.utils.auth import AuthenticatedHttpRequest + + +@region_silo_endpoint +@extend_schema(tags=["Crons"]) +class ProjectMonitorProcessingErrorsIndexEndpoint(ProjectMonitorEndpoint): + publish_status = { + "GET": ApiPublishStatus.PRIVATE, + } + owner = ApiOwner.CRONS + + @extend_schema( + operation_id="Retrieve checkin processing errors for a monitor", + parameters=[ + GlobalParams.ORG_SLUG, + GlobalParams.PROJECT_ID_OR_SLUG, + MonitorParams.MONITOR_ID_OR_SLUG, + ], + responses={ + 200: inline_sentry_response_serializer( + "CheckinProcessingError", list[CheckinProcessingErrorData] + ), + 401: RESPONSE_UNAUTHORIZED, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + ) + def get(self, request: AuthenticatedHttpRequest, project, monitor) -> Response: + """ + Retrieves checkin processing errors for a monitor + """ + paginator = SequencePaginator( + list(enumerate(CheckinProcessErrorsManager().get_for_monitor(monitor))) + ) + + return self.paginate( + request=request, + paginator=paginator, + on_results=lambda results: serialize(results, request.user), + ) diff --git a/src/sentry/monitors/processing_errors.py b/src/sentry/monitors/processing_errors.py index 389d6bc604d5e5..4e7924e8f34704 100644 --- a/src/sentry/monitors/processing_errors.py +++ b/src/sentry/monitors/processing_errors.py @@ -2,6 +2,7 @@ import dataclasses import logging +import uuid from datetime import timedelta from enum import Enum from typing import Any, TypedDict @@ -92,17 +93,20 @@ def from_dict(cls, processing_error_data: ProcessingErrorData) -> ProcessingErro class CheckinProcessingErrorData(TypedDict): errors: list[ProcessingErrorData] checkin: CheckinItemData + id: str @dataclasses.dataclass(frozen=True) class CheckinProcessingError: errors: list[ProcessingError] checkin: CheckinItem + id: uuid.UUID = dataclasses.field(default_factory=uuid.uuid4) def to_dict(self) -> CheckinProcessingErrorData: return { "errors": [error.to_dict() for error in self.errors], "checkin": self.checkin.to_dict(), + "id": self.id.hex, } @classmethod @@ -110,8 +114,17 @@ def from_dict(cls, data: CheckinProcessingErrorData) -> CheckinProcessingError: return cls( errors=[ProcessingError.from_dict(error) for error in data["errors"]], checkin=CheckinItem.from_dict(data["checkin"]), + id=uuid.UUID(data["id"]), ) + def __hash__(self): + return hash(self.id.hex) + + def __eq__(self, other): + if isinstance(other, CheckinProcessingError): + return self.id.hex == other.id.hex + return False + class CheckinProcessErrorsManager: def _get_cluster(self) -> RedisCluster[str] | StrictRedis[str]: diff --git a/src/sentry/monitors/serializers.py b/src/sentry/monitors/serializers.py index 4e242866e8493d..e0e08ec0e64f3f 100644 --- a/src/sentry/monitors/serializers.py +++ b/src/sentry/monitors/serializers.py @@ -17,6 +17,7 @@ MonitorIncident, MonitorStatus, ) +from sentry.monitors.processing_errors import CheckinProcessingError, CheckinProcessingErrorData from sentry.monitors.utils import fetch_associated_groups from sentry.monitors.validators import IntervalNames from sentry.types.actor import Actor @@ -346,3 +347,11 @@ def _expand(self, key) -> bool: return False return key in self.expand + + +@register(CheckinProcessingError) +class CheckinProcessingErrorSerializer(Serializer): + def serialize( + self, obj: CheckinProcessingError, attrs, user, **kwargs + ) -> CheckinProcessingErrorData: + return obj.to_dict() diff --git a/tests/sentry/monitors/endpoints/test_project_monitor_processing_errors.py b/tests/sentry/monitors/endpoints/test_project_monitor_processing_errors.py new file mode 100644 index 00000000000000..cc5ec7f2b5934f --- /dev/null +++ b/tests/sentry/monitors/endpoints/test_project_monitor_processing_errors.py @@ -0,0 +1,51 @@ +from sentry.api.serializers import serialize +from sentry.monitors.processing_errors import ( + CheckinProcessErrorsManager, + ProcessingError, + ProcessingErrorType, +) +from sentry.monitors.testutils import build_checkin_processing_error +from sentry.testutils.cases import APITestCase, MonitorTestCase +from sentry.utils import json + + +class ProjectMonitorProcessingErrorsIndexEndpointTest(MonitorTestCase, APITestCase): + endpoint = "sentry-api-0-project-monitor-processing-errors-index" + + def setUp(self): + super().setUp() + self.login_as(user=self.user) + + def test_empty(self): + monitor = self.create_monitor() + + resp = self.get_success_response(self.organization.slug, self.project.slug, monitor.slug) + assert resp.data == [] + + def test(self): + monitor = self.create_monitor() + + manager = CheckinProcessErrorsManager() + monitor_errors = [ + build_checkin_processing_error( + [ProcessingError(ProcessingErrorType.CHECKIN_INVALID_GUID, {"guid": "bad"})], + message_overrides={"project_id": self.project.id}, + payload_overrides={"monitor_slug": monitor.slug}, + ), + build_checkin_processing_error( + [ProcessingError(ProcessingErrorType.MONITOR_DISABLED, {"some": "data"})], + message_overrides={"project_id": self.project.id}, + payload_overrides={"monitor_slug": monitor.slug}, + ), + ] + project_error = build_checkin_processing_error( + [ProcessingError(ProcessingErrorType.ORGANIZATION_KILLSWITCH_ENABLED)], + message_overrides={"project_id": self.project.id}, + ) + + manager.store(monitor_errors[0], monitor) + manager.store(monitor_errors[1], monitor) + manager.store(project_error, None) + + resp = self.get_success_response(self.organization.slug, self.project.slug, monitor.slug) + assert resp.data == json.loads(json.dumps(serialize(list(reversed(monitor_errors))))) From 1deb0c9e69cf0c10262aa83e9bf523b6265d2617 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Fri, 10 May 2024 12:19:04 -0400 Subject: [PATCH 273/376] ref: stronger typing for a few more utils modules (#70659) --- pyproject.toml | 5 +++++ src/sentry/utils/geo.py | 6 +++--- src/sentry/utils/imports.py | 9 ++++++--- src/sentry/utils/numbers.py | 8 +++++--- src/sentry/utils/pubsub.py | 6 ++++-- src/sentry/utils/urls.py | 6 +++--- tests/sentry/utils/test_numbers.py | 2 +- 7 files changed, 27 insertions(+), 15 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 78236f07e5b4da..895ead1f7853a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -617,17 +617,22 @@ module = [ "sentry.utils.env", "sentry.utils.event", "sentry.utils.files", + "sentry.utils.geo", + "sentry.utils.imports", "sentry.utils.iterators", "sentry.utils.javascript", "sentry.utils.locking.backends.redis", "sentry.utils.migrations", + "sentry.utils.numbers", "sentry.utils.otp", "sentry.utils.performance_issues.detectors.*", "sentry.utils.performance_issues.performance_detection", + "sentry.utils.pubsub", "sentry.utils.redis", "sentry.utils.redis_metrics", "sentry.utils.sentry_apps.*", "sentry.utils.sms", + "sentry.utils.urls", "sentry.utils.uwsgi", "sentry.utils.zip", "sentry_plugins.base", diff --git a/src/sentry/utils/geo.py b/src/sentry/utils/geo.py index b1b05e9cfccf58..6461c84a0e587e 100644 --- a/src/sentry/utils/geo.py +++ b/src/sentry/utils/geo.py @@ -10,8 +10,8 @@ # default is no-op -def geo_by_addr(ip): - pass +def geo_by_addr(ip: str) -> dict[str, Any] | None: + return None rust_geoip: None | GeoIpLookup = None @@ -48,7 +48,7 @@ def _geo_by_addr(ip: str) -> dict[str, Any] | None: geo_by_addr = _geo_by_addr -def _init_geoip_rust(): +def _init_geoip_rust() -> None: global rust_geoip from sentry_relay.processing import GeoIpLookup diff --git a/src/sentry/utils/imports.py b/src/sentry/utils/imports.py index 4390674a14fb13..590266d9680e94 100644 --- a/src/sentry/utils/imports.py +++ b/src/sentry/utils/imports.py @@ -1,5 +1,8 @@ -class ModuleProxyCache(dict): - def __missing__(self, key): +from typing import Any + + +class ModuleProxyCache(dict[str, object]): + def __missing__(self, key: str) -> object: if "." not in key: return __import__(key) @@ -17,7 +20,7 @@ def __missing__(self, key): _cache = ModuleProxyCache() -def import_string(path: str): +def import_string(path: str) -> Any: """ Path must be module.path.ClassName diff --git a/src/sentry/utils/numbers.py b/src/sentry/utils/numbers.py index 7d64cf0f3e04cf..399cf364b97402 100644 --- a/src/sentry/utils/numbers.py +++ b/src/sentry/utils/numbers.py @@ -62,7 +62,9 @@ def base36_decode(s: str) -> int: DEFAULT_UNITS = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB") -def format_bytes(number, units=DEFAULT_UNITS, decimal_places=2): +def format_bytes( + number: float, units: tuple[str, ...] = DEFAULT_UNITS, decimal_places: int = 2 +) -> str: block = 1024.0 if number < block: return f"{number} {units[0]}" @@ -72,7 +74,7 @@ def format_bytes(number, units=DEFAULT_UNITS, decimal_places=2): while number >= block and u < max_unit: number /= block u += 1 - return ("{:.%df} {}" % (decimal_places,)).format(number, units[u]) + return f"{number:.{decimal_places}f} {units[u]}" def format_grouped_length(length: int, steps: list[int] | None = None) -> str: @@ -92,7 +94,7 @@ def format_grouped_length(length: int, steps: list[int] | None = None) -> str: return f">{steps[-1]}" -def validate_bigint(value): +def validate_bigint(value: object) -> bool: return isinstance(value, int) and value >= 0 and value.bit_length() <= 63 diff --git a/src/sentry/utils/pubsub.py b/src/sentry/utils/pubsub.py index 2d4dfbb22ae878..f2ffbb9e0387a4 100644 --- a/src/sentry/utils/pubsub.py +++ b/src/sentry/utils/pubsub.py @@ -1,12 +1,14 @@ +from typing import Any + from confluent_kafka import Producer class KafkaPublisher: - def __init__(self, connection, asynchronous=True): + def __init__(self, connection: dict[str, Any], asynchronous: bool = True) -> None: self.producer = Producer(connection or {}) self.asynchronous = asynchronous - def publish(self, channel, value, key=None): + def publish(self, channel: str, value: str, key: str | None = None) -> None: self.producer.produce(topic=channel, value=value, key=key) if self.asynchronous: self.producer.poll(0) diff --git a/src/sentry/utils/urls.py b/src/sentry/utils/urls.py index a1d31d495979b7..1711f006a79a2e 100644 --- a/src/sentry/utils/urls.py +++ b/src/sentry/utils/urls.py @@ -1,11 +1,11 @@ import re -from collections.abc import MutableMapping, Sequence +from collections.abc import Mapping, MutableMapping, Sequence from urllib.parse import parse_qs, parse_qsl, urlencode, urljoin, urlparse, urlsplit, urlunparse _scheme_re = re.compile(r"^([a-zA-Z0-9-+]+://)(.*)$") -def non_standard_url_join(base, to_join): +def non_standard_url_join(base: str, to_join: str | None) -> str: """A version of url join that can deal with unknown protocols.""" # joins to an absolute url are willing by default if not to_join: @@ -31,7 +31,7 @@ def non_standard_url_join(base, to_join): return rv -def add_params_to_url(url, params): +def add_params_to_url(url: str, params: Mapping[str, str]) -> str: url_parts = urlparse(url) query = dict(parse_qsl(url_parts.query)) query.update(params) diff --git a/tests/sentry/utils/test_numbers.py b/tests/sentry/utils/test_numbers.py index 2dbe5dd35b0496..66b74fc773c5bb 100644 --- a/tests/sentry/utils/test_numbers.py +++ b/tests/sentry/utils/test_numbers.py @@ -287,7 +287,7 @@ def test_format_bytes(): assert format_bytes(3000000000) == "2.79 GB" assert format_bytes(3000000000000) == "2.73 TB" - assert format_bytes(3000000000000, units=["B", "KB", "MB", "GB"]) == "2793.97 GB" + assert format_bytes(3000000000000, units=("B", "KB", "MB", "GB")) == "2793.97 GB" def test_format_grouped_length(): From a9cb58e02b78b0b99d08aeb8425eb4ff80384d72 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Fri, 10 May 2024 12:19:38 -0400 Subject: [PATCH 274/376] ref: delete unused sentry.utils.profile (#70658) --- src/sentry/utils/profile.py | 24 ------------------------ 1 file changed, 24 deletions(-) delete mode 100644 src/sentry/utils/profile.py diff --git a/src/sentry/utils/profile.py b/src/sentry/utils/profile.py deleted file mode 100644 index c2b13c6cfc1a39..00000000000000 --- a/src/sentry/utils/profile.py +++ /dev/null @@ -1,24 +0,0 @@ -import sys -import time -from cProfile import Profile -from functools import update_wrapper -from pstats import Stats - - -def profile_call(_func, *args, **kwargs): - p = Profile() - rv = [] - p.runcall(lambda: rv.append(_func(*args, **kwargs))) - p.dump_stats(f"/tmp/sentry-{time.time()}-{_func.__name__}.prof") - - stats = Stats(p, stream=sys.stderr) - stats.sort_stats("time", "calls") - stats.print_stats() - return rv[0] - - -def profile(func): - def newfunc(*args, **kwargs): - return profile_call(func, *args, **kwargs) - - return update_wrapper(newfunc, func) From aacdcfd3bcc14ad57ae2ac4f9fc6a81526998f29 Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Fri, 10 May 2024 09:20:32 -0700 Subject: [PATCH 275/376] ref(rules): Remove inner lock and add logging on exception (#70629) Part of debugging the delayed rule processor lead here, we don't need the inner lock (it's locked [here](https://github.com/getsentry/sentry/blob/370e78ca10dddc2b018d1e9b6e41e501386cc0d2/src/sentry/buffer/redis.py#L316-L318) before `process_batch` is called). --- src/sentry/buffer/redis.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/src/sentry/buffer/redis.py b/src/sentry/buffer/redis.py index e1178e573abcbd..e1937ce9e59922 100644 --- a/src/sentry/buffer/redis.py +++ b/src/sentry/buffer/redis.py @@ -312,15 +312,10 @@ def get_hash( return decoded_hash def process_batch(self) -> None: - client = get_cluster_routing_client(self.cluster, self.is_redis_cluster) - lock_key = self._lock_key(client, self.pending_key, ex=10) - if not lock_key: - return - try: redis_buffer_registry.callback(BufferHookEvent.FLUSH, self) - finally: - client.delete(lock_key) + except Exception: + logger.exception("process_batch.error") def incr( self, From f6914254e266a8d5ee1b15cfa831d3e5c8635627 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Fri, 10 May 2024 12:25:58 -0400 Subject: [PATCH 276/376] feat(trace): Desaturate on hover, fix durations (#70661) ### Summary This fixes trace duration and hover saturation. --- static/app/views/performance/traces/content.tsx | 3 +++ .../views/performance/traces/fieldRenderers.tsx | 15 +++++++++------ 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/static/app/views/performance/traces/content.tsx b/static/app/views/performance/traces/content.tsx index 86489974861cb1..53e9e30762b33b 100644 --- a/static/app/views/performance/traces/content.tsx +++ b/static/app/views/performance/traces/content.tsx @@ -529,6 +529,7 @@ const BreakdownPanelItem = styled(StyledPanelItem)<{highlightedSliceName: string ${p => p.highlightedSliceName ? `--highlightedSlice-${p.highlightedSliceName}-opacity: 1.0; + --highlightedSlice-${p.highlightedSliceName}-saturate: saturate(1.0); --highlightedSlice-${p.highlightedSliceName}-transform: translateY(0px); ` : null} @@ -536,10 +537,12 @@ const BreakdownPanelItem = styled(StyledPanelItem)<{highlightedSliceName: string p.highlightedSliceName ? ` --defaultSlice-opacity: 1.0; + --defaultSlice-saturate: saturate(0.8); --defaultSlice-transform: translateY(0px); ` : ` --defaultSlice-opacity: 1.0; + --defaultSlice-saturate: saturate(1.0); --defaultSlice-transform: translateY(0px); `} `; diff --git a/static/app/views/performance/traces/fieldRenderers.tsx b/static/app/views/performance/traces/fieldRenderers.tsx index a7da26efe8d4ac..29aa6e0cc4cb2f 100644 --- a/static/app/views/performance/traces/fieldRenderers.tsx +++ b/static/app/views/performance/traces/fieldRenderers.tsx @@ -71,6 +71,9 @@ const RectangleTraceBreakdown = styled(RowRectangle)<{ position: relative; width: 100%; height: 15px; + ${p => ` + filter: var(--highlightedSlice-${p.sliceName}-saturate, var(--defaultSlice-saturate)); + `} ${p => ` opacity: var(--highlightedSlice-${p.sliceName ?? ''}-opacity, var(--defaultSlice-opacity, 1.0)); `} @@ -124,8 +127,8 @@ export function TraceBreakdownRenderer({ } const BREAKDOWN_BAR_SIZE = 200; -const BREAKDOWN_QUANTIZE_STEP = 5; -const BREAKDOWN_BAR_WIDTH = BREAKDOWN_BAR_SIZE / BREAKDOWN_QUANTIZE_STEP; +const BREAKDOWN_QUANTIZE_STEP = 1; +const BREAKDOWN_NUM_SLICES = BREAKDOWN_BAR_SIZE / BREAKDOWN_QUANTIZE_STEP; // 200 export function SpanBreakdownSliceRenderer({ trace, @@ -146,8 +149,8 @@ export function SpanBreakdownSliceRenderer({ trace: TraceResult; offset?: number; }) { - const traceDuration = - Math.floor((trace.end - trace.start) / BREAKDOWN_BAR_WIDTH) * BREAKDOWN_BAR_WIDTH; + const traceSliceSize = (trace.end - trace.start) / BREAKDOWN_NUM_SLICES; + const traceDuration = BREAKDOWN_NUM_SLICES * traceSliceSize; const sliceDuration = sliceEnd - sliceStart; @@ -160,11 +163,11 @@ export function SpanBreakdownSliceRenderer({ const sliceWidth = BREAKDOWN_QUANTIZE_STEP * - Math.ceil(BREAKDOWN_BAR_WIDTH * (sliceDuration / traceDuration)); + Math.ceil(BREAKDOWN_NUM_SLICES * (sliceDuration / traceDuration)); const relativeSliceStart = sliceStart - trace.start; const sliceOffset = BREAKDOWN_QUANTIZE_STEP * - Math.floor((BREAKDOWN_BAR_WIDTH * relativeSliceStart) / traceDuration); + Math.floor((BREAKDOWN_NUM_SLICES * relativeSliceStart) / traceDuration); return ( Date: Fri, 10 May 2024 12:26:32 -0400 Subject: [PATCH 277/376] chore(actor) Remove old actor columns (#70549) (#70577) Restore changes from #70549 which had to be reverted as they caused failures in getsentry migration tests. This reverts commit cb038e95a32baba1d370ff977157dd903abf27ac. --- migrations_lockfile.txt | 2 +- .../migrations/0720_remove_actor_columns.py | 53 +++++++++++++++++++ 2 files changed, 54 insertions(+), 1 deletion(-) create mode 100644 src/sentry/migrations/0720_remove_actor_columns.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index 3fd18eba3a0405..4e9d95d0a54f40 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0016_add_control_cacheversion nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0719_querysubscription_timebox_column_deletion_db +sentry: 0720_remove_actor_columns social_auth: 0002_default_auto_field diff --git a/src/sentry/migrations/0720_remove_actor_columns.py b/src/sentry/migrations/0720_remove_actor_columns.py new file mode 100644 index 00000000000000..6b098b9ebddbcb --- /dev/null +++ b/src/sentry/migrations/0720_remove_actor_columns.py @@ -0,0 +1,53 @@ +# Generated by Django 5.0.4 on 2024-05-08 21:10 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0719_querysubscription_timebox_column_deletion_db"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + state_operations=[], + database_operations=[ + migrations.RunSQL( + sql="ALTER TABLE sentry_team DROP COLUMN actor_id", + reverse_sql="ALTER TABLE sentry_team ADD COLUMN actor_id BIGINT NULL", + hints={"tables": ["sentry_team"]}, + ), + migrations.RunSQL( + sql="ALTER TABLE sentry_rule DROP COLUMN owner_id", + reverse_sql="ALTER TABLE sentry_rule ADD COLUMN owner_id BIGINT NULL", + hints={"tables": ["sentry_rule"]}, + ), + migrations.RunSQL( + sql="ALTER TABLE sentry_alertrule DROP COLUMN owner_id", + reverse_sql="ALTER TABLE sentry_alertrule ADD COLUMN owner_id BIGINT NULL", + hints={"tables": ["sentry_alertrule"]}, + ), + migrations.RunSQL( + sql="ALTER TABLE sentry_grouphistory DROP COLUMN actor_id", + reverse_sql="ALTER TABLE sentry_grouphistory ADD COLUMN actor_id BIGINT NULL", + hints={"tables": ["sentry_grouphistory"]}, + ), + ], + ) + ] From 3aade54c7bf83614f22132e6b56191848d38d210 Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Fri, 10 May 2024 09:38:19 -0700 Subject: [PATCH 278/376] feat(api-idorslug): Updated Subset of Endpoints to use `organization_id_or_slug` (#70636) A subset of changes from https://github.com/getsentry/sentry/pull/70081! --- src/sentry/api/endpoints/broadcast_index.py | 16 ++++-- .../codeowners/external_actor/user_details.py | 4 +- src/sentry/api/endpoints/group_details.py | 2 +- .../organization_integrations/index.py | 2 +- .../notification_actions_details.py | 6 +-- .../notification_actions_index.py | 4 +- .../organization_code_mapping_codeowners.py | 6 ++- .../organization_code_mapping_details.py | 6 ++- .../organization_dashboard_details.py | 6 ++- .../api/endpoints/organization_events.py | 13 +++-- .../endpoints/organization_member/index.py | 4 +- .../api/endpoints/organization_projects.py | 2 +- .../api/endpoints/organization_relay_usage.py | 2 +- .../endpoints/organization_search_details.py | 4 +- .../api/endpoints/organization_sessions.py | 2 +- .../endpoints/organization_stats_summary.py | 2 +- .../api/endpoints/organization_stats_v2.py | 2 +- .../api/endpoints/organization_teams.py | 4 +- src/sentry/api/endpoints/project_details.py | 6 +-- .../api/endpoints/project_filter_details.py | 2 +- src/sentry/api/endpoints/project_keys.py | 4 +- src/sentry/api/endpoints/project_ownership.py | 4 +- .../api/endpoints/project_rule_details.py | 6 +-- src/sentry/api/endpoints/project_rules.py | 4 +- src/sentry/api/urls.py | 50 +++++++++---------- .../organizations/test_org_projects.py | 2 +- .../organizations/test_org_stats_v2.py | 2 +- .../releases/test_organization_sessions.py | 2 +- tests/apidocs/endpoints/teams/test_index.py | 2 +- .../api/endpoints/test_event_grouping_info.py | 8 +-- .../test_organization_dashboard_details.py | 10 +++- .../api/endpoints/test_project_details.py | 8 +-- .../sentry/api/endpoints/test_project_keys.py | 14 +++--- .../api/endpoints/test_project_ownership.py | 2 +- tests/sentry/api/test_organization_events.py | 2 +- .../integrations/github/test_ticket_action.py | 4 +- .../github_enterprise/test_ticket_action.py | 4 +- .../integrations/jira/test_ticket_action.py | 4 +- .../jira_server/test_ticket_action.py | 4 +- .../middleware/test_access_log_middleware.py | 4 +- .../test_metrics_sessions_v2.py | 4 +- .../api/endpoints/test_organization_events.py | 2 +- .../endpoints/test_organization_events_mep.py | 6 ++- .../test_organization_events_span_metrics.py | 2 +- .../endpoints/test_organization_sessions.py | 4 +- .../test_organization_stats_summary.py | 2 +- 46 files changed, 142 insertions(+), 113 deletions(-) diff --git a/src/sentry/api/endpoints/broadcast_index.py b/src/sentry/api/endpoints/broadcast_index.py index 5053a56ecfa4de..10fbe7a52a48c6 100644 --- a/src/sentry/api/endpoints/broadcast_index.py +++ b/src/sentry/api/endpoints/broadcast_index.py @@ -50,9 +50,19 @@ def _secondary_filtering(self, request: Request, organization_slug, queryset): # used in the SAAS product return list(queryset) - def convert_args(self, request: Request, organization_slug=None, *args, **kwargs): - if organization_slug: - args, kwargs = super().convert_args(request, organization_slug) + def convert_args(self, request: Request, *args, **kwargs): + organization_id_or_slug: int | str | None = None + if args and args[0] is not None: + organization_id_or_slug = args[0] + # Required so it behaves like the original convert_args, where organization_id_or_slug was another parameter + # TODO: Remove this once we remove the old `organization_slug` parameter from getsentry + args = args[1:] + else: + organization_id_or_slug = kwargs.pop("organization_id_or_slug", None) or kwargs.pop( + "organization_slug", None + ) + if organization_id_or_slug: + args, kwargs = super().convert_args(request, organization_id_or_slug) return (args, kwargs) diff --git a/src/sentry/api/endpoints/codeowners/external_actor/user_details.py b/src/sentry/api/endpoints/codeowners/external_actor/user_details.py index 541c9bee627eaf..b146273a48597d 100644 --- a/src/sentry/api/endpoints/codeowners/external_actor/user_details.py +++ b/src/sentry/api/endpoints/codeowners/external_actor/user_details.py @@ -30,12 +30,12 @@ class ExternalUserDetailsEndpoint(OrganizationEndpoint, ExternalActorEndpointMix def convert_args( self, request: Request, - organization_slug: str, + organization_id_or_slug: int | str, external_user_id: int, *args: Any, **kwargs: Any, ) -> tuple[tuple[Any, ...], dict[str, Any]]: - args, kwargs = super().convert_args(request, organization_slug, *args, **kwargs) + args, kwargs = super().convert_args(request, organization_id_or_slug, *args, **kwargs) kwargs["external_user"] = self.get_external_actor_or_404( external_user_id, kwargs["organization"] ) diff --git a/src/sentry/api/endpoints/group_details.py b/src/sentry/api/endpoints/group_details.py index 46f409d0da3cbf..e1332d62dbf0f8 100644 --- a/src/sentry/api/endpoints/group_details.py +++ b/src/sentry/api/endpoints/group_details.py @@ -136,7 +136,7 @@ def get(self, request: Request, group) -> Response: the issue (title, last seen, first seen), some overall numbers (number of comments, user reports) as well as the summarized event data. - :pparam string organization_id_or_slug: The slug of the organization. + :pparam string organization_id_or_slug: the id or slug of the organization. :pparam string issue_id: the ID of the issue to retrieve. :auth: required """ diff --git a/src/sentry/api/endpoints/integrations/organization_integrations/index.py b/src/sentry/api/endpoints/integrations/organization_integrations/index.py index 5743530854d28f..a32a059433c1b5 100644 --- a/src/sentry/api/endpoints/integrations/organization_integrations/index.py +++ b/src/sentry/api/endpoints/integrations/organization_integrations/index.py @@ -64,7 +64,7 @@ class OrganizationIntegrationsEndpoint(OrganizationIntegrationBaseEndpoint): @extend_schema( operation_id="List an Organization's Available Integrations", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, IntegrationParams.PROVIDER_KEY, IntegrationParams.FEATURES, IntegrationParams.INCLUDE_CONFIG, diff --git a/src/sentry/api/endpoints/notifications/notification_actions_details.py b/src/sentry/api/endpoints/notifications/notification_actions_details.py index 1e48cc0503dd7f..49ded9395c66cd 100644 --- a/src/sentry/api/endpoints/notifications/notification_actions_details.py +++ b/src/sentry/api/endpoints/notifications/notification_actions_details.py @@ -87,7 +87,7 @@ def convert_args(self, request: Request, action_id: int, *args, **kwargs): @extend_schema( operation_id="Retrieve a Spike Protection Notification Action", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, NotificationParams.ACTION_ID, ], responses={200: OutgoingNotificationActionSerializer}, @@ -111,7 +111,7 @@ def get( @extend_schema( operation_id="Update a Spike Protection Notification Action", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, NotificationParams.ACTION_ID, ], request=NotificationActionSerializer, @@ -159,7 +159,7 @@ def put( @extend_schema( operation_id="Delete a Spike Protection Notification Action", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, NotificationParams.ACTION_ID, ], responses={ diff --git a/src/sentry/api/endpoints/notifications/notification_actions_index.py b/src/sentry/api/endpoints/notifications/notification_actions_index.py index 6df7965be6fae6..cc95b12c610ec0 100644 --- a/src/sentry/api/endpoints/notifications/notification_actions_index.py +++ b/src/sentry/api/endpoints/notifications/notification_actions_index.py @@ -66,7 +66,7 @@ class NotificationActionsIndexEndpoint(OrganizationEndpoint): @extend_schema( operation_id="List Spike Protection Notifications", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, OrganizationParams.PROJECT, OrganizationParams.PROJECT_ID_OR_SLUG, NotificationParams.TRIGGER_TYPE, @@ -119,7 +119,7 @@ def get(self, request: Request, organization: Organization) -> Response: @extend_schema( operation_id="Create a Spike Protection Notification Action", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, ], request=NotificationActionSerializer, responses={ diff --git a/src/sentry/api/endpoints/organization_code_mapping_codeowners.py b/src/sentry/api/endpoints/organization_code_mapping_codeowners.py index 7c3dd0cabcd013..937c2fd247bdf7 100644 --- a/src/sentry/api/endpoints/organization_code_mapping_codeowners.py +++ b/src/sentry/api/endpoints/organization_code_mapping_codeowners.py @@ -30,8 +30,10 @@ class OrganizationCodeMappingCodeOwnersEndpoint(OrganizationEndpoint): } permission_classes = (OrganizationIntegrationsPermission,) - def convert_args(self, request: Request, organization_slug, config_id, *args, **kwargs): - args, kwargs = super().convert_args(request, organization_slug, config_id, *args, **kwargs) + def convert_args(self, request: Request, organization_id_or_slug, config_id, *args, **kwargs): + args, kwargs = super().convert_args( + request, organization_id_or_slug, config_id, *args, **kwargs + ) organization = kwargs["organization"] try: diff --git a/src/sentry/api/endpoints/organization_code_mapping_details.py b/src/sentry/api/endpoints/organization_code_mapping_details.py index 6395e678de9967..77032190000c5d 100644 --- a/src/sentry/api/endpoints/organization_code_mapping_details.py +++ b/src/sentry/api/endpoints/organization_code_mapping_details.py @@ -30,8 +30,10 @@ class OrganizationCodeMappingDetailsEndpoint(OrganizationEndpoint, OrganizationI } permission_classes = (OrganizationIntegrationsLoosePermission,) - def convert_args(self, request: Request, organization_slug, config_id, *args, **kwargs): - args, kwargs = super().convert_args(request, organization_slug, config_id, *args, **kwargs) + def convert_args(self, request: Request, organization_id_or_slug, config_id, *args, **kwargs): + args, kwargs = super().convert_args( + request, organization_id_or_slug, config_id, *args, **kwargs + ) ois = integration_service.get_organization_integrations( organization_id=kwargs["organization"].id ) diff --git a/src/sentry/api/endpoints/organization_dashboard_details.py b/src/sentry/api/endpoints/organization_dashboard_details.py index 30ddc19822f886..d10ae4983244be 100644 --- a/src/sentry/api/endpoints/organization_dashboard_details.py +++ b/src/sentry/api/endpoints/organization_dashboard_details.py @@ -24,8 +24,10 @@ class OrganizationDashboardBase(OrganizationEndpoint): owner = ApiOwner.PERFORMANCE permission_classes = (OrganizationDashboardsPermission,) - def convert_args(self, request: Request, organization_slug, dashboard_id, *args, **kwargs): - args, kwargs = super().convert_args(request, organization_slug, *args, **kwargs) + def convert_args( + self, request: Request, organization_id_or_slug, dashboard_id, *args, **kwargs + ): + args, kwargs = super().convert_args(request, organization_id_or_slug, *args, **kwargs) try: kwargs["dashboard"] = self._get_dashboard(request, kwargs["organization"], dashboard_id) diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index 791757a257446e..6cd3204b07f84d 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -116,7 +116,7 @@ def rate_limit_events( - request: Request, organization_slug: str | None = None, *args, **kwargs + request: Request, organization_id_or_slug: str | None = None, *args, **kwargs ) -> dict[str, dict[RateLimitCategory, RateLimit]]: """ Decision tree for rate limiting for organization events endpoint. @@ -156,9 +156,14 @@ def _validated_limits(limits: dict[str, Any], fallback: dict[str, Any]) -> RateL rate_limit = RateLimit(**LEGACY_RATE_LIMIT) try: - organization = Organization.objects.get_from_cache(slug=organization_slug) + if str(organization_id_or_slug).isdecimal(): + organization = Organization.objects.get_from_cache(id=organization_id_or_slug) + else: + organization = Organization.objects.get_from_cache(slug=organization_id_or_slug) except Organization.DoesNotExist: - logger.warning("organization.slug.invalid", extra={"organization_slug": organization_slug}) + logger.warning( + "organization.slug.invalid", extra={"organization_id_or_slug": organization_id_or_slug} + ) return _config_for_limit(rate_limit) if organization.id in options.get("api.organization_events.rate-limit-increased.orgs", []): @@ -230,7 +235,7 @@ def get_features(self, organization: Organization, request: Request) -> Mapping[ parameters=[ GlobalParams.END, GlobalParams.ENVIRONMENT, - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, OrganizationParams.PROJECT, GlobalParams.START, GlobalParams.STATS_PERIOD, diff --git a/src/sentry/api/endpoints/organization_member/index.py b/src/sentry/api/endpoints/organization_member/index.py index 460092e568bef5..3bc3cb0226f111 100644 --- a/src/sentry/api/endpoints/organization_member/index.py +++ b/src/sentry/api/endpoints/organization_member/index.py @@ -189,7 +189,7 @@ class OrganizationMemberIndexEndpoint(OrganizationEndpoint): @extend_schema( operation_id="List an Organization's Members", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, ], responses={ 200: inline_sentry_response_serializer( @@ -299,7 +299,7 @@ def get(self, request: Request, organization) -> Response: @extend_schema( operation_id="Add a Member to an Organization", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, ], request=OrganizationMemberRequestSerializer, responses={ diff --git a/src/sentry/api/endpoints/organization_projects.py b/src/sentry/api/endpoints/organization_projects.py index bc1b6f2c14f438..27cb1d8f0b7206 100644 --- a/src/sentry/api/endpoints/organization_projects.py +++ b/src/sentry/api/endpoints/organization_projects.py @@ -51,7 +51,7 @@ class OrganizationProjectsEndpoint(OrganizationEndpoint, EnvironmentMixin): @extend_schema( operation_id="List an Organization's Projects", - parameters=[GlobalParams.ORG_SLUG, CursorQueryParam], + parameters=[GlobalParams.ORG_ID_OR_SLUG, CursorQueryParam], request=None, responses={ 200: inline_sentry_response_serializer( diff --git a/src/sentry/api/endpoints/organization_relay_usage.py b/src/sentry/api/endpoints/organization_relay_usage.py index 5baec370041de4..180189c595cad1 100644 --- a/src/sentry/api/endpoints/organization_relay_usage.py +++ b/src/sentry/api/endpoints/organization_relay_usage.py @@ -27,7 +27,7 @@ class OrganizationRelayUsage(OrganizationEndpoint): @extend_schema( operation_id="List an Organization's trusted Relays", - parameters=[GlobalParams.ORG_SLUG], + parameters=[GlobalParams.ORG_ID_OR_SLUG], request=None, responses={ 200: inline_sentry_response_serializer( diff --git a/src/sentry/api/endpoints/organization_search_details.py b/src/sentry/api/endpoints/organization_search_details.py index f27fda9f694af9..f267c83317bb3b 100644 --- a/src/sentry/api/endpoints/organization_search_details.py +++ b/src/sentry/api/endpoints/organization_search_details.py @@ -43,8 +43,8 @@ class OrganizationSearchDetailsEndpoint(OrganizationEndpoint): } permission_classes = (OrganizationSearchEditPermission,) - def convert_args(self, request: Request, organization_slug, search_id, *args, **kwargs): - (args, kwargs) = super().convert_args(request, organization_slug, *args, **kwargs) + def convert_args(self, request: Request, organization_id_or_slug, search_id, *args, **kwargs): + (args, kwargs) = super().convert_args(request, organization_id_or_slug, *args, **kwargs) # Only allow users to delete their own personal searches OR # organization level searches diff --git a/src/sentry/api/endpoints/organization_sessions.py b/src/sentry/api/endpoints/organization_sessions.py index 9869c5ee245400..5c910652ddf1da 100644 --- a/src/sentry/api/endpoints/organization_sessions.py +++ b/src/sentry/api/endpoints/organization_sessions.py @@ -45,7 +45,7 @@ class OrganizationSessionsEndpoint(OrganizationEndpoint): GlobalParams.START, GlobalParams.END, GlobalParams.ENVIRONMENT, - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.STATS_PERIOD, OrganizationParams.PROJECT, SessionsParams.FIELD, diff --git a/src/sentry/api/endpoints/organization_stats_summary.py b/src/sentry/api/endpoints/organization_stats_summary.py index 9d4a0b4e0ace28..d81651da576be9 100644 --- a/src/sentry/api/endpoints/organization_stats_summary.py +++ b/src/sentry/api/endpoints/organization_stats_summary.py @@ -125,7 +125,7 @@ class OrganizationStatsSummaryEndpoint(OrganizationEndpoint): @extend_schema( operation_id="Retrieve an Organization's Events Count by Project", - parameters=[GlobalParams.ORG_SLUG, OrgStatsSummaryQueryParamsSerializer], + parameters=[GlobalParams.ORG_ID_OR_SLUG, OrgStatsSummaryQueryParamsSerializer], request=None, responses={ 200: inline_sentry_response_serializer( diff --git a/src/sentry/api/endpoints/organization_stats_v2.py b/src/sentry/api/endpoints/organization_stats_v2.py index b7dd4002eb1695..31ae658a810996 100644 --- a/src/sentry/api/endpoints/organization_stats_v2.py +++ b/src/sentry/api/endpoints/organization_stats_v2.py @@ -151,7 +151,7 @@ class OrganizationStatsEndpointV2(OrganizationEndpoint): @extend_schema( operation_id="Retrieve Event Counts for an Organization (v2)", - parameters=[GlobalParams.ORG_SLUG, OrgStatsQueryParamsSerializer], + parameters=[GlobalParams.ORG_ID_OR_SLUG, OrgStatsQueryParamsSerializer], request=None, responses={ 200: inline_sentry_response_serializer("OutcomesResponse", StatsApiResponse), diff --git a/src/sentry/api/endpoints/organization_teams.py b/src/sentry/api/endpoints/organization_teams.py index 9123bea37c08d6..9ebb1023a90a07 100644 --- a/src/sentry/api/endpoints/organization_teams.py +++ b/src/sentry/api/endpoints/organization_teams.py @@ -80,7 +80,7 @@ def team_serializer_for_post(self): @extend_schema( operation_id="List an Organization's Teams", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, TeamParams.DETAILED, CursorQueryParam, ], @@ -161,7 +161,7 @@ def should_add_creator_to_team(self, request: Request): @extend_schema( operation_id="Create a New Team", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, ], request=TeamPostSerializer, responses={ diff --git a/src/sentry/api/endpoints/project_details.py b/src/sentry/api/endpoints/project_details.py index 6c6750d1c36ddf..adf9eb9571bb1d 100644 --- a/src/sentry/api/endpoints/project_details.py +++ b/src/sentry/api/endpoints/project_details.py @@ -456,7 +456,7 @@ def _get_unresolved_count(self, project): @extend_schema( operation_id="Retrieve a Project", - parameters=[GlobalParams.ORG_SLUG, GlobalParams.PROJECT_ID_OR_SLUG], + parameters=[GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG], request=None, responses={ 200: DetailedProjectSerializer, @@ -500,7 +500,7 @@ def get(self, request: Request, project: Project) -> Response: @extend_schema( operation_id="Update a Project", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ], request=ProjectAdminSerializer, @@ -891,7 +891,7 @@ def put(self, request: Request, project) -> Response: @extend_schema( operation_id="Delete a Project", - parameters=[GlobalParams.ORG_SLUG, GlobalParams.PROJECT_ID_OR_SLUG], + parameters=[GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG], responses={ 204: RESPONSE_NO_CONTENT, 403: RESPONSE_FORBIDDEN, diff --git a/src/sentry/api/endpoints/project_filter_details.py b/src/sentry/api/endpoints/project_filter_details.py index e1fe9743ff5366..dac2c4140dc0ed 100644 --- a/src/sentry/api/endpoints/project_filter_details.py +++ b/src/sentry/api/endpoints/project_filter_details.py @@ -30,7 +30,7 @@ class ProjectFilterDetailsEndpoint(ProjectEndpoint): @extend_schema( operation_id="Update an Inbound Data Filter", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ProjectParams.FILTER_ID, ], diff --git a/src/sentry/api/endpoints/project_keys.py b/src/sentry/api/endpoints/project_keys.py index 0a3a176ecd600a..875997d4ae179c 100644 --- a/src/sentry/api/endpoints/project_keys.py +++ b/src/sentry/api/endpoints/project_keys.py @@ -33,7 +33,7 @@ class ProjectKeysEndpoint(ProjectEndpoint): @extend_schema( operation_id="List a Project's Client Keys", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, CursorQueryParam, ProjectParams.STATUS, @@ -72,7 +72,7 @@ def get(self, request: Request, project) -> Response: @extend_schema( operation_id="Create a New Client Key", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ], request=ProjectKeyPostSerializer, diff --git a/src/sentry/api/endpoints/project_ownership.py b/src/sentry/api/endpoints/project_ownership.py index 5d95590d10e12c..ea63426734b170 100644 --- a/src/sentry/api/endpoints/project_ownership.py +++ b/src/sentry/api/endpoints/project_ownership.py @@ -219,7 +219,7 @@ def rename_schema_identifier_for_parsing(self, ownership: ProjectOwnership) -> N @extend_schema( operation_id="Retrieve Ownership Configuration for a Project", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ], request=None, @@ -241,7 +241,7 @@ def get(self, request: Request, project) -> Response: @extend_schema( operation_id="Update Ownership Configuration for a Project", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ], request=ProjectOwnershipRequestSerializer, diff --git a/src/sentry/api/endpoints/project_rule_details.py b/src/sentry/api/endpoints/project_rule_details.py index b8effef82796ee..7d3df99858f625 100644 --- a/src/sentry/api/endpoints/project_rule_details.py +++ b/src/sentry/api/endpoints/project_rule_details.py @@ -108,7 +108,7 @@ class ProjectRuleDetailsEndpoint(RuleEndpoint): @extend_schema( operation_id="Retrieve an Issue Alert Rule for a Project", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, IssueAlertParams.ISSUE_RULE_ID, ], @@ -208,7 +208,7 @@ def get(self, request: Request, project, rule) -> Response: @extend_schema( operation_id="Update an Issue Alert Rule", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, IssueAlertParams.ISSUE_RULE_ID, ], @@ -398,7 +398,7 @@ def put(self, request: Request, project, rule) -> Response: @extend_schema( operation_id="Delete an Issue Alert Rule", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, IssueAlertParams.ISSUE_RULE_ID, ], diff --git a/src/sentry/api/endpoints/project_rules.py b/src/sentry/api/endpoints/project_rules.py index 0dd61c56aa6896..d27d4767475a1c 100644 --- a/src/sentry/api/endpoints/project_rules.py +++ b/src/sentry/api/endpoints/project_rules.py @@ -681,7 +681,7 @@ class ProjectRulesEndpoint(ProjectEndpoint): @extend_schema( operation_id="List a Project's Issue Alert Rules", - parameters=[GlobalParams.ORG_SLUG, GlobalParams.PROJECT_ID_OR_SLUG], + parameters=[GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG], request=None, responses={ 200: inline_sentry_response_serializer("ListRules", list[RuleSerializerResponse]), @@ -715,7 +715,7 @@ def get(self, request: Request, project) -> Response: @extend_schema( operation_id="Create an Issue Alert Rule for a Project", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ], request=ProjectRulesPostSerializer, diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index ca733243714386..5fc813a9789915 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -1210,13 +1210,13 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-derive-code-mappings", ), re_path( - r"^(?P[^\/]+)/code-mappings/(?P[^\/]+)/$", + r"^(?P[^\/]+)/code-mappings/(?P[^\/]+)/$", OrganizationCodeMappingDetailsEndpoint.as_view(), name="sentry-api-0-organization-code-mapping-details", ), # Codeowners re_path( - r"^(?P[^\/]+)/code-mappings/(?P[^\/]+)/codeowners/$", + r"^(?P[^\/]+)/code-mappings/(?P[^\/]+)/codeowners/$", OrganizationCodeMappingCodeOwnersEndpoint.as_view(), name="sentry-api-0-organization-code-mapping-codeowners", ), @@ -1278,12 +1278,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-dashboard-widget-details", ), re_path( - r"^(?P[^\/]+)/dashboards/(?P[^\/]+)/$", + r"^(?P[^\/]+)/dashboards/(?P[^\/]+)/$", OrganizationDashboardDetailsEndpoint.as_view(), name="sentry-api-0-organization-dashboard-details", ), re_path( - r"^(?P[^\/]+)/dashboards/(?P[^\/]+)/visit/$", + r"^(?P[^\/]+)/dashboards/(?P[^\/]+)/visit/$", OrganizationDashboardVisitEndpoint.as_view(), name="sentry-api-0-organization-dashboard-visit", ), @@ -1383,7 +1383,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-sdks", ), re_path( - r"^(?P[^\/]+)/events/$", + r"^(?P[^\/]+)/events/$", OrganizationEventsEndpoint.as_view(), name="sentry-api-0-organization-events", ), @@ -1560,7 +1560,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-group-index-stats", ), re_path( - r"^(?P[^\/]+)/integrations/$", + r"^(?P[^\/]+)/integrations/$", OrganizationIntegrationsEndpoint.as_view(), name="sentry-api-0-organization-integrations", ), @@ -1590,7 +1590,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-integration-serverless-functions", ), re_path( - r"^(?P[^\/]+)/members/$", + r"^(?P[^\/]+)/members/$", OrganizationMemberIndexEndpoint.as_view(), name="sentry-api-0-organization-member-index", ), @@ -1600,7 +1600,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-external-user", ), re_path( - r"^(?P[^\/]+)/external-users/(?P[^\/]+)/$", + r"^(?P[^\/]+)/external-users/(?P[^\/]+)/$", ExternalUserDetailsEndpoint.as_view(), name="sentry-api-0-organization-external-user-details", ), @@ -1621,17 +1621,17 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ), # Notification Actions re_path( - r"^(?P[^\/]+)/notifications/actions/$", + r"^(?P[^\/]+)/notifications/actions/$", NotificationActionsIndexEndpoint.as_view(), name="sentry-api-0-organization-notification-actions", ), re_path( - r"^(?P[^\/]+)/notifications/actions/(?P[^\/]+)/$", + r"^(?P[^\/]+)/notifications/actions/(?P[^\/]+)/$", NotificationActionsDetailsEndpoint.as_view(), name="sentry-api-0-organization-notification-actions-details", ), re_path( - r"^(?P[^\/]+)/notifications/available-actions/$", + r"^(?P[^\/]+)/notifications/available-actions/$", NotificationActionsAvailableEndpoint.as_view(), name="sentry-api-0-organization-notification-available-actions", ), @@ -1693,7 +1693,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-recent-searches", ), re_path( - r"^(?P[^\/]+)/searches/(?P[^\/]+)/$", + r"^(?P[^\/]+)/searches/(?P[^\/]+)/$", OrganizationSearchDetailsEndpoint.as_view(), name="sentry-api-0-organization-search-details", ), @@ -1703,7 +1703,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-searches", ), re_path( - r"^(?P[^\/]+)/sessions/$", + r"^(?P[^\/]+)/sessions/$", OrganizationSessionsEndpoint.as_view(), name="sentry-api-0-organization-sessions", ), @@ -1738,7 +1738,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-processing-issues", ), re_path( - r"^(?P[^\/]+)/projects/$", + r"^(?P[^\/]+)/projects/$", OrganizationProjectsEndpoint.as_view(), name="sentry-api-0-organization-projects", ), @@ -1904,17 +1904,17 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-stats", ), re_path( - r"^(?P[^\/]+)/stats_v2/$", + r"^(?P[^\/]+)/stats_v2/$", OrganizationStatsEndpointV2.as_view(), name="sentry-api-0-organization-stats-v2", ), re_path( - r"^(?P[^\/]+)/stats-summary/$", + r"^(?P[^\/]+)/stats-summary/$", OrganizationStatsSummaryEndpoint.as_view(), name="sentry-api-0-organization-stats-summary", ), re_path( - r"^(?P[^\/]+)/teams/$", + r"^(?P[^\/]+)/teams/$", OrganizationTeamsEndpoint.as_view(), name="sentry-api-0-organization-teams", ), @@ -1955,7 +1955,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ), # relay usage re_path( - r"^(?P[^\/]+)/relay_usage/$", + r"^(?P[^\/]+)/relay_usage/$", OrganizationRelayUsage.as_view(), name="sentry-api-0-organization-relay-usage", ), @@ -2142,7 +2142,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-agnostic-rule-conditions", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/$", ProjectDetailsEndpoint.as_view(), name="sentry-api-0-project-details", ), @@ -2202,7 +2202,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-event-details", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/grouping-info/$", + r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/grouping-info/$", EventGroupingInfoEndpoint.as_view(), name="sentry-api-0-event-grouping-info", ), @@ -2302,7 +2302,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-filters", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/filters/(?P[\w-]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/filters/(?P[\w-]+)/$", ProjectFilterDetailsEndpoint.as_view(), name="sentry-api-0-project-filters-details", ), @@ -2331,7 +2331,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-group-stats", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/keys/$", + r"^(?P[^\/]+)/(?P[^\/]+)/keys/$", ProjectKeysEndpoint.as_view(), name="sentry-api-0-project-keys", ), @@ -2435,7 +2435,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-artifact-lookup", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/rules/$", + r"^(?P[^\/]+)/(?P[^\/]+)/rules/$", ProjectRulesEndpoint.as_view(), name="sentry-api-0-project-rules", ), @@ -2480,7 +2480,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-rules-configuration", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/rules/(?P\d+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/rules/(?P\d+)/$", ProjectRuleDetailsEndpoint.as_view(), name="sentry-api-0-project-rule-details", ), @@ -2595,7 +2595,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-discard-processing-issues", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/ownership/$", + r"^(?P[^\/]+)/(?P[^\/]+)/ownership/$", ProjectOwnershipEndpoint.as_view(), name="sentry-api-0-project-ownership", ), diff --git a/tests/apidocs/endpoints/organizations/test_org_projects.py b/tests/apidocs/endpoints/organizations/test_org_projects.py index be3368470ede3c..dca73e35f01a57 100644 --- a/tests/apidocs/endpoints/organizations/test_org_projects.py +++ b/tests/apidocs/endpoints/organizations/test_org_projects.py @@ -12,7 +12,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-projects", - kwargs={"organization_slug": organization.slug}, + kwargs={"organization_id_or_slug": organization.slug}, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/organizations/test_org_stats_v2.py b/tests/apidocs/endpoints/organizations/test_org_stats_v2.py index 5a35acae960a71..d34bc0558c1124 100644 --- a/tests/apidocs/endpoints/organizations/test_org_stats_v2.py +++ b/tests/apidocs/endpoints/organizations/test_org_stats_v2.py @@ -29,7 +29,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-stats-v2", - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) def test_get(self): diff --git a/tests/apidocs/endpoints/releases/test_organization_sessions.py b/tests/apidocs/endpoints/releases/test_organization_sessions.py index 2373c954ebc581..0e746a27d53dd1 100644 --- a/tests/apidocs/endpoints/releases/test_organization_sessions.py +++ b/tests/apidocs/endpoints/releases/test_organization_sessions.py @@ -17,7 +17,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-sessions", - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/teams/test_index.py b/tests/apidocs/endpoints/teams/test_index.py index dd6e7f8d8a7126..e1caebfe85bdee 100644 --- a/tests/apidocs/endpoints/teams/test_index.py +++ b/tests/apidocs/endpoints/teams/test_index.py @@ -10,7 +10,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-teams", - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) self.login_as(user=self.user) diff --git a/tests/sentry/api/endpoints/test_event_grouping_info.py b/tests/sentry/api/endpoints/test_event_grouping_info.py index fe9259f261fee9..65c012381abf9b 100644 --- a/tests/sentry/api/endpoints/test_event_grouping_info.py +++ b/tests/sentry/api/endpoints/test_event_grouping_info.py @@ -32,7 +32,7 @@ def test_error_event(self): url = reverse( "sentry-api-0-event-grouping-info", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "event_id": event.event_id, }, @@ -51,7 +51,7 @@ def test_transaction_event(self): url = reverse( "sentry-api-0-event-grouping-info", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "event_id": event.event_id, }, @@ -69,7 +69,7 @@ def test_transaction_event_with_problem(self): url = reverse( "sentry-api-0-event-grouping-info", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "event_id": event.event_id, }, @@ -100,7 +100,7 @@ def test_no_event(self): url = reverse( "sentry-api-0-event-grouping-info", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "event_id": "fake-event-id", }, diff --git a/tests/sentry/api/endpoints/test_organization_dashboard_details.py b/tests/sentry/api/endpoints/test_organization_dashboard_details.py index 64107c1601f17b..7e6d695baf6ab6 100644 --- a/tests/sentry/api/endpoints/test_organization_dashboard_details.py +++ b/tests/sentry/api/endpoints/test_organization_dashboard_details.py @@ -79,7 +79,10 @@ def setUp(self): def url(self, dashboard_id): return reverse( "sentry-api-0-organization-dashboard-details", - kwargs={"organization_slug": self.organization.slug, "dashboard_id": dashboard_id}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "dashboard_id": dashboard_id, + }, ) def assert_serialized_dashboard(self, data, dashboard): @@ -1901,7 +1904,10 @@ class OrganizationDashboardVisitTest(OrganizationDashboardDetailsTestCase): def url(self, dashboard_id): return reverse( "sentry-api-0-organization-dashboard-visit", - kwargs={"organization_slug": self.organization.slug, "dashboard_id": dashboard_id}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "dashboard_id": dashboard_id, + }, ) def test_visit_dashboard(self): diff --git a/tests/sentry/api/endpoints/test_project_details.py b/tests/sentry/api/endpoints/test_project_details.py index ca268868571c2b..1855e87aa51a62 100644 --- a/tests/sentry/api/endpoints/test_project_details.py +++ b/tests/sentry/api/endpoints/test_project_details.py @@ -269,7 +269,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-project-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -1597,7 +1597,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-project-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -1698,7 +1698,7 @@ def test_dynamic_sampling_bias_activation(self): url = reverse( "sentry-api-0-project-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -1742,7 +1742,7 @@ def test_dynamic_sampling_bias_deactivation(self): url = reverse( "sentry-api-0-project-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) diff --git a/tests/sentry/api/endpoints/test_project_keys.py b/tests/sentry/api/endpoints/test_project_keys.py index 0e9a419511c53e..e668c07d9756b4 100644 --- a/tests/sentry/api/endpoints/test_project_keys.py +++ b/tests/sentry/api/endpoints/test_project_keys.py @@ -16,7 +16,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-keys", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -36,7 +36,7 @@ def test_use_case(self): url = reverse( "sentry-api-0-project-keys", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -60,7 +60,7 @@ def test_use_case_superuser(self): url = reverse( "sentry-api-0-project-keys", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -86,7 +86,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-keys", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -110,7 +110,7 @@ def test_minimal_args(self): url = reverse( "sentry-api-0-project-keys", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -130,7 +130,7 @@ def test_keys(self): url = reverse( "sentry-api-0-project-keys", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -148,7 +148,7 @@ def test_cannot_create_internal(self): url = reverse( "sentry-api-0-project-keys", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) diff --git a/tests/sentry/api/endpoints/test_project_ownership.py b/tests/sentry/api/endpoints/test_project_ownership.py index 4df1129b26b3ff..7a0b571de83670 100644 --- a/tests/sentry/api/endpoints/test_project_ownership.py +++ b/tests/sentry/api/endpoints/test_project_ownership.py @@ -39,7 +39,7 @@ def setUp(self): self.path = reverse( "sentry-api-0-project-ownership", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ) diff --git a/tests/sentry/api/test_organization_events.py b/tests/sentry/api/test_organization_events.py index 853b64677b7758..2fde9104f5246f 100644 --- a/tests/sentry/api/test_organization_events.py +++ b/tests/sentry/api/test_organization_events.py @@ -37,7 +37,7 @@ def client_get(self, *args, **kwargs): def reverse_url(self): return reverse( self.viewname, - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) def do_request(self, query, features=None, **kwargs): diff --git a/tests/sentry/integrations/github/test_ticket_action.py b/tests/sentry/integrations/github/test_ticket_action.py index 9ee8a639a61479..c6623d39edf7fc 100644 --- a/tests/sentry/integrations/github/test_ticket_action.py +++ b/tests/sentry/integrations/github/test_ticket_action.py @@ -102,7 +102,7 @@ def test_ticket_rules(self): reverse( "sentry-api-0-project-rules", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ), @@ -163,7 +163,7 @@ def test_fails_validation(self): reverse( "sentry-api-0-project-rules", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ), diff --git a/tests/sentry/integrations/github_enterprise/test_ticket_action.py b/tests/sentry/integrations/github_enterprise/test_ticket_action.py index 52bd35f4edca25..6192c56dbf50b7 100644 --- a/tests/sentry/integrations/github_enterprise/test_ticket_action.py +++ b/tests/sentry/integrations/github_enterprise/test_ticket_action.py @@ -113,7 +113,7 @@ def test_ticket_rules(self): reverse( "sentry-api-0-project-rules", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ), @@ -173,7 +173,7 @@ def test_fails_validation(self): reverse( "sentry-api-0-project-rules", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ), diff --git a/tests/sentry/integrations/jira/test_ticket_action.py b/tests/sentry/integrations/jira/test_ticket_action.py index df56681cc881e5..9ddc84b2853520 100644 --- a/tests/sentry/integrations/jira/test_ticket_action.py +++ b/tests/sentry/integrations/jira/test_ticket_action.py @@ -66,7 +66,7 @@ def test_ticket_rules(self): reverse( "sentry-api-0-project-rules", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ), @@ -124,7 +124,7 @@ def test_fails_validation(self): reverse( "sentry-api-0-project-rules", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ), diff --git a/tests/sentry/integrations/jira_server/test_ticket_action.py b/tests/sentry/integrations/jira_server/test_ticket_action.py index a3c31d440eab93..e6122aac9521df 100644 --- a/tests/sentry/integrations/jira_server/test_ticket_action.py +++ b/tests/sentry/integrations/jira_server/test_ticket_action.py @@ -157,7 +157,7 @@ def test_ticket_rules(self): reverse( "sentry-api-0-project-rules", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ), @@ -215,7 +215,7 @@ def test_fails_validation(self): reverse( "sentry-api-0-project-rules", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ), diff --git a/tests/sentry/middleware/test_access_log_middleware.py b/tests/sentry/middleware/test_access_log_middleware.py index cc09e3363b24d2..c520bb1674d31f 100644 --- a/tests/sentry/middleware/test_access_log_middleware.py +++ b/tests/sentry/middleware/test_access_log_middleware.py @@ -89,12 +89,12 @@ def get(self, request, organization_context, organization): name="concurrent-ratelimit-endpoint", ), re_path( - r"^(?P[^\/]+)/stats_v2/$", + r"^(?P[^\/]+)/stats_v2/$", MyOrganizationEndpoint.as_view(), name="sentry-api-0-organization-stats-v2", ), re_path( - r"^(?P[^\/]+)/members/$", + r"^(?P[^\/]+)/members/$", MyControlOrganizationEndpoint.as_view(), name="sentry-api-0-organization-members", ), diff --git a/tests/sentry/release_health/test_metrics_sessions_v2.py b/tests/sentry/release_health/test_metrics_sessions_v2.py index e860ca2e429534..0f68ae568ac9d9 100644 --- a/tests/sentry/release_health/test_metrics_sessions_v2.py +++ b/tests/sentry/release_health/test_metrics_sessions_v2.py @@ -44,14 +44,14 @@ def do_request(self, query, user=None, org=None): self.login_as(user=user or self.user) url = reverse( "sentry-api-0-organization-sessions", - kwargs={"organization_slug": (org or self.organization1).slug}, + kwargs={"organization_id_or_slug": (org or self.organization1).slug}, ) return self.client.get(url, query, format="json") def get_sessions_data(self, groupby: list[str], interval): response = self.do_request( { - "organization_slug": [self.organization1], + "organization_id_or_slug": [self.organization1], "project": [self.project1.id], "field": ["sum(session)"], "groupBy": groupby, diff --git a/tests/snuba/api/endpoints/test_organization_events.py b/tests/snuba/api/endpoints/test_organization_events.py index c013d21445b1de..0a426935d8f6c1 100644 --- a/tests/snuba/api/endpoints/test_organization_events.py +++ b/tests/snuba/api/endpoints/test_organization_events.py @@ -62,7 +62,7 @@ def client_get(self, *args, **kwargs): def reverse_url(self): return reverse( self.viewname, - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) def do_request(self, query, features=None, **kwargs): diff --git a/tests/snuba/api/endpoints/test_organization_events_mep.py b/tests/snuba/api/endpoints/test_organization_events_mep.py index 30e45c0226c5e6..55f927b9da4c9d 100644 --- a/tests/snuba/api/endpoints/test_organization_events_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_mep.py @@ -69,7 +69,7 @@ def do_request(self, query, features=None): self.login_as(user=self.user) url = reverse( self.viewname, - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) with self.feature(features): return self.client.get(url, query, format="json") @@ -3268,7 +3268,9 @@ class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithOnDemandMetric def setUp(self) -> None: super().setUp() - self.url = reverse(self.viewname, kwargs={"organization_slug": self.organization.slug}) + self.url = reverse( + self.viewname, kwargs={"organization_id_or_slug": self.organization.slug} + ) self.features = {"organizations:on-demand-metrics-extraction-widgets": True} def _create_specs( diff --git a/tests/snuba/api/endpoints/test_organization_events_span_metrics.py b/tests/snuba/api/endpoints/test_organization_events_span_metrics.py index 0798dfb618bafa..b8ffe049a0d0b6 100644 --- a/tests/snuba/api/endpoints/test_organization_events_span_metrics.py +++ b/tests/snuba/api/endpoints/test_organization_events_span_metrics.py @@ -36,7 +36,7 @@ def do_request(self, query, features=None): self.login_as(user=self.user) url = reverse( self.viewname, - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) with self.feature(features): return self.client.get(url, query, format="json") diff --git a/tests/snuba/api/endpoints/test_organization_sessions.py b/tests/snuba/api/endpoints/test_organization_sessions.py index 5a210e85c7b870..8019ce8f760bb9 100644 --- a/tests/snuba/api/endpoints/test_organization_sessions.py +++ b/tests/snuba/api/endpoints/test_organization_sessions.py @@ -133,7 +133,7 @@ def do_request(self, query, user=None, org=None): self.login_as(user=user or self.user) url = reverse( "sentry-api-0-organization-sessions", - kwargs={"organization_slug": (org or self.organization).slug}, + kwargs={"organization_id_or_slug": (org or self.organization).slug}, ) return self.client.get(url, query, format="json") @@ -1887,7 +1887,7 @@ def do_request(self, query, user=None, org=None): self.login_as(user=user or self.user) url = reverse( "sentry-api-0-organization-sessions", - kwargs={"organization_slug": (org or self.organization).slug}, + kwargs={"organization_id_or_slug": (org or self.organization).slug}, ) return self.client.get(url, query, format="json") diff --git a/tests/snuba/api/endpoints/test_organization_stats_summary.py b/tests/snuba/api/endpoints/test_organization_stats_summary.py index 2dfc792075be4d..8b1fe4bfdf0665 100644 --- a/tests/snuba/api/endpoints/test_organization_stats_summary.py +++ b/tests/snuba/api/endpoints/test_organization_stats_summary.py @@ -90,7 +90,7 @@ def do_request(self, query, user=None, org=None): self.login_as(user=user or self.user) url = reverse( "sentry-api-0-organization-stats-summary", - kwargs={"organization_slug": (org or self.organization).slug}, + kwargs={"organization_id_or_slug": (org or self.organization).slug}, ) return self.client.get(url, query, format="json") From bc24940ec0ae90ec68a3625e486355777036874e Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Fri, 10 May 2024 09:46:46 -0700 Subject: [PATCH 279/376] feat(crons): Add api for fetching checkin processing errors for an org (#70563) This allows us to return processing errors for organizations. Can be filtered by project id --- src/sentry/api/urls.py | 8 +++ ...ization_monitor_processing_errors_index.py | 57 +++++++++++++++++ src/sentry/monitors/processing_errors.py | 25 +++++--- ..._organization_monitor_processing_errors.py | 62 +++++++++++++++++++ .../sentry/monitors/test_processing_errors.py | 4 +- 5 files changed, 147 insertions(+), 9 deletions(-) create mode 100644 src/sentry/monitors/endpoints/organization_monitor_processing_errors_index.py create mode 100644 tests/sentry/monitors/endpoints/test_organization_monitor_processing_errors.py diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 5fc813a9789915..b5fc3d8ba93822 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -648,6 +648,9 @@ __all__ = ("urlpatterns",) +from ..monitors.endpoints.organization_monitor_processing_errors_index import ( + OrganizationMonitorProcessingErrorsIndexEndpoint, +) # issues endpoints are available both top level (by numerical ID) as well as coupled # to the organization (and queryable via short ID) @@ -1646,6 +1649,11 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationMonitorIndexStatsEndpoint.as_view(), name="sentry-api-0-organization-monitor-index-stats", ), + re_path( + r"^(?P[^\/]+)/processing-errors/$", + OrganizationMonitorProcessingErrorsIndexEndpoint.as_view(), + name="sentry-api-0-organization-monitor-processing-errors-index", + ), re_path( r"^(?P[^\/]+)/monitors-schedule-data/$", OrganizationMonitorScheduleSampleDataEndpoint.as_view(), diff --git a/src/sentry/monitors/endpoints/organization_monitor_processing_errors_index.py b/src/sentry/monitors/endpoints/organization_monitor_processing_errors_index.py new file mode 100644 index 00000000000000..e0499d39393273 --- /dev/null +++ b/src/sentry/monitors/endpoints/organization_monitor_processing_errors_index.py @@ -0,0 +1,57 @@ +from drf_spectacular.utils import extend_schema +from rest_framework.response import Response + +from sentry.api.api_owners import ApiOwner +from sentry.api.api_publish_status import ApiPublishStatus +from sentry.api.base import region_silo_endpoint +from sentry.api.bases import OrganizationEndpoint +from sentry.api.paginator import SequencePaginator +from sentry.api.serializers import serialize +from sentry.apidocs.constants import RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND, RESPONSE_UNAUTHORIZED +from sentry.apidocs.parameters import GlobalParams +from sentry.apidocs.utils import inline_sentry_response_serializer +from sentry.models.organization import Organization +from sentry.monitors.processing_errors import ( + CheckinProcessErrorsManager, + CheckinProcessingErrorData, +) +from sentry.utils.auth import AuthenticatedHttpRequest + + +@region_silo_endpoint +@extend_schema(tags=["Crons"]) +class OrganizationMonitorProcessingErrorsIndexEndpoint(OrganizationEndpoint): + publish_status = { + "GET": ApiPublishStatus.PRIVATE, + } + owner = ApiOwner.CRONS + + @extend_schema( + operation_id="Retrieve checkin processing errors for an Organization", + parameters=[ + GlobalParams.ORG_SLUG, + GlobalParams.PROJECT_ID_OR_SLUG, + ], + responses={ + 200: inline_sentry_response_serializer( + "CheckinProcessingError", list[CheckinProcessingErrorData] + ), + 401: RESPONSE_UNAUTHORIZED, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + ) + def get(self, request: AuthenticatedHttpRequest, organization: Organization) -> Response: + """ + Retrieves checkin processing errors for a monitor + """ + projects = self.get_projects(request, organization) + paginator = SequencePaginator( + list(enumerate(CheckinProcessErrorsManager().get_for_projects(projects))) + ) + + return self.paginate( + request=request, + paginator=paginator, + on_results=lambda results: serialize(results, request.user), + ) diff --git a/src/sentry/monitors/processing_errors.py b/src/sentry/monitors/processing_errors.py index 4e7924e8f34704..8390a15092b262 100644 --- a/src/sentry/monitors/processing_errors.py +++ b/src/sentry/monitors/processing_errors.py @@ -5,6 +5,7 @@ import uuid from datetime import timedelta from enum import Enum +from itertools import chain from typing import Any, TypedDict from django.conf import settings @@ -159,19 +160,29 @@ def build_monitor_identifier(self, monitor: Monitor) -> str: return f"monitor:{monitor.id}" def get_for_monitor(self, monitor: Monitor) -> list[CheckinProcessingError]: - return self._get_for_entity(self.build_monitor_identifier(monitor)) + return self._get_for_entities([self.build_monitor_identifier(monitor)]) def build_project_identifier(self, project_id: int) -> str: return f"project:{project_id}" - def get_for_project(self, project: Project) -> list[CheckinProcessingError]: - return self._get_for_entity(self.build_project_identifier(project.id)) + def get_for_projects(self, projects: list[Project]) -> list[CheckinProcessingError]: + return self._get_for_entities( + [self.build_project_identifier(project.id) for project in projects] + ) - def _get_for_entity(self, identifier: str) -> list[CheckinProcessingError]: + def _get_for_entities(self, identifiers: list[str]) -> list[CheckinProcessingError]: redis = self._get_cluster() - error_key = f"monitors.processing_errors.{identifier}" - raw_errors = redis.zrange(error_key, 0, MAX_ERRORS_PER_SET, desc=True) - return [CheckinProcessingError.from_dict(json.loads(raw_error)) for raw_error in raw_errors] + pipeline = redis.pipeline() + for identifier in identifiers: + pipeline.zrange( + f"monitors.processing_errors.{identifier}", 0, MAX_ERRORS_PER_SET, desc=True + ) + errors = [ + CheckinProcessingError.from_dict(json.loads(raw_error)) + for raw_error in chain(*pipeline.execute()) + ] + errors.sort(key=lambda error: error.checkin.ts.timestamp(), reverse=True) + return errors def handle_processing_errors(item: CheckinItem, error: CheckinValidationError): diff --git a/tests/sentry/monitors/endpoints/test_organization_monitor_processing_errors.py b/tests/sentry/monitors/endpoints/test_organization_monitor_processing_errors.py new file mode 100644 index 00000000000000..45b3ecd74aaf06 --- /dev/null +++ b/tests/sentry/monitors/endpoints/test_organization_monitor_processing_errors.py @@ -0,0 +1,62 @@ +from sentry.api.serializers import serialize +from sentry.monitors.processing_errors import ( + CheckinProcessErrorsManager, + ProcessingError, + ProcessingErrorType, +) +from sentry.monitors.testutils import build_checkin_processing_error +from sentry.testutils.cases import APITestCase, MonitorTestCase +from sentry.utils import json + + +class OrganizationMonitorProcessingErrorsIndexEndpointTest(MonitorTestCase, APITestCase): + endpoint = "sentry-api-0-organization-monitor-processing-errors-index" + + def setUp(self): + super().setUp() + self.login_as(user=self.user) + + def test_empty(self): + resp = self.get_success_response(self.organization.slug) + assert resp.data == [] + + def test(self): + monitor = self.create_monitor() + project_2 = self.create_project() + + manager = CheckinProcessErrorsManager() + monitor_error = build_checkin_processing_error( + [ProcessingError(ProcessingErrorType.CHECKIN_INVALID_GUID, {"guid": "bad"})], + message_overrides={"project_id": self.project.id}, + payload_overrides={"monitor_slug": monitor.slug}, + ) + + project_errors = [ + build_checkin_processing_error( + [ProcessingError(ProcessingErrorType.ORGANIZATION_KILLSWITCH_ENABLED)], + message_overrides={"project_id": self.project.id}, + ), + build_checkin_processing_error( + [ProcessingError(ProcessingErrorType.MONITOR_DISABLED, {"some": "data"})], + message_overrides={"project_id": self.project.id}, + ), + build_checkin_processing_error( + [ProcessingError(ProcessingErrorType.MONITOR_DISABLED, {"some": "data"})], + message_overrides={"project_id": project_2.id}, + ), + ] + + manager.store(monitor_error, monitor) + for error in project_errors: + manager.store(error, None) + + resp = self.get_success_response( + self.organization.slug, project=[self.project.id, project_2.id] + ) + assert resp.data == json.loads(json.dumps(serialize(list(reversed(project_errors))))) + + resp = self.get_success_response(self.organization.slug, project=[self.project.id]) + assert resp.data == json.loads(json.dumps(serialize(list(reversed(project_errors[:2]))))) + + resp = self.get_success_response(self.organization.slug, project=[project_2.id]) + assert resp.data == json.loads(json.dumps(serialize(list(reversed(project_errors[2:]))))) diff --git a/tests/sentry/monitors/test_processing_errors.py b/tests/sentry/monitors/test_processing_errors.py index 3ca64bdd54ac68..354c59fe8e3c61 100644 --- a/tests/sentry/monitors/test_processing_errors.py +++ b/tests/sentry/monitors/test_processing_errors.py @@ -61,7 +61,7 @@ def test_store_with_slug_not_exist(self): ) manager.store(processing_error, None) - fetched_processing_error = manager.get_for_project(self.project) + fetched_processing_error = manager.get_for_projects([self.project]) assert len(fetched_processing_error) == 1 self.assert_processing_errors_equal(processing_error, fetched_processing_error[0]) @@ -107,7 +107,7 @@ def test_get_for_monitor_empty(self): def test_get_for_project(self): manager = CheckinProcessErrorsManager() - assert len(manager.get_for_project(self.project)) == 0 + assert len(manager.get_for_projects([self.project])) == 0 class HandleProcessingErrorsTest(TestCase): From 30d15df79bf79f026b10a429ff03e427397aca33 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Fri, 10 May 2024 13:18:58 -0400 Subject: [PATCH 280/376] fix(dashboard-templates): Change priority sort to date for issue widgets (#70598) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Priority doesn't seem to be a valid sort order for these anymore and date is the default order on the issues page. Fixes the following error state from showing by default: ![Screenshot 2024-05-09 at 2 44 03 PM](https://github.com/getsentry/sentry/assets/22846452/cc42c7b6-0ef6-487a-a33d-7f1ae4f17c1a) --- static/app/views/dashboards/data.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/views/dashboards/data.tsx b/static/app/views/dashboards/data.tsx index aa538cf9f29b5e..e859932b185d29 100644 --- a/static/app/views/dashboards/data.tsx +++ b/static/app/views/dashboards/data.tsx @@ -467,7 +467,7 @@ export const DASHBOARDS_TEMPLATES: DashboardTemplate[] = [ aggregates: [], columns: ['assignee', 'issue', 'title'], conditions: 'assigned_or_suggested:me is:unresolved', - orderby: 'priority', + orderby: 'date', }, ], }, @@ -917,7 +917,7 @@ export const DASHBOARDS_TEMPLATES: DashboardTemplate[] = [ aggregates: [], columns: ['assignee', 'issue', 'title'], conditions: 'assigned_or_suggested:me is:unresolved', - orderby: 'priority', + orderby: 'date', }, ], }, From e259b96bc71da14ee30201f4faf60356e57e93f3 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Fri, 10 May 2024 13:24:19 -0400 Subject: [PATCH 281/376] chore(actor) Improve model interactions with Actor (#70581) Consolidate the team/user field management logic into the Model classes as much as possible. There is a leak to this abstraction when Model.update() is used. The leak stems from how `update()` works under the hood as it uses QuerySet.update() to only updated the provided attributes instead of all dirty attributes. --- .../api/endpoints/project_rule_details.py | 15 +------ src/sentry/api/endpoints/project_rules.py | 15 +------ .../api/serializers/models/alert_rule.py | 6 +-- src/sentry/api/serializers/models/rule.py | 5 +-- src/sentry/incidents/logic.py | 39 +++++++------------ src/sentry/incidents/models/alert_rule.py | 16 ++++++++ src/sentry/mediators/project_rules/creator.py | 8 ++-- src/sentry/mediators/project_rules/updater.py | 7 ++-- src/sentry/models/grouphistory.py | 16 ++++++++ src/sentry/models/rule.py | 16 ++++++++ src/sentry/monitors/serializers.py | 6 ++- src/sentry/types/actor.py | 18 +++++++-- .../api/endpoints/test_project_rules.py | 4 +- .../mediators/project_rules/test_creator.py | 7 +--- .../mediators/project_rules/test_updater.py | 10 ++--- tests/sentry/models/test_grouphistory.py | 30 ++++++++++++++ tests/sentry/types/test_actor.py | 4 +- 17 files changed, 134 insertions(+), 88 deletions(-) diff --git a/src/sentry/api/endpoints/project_rule_details.py b/src/sentry/api/endpoints/project_rule_details.py index 7d3df99858f625..bd5c1af76fb13c 100644 --- a/src/sentry/api/endpoints/project_rule_details.py +++ b/src/sentry/api/endpoints/project_rule_details.py @@ -39,8 +39,6 @@ ) from sentry.models.rule import NeglectedRule, RuleActivity, RuleActivityType from sentry.models.scheduledeletion import RegionScheduledDeletion -from sentry.models.team import Team -from sentry.models.user import User from sentry.rules.actions import trigger_sentry_app_action_creators_for_issues from sentry.rules.actions.utils import get_changed_data, get_updated_rule_data from sentry.signals import alert_rule_edited @@ -328,18 +326,7 @@ def put(self, request: Request, project, rule) -> Response: owner = data.get("owner") if owner: - try: - kwargs["owner_user_id"] = None - kwargs["owner_team_id"] = None - if owner.is_user: - kwargs["owner_user_id"] = owner.id - if owner.is_team: - kwargs["owner_team_id"] = owner.id - except (User.DoesNotExist, Team.DoesNotExist): - return Response( - "Could not resolve owner", - status=status.HTTP_400_BAD_REQUEST, - ) + kwargs["owner"] = owner if rule.status == ObjectStatus.DISABLED: rule.status = ObjectStatus.ACTIVE diff --git a/src/sentry/api/endpoints/project_rules.py b/src/sentry/api/endpoints/project_rules.py index d27d4767475a1c..30a77b8728ba16 100644 --- a/src/sentry/api/endpoints/project_rules.py +++ b/src/sentry/api/endpoints/project_rules.py @@ -28,8 +28,6 @@ from sentry.integrations.slack.utils import RedisRuleStatus from sentry.mediators.project_rules.creator import Creator from sentry.models.rule import Rule, RuleActivity, RuleActivityType -from sentry.models.team import Team -from sentry.models.user import User from sentry.rules.actions import trigger_sentry_app_action_creators_for_issues from sentry.rules.actions.base import instantiate_action from sentry.rules.processing.processor import is_condition_slow @@ -824,18 +822,7 @@ def post(self, request: Request, project) -> Response: owner = data.get("owner") if owner: - try: - kwargs["owner_user_id"] = None - kwargs["owner_team_id"] = None - if owner.is_user: - kwargs["owner_user_id"] = owner.id - if owner.is_team: - kwargs["owner_team_id"] = owner.id - except (User.DoesNotExist, Team.DoesNotExist): - return Response( - "Could not resolve owner", - status=status.HTTP_400_BAD_REQUEST, - ) + kwargs["owner"] = owner if data.get("pending_save"): client = RedisRuleStatus() diff --git a/src/sentry/api/serializers/models/alert_rule.py b/src/sentry/api/serializers/models/alert_rule.py index f4fe8c0d63408f..b53f95be6cec0d 100644 --- a/src/sentry/api/serializers/models/alert_rule.py +++ b/src/sentry/api/serializers/models/alert_rule.py @@ -30,7 +30,6 @@ from sentry.services.hybrid_cloud.user import RpcUser from sentry.services.hybrid_cloud.user.service import user_service from sentry.snuba.models import SnubaQueryEventType -from sentry.types.actor import Actor logger = logging.getLogger(__name__) @@ -198,8 +197,9 @@ def get_attrs( result[item]["activations"] = serialize(activations, **kwargs) if item.user_id or item.team_id: - actor = Actor.from_id(user_id=item.user_id, team_id=item.team_id) - result[item]["owner"] = actor.identifier + actor = item.owner + if actor: + result[item]["owner"] = actor.identifier if "original_alert_rule" in self.expand: snapshot_activities = AlertRuleActivity.objects.filter( diff --git a/src/sentry/api/serializers/models/rule.py b/src/sentry/api/serializers/models/rule.py index 0823b22ff4a03b..1781840869ca78 100644 --- a/src/sentry/api/serializers/models/rule.py +++ b/src/sentry/api/serializers/models/rule.py @@ -10,7 +10,6 @@ from sentry.models.rulefirehistory import RuleFireHistory from sentry.models.rulesnooze import RuleSnooze from sentry.services.hybrid_cloud.user.service import user_service -from sentry.types.actor import Actor def generate_rule_label(project, rule, data): @@ -131,8 +130,8 @@ def get_attrs(self, item_list, user, **kwargs): ) for rule in rules.values(): - if rule.owner_team_id or rule.owner_user_id: - actor = Actor.from_id(user_id=rule.owner_user_id, team_id=rule.owner_team_id) + actor = rule.owner + if actor: result[rule]["owner"] = actor.identifier for action in rule.data.get("actions", []): diff --git a/src/sentry/incidents/logic.py b/src/sentry/incidents/logic.py index 15366e1965e85a..c4e899681ae790 100644 --- a/src/sentry/incidents/logic.py +++ b/src/sentry/incidents/logic.py @@ -525,7 +525,7 @@ def create_alert_rule( if `include_all_projects` is True :param name: Name for the alert rule. This will be used as part of the incident name, and must be unique per project - :param owner: Actor (sentry.services.hybrid_cloud.actor.Actor) or None + :param owner: Actor (sentry.types.actor.Actor) or None :param query: An event search query to subscribe to and monitor for alerts :param aggregate: A string representing the aggregate used in this alert rule :param time_window: Time period to aggregate over, in minutes @@ -557,16 +557,6 @@ def create_alert_rule( resolution = resolution * DEFAULT_CMP_ALERT_RULE_RESOLUTION_MULTIPLIER comparison_delta = int(timedelta(minutes=comparison_delta).total_seconds()) - owner_user_id = None - owner_team_id = None - if owner and isinstance(owner, Actor): - if owner.is_user: - owner_user_id = owner.id - elif owner.is_team: - owner_team_id = owner.id - elif owner: - assert False, "Cannot create, invalid input type for owner" - with transaction.atomic(router.db_for_write(SnubaQuery)): # NOTE: `create_snuba_query` constructs the postgres representation of the snuba query snuba_query = create_snuba_query( @@ -589,8 +579,7 @@ def create_alert_rule( threshold_period=threshold_period, include_all_projects=include_all_projects, comparison_delta=comparison_delta, - user_id=owner_user_id, - team_id=owner_team_id, + owner=owner, monitor_type=monitor_type.value, ) @@ -714,7 +703,7 @@ def update_alert_rule( `include_all_projects` is True :param name: Name for the alert rule. This will be used as part of the incident name, and must be unique per project. - :param owner: Actor (sentry.services.hybrid_cloud.actor.Actor) or None + :param owner: Actor (sentry.types.actor.Actor) or None :param query: An event search query to subscribe to and monitor for alerts :param aggregate: A string representing the aggregate used in this alert rule :param time_window: Time period to aggregate over, in minutes. @@ -733,7 +722,7 @@ def update_alert_rule( comparison period. In minutes. :return: The updated `AlertRule` """ - updated_fields = {"date_modified": django_timezone.now()} + updated_fields: dict[str, Any] = {"date_modified": django_timezone.now()} updated_query_fields = {} if name: updated_fields["name"] = name @@ -762,17 +751,7 @@ def update_alert_rule( if event_types is not None: updated_query_fields["event_types"] = event_types if owner is not NOT_SET: - team_id = None - user_id = None - if owner and isinstance(owner, Actor): - if owner.is_user: - user_id = owner.id - elif owner.is_team: - team_id = owner.id - elif owner: - assert False, "Cannot update, invalid input type for owner" - updated_fields["team_id"] = team_id - updated_fields["user_id"] = user_id + updated_fields["owner"] = owner if comparison_delta is not NOT_SET: if comparison_delta is not None: # Since comparison alerts make twice as many queries, run the queries less frequently. @@ -805,6 +784,14 @@ def update_alert_rule( incidents = Incident.objects.filter(alert_rule=alert_rule).exists() if incidents: snapshot_alert_rule(alert_rule, user) + + if "owner" in updated_fields: + alert_rule.owner = updated_fields.pop("owner", None) + # This is clunky but Model.update() uses QuerySet.update() + # and doesn't persist other dirty attributes in the model + updated_fields["user_id"] = alert_rule.user_id + updated_fields["team_id"] = alert_rule.team_id + alert_rule.update(**updated_fields) AlertRuleActivity.objects.create( alert_rule=alert_rule, diff --git a/src/sentry/incidents/models/alert_rule.py b/src/sentry/incidents/models/alert_rule.py index 06f44d1f7ab8ae..746e5e66b1aa5b 100644 --- a/src/sentry/incidents/models/alert_rule.py +++ b/src/sentry/incidents/models/alert_rule.py @@ -36,6 +36,7 @@ from sentry.services.hybrid_cloud.user.service import user_service from sentry.snuba.models import QuerySubscription from sentry.snuba.subscriptions import bulk_create_snuba_subscriptions, delete_snuba_subscription +from sentry.types.actor import Actor from sentry.utils import metrics logger = logging.getLogger(__name__) @@ -287,6 +288,21 @@ def created_by_id(self): pass return None + @property + def owner(self) -> Actor | None: + """Part of ActorOwned Protocol""" + return Actor.from_id(user_id=self.user_id, team_id=self.team_id) + + @owner.setter + def owner(self, actor: Actor | None) -> None: + """Part of ActorOwned Protocol""" + self.team_id = None + self.user_id = None + if actor and actor.is_user: + self.user_id = actor.id + if actor and actor.is_team: + self.team_id = actor.id + def get_audit_log_data(self): return {"label": self.name} diff --git a/src/sentry/mediators/project_rules/creator.py b/src/sentry/mediators/project_rules/creator.py index 3994b6c6fc631b..2f77edcc0433c4 100644 --- a/src/sentry/mediators/project_rules/creator.py +++ b/src/sentry/mediators/project_rules/creator.py @@ -5,13 +5,13 @@ from sentry.mediators.param import Param from sentry.models.project import Project from sentry.models.rule import Rule +from sentry.types.actor import Actor class Creator(Mediator): name = Param(str) environment = Param(int, required=False) - owner_team_id = Param(int, required=False) - owner_user_id = Param(int, required=False) + owner = Param(Actor, required=False) project = Param(Project) action_match = Param(str) filter_match = Param(str, required=False) @@ -28,6 +28,7 @@ def call(self): def _create_rule(self): kwargs = self._get_kwargs() rule = Rule.objects.create(**kwargs) + return rule def _get_kwargs(self): @@ -40,10 +41,9 @@ def _get_kwargs(self): } _kwargs = { "label": self.name, - "owner_team_id": self.owner_team_id, - "owner_user_id": self.owner_user_id, "environment_id": self.environment or None, "project": self.project, "data": data, + "owner": self.owner, } return _kwargs diff --git a/src/sentry/mediators/project_rules/updater.py b/src/sentry/mediators/project_rules/updater.py index 11fdb1da85b84c..e897de27564529 100644 --- a/src/sentry/mediators/project_rules/updater.py +++ b/src/sentry/mediators/project_rules/updater.py @@ -5,13 +5,13 @@ from sentry.mediators.param import Param from sentry.models.project import Project from sentry.models.rule import Rule +from sentry.types.actor import Actor class Updater(Mediator): rule = Param(Rule) name = Param(str, required=False) - owner_team_id = Param(int, required=False) - owner_user_id = Param(int, required=False) + owner = Param(Actor, required=False) environment = Param(int, required=False) project = Param(Project) action_match = Param(str, required=False) @@ -40,8 +40,7 @@ def _update_name(self): self.rule.label = self.name def _update_owner(self) -> None: - self.rule.owner_user_id = self.owner_user_id - self.rule.owner_team_id = self.owner_team_id + self.rule.owner = self.owner def _update_environment(self): self.rule.environment_id = self.environment diff --git a/src/sentry/models/grouphistory.py b/src/sentry/models/grouphistory.py index c3e0e1cad392b5..c905c3f039d811 100644 --- a/src/sentry/models/grouphistory.py +++ b/src/sentry/models/grouphistory.py @@ -17,6 +17,7 @@ ) from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey from sentry.types.activity import ActivityType +from sentry.types.actor import Actor from sentry.types.group import GROUP_SUBSTATUS_TO_GROUP_HISTORY_STATUS if TYPE_CHECKING: @@ -224,6 +225,21 @@ class Meta: __repr__ = sane_repr("group_id", "release_id") + @property + def owner(self) -> Actor | None: + """Part of ActorOwned protocol""" + return Actor.from_id(user_id=self.user_id, team_id=self.team_id) + + @owner.setter + def owner(self, actor: Actor | None) -> None: + """Part of ActorOwned protocol""" + self.team_id = None + self.user_id = None + if actor and actor.is_user: + self.user_id = actor.id + if actor and actor.is_team: + self.team_id = actor.id + def get_prev_history(group, status): """ diff --git a/src/sentry/models/rule.py b/src/sentry/models/rule.py index 49249299d76c86..3c794bad53408c 100644 --- a/src/sentry/models/rule.py +++ b/src/sentry/models/rule.py @@ -18,6 +18,7 @@ ) from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey from sentry.db.models.manager import BaseManager +from sentry.types.actor import Actor from sentry.utils.cache import cache @@ -105,6 +106,21 @@ def created_by_id(self): return None + @property + def owner(self) -> Actor | None: + """Part of ActorOwned Protocol""" + return Actor.from_id(user_id=self.owner_user_id, team_id=self.owner_team_id) + + @owner.setter + def owner(self, actor: Actor | None) -> None: + """Part of ActorOwned Protocol""" + self.owner_team_id = None + self.owner_user_id = None + if actor and actor.is_user: + self.owner_user_id = actor.id + if actor and actor.is_team: + self.owner_team_id = actor.id + def delete(self, *args, **kwargs): rv = super().delete(*args, **kwargs) self._clear_project_rule_cache() diff --git a/src/sentry/monitors/serializers.py b/src/sentry/monitors/serializers.py index e0e08ec0e64f3f..52790f21a15365 100644 --- a/src/sentry/monitors/serializers.py +++ b/src/sentry/monitors/serializers.py @@ -185,10 +185,12 @@ def get_attrs(self, item_list, user, **kwargs): actors.extend( [Actor.from_id(team_id=m.owner_team_id) for m in item_list if m.owner_team_id] ) + filtered_actors = list(filter(None, actors)) - actors_serialized = serialize(Actor.resolve_many(actors), user, ActorSerializer()) + actors_serialized = serialize(Actor.resolve_many(filtered_actors), user, ActorSerializer()) actor_data = { - actor: serialized_actor for actor, serialized_actor in zip(actors, actors_serialized) + actor: serialized_actor + for actor, serialized_actor in zip(filtered_actors, actors_serialized) } monitor_environments = ( diff --git a/src/sentry/types/actor.py b/src/sentry/types/actor.py index b029e0f3784739..899e5572ed3655 100644 --- a/src/sentry/types/actor.py +++ b/src/sentry/types/actor.py @@ -1,7 +1,7 @@ from collections import defaultdict from collections.abc import Iterable, MutableMapping, Sequence from enum import Enum -from typing import TYPE_CHECKING, Any, Union, overload +from typing import TYPE_CHECKING, Any, Protocol, Union, overload from django.core.exceptions import ObjectDoesNotExist from rest_framework import serializers @@ -201,14 +201,14 @@ def from_identifier(cls, id: str | int | None) -> "Actor | None": raise cls.InvalidActor(f"Unable to resolve actor identifier: {e}") @classmethod - def from_id(cls, user_id: int | None = None, team_id: int | None = None) -> "Actor": + def from_id(cls, user_id: int | None = None, team_id: int | None = None) -> "Actor | None": if user_id and team_id: raise cls.InvalidActor("You can only provide one of user_id and team_id") if user_id: return cls(id=user_id, actor_type=ActorType.USER) if team_id: return cls(id=team_id, actor_type=ActorType.TEAM) - raise cls.InvalidActor("You must provide one of user_id and team_id") + return None def __post_init__(self) -> None: if not self.is_team and self.slug is not None: @@ -259,6 +259,18 @@ def is_user(self) -> bool: return self.actor_type == ActorType.USER +class ActorOwned(Protocol): + """Protocol for objects that are owned by Actor but need to store ownership in discrete columns""" + + @property + def owner(self) -> Actor | None: + ... + + @owner.setter + def owner(self, actor: Actor | None) -> None: + ... + + def parse_and_validate_actor(actor_identifier: str | None, organization_id: int) -> Actor | None: from sentry.models.organizationmember import OrganizationMember from sentry.models.team import Team diff --git a/tests/sentry/api/endpoints/test_project_rules.py b/tests/sentry/api/endpoints/test_project_rules.py index fa3073de493c54..899196c16bc1e1 100644 --- a/tests/sentry/api/endpoints/test_project_rules.py +++ b/tests/sentry/api/endpoints/test_project_rules.py @@ -24,6 +24,7 @@ from sentry.testutils.cases import APITestCase from sentry.testutils.helpers import install_slack, with_feature from sentry.testutils.silo import assume_test_silo_mode +from sentry.types.actor import Actor class ProjectRuleBaseTestCase(APITestCase): @@ -766,8 +767,7 @@ def test_kicks_off_slack_async_job( "actions": payload.get("actions", []), "frequency": payload.get("frequency"), "user_id": self.user.id, - "owner_user_id": self.user.id, - "owner_team_id": None, + "owner": Actor.from_id(user_id=self.user.id), "uuid": "abc123", } call_args = mock_find_channel_id_for_alert_rule.call_args[1]["kwargs"] diff --git a/tests/sentry/mediators/project_rules/test_creator.py b/tests/sentry/mediators/project_rules/test_creator.py index ae901711d0635e..f104c6e8d7f127 100644 --- a/tests/sentry/mediators/project_rules/test_creator.py +++ b/tests/sentry/mediators/project_rules/test_creator.py @@ -1,8 +1,7 @@ from sentry.mediators.project_rules.creator import Creator from sentry.models.rule import Rule -from sentry.models.user import User from sentry.testutils.cases import TestCase -from sentry.testutils.silo import assume_test_silo_mode_of +from sentry.types.actor import Actor class TestCreator(TestCase): @@ -13,11 +12,9 @@ def setUp(self): teams=[self.create_team()], name="foo", fire_project_created=True ) - with assume_test_silo_mode_of(User): - self.user = User.objects.get(id=self.user.id) self.creator = Creator( name="New Cool Rule", - owner_user_id=self.user.id, + owner=Actor.from_id(user_id=self.user.id), project=self.project, action_match="all", filter_match="any", diff --git a/tests/sentry/mediators/project_rules/test_updater.py b/tests/sentry/mediators/project_rules/test_updater.py index 82190f0dc0e54c..97118a4316c611 100644 --- a/tests/sentry/mediators/project_rules/test_updater.py +++ b/tests/sentry/mediators/project_rules/test_updater.py @@ -2,6 +2,7 @@ from sentry.models.user import User from sentry.testutils.cases import TestCase from sentry.testutils.silo import assume_test_silo_mode_of +from sentry.types.actor import Actor class TestUpdater(TestCase): @@ -20,8 +21,7 @@ def test_update_name(self): assert self.rule.label == "Cool New Rule" def test_update_owner(self): - self.updater.owner_team_id = None - self.updater.owner_user_id = self.user.id + self.updater.owner = Actor.from_id(user_id=self.user.id) self.updater.call() with assume_test_silo_mode_of(User): self.user = User.objects.get(id=self.user.id) @@ -29,14 +29,12 @@ def test_update_owner(self): assert self.rule.owner_team_id is None team = self.create_team() - self.updater.owner_team_id = team.id - self.updater.owner_user_id = None + self.updater.owner = Actor.from_id(team_id=team.id) self.updater.call() assert self.rule.owner_team_id == team.id assert self.rule.owner_user_id is None - self.updater.owner_user_id = None - self.updater.owner_team_id = None + self.updater.owner = None self.updater.call() assert self.rule.owner_team_id is None assert self.rule.owner_user_id is None diff --git a/tests/sentry/models/test_grouphistory.py b/tests/sentry/models/test_grouphistory.py index dab52090cababb..17d937ce8b1ef3 100644 --- a/tests/sentry/models/test_grouphistory.py +++ b/tests/sentry/models/test_grouphistory.py @@ -2,10 +2,40 @@ from sentry.models.grouphistory import GroupHistory, GroupHistoryStatus, get_prev_history from sentry.testutils.cases import TestCase from sentry.testutils.skips import requires_snuba +from sentry.types.actor import Actor pytestmark = requires_snuba +class GroupHistoryTest(TestCase): + def test_owner(self): + team = self.create_team() + + GroupAssignee.objects.assign(self.group, self.user) + history = GroupHistory.objects.filter(group_id=self.group.id).first() + assert history + + actor = Actor.from_id(user_id=self.user.id) + assert actor + history.owner = actor + + owner = history.owner + assert owner + assert owner.identifier == actor.identifier + assert history.user_id == self.user.id + assert history.team_id is None + + actor = Actor.from_id(team_id=team.id) + assert actor + history.owner = actor + owner = history.owner + + assert owner + assert owner.identifier == actor.identifier + assert history.team_id == team.id + assert history.user_id is None + + class FilterToTeamTest(TestCase): def test(self): GroupAssignee.objects.assign(self.group, self.user) diff --git a/tests/sentry/types/test_actor.py b/tests/sentry/types/test_actor.py index 471e36334d45e5..8a0a85ac10a68d 100644 --- a/tests/sentry/types/test_actor.py +++ b/tests/sentry/types/test_actor.py @@ -78,10 +78,10 @@ def test_from_id(): assert actor.id == 11 assert actor.actor_type == ActorType.USER + assert Actor.from_id(user_id=None) is None + with pytest.raises(Actor.InvalidActor): Actor.from_id(user_id=11, team_id=99) - with pytest.raises(Actor.InvalidActor): - Actor.from_id(user_id=None) @django_db_all(transaction=True) From 6b7d6b472f1c1ca6edf69975f724aab476ce02b7 Mon Sep 17 00:00:00 2001 From: Athena Moghaddam <132939361+sentaur-athena@users.noreply.github.com> Date: Fri, 10 May 2024 10:24:59 -0700 Subject: [PATCH 282/376] fix(platform): Using the correct name (#70627) sdk developers recommended to use `OS` in the name. --- static/app/components/events/contextSummary/contextIcon.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/components/events/contextSummary/contextIcon.tsx b/static/app/components/events/contextSummary/contextIcon.tsx index 5f71db987a3f9e..d3e4631c9827fc 100644 --- a/static/app/components/events/contextSummary/contextIcon.tsx +++ b/static/app/components/events/contextSummary/contextIcon.tsx @@ -61,7 +61,7 @@ const LOGO_MAPPING = { 'net-core': logoNetcore, 'net-framework': logoNetframework, 'qq-browser': logoQq, - nintendo: logoNintendo, + 'nintendo-os': logoNintendo, amazon: logoAmazon, amd: logoAmd, android: logoAndroid, From 45a2cb1436488d834636568db99881e29e98698a Mon Sep 17 00:00:00 2001 From: Stephen Cefali Date: Fri, 10 May 2024 10:36:43 -0700 Subject: [PATCH 283/376] feat(snuba-search): adds handling for additional filters for useGroupSnubaDataset=1 (#70607) This PR adds the capability to filter on the following new attributes: * regressed_in_release * release.build * release.package * release.dist All of them require Postgres queries but handle it differently. For `regressed_in_release`, use the query to generate a list of IDs where we have a group history for regression that matches the versions we check for. For the other three, we get the release versions from Postgres and use that in a snuba query checking events of particular release versions. Note I am leveraging `convert_search_filter_to_snuba_query` to generate the filters which requires some name mapping that I've added. And it adds logic to skip checking `search_issues` for columns that don't appear in it. Also added `for_review` which is used in the standard way of generating group_ids. And also added support for `times_seen`. --- .../endpoints/organization_group_index.py | 8 +- src/sentry/search/snuba/backend.py | 2 +- src/sentry/search/snuba/executors.py | 82 ++++++- .../test_organization_group_index.py | 230 +++++++++++++++++- 4 files changed, 307 insertions(+), 15 deletions(-) diff --git a/src/sentry/issues/endpoints/organization_group_index.py b/src/sentry/issues/endpoints/organization_group_index.py index 57f85ca63456c7..2c4c3f4455c74b 100644 --- a/src/sentry/issues/endpoints/organization_group_index.py +++ b/src/sentry/issues/endpoints/organization_group_index.py @@ -51,12 +51,8 @@ # these filters are currently not supported in the snuba only search # and will use PostgresSnubaQueryExecutor instead of GroupAttributesPostgresSnubaQueryExecutor UNSUPPORTED_SNUBA_FILTERS = [ - "regressed_in_release", - "issue.priority", - "firstRelease", - "release.build", - "release.package", - "release.dist", + "issue.priority", # coming soon + "firstRelease", # coming soon ] diff --git a/src/sentry/search/snuba/backend.py b/src/sentry/search/snuba/backend.py index 3315f26d0bb1b7..26539b637ba40d 100644 --- a/src/sentry/search/snuba/backend.py +++ b/src/sentry/search/snuba/backend.py @@ -496,7 +496,7 @@ def query( if use_group_snuba_dataset: # we need to handle two cases fo the group queryset: # 1. Limit results to groups that are not pending deletion or merge - # 2. Handle queries snuba doesn't support such as bookmarked_by, linked, subscribed_by + # 2. Handle queries snuba doesn't support such as bookmarked_by, linked, subscribed_by, etc # For the second case, we hit postgres before Snuba to get the group ids group_queryset = self._build_limited_group_queryset(projects, search_filters) diff --git a/src/sentry/search/snuba/executors.py b/src/sentry/search/snuba/executors.py index 8f94f3dcb957f0..cf9af8c88e303b 100644 --- a/src/sentry/search/snuba/executors.py +++ b/src/sentry/search/snuba/executors.py @@ -57,6 +57,7 @@ from sentry.models.project import Project from sentry.models.team import Team from sentry.models.user import User +from sentry.rules.conditions.event_attribute import ATTR_CHOICES from sentry.search.events.builder.discover import UnresolvedQuery from sentry.search.events.datasets.discover import DiscoverDatasetConfig from sentry.search.events.filter import convert_search_filter_to_snuba_query, format_search_filter @@ -95,7 +96,14 @@ class Clauses(Enum): # we cannot use snuba for these fields because they require a join with tables that don't exist there # if we ever see these fields, we will use postgres to get the group_ids before sending back to ClickHouse -POSTGRES_ONLY_SEARCH_FIELDS = ["bookmarked_by", "linked", "subscribed_by"] +# note that we could eventually migrate the releases table to ClickHouse and handle those with a join in ClickHouse +POSTGRES_ONLY_SEARCH_FIELDS = [ + "bookmarked_by", + "linked", + "subscribed_by", + "regressed_in_release", + "for_review", +] @dataclass @@ -1141,6 +1149,15 @@ class InvalidQueryForExecutor(Exception): class GroupAttributesPostgresSnubaQueryExecutor(PostgresSnubaQueryExecutor): + def get_times_seen_filter( + self, search_filter: SearchFilter, joined_entity: Entity + ) -> Condition: + return Condition( + Function("count", []), + Op(search_filter.operator), + search_filter.value.raw_value, + ) + def get_last_seen_filter(self, search_filter: SearchFilter, joined_entity: Entity) -> Condition: # get the max timestamp of the error/search_issue event return Condition( @@ -1172,13 +1189,48 @@ def get_basic_group_snuba_condition( ) def get_basic_event_snuba_condition( - self, search_filter: SearchFilter, joined_entity: Entity + self, + search_filter: SearchFilter, + joined_entity: Entity, + organization_id: int, + project_ids: Sequence[int], + environments: Sequence[str], ) -> Condition: """ Returns the basic lookup for a search filter. """ - query_builder = self.def_get_query_builder(joined_entity) - return query_builder.default_filter_converter(search_filter) + # note this might hit postgres to do queries on releases + raw_conditions = convert_search_filter_to_snuba_query( + search_filter, + params={ + "organization_id": organization_id, + "project_id": project_ids, + "environment": environments, + }, + ) + if not raw_conditions: + return None + + item = raw_conditions[0] + if not isinstance(item, list): + raw_conditions = [raw_conditions] + + output_conditions = [] + for item in raw_conditions: + column_name = item[0] + # do some name mapping for snuba + column_name = column_name.replace("stack.", "stacktrace.") + if ATTR_CHOICES.get(column_name) is not None: + raw_column = ATTR_CHOICES.get(column_name) + column_name = raw_column.value.event_name + + column = Column(column_name, joined_entity) + operator = Op(item[1]) + value = item[2] + output_conditions.append(Condition(column, operator, value)) + if len(output_conditions) == 1: + return output_conditions[0] + return BooleanCondition(op=BooleanOp.AND, conditions=output_conditions) def get_assigned( self, search_filter: SearchFilter, joined_entity: Entity, check_none=True @@ -1462,6 +1514,7 @@ def get_last_seen_aggregation(self, joined_entity: Entity) -> Function: "message": (get_message_condition, Clauses.WHERE), "first_seen": (get_first_seen_filter, Clauses.WHERE), "last_seen": (get_last_seen_filter, Clauses.HAVING), + "times_seen": (get_times_seen_filter, Clauses.HAVING), } first_seen = Column("group_first_seen", entities["attrs"]) times_seen_aggregation = Function("count", [], alias="times_seen") @@ -1481,6 +1534,19 @@ def get_sort_defs(self, entity): "user": "user_count", } + def should_check_search_issues( + self, group_categories: Sequence[str], search_filters: Sequence[SearchFilter] + ) -> bool: + # not in the group categories we are looking for + if not any([GroupCategory.ERROR.value != gc for gc in group_categories]): + return False + # error/stacktrace info doesn't exist exist in search_issues so we shouldn't it at all + bad_prefix_list = ["error.", "stack."] + for filter in search_filters: + if any(filter.key.name.startswith(bad_prefix) for bad_prefix in bad_prefix_list): + return False + return True + def calculate_start_end( self, retention_window_start: datetime | None, @@ -1555,6 +1621,7 @@ def query( ] organization = projects[0].organization + project_ids = [p.id for p in projects] event_entity = self.entities["event"] attr_entity = self.entities["attrs"] @@ -1568,7 +1635,7 @@ def query( entities_to_check.append(event_entity) # check we have non-error categories to search for - if any([GroupCategory.ERROR.value != gc for gc in group_categories]): + if self.should_check_search_issues(group_categories, search_filters): entities_to_check.append(search_issues_entity) for joined_entity in entities_to_check: @@ -1615,7 +1682,9 @@ def query( raise InvalidQueryForExecutor(f"Invalid clause {clause}") else: where_conditions.append( - self.get_basic_event_snuba_condition(search_filter, joined_entity) + self.get_basic_event_snuba_condition( + search_filter, joined_entity, organization.id, project_ids, environments + ) ) # handle types based on issue.type and issue.category @@ -1639,7 +1708,6 @@ def query( sort_func = self.get_sort_defs(joined_entity)[sort_by] if environments: - # TODO: Should this be handled via filter_keys, once we have a snql compatible version? where_conditions.append( Condition( Column("environment", joined_entity), Op.IN, [e.name for e in environments] diff --git a/tests/sentry/issues/endpoints/test_organization_group_index.py b/tests/sentry/issues/endpoints/test_organization_group_index.py index 2e3b810c0876bb..d0a837479cacb2 100644 --- a/tests/sentry/issues/endpoints/test_organization_group_index.py +++ b/tests/sentry/issues/endpoints/test_organization_group_index.py @@ -2777,7 +2777,6 @@ def test_snuba_assignee_filter(self) -> None: def test_snuba_unsupported_filters(self) -> None: self.login_as(user=self.user) for query in [ - "regressed_in_release:latest", "issue.priority:high", ]: with patch( @@ -3255,6 +3254,235 @@ def test_filter_by_subscribed_by(self): ) assert len(response.data) == 0 + @override_options({"issues.group_attributes.send_kafka": True}) + def test_snuba_search_lookup_by_regressed_in_release(self): + self.login_as(self.user) + project = self.project + release = self.create_release() + event = self.store_event( + data={ + "timestamp": iso_format(before_now(seconds=1)), + "tags": {"sentry:release": release.version}, + }, + project_id=project.id, + ) + record_group_history(event.group, GroupHistoryStatus.REGRESSED, release=release) + response = self.get_success_response( + query=f"regressed_in_release:{release.version}", useGroupSnubaDataset=1 + ) + issues = json.loads(response.content) + assert [int(issue["id"]) for issue in issues] == [event.group.id] + + @override_options({"issues.group_attributes.send_kafka": True}) + def test_lookup_by_release_build(self): + + for i in range(3): + j = 119 + i + self.create_release(version=f"steve@1.2.{i}+{j}") + + self.login_as(self.user) + project = self.project + release = self.create_release(version="steve@1.2.7+123") + event = self.store_event( + data={ + "timestamp": iso_format(before_now(seconds=1)), + "tags": {"sentry:release": release.version}, + }, + project_id=project.id, + ) + + response = self.get_success_response(query="release.build:123", useGroupSnubaDataset=1) + issues = json.loads(response.content) + assert len(issues) == 1 + assert int(issues[0]["id"]) == event.group.id + + response = self.get_success_response(query="release.build:122", useGroupSnubaDataset=1) + issues = json.loads(response.content) + assert len(issues) == 0 + + @override_options({"issues.group_attributes.send_kafka": True}) + def test_snuba_search_lookup_by_stack_filename(self): + self.login_as(self.user) + project = self.project + event = self.store_event( + data={ + "timestamp": iso_format(before_now(seconds=1)), + "fingerprint": ["unique-fingerprint-1"], + "exception": { + "values": [ + { + "type": "Error", + "stacktrace": { + "frames": [ + { + "filename": "example.py", + "lineno": 29, + "colno": 10, + "function": "test_function", + } + ] + }, + } + ] + }, + }, + project_id=project.id, + ) + self.store_event( + data={ + "timestamp": iso_format(before_now(seconds=2)), + "fingerprint": ["unique-fingerprint-2"], + "exception": { + "values": [ + { + "type": "Error", + "stacktrace": { + "frames": [ + { + "filename": "different_example.py", + "lineno": 45, + "colno": 10, + "function": "another_test_function", + } + ] + }, + } + ] + }, + }, + project_id=project.id, + ) + + response = self.get_success_response( + query="stack.filename:example.py", useGroupSnubaDataset=1 + ) + issues = json.loads(response.content) + assert len(issues) == 1 + assert int(issues[0]["id"]) == event.group.id + response = self.get_success_response( + query="stack.filename:nonexistent.py", useGroupSnubaDataset=1 + ) + issues = json.loads(response.content) + assert len(issues) == 0 + + @override_options({"issues.group_attributes.send_kafka": True}) + def test_error_main_thread_condition(self): + self.login_as(user=self.user) + project = self.project + # Simulate sending an event with main_thread set to true + event1 = self.store_event( + data={ + "timestamp": iso_format(before_now(seconds=1)), + "message": "MainThreadError", + "exception": { + "values": [ + { + "type": "Error", + "value": "Error in main thread", + "thread_id": 1, + } + ] + }, + "threads": {"values": [{"id": 1, "main": True}]}, + }, + project_id=project.id, + ) + # Simulate sending an event with main_thread set to false + event2 = self.store_event( + data={ + "timestamp": iso_format(before_now(seconds=2)), + "message": "WorkerThreadError", + "exception": { + "values": [ + { + "type": "Error", + "value": "Error in worker thread", + "thread_id": 2, + } + ] + }, + "threads": {"values": [{"id": 2, "main": False}]}, + }, + project_id=project.id, + ) + + # Query for events where main_thread is true + response = self.get_success_response(query="error.main_thread:true", useGroupSnubaDataset=1) + issues = json.loads(response.content) + assert len(issues) == 1 + assert int(issues[0]["id"]) == event1.group.id + + # Query for events where main_thread is false + response = self.get_success_response( + query="error.main_thread:false", useGroupSnubaDataset=1 + ) + issues = json.loads(response.content) + assert len(issues) == 1 + assert int(issues[0]["id"]) == event2.group.id + + @override_options({"issues.group_attributes.send_kafka": True}) + def test_snuba_heavy_search_aggregate_stats_regression_test(self): + self.store_event( + data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, + project_id=self.project.id, + ) + + self.login_as(user=self.user) + response = self.get_response( + sort_by="date", + limit=10, + query="times_seen:>0 last_seen:-1h", + useGroupSnubaDataset=1, + ) + + assert response.status_code == 200 + assert len(response.data) == 1 + + @override_options({"issues.group_attributes.send_kafka": True}) + def test_snuba_heavy_search_inbox_search(self): + self.store_event( + data={ + "timestamp": iso_format(before_now(seconds=200)), + "fingerprint": ["group-1"], + "tags": {"server": "example.com", "trace": "woof", "message": "foo"}, + }, + project_id=self.project.id, + ) + + event = self.store_event( + data={ + "timestamp": iso_format(before_now(seconds=200)), + "fingerprint": ["group-2"], + "tags": {"server": "example.com", "trace": "woof", "message": "foo"}, + }, + project_id=self.project.id, + ) + + self.store_event( + data={ + "timestamp": iso_format(before_now(seconds=200)), + "fingerprint": ["group-3"], + "tags": {"server": "example.com", "trace": "woof", "message": "foo"}, + }, + project_id=self.project.id, + ) + + add_group_to_inbox(event.group, GroupInboxReason.NEW) + + self.login_as(user=self.user) + response = self.get_response( + sort_by="date", + limit=10, + query="is:unresolved is:for_review", + expand=["inbox"], + useGroupSnubaDataset=1, + ) + assert response.status_code == 200 + assert len(response.data) == 1 + assert int(response.data[0]["id"]) == event.group.id + assert response.data[0]["inbox"] is not None + assert response.data[0]["inbox"]["reason"] == GroupInboxReason.NEW.value + class GroupUpdateTest(APITestCase, SnubaTestCase): endpoint = "sentry-api-0-organization-group-index" From 96178b2a9205dec890c1d072bcf6a0866df1fecb Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Fri, 10 May 2024 13:37:46 -0400 Subject: [PATCH 284/376] ref(router): Remove ability to pass `to` as a function (#70669) We don't use this and it's not supported in react-router 6 --- .../feedback/list/feedbackListItem.tsx | 16 ++++---- static/app/components/links/link.tsx | 4 +- static/app/utils/withDomainRequired.spec.tsx | 38 ------------------- static/app/utils/withDomainRequired.tsx | 20 +++------- .../views/replays/replayTable/tableCell.tsx | 4 +- 5 files changed, 16 insertions(+), 66 deletions(-) diff --git a/static/app/components/feedback/list/feedbackListItem.tsx b/static/app/components/feedback/list/feedbackListItem.tsx index 911592ab2b1e14..73099836fe69c6 100644 --- a/static/app/components/feedback/list/feedbackListItem.tsx +++ b/static/app/components/feedback/list/feedbackListItem.tsx @@ -65,15 +65,13 @@ const FeedbackListItem = forwardRef( { - return { - pathname: normalizeUrl(`/organizations/${organization.slug}/feedback/`), - query: { - ...location.query, - referrer: 'feedback_list_page', - feedbackSlug: `${feedbackItem.project.slug}:${feedbackItem.id}`, - }, - }; + to={{ + pathname: normalizeUrl(`/organizations/${organization.slug}/feedback/`), + query: { + ...location.query, + referrer: 'feedback_list_page', + feedbackSlug: `${feedbackItem.project.slug}:${feedbackItem.id}`, + }, }} onClick={() => { trackAnalytics('feedback.list-item-selected', {organization}); diff --git a/static/app/components/links/link.tsx b/static/app/components/links/link.tsx index 772408ffeb4f09..091a29def19330 100644 --- a/static/app/components/links/link.tsx +++ b/static/app/components/links/link.tsx @@ -1,7 +1,7 @@ import {forwardRef} from 'react'; import {Link as RouterLink} from 'react-router'; import styled from '@emotion/styled'; -import type {Location, LocationDescriptor} from 'history'; +import type {LocationDescriptor} from 'history'; import {useLocation} from 'sentry/utils/useLocation'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; @@ -22,7 +22,7 @@ export interface LinkProps * work in environments that do have customer-domains (saas) and those without * customer-domains (single-tenant). */ - to: ((location: Location) => LocationDescriptor) | LocationDescriptor; + to: LocationDescriptor; /** * Style applied to the component's root */ diff --git a/static/app/utils/withDomainRequired.spec.tsx b/static/app/utils/withDomainRequired.spec.tsx index 3bf32c108b846b..7e01711a971042 100644 --- a/static/app/utils/withDomainRequired.spec.tsx +++ b/static/app/utils/withDomainRequired.spec.tsx @@ -1,5 +1,4 @@ import type {RouteComponentProps} from 'react-router'; -import type {Location, LocationDescriptor, LocationDescriptorObject} from 'history'; import {LocationFixture} from 'sentry-fixture/locationFixture'; import {OrganizationFixture} from 'sentry-fixture/organization'; @@ -182,43 +181,6 @@ describe('normalizeUrl', function () { ); expect(result.pathname).toEqual('/issues'); }); - - it('replaces pathname in function callback', function () { - const location = LocationFixture(); - function objectCallback(_loc: Location): LocationDescriptorObject { - return {pathname: '/settings/'}; - } - result = normalizeUrl(objectCallback, location); - expect(result.pathname).toEqual('/settings/'); - - function stringCallback(_loc: Location): LocationDescriptor { - return '/organizations/a-long-slug/discover/'; - } - result = normalizeUrl(stringCallback, location); - expect(result).toEqual('/discover/'); - - // Normalizes urls if options.customerDomain is true and orgslug.sentry.io isn't being used - window.__initialData.customerDomain = null; - - function objectCallback2(_loc: Location): LocationDescriptorObject { - return {pathname: '/settings/'}; - } - result = normalizeUrl(objectCallback2, location, {forceCustomerDomain: true}); - expect(result.pathname).toEqual('/settings/'); - - function stringCallback2(_loc: Location): LocationDescriptor { - return '/organizations/a-long-slug/discover/'; - } - result = normalizeUrl(stringCallback2, location, {forceCustomerDomain: true}); - expect(result).toEqual('/discover/'); - }); - - it('errors on functions without location', function () { - function objectCallback(_loc: Location): LocationDescriptorObject { - return {pathname: '/settings/organization'}; - } - expect(() => normalizeUrl(objectCallback)).toThrow(); - }); }); const originalLocation = window.location; diff --git a/static/app/utils/withDomainRequired.tsx b/static/app/utils/withDomainRequired.tsx index 4635349e6ebf24..1b2fee1e989b63 100644 --- a/static/app/utils/withDomainRequired.tsx +++ b/static/app/utils/withDomainRequired.tsx @@ -25,8 +25,6 @@ const NORMALIZE_PATTERNS: Array<[pattern: RegExp, replacement: string]> = [ [/^\/?accept-terms\/[^\/]*\/?$/, '/accept-terms/'], ]; -type LocationTarget = ((location: Location) => LocationDescriptor) | LocationDescriptor; - type NormalizeUrlOptions = { forceCustomerDomain: boolean; }; @@ -43,16 +41,16 @@ export function normalizeUrl( ): LocationDescriptor; export function normalizeUrl( - path: LocationTarget, + path: LocationDescriptor, location?: Location, options?: NormalizeUrlOptions -): LocationTarget; +): LocationDescriptor; export function normalizeUrl( - path: LocationTarget, + path: LocationDescriptor, location?: Location | NormalizeUrlOptions, options?: NormalizeUrlOptions -): LocationTarget { +): LocationDescriptor { if (location && 'forceCustomerDomain' in location) { options = location; location = undefined; @@ -62,15 +60,7 @@ export function normalizeUrl( return path; } - let resolved: LocationDescriptor; - if (typeof path === 'function') { - if (!location) { - throw new Error('Cannot resolve function URL without a location'); - } - resolved = path(location); - } else { - resolved = path; - } + let resolved = path; if (typeof resolved === 'string') { for (const patternData of NORMALIZE_PATTERNS) { diff --git a/static/app/views/replays/replayTable/tableCell.tsx b/static/app/views/replays/replayTable/tableCell.tsx index 1d7e57928b842d..cac04870e9282e 100644 --- a/static/app/views/replays/replayTable/tableCell.tsx +++ b/static/app/views/replays/replayTable/tableCell.tsx @@ -360,7 +360,7 @@ export function ReplayCell({ {/* Avatar is used instead of ProjectBadge because using ProjectBadge increases spacing, which doesn't look as good */} {project ? : null} {project ? project.slug : null} - + {getShortEventId(replay.id)} @@ -382,7 +382,7 @@ export function ReplayCell({ replay.user.display_name || t('Anonymous User') ) : ( From cfe669041793f4afaae9f9f0444d19428e1155fc Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Fri, 10 May 2024 10:41:52 -0700 Subject: [PATCH 285/376] feat(crons): Update processing errors storage to store only identifiers in the set (#70635) We need to support deleting processing errors when a user dismisses them, and the current data storage format doesn't support that well. This modifies the format to store only the guid in the set, so we can easily look it up and delete it. The full error is stored in a separate key. There's a chance that the set might contain errors that have ttled out - we don't care if this happens, these are too old to be actionable, so we just skip rows with no data. --- src/sentry/monitors/processing_errors.py | 39 ++++++++++++------- ...zation_monitor_processing_errors_index.py} | 0 ...roject_monitor_processing_errors_index.py} | 0 .../sentry/monitors/test_processing_errors.py | 29 ++++++++++++++ 4 files changed, 54 insertions(+), 14 deletions(-) rename tests/sentry/monitors/endpoints/{test_organization_monitor_processing_errors.py => test_organization_monitor_processing_errors_index.py} (100%) rename tests/sentry/monitors/endpoints/{test_project_monitor_processing_errors.py => test_project_monitor_processing_errors_index.py} (100%) diff --git a/src/sentry/monitors/processing_errors.py b/src/sentry/monitors/processing_errors.py index 8390a15092b262..ef152be9ff25b5 100644 --- a/src/sentry/monitors/processing_errors.py +++ b/src/sentry/monitors/processing_errors.py @@ -5,7 +5,6 @@ import uuid from datetime import timedelta from enum import Enum -from itertools import chain from typing import Any, TypedDict from django.conf import settings @@ -22,7 +21,7 @@ logger = logging.getLogger(__name__) MAX_ERRORS_PER_SET = 10 -MONITOR_ERRORS_LIFETIME = timedelta(days=1) +MONITOR_ERRORS_LIFETIME = timedelta(days=7) class ProcessingErrorType(Enum): @@ -142,20 +141,28 @@ def store(self, error: CheckinProcessingError, monitor: Monitor | None): except Monitor.DoesNotExist: pass if monitor: - error_identifier = self.build_monitor_identifier(monitor) + entity_identifier = self.build_monitor_identifier(monitor) else: - error_identifier = self.build_project_identifier(error.checkin.message["project_id"]) + entity_identifier = self.build_project_identifier(error.checkin.message["project_id"]) - error_key = f"monitors.processing_errors.{error_identifier}" + error_set_key = self.build_set_identifier(entity_identifier) + error_key = self.build_error_identifier(entity_identifier, error.id) serialized_error = json.dumps(error.to_dict()) redis_client = self._get_cluster() pipeline = redis_client.pipeline(transaction=False) - pipeline.zadd(error_key, {serialized_error: error.checkin.ts.timestamp()}) + pipeline.zadd(error_set_key, {error.id.hex: error.checkin.ts.timestamp()}) + pipeline.set(error_key, serialized_error, ex=MONITOR_ERRORS_LIFETIME) # Cap the error list to the `MAX_ERRORS_PER_SET` most recent errors - pipeline.zremrangebyrank(error_key, 0, -(MAX_ERRORS_PER_SET + 1)) - pipeline.expire(error_key, MONITOR_ERRORS_LIFETIME) + pipeline.zremrangebyrank(error_set_key, 0, -(MAX_ERRORS_PER_SET + 1)) + pipeline.expire(error_set_key, MONITOR_ERRORS_LIFETIME) pipeline.execute() + def build_set_identifier(self, entity_identifier: str) -> str: + return f"monitors.processing_errors_set.{entity_identifier}" + + def build_error_identifier(self, entity_identifier: str, uuid: uuid.UUID) -> str: + return f"monitors.processing_errors.{entity_identifier}.{uuid.hex}" + def build_monitor_identifier(self, monitor: Monitor) -> str: return f"monitor:{monitor.id}" @@ -170,16 +177,20 @@ def get_for_projects(self, projects: list[Project]) -> list[CheckinProcessingErr [self.build_project_identifier(project.id) for project in projects] ) - def _get_for_entities(self, identifiers: list[str]) -> list[CheckinProcessingError]: + def _get_for_entities(self, entity_identifiers: list[str]) -> list[CheckinProcessingError]: redis = self._get_cluster() pipeline = redis.pipeline() - for identifier in identifiers: - pipeline.zrange( - f"monitors.processing_errors.{identifier}", 0, MAX_ERRORS_PER_SET, desc=True - ) + for identifier in entity_identifiers: + pipeline.zrange(self.build_set_identifier(identifier), 0, MAX_ERRORS_PER_SET, desc=True) + error_identifiers = [ + self.build_error_identifier(entity_identifier, uuid.UUID(error_identifier)) + for entity_identifier, error_identifiers in zip(entity_identifiers, pipeline.execute()) + for error_identifier in error_identifiers + ] errors = [ CheckinProcessingError.from_dict(json.loads(raw_error)) - for raw_error in chain(*pipeline.execute()) + for raw_error in redis.mget(error_identifiers) + if raw_error is not None ] errors.sort(key=lambda error: error.checkin.ts.timestamp(), reverse=True) return errors diff --git a/tests/sentry/monitors/endpoints/test_organization_monitor_processing_errors.py b/tests/sentry/monitors/endpoints/test_organization_monitor_processing_errors_index.py similarity index 100% rename from tests/sentry/monitors/endpoints/test_organization_monitor_processing_errors.py rename to tests/sentry/monitors/endpoints/test_organization_monitor_processing_errors_index.py diff --git a/tests/sentry/monitors/endpoints/test_project_monitor_processing_errors.py b/tests/sentry/monitors/endpoints/test_project_monitor_processing_errors_index.py similarity index 100% rename from tests/sentry/monitors/endpoints/test_project_monitor_processing_errors.py rename to tests/sentry/monitors/endpoints/test_project_monitor_processing_errors_index.py diff --git a/tests/sentry/monitors/test_processing_errors.py b/tests/sentry/monitors/test_processing_errors.py index 354c59fe8e3c61..b2a6576406260f 100644 --- a/tests/sentry/monitors/test_processing_errors.py +++ b/tests/sentry/monitors/test_processing_errors.py @@ -109,6 +109,35 @@ def test_get_for_project(self): manager = CheckinProcessErrorsManager() assert len(manager.get_for_projects([self.project])) == 0 + def test_get_missing_data(self): + # Validate that we don't error if a processing error has expired but is still + # in the set + monitor = self.create_monitor() + manager = CheckinProcessErrorsManager() + processing_errors = [ + build_checkin_processing_error( + [ProcessingError(ProcessingErrorType.CHECKIN_INVALID_GUID, {"guid": "bad"})], + message_overrides={"project_id": self.project.id}, + payload_overrides={"monitor_slug": monitor.slug}, + ), + build_checkin_processing_error( + [ProcessingError(ProcessingErrorType.MONITOR_DISABLED, {"some": "data"})], + message_overrides={"project_id": self.project.id}, + payload_overrides={"monitor_slug": monitor.slug}, + ), + ] + for processing_error in processing_errors: + manager.store(processing_error, monitor) + redis = manager._get_cluster() + redis.delete( + manager.build_error_identifier( + manager.build_monitor_identifier(monitor), processing_errors[0].id + ) + ) + fetched_processing_error = manager.get_for_monitor(monitor) + assert len(fetched_processing_error) == 1 + self.assert_processing_errors_equal(processing_errors[1], fetched_processing_error[0]) + class HandleProcessingErrorsTest(TestCase): def test(self): From b63ab1ecb2443100581c558db1d272222191a3fc Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Fri, 10 May 2024 14:10:59 -0400 Subject: [PATCH 286/376] ref: remove sentry.utils.functional (#70660) many of the functions were only used once so I moved them to their consuming modules --- .../installation/external_issue/actions.py | 18 ++++- src/sentry/api/serializers/models/activity.py | 13 ++-- src/sentry/grouping/enhancer/matchers.py | 26 +++++-- src/sentry/mediators/mediator.py | 11 +-- src/sentry/utils/functional.py | 73 ------------------- tests/sentry/grouping/test_enhancer.py | 17 ++++- tests/sentry/utils/test_functional.py | 33 --------- 7 files changed, 64 insertions(+), 127 deletions(-) delete mode 100644 src/sentry/utils/functional.py delete mode 100644 tests/sentry/utils/test_functional.py diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_issue/actions.py b/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_issue/actions.py index ccfbff07aaa180..f5ae82ca2a24ca 100644 --- a/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_issue/actions.py +++ b/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_issue/actions.py @@ -1,3 +1,4 @@ +from django.utils.functional import empty from rest_framework.request import Request from rest_framework.response import Response @@ -11,7 +12,20 @@ from sentry.models.project import Project from sentry.models.user import User from sentry.services.hybrid_cloud.user.serial import serialize_rpc_user -from sentry.utils.functional import extract_lazy_object + + +def _extract_lazy_object(lo): + """ + Unwrap a LazyObject and return the inner object. Whatever that may be. + + ProTip: This is relying on `django.utils.functional.empty`, which may + or may not be removed in the future. It's 100% undocumented. + """ + if not hasattr(lo, "_wrapped"): + return lo + if lo._wrapped is empty: + lo._setup() + return lo._wrapped @region_silo_endpoint @@ -42,7 +56,7 @@ def post(self, request: Request, installation) -> Response: uri = data.pop("uri") try: - user = extract_lazy_object(request.user) + user = _extract_lazy_object(request.user) if isinstance(user, User): user = serialize_rpc_user(user) diff --git a/src/sentry/api/serializers/models/activity.py b/src/sentry/api/serializers/models/activity.py index ee5b5de9f2bb06..a78174789b43e1 100644 --- a/src/sentry/api/serializers/models/activity.py +++ b/src/sentry/api/serializers/models/activity.py @@ -1,5 +1,3 @@ -import functools - from sentry.api.serializers import Serializer, register, serialize from sentry.api.serializers.models.commit import CommitWithReleaseSerializer from sentry.models.activity import Activity @@ -9,7 +7,6 @@ from sentry.services.hybrid_cloud.user.serial import serialize_generic_user from sentry.services.hybrid_cloud.user.service import user_service from sentry.types.activity import ActivityType -from sentry.utils.functional import apply_values @register(Activity) @@ -65,9 +62,9 @@ def get_attrs(self, item_list, user): else: pull_requests = {} - groups = apply_values( - functools.partial(serialize, user=user), - Group.objects.in_bulk( + groups = { + k: serialize(v, user=user) + for k, v in Group.objects.in_bulk( { i.data["source_id"] for i in item_list @@ -78,8 +75,8 @@ def get_attrs(self, item_list, user): for i in item_list if i.type == ActivityType.UNMERGE_SOURCE.value } - ), - ) + ).items() + } return { item: { diff --git a/src/sentry/grouping/enhancer/matchers.py b/src/sentry/grouping/enhancer/matchers.py index 45c98b5af6f3e2..61472f1f10cff3 100644 --- a/src/sentry/grouping/enhancer/matchers.py +++ b/src/sentry/grouping/enhancer/matchers.py @@ -4,12 +4,28 @@ from sentry.stacktraces.functions import get_function_name_for_frame from sentry.stacktraces.platform import get_behavior_family_for_platform from sentry.utils import metrics -from sentry.utils.functional import cached from sentry.utils.glob import glob_match from sentry.utils.safe import get_path from .exceptions import InvalidEnhancerConfig + +def _cached(cache, function, *args, **kwargs): + """Calls ``function`` or retrieves its return value from the ``cache``. + + This is similar to ``functools.cache``, but uses a custom cache instead + of a global one. The cache can be shared between multiple functions. + """ + key = (function, args, tuple(sorted(kwargs.items()))) + + if key in cache: + rv = cache[key] + else: + rv = cache[key] = function(*args) + + return rv + + MATCH_KEYS = { "path": "p", "function": "f", @@ -190,7 +206,7 @@ def _to_config_structure(self, version): def path_like_match(pattern, value): - """Stand-alone function for use with ``cached``""" + """Stand-alone function for use with ``_cached``""" if glob_match(value, pattern, ignorecase=False, doublestar=True, path_normalize=True): return True if not value.startswith(b"/") and glob_match( @@ -213,7 +229,7 @@ def _positive_frame_match(self, match_frame, exception_data, cache): if value is None: return False - return cached(cache, path_like_match, self._encoded_pattern, value) + return _cached(cache, path_like_match, self._encoded_pattern, value) class PackageMatch(PathLikeMatch): @@ -254,7 +270,7 @@ def _positive_frame_match(self, match_frame, exception_data, cache): if field == self._encoded_pattern: return True - return cached(cache, glob_match, field, self._encoded_pattern) + return _cached(cache, glob_match, field, self._encoded_pattern) class FunctionMatch(FrameFieldMatch): @@ -281,7 +297,7 @@ def matches_frame(self, frames, idx, exception_data, cache): def _positive_frame_match(self, frame_data, exception_data, cache): field = get_path(exception_data, *self.field_path) or "" - return cached(cache, glob_match, field, self._encoded_pattern) + return _cached(cache, glob_match, field, self._encoded_pattern) class ExceptionTypeMatch(ExceptionFieldMatch): diff --git a/src/sentry/mediators/mediator.py b/src/sentry/mediators/mediator.py index aa7bddef2ea07c..a661827401cf66 100644 --- a/src/sentry/mediators/mediator.py +++ b/src/sentry/mediators/mediator.py @@ -8,7 +8,6 @@ from django.utils.functional import cached_property import sentry -from sentry.utils.functional import compact from .param import Param @@ -232,13 +231,15 @@ def _default_logging(self): request_params = env.request.resolver_match.kwargs - return compact( - { + return { + k: v + for k, v in { "org": request_params.get("organization_slug"), "team": request_params.get("team_slug"), "project": request_params.get("project_slug"), - } - ) + }.items() + if v is not None + } @property def _logging_context(self): diff --git a/src/sentry/utils/functional.py b/src/sentry/utils/functional.py deleted file mode 100644 index 2da0dc45f43978..00000000000000 --- a/src/sentry/utils/functional.py +++ /dev/null @@ -1,73 +0,0 @@ -from django.utils.functional import empty - - -def extract_lazy_object(lo): - """ - Unwrap a LazyObject and return the inner object. Whatever that may be. - - ProTip: This is relying on `django.utils.functional.empty`, which may - or may not be removed in the future. It's 100% undocumented. - """ - if not hasattr(lo, "_wrapped"): - return lo - if lo._wrapped is empty: - lo._setup() - return lo._wrapped - - -def apply_values(function, mapping): - """\ - Applies ``function`` to a sequence containing all of the values in the - provided mapping, returing a new mapping with the values replaced with - the results of the provided function. - - >>> apply_values( - ... lambda values: map(u'{} fish'.format, values), - ... {1: 'red', 2: 'blue'}, - ... ) - {1: u'red fish', 2: u'blue fish'} - """ - if not mapping: - return {} - - keys, values = zip(*mapping.items()) - return dict(zip(keys, function(values))) - - -def compact(seq): - """ - Removes ``None`` values from various sequence-based data structures. - - dict: - Removes keys with a corresponding ``None`` value. - - list: - Removes ``None`` values. - - >>> compact({'foo': 'bar', 'baz': None}) - {'foo': 'bar'} - - >>> compact([1, None, 2]) - [1, 2] - """ - if isinstance(seq, dict): - return {k: v for k, v in seq.items() if v is not None} - - elif isinstance(seq, list): - return [k for k in seq if k is not None] - - -def cached(cache, function, *args, **kwargs): - """Calls ``function`` or retrieves its return value from the ``cache``. - - This is similar to ``functools.cache``, but uses a custom cache instead - of a global one. The cache can be shared between multiple functions. - """ - key = (function, args, tuple(sorted(kwargs.items()))) - - if key in cache: - rv = cache[key] - else: - rv = cache[key] = function(*args) - - return rv diff --git a/tests/sentry/grouping/test_enhancer.py b/tests/sentry/grouping/test_enhancer.py index f9fd43ac2a9ebc..d8ad80a2b3a53c 100644 --- a/tests/sentry/grouping/test_enhancer.py +++ b/tests/sentry/grouping/test_enhancer.py @@ -1,13 +1,14 @@ from __future__ import annotations from typing import Any +from unittest import mock import pytest from sentry.grouping.component import GroupingComponent from sentry.grouping.enhancer import Enhancements from sentry.grouping.enhancer.exceptions import InvalidEnhancerConfig -from sentry.grouping.enhancer.matchers import create_match_frame +from sentry.grouping.enhancer.matchers import _cached, create_match_frame def dump_obj(obj): @@ -494,3 +495,17 @@ def test_app_no_matches(frame): enhancements = Enhancements.from_config_string("app:no +app") enhancements.apply_modifications_to_frame([frame], "native", {}) assert frame.get("in_app") + + +def test_cached_with_kwargs(): + """Order of kwargs should not matter""" + + foo = mock.Mock() + + cache: dict[object, object] = {} + _cached(cache, foo, kw1=1, kw2=2) + assert foo.call_count == 1 + + # Call with different kwargs order - call_count is still one: + _cached(cache, foo, kw2=2, kw1=1) + assert foo.call_count == 1 diff --git a/tests/sentry/utils/test_functional.py b/tests/sentry/utils/test_functional.py deleted file mode 100644 index 378c1ba26503f5..00000000000000 --- a/tests/sentry/utils/test_functional.py +++ /dev/null @@ -1,33 +0,0 @@ -from __future__ import annotations - -from unittest import TestCase, mock - -from sentry.utils.functional import cached, compact - - -class CompactTest(TestCase): - def test_none(self): - assert compact({"foo": None, "bar": 1}) == {"bar": 1} - - def test_zero(self): - assert compact({"foo": 0}) == {"foo": 0} - - def test_false(self): - assert compact({"foo": False}) == {"foo": False} - - def test_empty_string(self): - assert compact({"foo": ""}) == {"foo": ""} - - -def test_cached_with_kwargs(): - """Order of kwargs should not matter""" - - foo = mock.Mock() - - cache: dict[object, object] = {} - cached(cache, foo, kw1=1, kw2=2) - assert foo.call_count == 1 - - # Call with different kwargs order - call_count is still one: - cached(cache, foo, kw2=2, kw1=1) - assert foo.call_count == 1 From 3cde9dfb37d4350127eeaaa880697cb567acd3bf Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Fri, 10 May 2024 14:16:42 -0400 Subject: [PATCH 287/376] ref: fix types for utils.locking (#70611) --- pyproject.toml | 3 +-- src/sentry/locks.py | 7 +++-- src/sentry/tasks/post_process.py | 9 +++++-- src/sentry/utils/locking/backends/__init__.py | 6 ++--- .../utils/locking/backends/migration.py | 16 +++++------ src/sentry/utils/locking/lock.py | 23 +++++++++++----- src/sentry/utils/locking/manager.py | 3 ++- src/sentry/utils/services.py | 27 ++++++++++++++++--- .../consumers/test_monitor_consumer.py | 4 --- 9 files changed, 64 insertions(+), 34 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 895ead1f7853a5..ca64efdea039f5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -467,7 +467,6 @@ module = [ "sentry.utils.committers", "sentry.utils.distutils.commands.base", "sentry.utils.distutils.commands.build_assets", - "sentry.utils.locking.backends.migration", "sentry.utils.services", "sentry.utils.snowflake", "sentry.web.forms.accounts", @@ -621,7 +620,7 @@ module = [ "sentry.utils.imports", "sentry.utils.iterators", "sentry.utils.javascript", - "sentry.utils.locking.backends.redis", + "sentry.utils.locking.*", "sentry.utils.migrations", "sentry.utils.numbers", "sentry.utils.otp", diff --git a/src/sentry/locks.py b/src/sentry/locks.py index ad35daf6981b6b..d41b38392f60f0 100644 --- a/src/sentry/locks.py +++ b/src/sentry/locks.py @@ -1,6 +1,9 @@ from django.conf import settings +from sentry.utils.locking.backends import LockBackend from sentry.utils.locking.manager import LockManager -from sentry.utils.services import build_instance_from_options +from sentry.utils.services import build_instance_from_options_of_type -locks = LockManager(build_instance_from_options(settings.SENTRY_DEFAULT_LOCKS_BACKEND_OPTIONS)) +locks = LockManager( + build_instance_from_options_of_type(LockBackend, settings.SENTRY_DEFAULT_LOCKS_BACKEND_OPTIONS) +) diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index 504ae6c6d414be..817fc06f229684 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -30,12 +30,13 @@ from sentry.utils.cache import cache from sentry.utils.event_frames import get_sdk_name from sentry.utils.locking import UnableToAcquireLock +from sentry.utils.locking.backends import LockBackend from sentry.utils.locking.manager import LockManager from sentry.utils.retries import ConditionalRetryPolicy, exponential_delay from sentry.utils.safe import get_path, safe_execute from sentry.utils.sdk import bind_organization_context, set_current_event_project from sentry.utils.sdk_crashes.sdk_crash_detection_config import build_sdk_crash_detection_configs -from sentry.utils.services import build_instance_from_options +from sentry.utils.services import build_instance_from_options_of_type if TYPE_CHECKING: from sentry.eventstore.models import Event, GroupEvent @@ -48,7 +49,11 @@ logger = logging.getLogger(__name__) -locks = LockManager(build_instance_from_options(settings.SENTRY_POST_PROCESS_LOCKS_BACKEND_OPTIONS)) +locks = LockManager( + build_instance_from_options_of_type( + LockBackend, settings.SENTRY_POST_PROCESS_LOCKS_BACKEND_OPTIONS + ) +) ISSUE_OWNERS_PER_PROJECT_PER_MIN_RATELIMIT = 50 HIGHER_ISSUE_OWNERS_PER_PROJECT_PER_MIN_RATELIMIT = 200 diff --git a/src/sentry/utils/locking/backends/__init__.py b/src/sentry/utils/locking/backends/__init__.py index d64bde722ea7b3..0ac9660c144795 100644 --- a/src/sentry/utils/locking/backends/__init__.py +++ b/src/sentry/utils/locking/backends/__init__.py @@ -4,7 +4,7 @@ class LockBackend: ``sentry.utils.locking.Lock`` class. """ - def acquire(self, key, duration, routing_key=None): + def acquire(self, key: str, duration: int, routing_key: str | None = None) -> None: """ Acquire a lock, represented by the given key for the given duration (in seconds.) This method should attempt to acquire the lock once, in a @@ -18,13 +18,13 @@ def acquire(self, key, duration, routing_key=None): """ raise NotImplementedError - def release(self, key, routing_key=None): + def release(self, key: str, routing_key: str | None = None) -> None: """ Release a lock. The return value is not used. """ raise NotImplementedError - def locked(self, key, routing_key=None): + def locked(self, key: str, routing_key: str | None = None) -> bool: """ Check if a lock has been taken. """ diff --git a/src/sentry/utils/locking/backends/migration.py b/src/sentry/utils/locking/backends/migration.py index 2b2ab200367435..c7b80c92c01f2c 100644 --- a/src/sentry/utils/locking/backends/migration.py +++ b/src/sentry/utils/locking/backends/migration.py @@ -1,8 +1,8 @@ from collections.abc import Callable, Mapping -from typing import Any, Optional, Union, cast +from typing import Any, Optional, Union from sentry.utils.locking.backends import LockBackend -from sentry.utils.services import build_instance_from_options, resolve_callable +from sentry.utils.services import build_instance_from_options_of_type, resolve_callable SelectorFncType = Callable[[str, Optional[Union[str, int]], LockBackend, LockBackend], LockBackend] @@ -57,12 +57,10 @@ def __init__( backend_old_config: Mapping[str, Any], selector_func_path: str | SelectorFncType | None = None, ): - self.backend_new = cast(LockBackend, build_instance_from_options(backend_new_config)) - self.backend_old = cast(LockBackend, build_instance_from_options(backend_old_config)) + self.backend_new = build_instance_from_options_of_type(LockBackend, backend_new_config) + self.backend_old = build_instance_from_options_of_type(LockBackend, backend_old_config) self.selector_func: SelectorFncType = ( - cast(SelectorFncType, resolve_callable(selector_func_path)) - if selector_func_path - else _default_selector_func + resolve_callable(selector_func_path) if selector_func_path else _default_selector_func ) def _get_backend(self, key: str, routing_key: str | int | None) -> LockBackend: @@ -83,7 +81,7 @@ def acquire(self, key: str, duration: int, routing_key: str | None = None) -> No raise Exception(f"Could not set key: {key!r}") return backend.acquire(key=key, duration=duration, routing_key=routing_key) - def release(self, key, routing_key=None): + def release(self, key: str, routing_key: str | None = None) -> None: backend = self._get_backend(key=key, routing_key=routing_key) try: (self.backend_new if backend == self.backend_old else self.backend_old).release( @@ -93,7 +91,7 @@ def release(self, key, routing_key=None): pass backend.release(key=key, routing_key=routing_key) - def locked(self, key, routing_key=None): + def locked(self, key: str, routing_key: str | None = None) -> bool: return self.backend_old.locked(key=key, routing_key=routing_key) or self.backend_new.locked( key=key, routing_key=routing_key ) diff --git a/src/sentry/utils/locking/lock.py b/src/sentry/utils/locking/lock.py index d1429b62f25f53..579a47a016ee5b 100644 --- a/src/sentry/utils/locking/lock.py +++ b/src/sentry/utils/locking/lock.py @@ -1,24 +1,31 @@ +from __future__ import annotations + import logging import random import time +from collections.abc import Generator from contextlib import contextmanager +from typing import ContextManager from sentry.utils.locking import UnableToAcquireLock +from sentry.utils.locking.backends import LockBackend logger = logging.getLogger(__name__) class Lock: - def __init__(self, backend, key: str, duration: int, routing_key: str | None = None) -> None: + def __init__( + self, backend: LockBackend, key: str, duration: int, routing_key: str | None = None + ) -> None: self.backend = backend self.key = key self.duration = duration self.routing_key = routing_key - def __repr__(self): + def __repr__(self) -> str: return f"" - def acquire(self): + def acquire(self) -> ContextManager[None]: """ Attempt to acquire the lock. @@ -35,7 +42,7 @@ def acquire(self): ) from error @contextmanager - def releaser(): + def releaser() -> Generator[None, None, None]: try: yield finally: @@ -43,7 +50,9 @@ def releaser(): return releaser() - def blocking_acquire(self, initial_delay: float, timeout: float, exp_base=1.6): + def blocking_acquire( + self, initial_delay: float, timeout: float, exp_base: float = 1.6 + ) -> ContextManager[None]: """ Try to acquire the lock in a polling loop. @@ -70,7 +79,7 @@ def blocking_acquire(self, initial_delay: float, timeout: float, exp_base=1.6): raise UnableToAcquireLock(f"Unable to acquire {self!r} because of timeout") - def release(self): + def release(self) -> None: """ Attempt to release the lock. @@ -82,7 +91,7 @@ def release(self): except Exception as error: logger.warning("Failed to release %r due to error: %r", self, error, exc_info=True) - def locked(self): + def locked(self) -> bool: """ See if the lock has been taken somewhere else. """ diff --git a/src/sentry/utils/locking/manager.py b/src/sentry/utils/locking/manager.py index 266d8a45fab165..fb077a2dd8774d 100644 --- a/src/sentry/utils/locking/manager.py +++ b/src/sentry/utils/locking/manager.py @@ -1,9 +1,10 @@ from sentry.utils import metrics +from sentry.utils.locking.backends import LockBackend from sentry.utils.locking.lock import Lock class LockManager: - def __init__(self, backend): + def __init__(self, backend: LockBackend) -> None: self.backend = backend def get( diff --git a/src/sentry/utils/services.py b/src/sentry/utils/services.py index 6659a612a905af..297d1e67e914db 100644 --- a/src/sentry/utils/services.py +++ b/src/sentry/utils/services.py @@ -21,6 +21,8 @@ logger = logging.getLogger(__name__) +T = TypeVar("T") + class Service: __all__: Iterable[str] = () @@ -116,7 +118,10 @@ def expose(self, context: MutableMapping[str, Any]) -> None: context[key] = getattr(base_instance, key) -def resolve_callable(value: str | AnyCallable) -> AnyCallable: +CallableT = TypeVar("CallableT", bound=Callable[..., object]) + + +def resolve_callable(value: str | CallableT) -> CallableT: if callable(value): return value elif isinstance(value, str): @@ -385,9 +390,10 @@ def call_backend_method(context: Context, backend: Service, is_primary: bool) -> def build_instance_from_options( - options: Mapping[str, Any], - default_constructor: Callable[..., Service] | None = None, -) -> Service: + options: Mapping[str, object], + *, + default_constructor: Callable[..., object] | None = None, +) -> object: try: path = options["path"] except KeyError: @@ -401,6 +407,19 @@ def build_instance_from_options( return constructor(**options.get("options", {})) +def build_instance_from_options_of_type( + tp: type[T], + options: Mapping[str, object], + *, + default_constructor: Callable[..., T] | None = None, +) -> T: + ret = build_instance_from_options(options, default_constructor=default_constructor) + if isinstance(ret, tp): + return ret + else: + raise TypeError(f"expected built object of type {tp}, got {type(ret)}") + + class ServiceDelegator(Delegator, Service): """\ The backends are provided as mapping of backend name to configuration diff --git a/tests/sentry/monitors/consumers/test_monitor_consumer.py b/tests/sentry/monitors/consumers/test_monitor_consumer.py index 1eb40d18affdf6..d0fc419e0d3414 100644 --- a/tests/sentry/monitors/consumers/test_monitor_consumer.py +++ b/tests/sentry/monitors/consumers/test_monitor_consumer.py @@ -37,11 +37,7 @@ from sentry.monitors.types import CheckinItem from sentry.testutils.cases import TestCase from sentry.utils import json -from sentry.utils.locking.manager import LockManager from sentry.utils.outcomes import Outcome -from sentry.utils.services import build_instance_from_options - -locks = LockManager(build_instance_from_options(settings.SENTRY_POST_PROCESS_LOCKS_BACKEND_OPTIONS)) class MonitorConsumerTest(TestCase): From 05b36bccb3527a24bd76e50318ea532e233eb2e3 Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Fri, 10 May 2024 11:31:14 -0700 Subject: [PATCH 288/376] cogs(replays): remove some logs and sample more on dom index logs (#70672) we log a lot in this file. https://cloudlogging.app.goo.gl/J6jw8U7i1LjGNQnCA lets: * add some sampling to some logs (canvas size) * increase sampling for some logs (large mutation) * remove slow click / multi click logs, as I don't think we need the data gathering from this anymore --- src/sentry/replays/usecases/ingest/dom_index.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/src/sentry/replays/usecases/ingest/dom_index.py b/src/sentry/replays/usecases/ingest/dom_index.py index ce9572ba3e6d2c..88c00ac090a545 100644 --- a/src/sentry/replays/usecases/ingest/dom_index.py +++ b/src/sentry/replays/usecases/ingest/dom_index.py @@ -128,7 +128,11 @@ def log_canvas_size( events: list[dict[str, Any]], ) -> None: for event in events: - if event.get("type") == 3 and event.get("data", {}).get("source") == 9: + if ( + event.get("type") == 3 + and event.get("data", {}).get("source") == 9 + and random.randint(0, 499) < 1 + ): logger.info( # Logging to the sentry.replays.slow_click namespace because # its the only one configured to use BigQuery at the moment. @@ -194,7 +198,7 @@ def get_user_actions( isinstance(payload, dict) and tag == "breadcrumb" and payload.get("category") == "replay.mutations" - and random.randint(0, 99) < 1 + and random.randint(0, 500) < 1 ): _handle_mutations_event(project_id, replay_id, event) @@ -420,17 +424,8 @@ def _handle_breadcrumb( log["replay_id"] = replay_id log["dom_tree"] = log.pop("message") - logger.info("sentry.replays.slow_click", extra=log) - return click - elif category == "ui.multiClick": - # Log the event for tracking. - log = event["data"].get("payload", {}).copy() - log["project_id"] = project_id - log["replay_id"] = replay_id - log["dom_tree"] = log.pop("message") - logger.info("sentry.replays.slow_click", extra=log) elif category == "ui.click": click = create_click_event( payload, replay_id, is_dead=False, is_rage=False, project_id=project_id From 280d63158196553a234d1566e95bfc403fdc8d5e Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Fri, 10 May 2024 14:43:22 -0400 Subject: [PATCH 289/376] feat(traces): Add dynamic tag keys lookup (#70674) ### Summary This is somewhat slow but will allow users to see custom span tag keys in the autocomplete. --- .../performance/traces/tracesSearchBar.tsx | 38 ++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/static/app/views/performance/traces/tracesSearchBar.tsx b/static/app/views/performance/traces/tracesSearchBar.tsx index 09b26cedd4500b..4d4bfc6d3c7b69 100644 --- a/static/app/views/performance/traces/tracesSearchBar.tsx +++ b/static/app/views/performance/traces/tracesSearchBar.tsx @@ -7,6 +7,7 @@ import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {TagCollection} from 'sentry/types'; import {DiscoverDatasets} from 'sentry/utils/discover/types'; +import {type ApiQueryKey, useApiQuery} from 'sentry/utils/queryClient'; import useOrganization from 'sentry/utils/useOrganization'; import {SpanIndexedField} from 'sentry/views/starfish/types'; @@ -33,6 +34,41 @@ const getTracesSupportedTags = () => { return tags; }; +interface SpanFieldEntry { + key: string; + name: string; +} +type SpanFieldsResponse = SpanFieldEntry[]; + +const getDynamicSpanFieldsEndpoint = (orgSlug: string): ApiQueryKey => [ + `/organizations/${orgSlug}/spans/fields/?statsPeriod=1h`, +]; + +const useTracesSupportedTags = (): TagCollection => { + const organization = useOrganization(); + const staticTags = getTracesSupportedTags(); + + const dynamicTagQuery = useApiQuery( + getDynamicSpanFieldsEndpoint(organization.slug), + { + staleTime: 0, + retry: false, + } + ); + + if (dynamicTagQuery.isSuccess) { + const dynamicTags: TagCollection = Object.fromEntries( + dynamicTagQuery.data.map(entry => [entry.key, entry]) + ); + return { + ...dynamicTags, + ...staticTags, + }; + } + + return staticTags; +}; + export function TracesSearchBar({ queries, handleSearch, @@ -42,7 +78,7 @@ export function TracesSearchBar({ const organization = useOrganization(); const canAddMoreQueries = queries.length <= 2; const localQueries = queries.length ? queries : ['']; - const supportedTags = getTracesSupportedTags(); + const supportedTags = useTracesSupportedTags(); return ( From d35fbb083ff2f53ea5d27996cf2cc9cb0eca4d10 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Fri, 10 May 2024 15:03:57 -0400 Subject: [PATCH 290/376] ref(js): Remove unused `query` in ListLink (#70675) It's not used anywhere --- static/app/components/links/listLink.tsx | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/static/app/components/links/listLink.tsx b/static/app/components/links/listLink.tsx index e28c35c4e962f9..13ab3f08330480 100644 --- a/static/app/components/links/listLink.tsx +++ b/static/app/components/links/listLink.tsx @@ -2,7 +2,6 @@ import {Link as RouterLink} from 'react-router'; import styled from '@emotion/styled'; import classNames from 'classnames'; import type {LocationDescriptor} from 'history'; -import * as qs from 'query-string'; import useRouter from 'sentry/utils/useRouter'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; @@ -24,14 +23,12 @@ type Props = LinkProps & { * Should be should be supplied by the parent component */ isActive?: (location: LocationDescriptor, indexOnly?: boolean) => boolean; - query?: string; }; function ListLink({ children, className, isActive, - query, to, activeClassName = 'active', index = false, @@ -39,8 +36,7 @@ function ListLink({ ...props }: Props) { const router = useRouter(); - const queryData = query ? qs.parse(query) : undefined; - const targetLocation = typeof to === 'string' ? {pathname: to, query: queryData} : to; + const targetLocation = typeof to === 'string' ? {pathname: to} : to; const target = normalizeUrl(targetLocation); const active = isActive?.(target, index) ?? router.isActive(target, index); From 700c6ed7939d918b4f272da7b5dbc5fb62b4c5a9 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Fri, 10 May 2024 15:09:28 -0400 Subject: [PATCH 291/376] feat(traces): Fix visuals and duration tooltip (#70677) ### Summary This makes it a little more obvious a project is selected, and fixes the duration tooltip to use the real value of the breakdown. --- static/app/views/performance/traces/content.tsx | 7 ++++--- static/app/views/performance/traces/fieldRenderers.tsx | 10 ++++++++-- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/static/app/views/performance/traces/content.tsx b/static/app/views/performance/traces/content.tsx index 53e9e30762b33b..7d9f7881c7b10c 100644 --- a/static/app/views/performance/traces/content.tsx +++ b/static/app/views/performance/traces/content.tsx @@ -379,6 +379,7 @@ export interface TraceResult { } interface TraceBreakdownBase { + duration: number; // Contains the accurate duration for display. Start and end may be quantized. end: number; opCategory: string | null; sdkName: string | null; @@ -529,7 +530,7 @@ const BreakdownPanelItem = styled(StyledPanelItem)<{highlightedSliceName: string ${p => p.highlightedSliceName ? `--highlightedSlice-${p.highlightedSliceName}-opacity: 1.0; - --highlightedSlice-${p.highlightedSliceName}-saturate: saturate(1.0); + --highlightedSlice-${p.highlightedSliceName}-saturate: saturate(1.0) contrast(1.0); --highlightedSlice-${p.highlightedSliceName}-transform: translateY(0px); ` : null} @@ -537,12 +538,12 @@ const BreakdownPanelItem = styled(StyledPanelItem)<{highlightedSliceName: string p.highlightedSliceName ? ` --defaultSlice-opacity: 1.0; - --defaultSlice-saturate: saturate(0.8); + --defaultSlice-saturate: saturate(0.7) contrast(0.9) brightness(1.2); --defaultSlice-transform: translateY(0px); ` : ` --defaultSlice-opacity: 1.0; - --defaultSlice-saturate: saturate(1.0); + --defaultSlice-saturate: saturate(1.0) contrast(1.0); --defaultSlice-transform: translateY(0px); `} `; diff --git a/static/app/views/performance/traces/fieldRenderers.tsx b/static/app/views/performance/traces/fieldRenderers.tsx index 29aa6e0cc4cb2f..82d52a4f108d7b 100644 --- a/static/app/views/performance/traces/fieldRenderers.tsx +++ b/static/app/views/performance/traces/fieldRenderers.tsx @@ -80,7 +80,7 @@ const RectangleTraceBreakdown = styled(RowRectangle)<{ ${p => ` transform: var(--hoveredSlice-${p.offset}-translateY, var(--highlightedSlice-${p.sliceName ?? ''}-transform, var(--defaultSlice-transform, 1.0))); `} - transition: opacity,transform 0.2s cubic-bezier(0.4, 0, 0.2, 1); + transition: filter,opacity,transform 0.2s cubic-bezier(0.4, 0, 0.2, 1); `; export function TraceBreakdownRenderer({ @@ -107,6 +107,7 @@ export function TraceBreakdownRenderer({ sliceName={breakdown.project} sliceStart={breakdown.start} sliceEnd={breakdown.end} + sliceDurationReal={breakdown.duration} sliceSecondaryName={breakdown.sdkName} trace={trace} theme={theme} @@ -136,6 +137,7 @@ export function SpanBreakdownSliceRenderer({ sliceName, sliceStart, sliceEnd, + sliceDurationReal, sliceSecondaryName, onMouseEnter, offset, @@ -148,6 +150,7 @@ export function SpanBreakdownSliceRenderer({ theme: Theme; trace: TraceResult; offset?: number; + sliceDurationReal?: number; }) { const traceSliceSize = (trace.end - trace.start) / BREAKDOWN_NUM_SLICES; const traceDuration = BREAKDOWN_NUM_SLICES * traceSliceSize; @@ -184,7 +187,10 @@ export function SpanBreakdownSliceRenderer({ ({getShortenedSdkName(sliceSecondaryName)})
- +
} From 389cab2867baefbe02bbc3c1e23f11efb9a4b47e Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Fri, 10 May 2024 15:18:12 -0400 Subject: [PATCH 292/376] fix(replay): if a video replay has no gestures, hide the rrweb canvas (#70664) This should probably get removed eventually once all mobile replays have gestures Before: SCR-20240510-lgxp After: SCR-20240510-lgvh --- .../components/replays/videoReplayerWithInteractions.tsx | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/static/app/components/replays/videoReplayerWithInteractions.tsx b/static/app/components/replays/videoReplayerWithInteractions.tsx index 259e4bfa43f0c0..9c7719dcd8f860 100644 --- a/static/app/components/replays/videoReplayerWithInteractions.tsx +++ b/static/app/components/replays/videoReplayerWithInteractions.tsx @@ -99,6 +99,8 @@ export class VideoReplayerWithInteractions { } }); + const hasGestures = events?.some(e => e.type === 3); + this.replayer = new Replayer(eventsWithSnapshots, { root: root as Element, blockClass: 'sentry-block', @@ -112,6 +114,12 @@ export class VideoReplayerWithInteractions { skipInactive: false, speed: this.config.speed, }); + + if (!hasGestures) { + // If the replay has no gestures, we should hide the mouse + // @ts-expect-error private + this.replayer.mouse.classList.remove('replayer-mouse'); + } } public destroy() { From 233b99af911db7bd997ac9b79e7415427cb80167 Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Fri, 10 May 2024 12:20:35 -0700 Subject: [PATCH 293/376] feat(feedback): flag for spam detection actions (#70676) would like to start taking action on spam detection for LA and soon EA, so make this a feature instead of an option so we can LA it. --- src/sentry/conf/server.py | 2 + src/sentry/features/base.py | 4 +- src/sentry/features/temporary.py | 1 + .../feedback/usecases/create_feedback.py | 10 +- .../feedback/usecases/test_create_feedback.py | 161 +++++++++--------- 5 files changed, 92 insertions(+), 86 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index e9f5149e8f941b..bea30ae27ba053 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1949,6 +1949,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:user-feedback-spam-filter-ui": False, # Enable User Feedback spam auto filtering feature ingest "organizations:user-feedback-spam-filter-ingest": False, + # Enable User Feedback spam auto filtering feature actions + "organizations:user-feedback-spam-filter-actions": False, # Enable User Feedback v2 UI "organizations:user-feedback-ui": False, # User Feedback Error Link Ingestion Changes diff --git a/src/sentry/features/base.py b/src/sentry/features/base.py index 213ce4d6e58ba5..35b61aad85d01c 100644 --- a/src/sentry/features/base.py +++ b/src/sentry/features/base.py @@ -96,4 +96,6 @@ class FeatureHandlerStrategy(Enum): REMOTE = 2 """Handle the feature using a remote flag management service""" OPTIONS = 3 - """Handle the feature using options""" + """Handle the feature using options. see https://develop.sentry.dev/feature-flags/#building-your-options-based-feature + for more information. + """ diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py index 878487f44e3d39..4ad8a1641c32e1 100644 --- a/src/sentry/features/temporary.py +++ b/src/sentry/features/temporary.py @@ -254,6 +254,7 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:user-feedback-ingest", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) manager.add("organizations:user-feedback-replay-clip", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:user-feedback-spam-filter-ingest", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) + manager.add("organizations:user-feedback-spam-filter-actions", OrganizationFeature, FeatureHandlerStrategy.OPTIONS) manager.add("organizations:user-feedback-spam-filter-ui", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:user-feedback-ui", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:user-feedback-event-link-ingestion-changes", OrganizationFeature, FeatureHandlerStrategy.OPTIONS) diff --git a/src/sentry/feedback/usecases/create_feedback.py b/src/sentry/feedback/usecases/create_feedback.py index 16659e4c9bd58b..7347c8856adc82 100644 --- a/src/sentry/feedback/usecases/create_feedback.py +++ b/src/sentry/feedback/usecases/create_feedback.py @@ -8,7 +8,7 @@ import jsonschema -from sentry import features, options +from sentry import features from sentry.constants import DataCategory from sentry.eventstore.models import Event, GroupEvent from sentry.feedback.usecases.spam_detection import is_spam @@ -253,7 +253,7 @@ def create_feedback_issue(event, project_id: int, source: FeedbackCreationSource payload_type=PayloadType.OCCURRENCE, occurrence=occurrence, event_data=event_fixed ) if is_message_spam: - auto_ignore_spam_feedbacks(project_id, issue_fingerprint) + auto_ignore_spam_feedbacks(project, issue_fingerprint) metrics.incr( "feedback.create_feedback_issue.produced_occurrence", tags={"referrer": source.value}, @@ -351,14 +351,14 @@ def shim_to_feedback( ) -def auto_ignore_spam_feedbacks(project_id, issue_fingerprint): - if options.get("feedback.spam-detection-actions"): +def auto_ignore_spam_feedbacks(project, issue_fingerprint): + if features.has("organizations:user-feedback-spam-filter-actions", project.organization): metrics.incr("feedback.spam-detection-actions.set-ignored") produce_occurrence_to_kafka( payload_type=PayloadType.STATUS_CHANGE, status_change=StatusChangeMessage( fingerprint=issue_fingerprint, - project_id=project_id, + project_id=project.id, new_status=GroupStatus.RESOLVED, new_substatus=None, ), diff --git a/tests/sentry/feedback/usecases/test_create_feedback.py b/tests/sentry/feedback/usecases/test_create_feedback.py index 16067738a07dcf..27817576c26a3c 100644 --- a/tests/sentry/feedback/usecases/test_create_feedback.py +++ b/tests/sentry/feedback/usecases/test_create_feedback.py @@ -39,7 +39,6 @@ def llm_settings(set_sentry_option): "llm.usecases.options", {"spamdetection": {"provider": "openai", "options": {"model": "gpt-4-turbo-1.0"}}}, ), - set_sentry_option("feedback.spam-detection-actions", True), ): yield @@ -462,94 +461,96 @@ def test_create_feedback_spam_detection_adds_field( monkeypatch, feature_flag, ): - with Feature({"organizations:user-feedback-spam-filter-ingest": feature_flag}): - event = { - "project_id": default_project.id, - "request": { - "url": "https://sentry.sentry.io/feedback/?statsPeriod=14d", - "headers": { - "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36" + with Feature({"organizations:user-feedback-spam-filter-actions": True}): + + with Feature({"organizations:user-feedback-spam-filter-ingest": feature_flag}): + event = { + "project_id": default_project.id, + "request": { + "url": "https://sentry.sentry.io/feedback/?statsPeriod=14d", + "headers": { + "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36" + }, }, - }, - "event_id": "56b08cf7852c42cbb95e4a6998c66ad6", - "timestamp": 1698255009.574, - "received": "2021-10-24T22:23:29.574000+00:00", - "environment": "prod", - "release": "frontend@daf1316f209d961443664cd6eb4231ca154db502", - "user": { - "ip_address": "72.164.175.154", - "email": "josh.ferge@sentry.io", - "id": 880461, - "isStaff": False, - "name": "Josh Ferge", - }, - "contexts": { - "feedback": { - "contact_email": "josh.ferge@sentry.io", + "event_id": "56b08cf7852c42cbb95e4a6998c66ad6", + "timestamp": 1698255009.574, + "received": "2021-10-24T22:23:29.574000+00:00", + "environment": "prod", + "release": "frontend@daf1316f209d961443664cd6eb4231ca154db502", + "user": { + "ip_address": "72.164.175.154", + "email": "josh.ferge@sentry.io", + "id": 880461, + "isStaff": False, "name": "Josh Ferge", - "message": input_message, - "replay_id": "3d621c61593c4ff9b43f8490a78ae18e", - "url": "https://sentry.sentry.io/feedback/?statsPeriod=14d", }, - }, - "breadcrumbs": [], - "platform": "javascript", - } - - def dummy_response(*args, **kwargs): - return ChatCompletion( - id="test", - choices=[ - Choice( - index=0, - message=ChatCompletionMessage( - content=( - "spam" - if "This is definitely spam" in kwargs["messages"][0]["content"] - else "not spam" + "contexts": { + "feedback": { + "contact_email": "josh.ferge@sentry.io", + "name": "Josh Ferge", + "message": input_message, + "replay_id": "3d621c61593c4ff9b43f8490a78ae18e", + "url": "https://sentry.sentry.io/feedback/?statsPeriod=14d", + }, + }, + "breadcrumbs": [], + "platform": "javascript", + } + + def dummy_response(*args, **kwargs): + return ChatCompletion( + id="test", + choices=[ + Choice( + index=0, + message=ChatCompletionMessage( + content=( + "spam" + if "This is definitely spam" in kwargs["messages"][0]["content"] + else "not spam" + ), + role="assistant", ), - role="assistant", - ), - finish_reason="stop", - ) - ], - created=time.time(), - model="gpt3.5-trubo", - object="chat.completion", - ) + finish_reason="stop", + ) + ], + created=time.time(), + model="gpt3.5-trubo", + object="chat.completion", + ) - mock_openai = Mock() - mock_openai().chat.completions.create = dummy_response - - monkeypatch.setattr("sentry.llm.providers.openai.OpenAI", mock_openai) - - create_feedback_issue( - event, default_project.id, FeedbackCreationSource.NEW_FEEDBACK_ENVELOPE - ) + mock_openai = Mock() + mock_openai().chat.completions.create = dummy_response - # Check if the 'is_spam' evidence in the Kafka message matches the expected result - is_spam_evidence = [ - evidence.value - for evidence in mock_produce_occurrence_to_kafka.call_args_list[0] - .kwargs["occurrence"] - .evidence_display - if evidence.name == "is_spam" - ] - found_is_spam = is_spam_evidence[0] if is_spam_evidence else None - assert ( - found_is_spam == expected_result - ), f"Expected {expected_result} but found {found_is_spam} for {input_message} and feature flag {feature_flag}" + monkeypatch.setattr("sentry.llm.providers.openai.OpenAI", mock_openai) - if expected_result and feature_flag: - assert ( - mock_produce_occurrence_to_kafka.call_args_list[1] - .kwargs["status_change"] - .new_status - == GroupStatus.RESOLVED + create_feedback_issue( + event, default_project.id, FeedbackCreationSource.NEW_FEEDBACK_ENVELOPE ) - if not (expected_result and feature_flag): - assert mock_produce_occurrence_to_kafka.call_count == 1 + # Check if the 'is_spam' evidence in the Kafka message matches the expected result + is_spam_evidence = [ + evidence.value + for evidence in mock_produce_occurrence_to_kafka.call_args_list[0] + .kwargs["occurrence"] + .evidence_display + if evidence.name == "is_spam" + ] + found_is_spam = is_spam_evidence[0] if is_spam_evidence else None + assert ( + found_is_spam == expected_result + ), f"Expected {expected_result} but found {found_is_spam} for {input_message} and feature flag {feature_flag}" + + if expected_result and feature_flag: + assert ( + mock_produce_occurrence_to_kafka.call_args_list[1] + .kwargs["status_change"] + .new_status + == GroupStatus.RESOLVED + ) + + if not (expected_result and feature_flag): + assert mock_produce_occurrence_to_kafka.call_count == 1 @django_db_all From 2da62970255b8f5eee56a960aac39b3c96cc8d77 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Fri, 10 May 2024 16:06:52 -0400 Subject: [PATCH 294/376] ref(js): Drop unused activeClassName (#70680) We never override this --- static/app/components/links/listLink.tsx | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/static/app/components/links/listLink.tsx b/static/app/components/links/listLink.tsx index 13ab3f08330480..420c9579c42023 100644 --- a/static/app/components/links/listLink.tsx +++ b/static/app/components/links/listLink.tsx @@ -13,10 +13,6 @@ type Props = LinkProps & { * Link target. We don't want to expose the ToLocationFunction on this component. */ to: LocationDescriptor; - /** - * The class to apply when the link is 'active' - */ - activeClassName?: string; disabled?: boolean; index?: boolean; /** @@ -30,7 +26,6 @@ function ListLink({ className, isActive, to, - activeClassName = 'active', index = false, disabled = false, ...props @@ -42,10 +37,7 @@ function ListLink({ const active = isActive?.(target, index) ?? router.isActive(target, index); return ( - + {children} From ef0db37472053df547edcf09e02efa92be833ba2 Mon Sep 17 00:00:00 2001 From: Nathan Hsieh <6186377+nhsiehgit@users.noreply.github.com> Date: Fri, 10 May 2024 13:24:06 -0700 Subject: [PATCH 295/376] nit: remove deploy from activation triggers until we're ready to implement (#70682) --- static/app/views/alerts/rules/metric/ruleConditionsForm.tsx | 6 ------ 1 file changed, 6 deletions(-) diff --git a/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx b/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx index 1d36c56808f930..c954883ff180b4 100644 --- a/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx +++ b/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx @@ -431,7 +431,6 @@ class RuleConditionsForm extends PureComponent { ? null : onMonitorTypeSelect({ monitorType: MonitorType.CONTINUOUS, - activationCondition, }) } > @@ -447,7 +446,6 @@ class RuleConditionsForm extends PureComponent { ? null : onMonitorTypeSelect({ monitorType: MonitorType.ACTIVATED, - activationCondition, }) } > @@ -464,10 +462,6 @@ class RuleConditionsForm extends PureComponent { value: ActivationConditionType.RELEASE_CREATION, label: t('New Release'), }, - { - value: ActivationConditionType.DEPLOY_CREATION, - label: t('New Deploy'), - }, ]} required value={activationCondition} From 830365b0e446c2a1f50d02427fcf6fbfe25e4117 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Fri, 10 May 2024 16:26:57 -0400 Subject: [PATCH 296/376] fix(auth) Improve layout for auth link view (#70678) - Remove overflowing avatars - Add margin below action buttons - Improve sample data used in debug views. Refs HC-1186 --- src/sentry/templates/sentry/auth-confirm-link.html | 2 +- src/sentry/templates/sentry/partial/avatar.html | 3 ++- src/sentry/templatetags/sentry_avatars.py | 1 + src/sentry/web/frontend/debug/debug_auth_views.py | 5 +++-- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/sentry/templates/sentry/auth-confirm-link.html b/src/sentry/templates/sentry/auth-confirm-link.html index 50133d27d66602..50641b747eb804 100644 --- a/src/sentry/templates/sentry/auth-confirm-link.html +++ b/src/sentry/templates/sentry/auth-confirm-link.html @@ -37,7 +37,7 @@
-
+
{% if force_link or existing_user.email == identity.email %}

We're going to associate this {{ provider }} account with your Sentry account, which means all of your existing settings will stay intact.

diff --git a/src/sentry/templates/sentry/partial/avatar.html b/src/sentry/templates/sentry/partial/avatar.html index 0b4960840f88f4..6b3f89fb3acbd7 100644 --- a/src/sentry/templates/sentry/partial/avatar.html +++ b/src/sentry/templates/sentry/partial/avatar.html @@ -10,9 +10,10 @@ {% email_avatar display_name label size True %} {% endif %} {% else %} - {% letter_avatar_svg display_name label size %} {% if avatar_type == 'gravatar' %} + {% else %} + {% letter_avatar_svg display_name label size %} {% endif %} {% endif %} {% endif %} diff --git a/src/sentry/templatetags/sentry_avatars.py b/src/sentry/templatetags/sentry_avatars.py index 2f96a8d359c38c..857ec1def14f56 100644 --- a/src/sentry/templatetags/sentry_avatars.py +++ b/src/sentry/templatetags/sentry_avatars.py @@ -55,6 +55,7 @@ def avatar(user, size=36): email = user.email if user_id: email = user.user.email + return { "email": email, "user_id": user_id, diff --git a/src/sentry/web/frontend/debug/debug_auth_views.py b/src/sentry/web/frontend/debug/debug_auth_views.py index d23afd8b76be1d..99eb396a308e90 100644 --- a/src/sentry/web/frontend/debug/debug_auth_views.py +++ b/src/sentry/web/frontend/debug/debug_auth_views.py @@ -23,11 +23,12 @@ def get(self, request: HttpRequest) -> HttpResponse: class DebugAuthConfirmLink(View): def get(self, request: HttpRequest) -> HttpResponse: - auth_identity = {"id": "bar@example.com", "email": "bar@example.com"} + auth_identity = {"id": "bar@example.com", "email": "test1@example.com"} return render_to_response( "sentry/auth-confirm-link.html", context={ - "existing_user": User(email="foo@example.com"), + "provider": "Okta", + "existing_user": User(email="test1@example.com", avatar_type=2), "identity": auth_identity, "identity_display_name": auth_identity["email"], "identity_identifier": auth_identity["id"], From 89c7279655e5d0ff45d06cdfb7be18725c546776 Mon Sep 17 00:00:00 2001 From: Matt Duncan <14761+mrduncan@users.noreply.github.com> Date: Fri, 10 May 2024 13:29:28 -0700 Subject: [PATCH 297/376] chore(issues): Remove impossible branch (#70562) Since the first line of `process_event_and_issue_occurrence` reads from `event_data` this can't possibly ever be `None`. --- tests/sentry/issues/test_utils.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/tests/sentry/issues/test_utils.py b/tests/sentry/issues/test_utils.py index 68226f15609120..32ad04ccee7eed 100644 --- a/tests/sentry/issues/test_utils.py +++ b/tests/sentry/issues/test_utils.py @@ -68,18 +68,17 @@ def build_occurrence(self, **overrides: Any) -> IssueOccurrence: return IssueOccurrence.from_dict(self.build_occurrence_data(**overrides)) def process_occurrence( - self, event_data: dict[str, Any] | None = None, **overrides + self, event_data: dict[str, Any], **overrides ) -> tuple[IssueOccurrence, GroupInfo | None]: """ Testutil to build and process occurrence data instead of going through Kafka. This ensures the occurrence data is well-formed. """ occurrence_data = self.build_occurrence_data(**overrides) - if event_data: - if "event_id" not in event_data: - event_data["event_id"] = occurrence_data["event_id"] - if "project_id" not in event_data: - event_data["project_id"] = occurrence_data["project_id"] + if "event_id" not in event_data: + event_data["event_id"] = occurrence_data["event_id"] + if "project_id" not in event_data: + event_data["project_id"] = occurrence_data["project_id"] return process_event_and_issue_occurrence(occurrence_data, event_data) From 5a883ee1ba7fe3aac70575559d292b8332ef8acd Mon Sep 17 00:00:00 2001 From: Matt Duncan <14761+mrduncan@users.noreply.github.com> Date: Fri, 10 May 2024 13:29:40 -0700 Subject: [PATCH 298/376] chore(issues): Fix some incorrect types in fingerprinting (#70564) --- .../grouping/fingerprinting/__init__.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/sentry/grouping/fingerprinting/__init__.py b/src/sentry/grouping/fingerprinting/__init__.py index b4578256d9592b..6cfedfdfa12531 100644 --- a/src/sentry/grouping/fingerprinting/__init__.py +++ b/src/sentry/grouping/fingerprinting/__init__.py @@ -175,8 +175,8 @@ class FingerprintingRules: def __init__( self, rules: Sequence[Rule], - changelog: Sequence[object] = None, - version: int = None, + changelog: Sequence[object] | None = None, + version: int | None = None, bases: Sequence[object] | None = None, ) -> None: if version is None: @@ -196,7 +196,7 @@ def iter_rules(self, include_builtin: bool = True) -> Generator[Rule, None, None def get_fingerprint_values_for_event(self, event: dict[str, object]) -> None | object: if not (self.bases or self.rules): - return + return None access = EventAccess(event) for rule in self.iter_rules(): new_values = rule.get_fingerprint_values_for_event_access(access) @@ -311,7 +311,7 @@ def __init__(self, key: str, pattern: str, negated: bool = False) -> None: self.negated = negated @property - def match_group(self) -> list[dict[str, str]]: + def match_group(self) -> str: if self.key == "message": return "toplevel" if self.key in ("logger", "level"): @@ -328,7 +328,7 @@ def match_group(self) -> list[dict[str, str]]: return "release" return "frames" - def matches(self, values: dict[str, object]) -> None: + def matches(self, values: dict[str, object]) -> bool: rv = self._positive_match(values) if self.negated: rv = not rv @@ -440,7 +440,7 @@ def get_fingerprint_values_for_event_access( if all(x.matches(values) for x in matchers): break else: - return + return None return self.fingerprint, self.attributes @@ -473,7 +473,7 @@ def from_json(cls, json: dict[str, object]) -> Rule: return cls._from_config_structure(json) @property - def text(self) -> list[dict[str, str]]: + def text(self) -> str: return ( '%s -> "%s" %s' % ( @@ -561,10 +561,10 @@ def visit_fp_attribute(self, _: NodeVisitorBase, children: Sequence[object]) -> raise InvalidFingerprintingConfig("Unknown attribute '%s'" % key) return (key, value) - def visit_quoted(self, node: NodeVisitorBase, _: Sequence[object]) -> None: + def visit_quoted(self, node: NodeVisitorBase, _: Sequence[object]) -> str: return unescape_string(node.text[1:-1]) - def visit_unquoted(self, node: NodeVisitorBase, _: Sequence[object]) -> None: + def visit_unquoted(self, node: NodeVisitorBase, _: Sequence[object]) -> str: return node.text visit_unquoted_no_comma = visit_unquoted @@ -572,10 +572,10 @@ def visit_unquoted(self, node: NodeVisitorBase, _: Sequence[object]) -> None: def generic_visit(self, _: NodeVisitorBase, children: Sequence[object]) -> None: return children - def visit_key(self, node: NodeVisitorBase, _: Sequence[object]) -> None: + def visit_key(self, node: NodeVisitorBase, _: Sequence[object]) -> str: return node.text - def visit_quoted_key(self, node: NodeVisitorBase, _: Sequence[object]) -> None: + def visit_quoted_key(self, node: NodeVisitorBase, _: Sequence[object]) -> str: # leading ! are used to indicate negation. make sure they don't appear. return node.match.groups()[0].lstrip("!") From d17225a0c24b2be663177b8e8ac9be2d0ddf8c71 Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Fri, 10 May 2024 13:55:00 -0700 Subject: [PATCH 299/376] feat(assignee-selector): Create storybook component for new assignee selector trigger (#70561) This PR creates a new component `` which will eventually replace the current assignee selector trigger in the issue stream. This PR does **not** make any changes to the selector, it merely creates the component and a corresponding storybook entry so it can be iterated on independent of any changes made to the issue stream. More details of the project can be found [here](https://github.com/getsentry/sentry/issues/69827) (#69827) At a glance (as of 5/9): image Note that unlike the previous assignee selector, there is no longer a "Suggested Assignee" state for this new assignee selector, there is only an Assigned and Unassigned state. TODO: - [x] Figure out what the loading state looks like - [x] Chevron weight and style match border - [x] Add tests --- .../app/components/assigneeBadge.stories.tsx | 100 +++++++++++ static/app/components/assigneeBadge.tsx | 162 ++++++++++++++++++ 2 files changed, 262 insertions(+) create mode 100644 static/app/components/assigneeBadge.stories.tsx create mode 100644 static/app/components/assigneeBadge.tsx diff --git a/static/app/components/assigneeBadge.stories.tsx b/static/app/components/assigneeBadge.stories.tsx new file mode 100644 index 00000000000000..7b78f2627cad5d --- /dev/null +++ b/static/app/components/assigneeBadge.stories.tsx @@ -0,0 +1,100 @@ +import {Fragment, useState} from 'react'; + +import {AssigneeBadge} from 'sentry/components/assigneeBadge'; +import storyBook from 'sentry/stories/storyBook'; +import type {Actor} from 'sentry/types'; +import {useUser} from 'sentry/utils/useUser'; +import {useUserTeams} from 'sentry/utils/useUserTeams'; + +export default storyBook(AssigneeBadge, story => { + story('User Assignee', () => { + const user = useUser(); + const [chevron1Toggle, setChevron1Toggle] = useState<'up' | 'down'>('down'); + const [chevron2Toggle, setChevron2Toggle] = useState<'up' | 'down'>('down'); + const userActor: Actor = { + type: 'user', + id: user.id, + name: user.name, + email: user.email, + }; + + return ( + +

setChevron1Toggle(chevron1Toggle === 'up' ? 'down' : 'up')}> + +

+

setChevron2Toggle(chevron2Toggle === 'up' ? 'down' : 'up')}> + +

+
+ ); + }); + + story('Team Assignee', () => { + const {teams} = useUserTeams(); + const [chevron1Toggle, setChevron1Toggle] = useState<'up' | 'down'>('down'); + const [chevron2Toggle, setChevron2Toggle] = useState<'up' | 'down'>('down'); + + const teamActor: Actor = { + type: 'team', + id: teams[0].id, + name: teams[0].name, + }; + + return ( + +

setChevron1Toggle(chevron1Toggle === 'up' ? 'down' : 'up')}> + +

+

setChevron2Toggle(chevron2Toggle === 'up' ? 'down' : 'up')}> + +

+
+ ); + }); + + story('Unassigned', () => { + const [chevron1Toggle, setChevron1Toggle] = useState<'up' | 'down'>('down'); + const [chevron2Toggle, setChevron2Toggle] = useState<'up' | 'down'>('down'); + + return ( + +

setChevron1Toggle(chevron1Toggle === 'up' ? 'down' : 'up')}> + +

+

setChevron2Toggle(chevron2Toggle === 'up' ? 'down' : 'up')}> + +

+
+ ); + }); + + story('Loading', () => { + return ( + +

+ +

+

+ +

+
+ ); + }); +}); diff --git a/static/app/components/assigneeBadge.tsx b/static/app/components/assigneeBadge.tsx new file mode 100644 index 00000000000000..e589596cd0efee --- /dev/null +++ b/static/app/components/assigneeBadge.tsx @@ -0,0 +1,162 @@ +import {Fragment} from 'react'; +import styled from '@emotion/styled'; + +import ActorAvatar from 'sentry/components/avatar/actorAvatar'; +import Tag from 'sentry/components/badge/tag'; +import {Chevron} from 'sentry/components/chevron'; +import ExternalLink from 'sentry/components/links/externalLink'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; +import Placeholder from 'sentry/components/placeholder'; +import {Tooltip} from 'sentry/components/tooltip'; +import {t, tct} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import type {Actor, SuggestedOwnerReason} from 'sentry/types'; +import {lightTheme as theme} from 'sentry/utils/theme'; + +type AssigneeBadgeProps = { + assignedTo?: Actor | undefined; + assignmentReason?: SuggestedOwnerReason; + chevronDirection?: 'up' | 'down'; + loading?: boolean; + showLabel?: boolean; +}; + +const AVATAR_SIZE = 16; + +export function AssigneeBadge({ + assignedTo, + assignmentReason, + showLabel = false, + chevronDirection = 'down', + loading = false, +}: AssigneeBadgeProps) { + const suggestedReasons: Record = { + suspectCommit: tct('Based on [commit:commit data]', { + commit: ( + + ), + }), + ownershipRule: t('Matching Issue Owners Rule'), + projectOwnership: t('Matching Issue Owners Rule'), + codeowners: t('Matching Codeowners Rule'), + }; + + const makeAssignedIcon = (actor: Actor) => { + return ( + + + {showLabel && ( +
{`${actor.type === 'team' ? '#' : ''}${actor.name}`}
+ )} + +
+ ); + }; + + const loadingIcon = ( + + + {showLabel && 'Loading...'} + + + ); + + const unassignedIcon = ( + + + {showLabel && Unassigned} + + + ); + + return loading ? ( + + ) : assignedTo ? ( + + {t('Assigned to ')} + {assignedTo.type === 'team' ? `#${assignedTo.name}` : assignedTo.name} + {assignmentReason && ( + {suggestedReasons[assignmentReason]} + )} + + } + > + + + ) : ( + +
{t('Unassigned')}
+ + {tct( + 'You can auto-assign issues by adding [issueOwners:Issue Owner rules].', + { + issueOwners: ( + + ), + } + )} + + + } + > + +
+ ); +} + +const StyledLoadingIndicator = styled(LoadingIndicator)` + display: inline-flex; + align-items: center; +`; + +const TooltipWrapper = styled('div')` + text-align: left; +`; + +const StyledTag = styled(Tag)` + span { + display: flex; + align-items: center; + gap: ${space(0.5)}; + } + & > div { + height: 24px; + padding: ${space(0.5)}; + } + color: ${p => p.theme.subText}; + cursor: pointer; +`; + +const TooltipSubtext = styled('div')` + color: ${p => p.theme.subText}; +`; + +const TooltipSubExternalLink = styled(ExternalLink)` + color: ${p => p.theme.subText}; + text-decoration: underline; + + :hover { + color: ${p => p.theme.subText}; + } +`; From 1caa7c55dabfac1d84d977b12c9c0214a225ee12 Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Fri, 10 May 2024 13:59:46 -0700 Subject: [PATCH 300/376] feat(api-idorslug): Updated Subset of Org, Project, SCIM, Release Endpoints to use `organization_id_or_slug` (#70673) --- .../api/endpoints/organization_region.py | 18 ++++++---- .../api/endpoints/project_key_details.py | 6 ++-- .../api/endpoints/project_symbol_sources.py | 8 ++--- .../release_threshold_status_index.py | 2 +- .../api/endpoints/shared_group_details.py | 12 +++---- .../source_map_debug_blue_thunder_edition.py | 2 +- src/sentry/api/urls.py | 32 ++++++++--------- .../organization_alert_rule_activations.py | 2 +- .../organization_alert_rule_index.py | 4 +-- .../issues/endpoints/source_map_debug.py | 2 +- src/sentry/lang/native/sources.py | 4 +-- .../endpoints/project_rule_group_history.py | 2 +- .../history/endpoints/project_rule_stats.py | 2 +- src/sentry/scim/endpoints/members.py | 16 ++++----- src/sentry/scim/endpoints/teams.py | 16 +++++---- .../apidocs/endpoints/projects/test_dsyms.py | 2 +- .../endpoints/scim/test_group_details.py | 2 +- .../endpoints/scim/test_group_index.py | 2 +- .../endpoints/scim/test_member_details.py | 4 +-- .../endpoints/scim/test_member_index.py | 2 +- .../lang/java/test_plugin.py | 4 +-- .../sentry/api/endpoints/test_debug_files.py | 36 +++++++++---------- .../endpoints/test_project_artifact_lookup.py | 18 +++++----- .../api/endpoints/test_project_key_details.py | 30 ++++++++-------- tests/sentry/models/test_debugfile.py | 2 +- tests/sentry/profiles/test_task.py | 2 +- tests/symbolicator/test_minidump_full.py | 2 +- tests/symbolicator/test_payload_full.py | 2 +- tests/symbolicator/test_unreal_full.py | 2 +- 29 files changed, 123 insertions(+), 115 deletions(-) diff --git a/src/sentry/api/endpoints/organization_region.py b/src/sentry/api/endpoints/organization_region.py index 4dfe32b99bd173..e3a847cbb6cc3a 100644 --- a/src/sentry/api/endpoints/organization_region.py +++ b/src/sentry/api/endpoints/organization_region.py @@ -54,22 +54,28 @@ class OrganizationRegionEndpoint(Endpoint): permission_classes = (OrganizationRegionEndpointPermissions,) def convert_args( - self, request: Request, organization_slug: str | None = None, *args: Any, **kwargs: Any + self, + request: Request, + organization_id_or_slug: int | str | None = None, + *args: Any, + **kwargs: Any, ) -> tuple[tuple[Any, ...], dict[str, Any]]: - if not organization_slug: + if not organization_id_or_slug: raise ResourceDoesNotExist try: # We don't use the lookup since OrganizationMapping uses a BigIntField for organization_id instead of a ForeignKey if ( id_or_slug_path_params_enabled( - self.convert_args.__qualname__, str(organization_slug) + self.convert_args.__qualname__, str(organization_id_or_slug) ) - and str(organization_slug).isdecimal() + and str(organization_id_or_slug).isdecimal() ): - org_mapping = OrganizationMapping.objects.get(organization_id=organization_slug) + org_mapping = OrganizationMapping.objects.get( + organization_id=organization_id_or_slug + ) else: - org_mapping = OrganizationMapping.objects.get(slug=organization_slug) + org_mapping = OrganizationMapping.objects.get(slug=organization_id_or_slug) except OrganizationMapping.DoesNotExist: raise ResourceDoesNotExist diff --git a/src/sentry/api/endpoints/project_key_details.py b/src/sentry/api/endpoints/project_key_details.py index 955f554875dbf8..79feec83a2c4b6 100644 --- a/src/sentry/api/endpoints/project_key_details.py +++ b/src/sentry/api/endpoints/project_key_details.py @@ -40,7 +40,7 @@ class ProjectKeyDetailsEndpoint(ProjectEndpoint): @extend_schema( operation_id="Retrieve a Client Key", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ProjectParams.key_id("The ID of the client key"), ], @@ -68,7 +68,7 @@ def get(self, request: Request, project, key_id) -> Response: @extend_schema( operation_id="Update a Client Key", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ProjectParams.key_id("The ID of the key to update."), ], @@ -181,7 +181,7 @@ def put(self, request: Request, project, key_id) -> Response: @extend_schema( operation_id="Delete a Client Key", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ProjectParams.key_id("The ID of the key to delete."), ], diff --git a/src/sentry/api/endpoints/project_symbol_sources.py b/src/sentry/api/endpoints/project_symbol_sources.py index 0ac69a67c4b003..2471d11b078aaa 100644 --- a/src/sentry/api/endpoints/project_symbol_sources.py +++ b/src/sentry/api/endpoints/project_symbol_sources.py @@ -243,7 +243,7 @@ class ProjectSymbolSourcesEndpoint(ProjectEndpoint): @extend_schema( operation_id="Retrieve a Project's Symbol Sources", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ProjectParams.source_id( "The ID of the source to look up. If this is not provided, all sources are returned.", @@ -277,7 +277,7 @@ def get(self, request: Request, project: Project) -> Response: @extend_schema( operation_id="Delete a Symbol Source from a Project", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ProjectParams.source_id("The ID of the source to delete.", True), ], @@ -310,7 +310,7 @@ def delete(self, request: Request, project: Project) -> Response: @extend_schema( operation_id="Add a Symbol Source to a Project", - parameters=[GlobalParams.ORG_SLUG, GlobalParams.PROJECT_ID_OR_SLUG], + parameters=[GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG], request=SourceSerializer, responses={ 201: REDACTED_SOURCE_SCHEMA, @@ -350,7 +350,7 @@ def post(self, request: Request, project: Project) -> Response: @extend_schema( operation_id="Update a Project's Symbol Source", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ProjectParams.source_id("The ID of the source to update.", True), ], diff --git a/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py b/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py index a32fae2a69e49e..11685b32a74379 100644 --- a/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py +++ b/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py @@ -97,7 +97,7 @@ class ReleaseThresholdStatusIndexEndpoint(OrganizationReleasesBaseEndpoint, Envi @extend_schema( operation_id="Retrieve Statuses of Release Thresholds (Alpha)", - parameters=[GlobalParams.ORG_SLUG, ReleaseThresholdStatusIndexSerializer], + parameters=[GlobalParams.ORG_ID_OR_SLUG, ReleaseThresholdStatusIndexSerializer], request=None, responses={ 200: inline_sentry_response_serializer( diff --git a/src/sentry/api/endpoints/shared_group_details.py b/src/sentry/api/endpoints/shared_group_details.py index c0bde793da6b06..05e493f4754a30 100644 --- a/src/sentry/api/endpoints/shared_group_details.py +++ b/src/sentry/api/endpoints/shared_group_details.py @@ -22,7 +22,7 @@ class SharedGroupDetailsEndpoint(Endpoint, EnvironmentMixin): def get( self, request: Request, - organization_slug: int | str | None = None, + organization_id_or_slug: int | str | None = None, share_id: str | None = None, ) -> Response: """ @@ -45,13 +45,13 @@ def get( except Group.DoesNotExist: raise ResourceDoesNotExist - # Checks if the organization_slug (eventually renamed to organization_id_or_slug) matches the group organization's id or slug - if organization_slug: - if str(organization_slug).isdecimal(): - if int(organization_slug) != group.organization.id: + # Checks if the organization_id_or_slug matches the group organization's id or slug + if organization_id_or_slug: + if str(organization_id_or_slug).isdecimal(): + if int(organization_id_or_slug) != group.organization.id: raise ResourceDoesNotExist else: - if organization_slug != group.organization.slug: + if organization_id_or_slug != group.organization.slug: raise ResourceDoesNotExist if group.organization.flags.disable_shared_issues: diff --git a/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py b/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py index f8fe44f70f9092..07bf87f9bf646b 100644 --- a/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py +++ b/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py @@ -135,7 +135,7 @@ class SourceMapDebugBlueThunderEditionEndpoint(ProjectEndpoint): @extend_schema( operation_id="Get Debug Information Related to Source Maps for a Given Event", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, EventParams.EVENT_ID, ], diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index b5fc3d8ba93822..183342307085f7 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -1125,7 +1125,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ), # Alert Rules re_path( - r"^(?P[^\/]+)/alert-rules/$", + r"^(?P[^\/]+)/alert-rules/$", OrganizationAlertRuleIndexEndpoint.as_view(), name="sentry-api-0-organization-alert-rules", ), @@ -1140,7 +1140,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-alert-rule-details", ), re_path( - r"^(?P[^\/]+)/alert-rules/(?P[^\/]+)/activations/$", + r"^(?P[^\/]+)/alert-rules/(?P[^\/]+)/activations/$", OrganizationAlertRuleActivationsEndpoint.as_view(), name="sentry-api-0-organization-alert-rule-activations", ), @@ -1807,7 +1807,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ), # TODO: also integrate release threshold status into the releases response? re_path( - r"^(?P[^\/]+)/release-threshold-statuses/$", + r"^(?P[^\/]+)/release-threshold-statuses/$", ReleaseThresholdStatusIndexEndpoint.as_view(), name="sentry-api-0-organization-release-threshold-statuses", ), @@ -1998,7 +1998,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-request-project-creation", ), re_path( - r"^(?P[^\/]+)/scim/v2/", + r"^(?P[^\/]+)/scim/v2/", include( [ re_path( @@ -2132,7 +2132,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-prompts-activity", ), re_path( - r"^(?P[^\/]+)/region/$", + r"^(?P[^\/]+)/region/$", OrganizationRegionEndpoint.as_view(), name="sentry-api-0-organization-region", ), @@ -2255,12 +2255,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-event-owners", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/source-map-debug/$", + r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/source-map-debug/$", SourceMapDebugEndpoint.as_view(), name="sentry-api-0-event-source-map-debug", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/source-map-debug-blue-thunder-edition/$", + r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/source-map-debug-blue-thunder-edition/$", SourceMapDebugBlueThunderEditionEndpoint.as_view(), name="sentry-api-0-event-source-map-debug-blue-thunder-edition", ), @@ -2270,12 +2270,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-event-actionable-items", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/files/dsyms/$", + r"^(?P[^\/]+)/(?P[^\/]+)/files/dsyms/$", DebugFilesEndpoint.as_view(), name="sentry-api-0-dsym-files", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/files/source-maps/$", + r"^(?P[^\/]+)/(?P[^\/]+)/files/source-maps/$", SourceMapsEndpoint.as_view(), name="sentry-api-0-source-maps", ), @@ -2300,7 +2300,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-unknown-dsym-files", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/files/dsyms/associate/$", + r"^(?P[^\/]+)/(?P[^\/]+)/files/dsyms/associate/$", AssociateDSymFilesEndpoint.as_view(), name="sentry-api-0-associate-dsym-files", ), @@ -2344,7 +2344,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-keys", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/keys/(?P[^\/]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/keys/(?P[^\/]+)/$", ProjectKeyDetailsEndpoint.as_view(), name="sentry-api-0-project-key-details", ), @@ -2438,7 +2438,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-release-file-details", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/artifact-lookup/$", + r"^(?P[^\/]+)/(?P[^\/]+)/artifact-lookup/$", ProjectArtifactLookupEndpoint.as_view(), name="sentry-api-0-project-artifact-lookup", ), @@ -2518,12 +2518,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-rule-actions", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/rules/(?P[^\/]+)/group-history/$", + r"^(?P[^\/]+)/(?P[^\/]+)/rules/(?P[^\/]+)/group-history/$", ProjectRuleGroupHistoryIndexEndpoint.as_view(), name="sentry-api-0-project-rule-group-history-index", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/rules/(?P[^\/]+)/stats/$", + r"^(?P[^\/]+)/(?P[^\/]+)/rules/(?P[^\/]+)/stats/$", ProjectRuleStatsIndexEndpoint.as_view(), name="sentry-api-0-project-rule-stats-index", ), @@ -2538,7 +2538,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-stats", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/symbol-sources/$", + r"^(?P[^\/]+)/(?P[^\/]+)/symbol-sources/$", ProjectSymbolSourcesEndpoint.as_view(), name="sentry-api-0-project-symbol-sources", ), @@ -3171,7 +3171,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ), # TODO: include in the /organizations/ route tree + remove old dupe once hybrid cloud launches re_path( - r"^organizations/(?P[^\/]+)/shared/(?:issues|groups)/(?P[^\/]+)/$", + r"^organizations/(?P[^\/]+)/shared/(?:issues|groups)/(?P[^\/]+)/$", SharedGroupDetailsEndpoint.as_view(), name="sentry-api-0-organization-shared-group-details", ), diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_activations.py b/src/sentry/incidents/endpoints/organization_alert_rule_activations.py index c76358f111a391..d48985b4d67991 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_activations.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_activations.py @@ -28,7 +28,7 @@ class OrganizationAlertRuleActivationsEndpoint(OrganizationAlertRuleEndpoint): @extend_schema( operation_id="Retrieve activations for an AlertRule", - parameters=[GlobalParams.ORG_SLUG, MetricAlertParams.METRIC_RULE_ID], + parameters=[GlobalParams.ORG_ID_OR_SLUG, MetricAlertParams.METRIC_RULE_ID], responses={ 200: inline_sentry_response_serializer( "ListAlertRuleActivations", list[AlertRuleActivationsResponse] diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_index.py b/src/sentry/incidents/endpoints/organization_alert_rule_index.py index 15d4933e6c161a..1a20bee1f500eb 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_index.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_index.py @@ -422,7 +422,7 @@ class OrganizationAlertRuleIndexEndpoint(OrganizationEndpoint, AlertRuleIndexMix @extend_schema( operation_id="List an Organization's Metric Alert Rules", - parameters=[GlobalParams.ORG_SLUG], + parameters=[GlobalParams.ORG_ID_OR_SLUG], request=None, responses={ 200: inline_sentry_response_serializer( @@ -449,7 +449,7 @@ def get(self, request: Request, organization) -> Response: @extend_schema( operation_id="Create a Metric Alert Rule for an Organization", - parameters=[GlobalParams.ORG_SLUG], + parameters=[GlobalParams.ORG_ID_OR_SLUG], request=OrganizationAlertRuleIndexPostSerializer, responses={ 201: AlertRuleSerializer, diff --git a/src/sentry/issues/endpoints/source_map_debug.py b/src/sentry/issues/endpoints/source_map_debug.py index bb634f066be5a6..8b0f5669b38867 100644 --- a/src/sentry/issues/endpoints/source_map_debug.py +++ b/src/sentry/issues/endpoints/source_map_debug.py @@ -38,7 +38,7 @@ class SourceMapDebugEndpoint(ProjectEndpoint): @extend_schema( operation_id="Debug Issues Related to Source Maps for a Given Event", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, EventParams.EVENT_ID, EventParams.FRAME_IDX, diff --git a/src/sentry/lang/native/sources.py b/src/sentry/lang/native/sources.py index 746986c709a654..5aa357ed79399b 100644 --- a/src/sentry/lang/native/sources.py +++ b/src/sentry/lang/native/sources.py @@ -231,7 +231,7 @@ def get_internal_source(project: Project): reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ), @@ -261,7 +261,7 @@ def get_internal_artifact_lookup_source_url(project: Project): reverse( "sentry-api-0-project-artifact-lookup", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ), diff --git a/src/sentry/rules/history/endpoints/project_rule_group_history.py b/src/sentry/rules/history/endpoints/project_rule_group_history.py index 2504ee662952da..63f5e634934f20 100644 --- a/src/sentry/rules/history/endpoints/project_rule_group_history.py +++ b/src/sentry/rules/history/endpoints/project_rule_group_history.py @@ -63,7 +63,7 @@ class ProjectRuleGroupHistoryIndexEndpoint(RuleEndpoint): @extend_schema( operation_id="Retrieve a Group Firing History for an Issue Alert", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, IssueAlertParams.ISSUE_RULE_ID, ], diff --git a/src/sentry/rules/history/endpoints/project_rule_stats.py b/src/sentry/rules/history/endpoints/project_rule_stats.py index da7a8189555f5f..c3db13b255319c 100644 --- a/src/sentry/rules/history/endpoints/project_rule_stats.py +++ b/src/sentry/rules/history/endpoints/project_rule_stats.py @@ -46,7 +46,7 @@ class ProjectRuleStatsIndexEndpoint(RuleEndpoint): @extend_schema( operation_id="Retrieve Firing Starts for an Issue Alert Rule for a Given Time Range.", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, IssueAlertParams.ISSUE_RULE_ID, ], diff --git a/src/sentry/scim/endpoints/members.py b/src/sentry/scim/endpoints/members.py index 1cc2188ae7e844..ca3410827cd1a0 100644 --- a/src/sentry/scim/endpoints/members.py +++ b/src/sentry/scim/endpoints/members.py @@ -176,14 +176,14 @@ class OrganizationSCIMMemberDetails(SCIMEndpoint, OrganizationMemberEndpoint): def convert_args( self, request: Request, - organization_slug: str, + organization_id_or_slug: int | str, member_id: str = "me", *args: Any, **kwargs: Any, ) -> tuple[Any, Any]: try: args, kwargs = super().convert_args( - request, organization_slug, member_id, *args, **kwargs + request, organization_id_or_slug, member_id, *args, **kwargs ) return args, kwargs except ResourceDoesNotExist: @@ -226,7 +226,7 @@ def _should_delete_member(self, operation): @extend_schema( operation_id="Query an Individual Organization Member", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.member_id("The ID of the member to query."), ], request=None, @@ -253,7 +253,7 @@ def get(self, request: Request, organization, member) -> Response: @extend_schema( operation_id="Update an Organization Member's Attributes", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.member_id("The ID of the member to update."), ], request=SCIMPatchRequestSerializer, @@ -305,7 +305,7 @@ def patch(self, request: Request, organization, member): @extend_schema( operation_id="Delete an Organization Member via SCIM", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.member_id("The ID of the member to delete."), ], responses={ @@ -334,7 +334,7 @@ def delete(self, request: Request, organization, member) -> Response: @extend_schema( operation_id="Update an Organization Member's Attributes", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.member_id("The ID of the member to update."), ], request=inline_serializer( @@ -443,7 +443,7 @@ class OrganizationSCIMMemberIndex(SCIMEndpoint): @extend_schema( operation_id="List an Organization's SCIM Members", - parameters=[GlobalParams.ORG_SLUG, SCIMQueryParamSerializer], + parameters=[GlobalParams.ORG_ID_OR_SLUG, SCIMQueryParamSerializer], responses={ 200: inline_sentry_response_serializer( "SCIMListResponseEnvelopeSCIMMemberIndexResponse", SCIMListMembersResponse @@ -500,7 +500,7 @@ def on_results(results): @extend_schema( operation_id="Provision a New Organization Member", - parameters=[GlobalParams.ORG_SLUG], + parameters=[GlobalParams.ORG_ID_OR_SLUG], request=inline_serializer( name="SCIMMemberProvision", fields={ diff --git a/src/sentry/scim/endpoints/teams.py b/src/sentry/scim/endpoints/teams.py index 1343744dcb7b12..4b2da06e9ff27b 100644 --- a/src/sentry/scim/endpoints/teams.py +++ b/src/sentry/scim/endpoints/teams.py @@ -165,7 +165,7 @@ class OrganizationSCIMTeamIndex(SCIMEndpoint): @extend_schema( operation_id="List an Organization's Paginated Teams", - parameters=[GlobalParams.ORG_SLUG, SCIMQueryParamSerializer], + parameters=[GlobalParams.ORG_ID_OR_SLUG, SCIMQueryParamSerializer], request=None, responses={ 200: inline_sentry_response_serializer( @@ -212,7 +212,7 @@ def on_results(results): @extend_schema( operation_id="Provision a New Team", - parameters=[GlobalParams.ORG_SLUG], + parameters=[GlobalParams.ORG_ID_OR_SLUG], request=inline_serializer( name="SCIMTeamRequestBody", fields={ @@ -302,8 +302,10 @@ class OrganizationSCIMTeamDetails(SCIMEndpoint, TeamDetailsEndpoint): } permission_classes = (OrganizationSCIMTeamPermission,) - def convert_args(self, request: Request, organization_slug: str, team_id, *args, **kwargs): - args, kwargs = super().convert_args(request, organization_slug) + def convert_args( + self, request: Request, organization_id_or_slug: int | str, team_id, *args, **kwargs + ): + args, kwargs = super().convert_args(request, organization_id_or_slug) try: kwargs["team"] = self._get_team(kwargs["organization"], team_id) except Team.DoesNotExist: @@ -322,7 +324,7 @@ def _get_team(self, organization, team_id): @extend_schema( operation_id="Query an Individual Team", - parameters=[SCIMParams.TEAM_ID, GlobalParams.ORG_SLUG], + parameters=[SCIMParams.TEAM_ID, GlobalParams.ORG_ID_OR_SLUG], request=None, responses={ 200: TeamSCIMSerializer, @@ -403,7 +405,7 @@ def _rename_team_operation(self, request: Request, new_name, team): @extend_schema( operation_id="Update a Team's Attributes", - parameters=[GlobalParams.ORG_SLUG, SCIMParams.TEAM_ID], + parameters=[GlobalParams.ORG_ID_OR_SLUG, SCIMParams.TEAM_ID], request=SCIMTeamPatchRequestSerializer, responses={ 204: RESPONSE_SUCCESS, @@ -474,7 +476,7 @@ def patch(self, request: Request, organization, team): @extend_schema( operation_id="Delete an Individual Team", - parameters=[GlobalParams.ORG_SLUG, SCIMParams.TEAM_ID], + parameters=[GlobalParams.ORG_ID_OR_SLUG, SCIMParams.TEAM_ID], responses={ 204: RESPONSE_SUCCESS, 401: RESPONSE_UNAUTHORIZED, diff --git a/tests/apidocs/endpoints/projects/test_dsyms.py b/tests/apidocs/endpoints/projects/test_dsyms.py index 0d67ad1c415422..dd51cfba5e769c 100644 --- a/tests/apidocs/endpoints/projects/test_dsyms.py +++ b/tests/apidocs/endpoints/projects/test_dsyms.py @@ -13,7 +13,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ) diff --git a/tests/apidocs/endpoints/scim/test_group_details.py b/tests/apidocs/endpoints/scim/test_group_details.py index ba1586398768b5..cd06027be2e5a8 100644 --- a/tests/apidocs/endpoints/scim/test_group_details.py +++ b/tests/apidocs/endpoints/scim/test_group_details.py @@ -15,7 +15,7 @@ def setUp(self): ) self.url = reverse( "sentry-api-0-organization-scim-team-details", - kwargs={"organization_slug": self.organization.slug, "team_id": self.team.id}, + kwargs={"organization_id_or_slug": self.organization.slug, "team_id": self.team.id}, ) def test_get(self): diff --git a/tests/apidocs/endpoints/scim/test_group_index.py b/tests/apidocs/endpoints/scim/test_group_index.py index 13ebbf1b836e2d..523e7567374180 100644 --- a/tests/apidocs/endpoints/scim/test_group_index.py +++ b/tests/apidocs/endpoints/scim/test_group_index.py @@ -12,7 +12,7 @@ def setUp(self): self.team = self.create_team(organization=self.organization, members=[self.user]) self.url = reverse( "sentry-api-0-organization-scim-team-index", - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) def test_get(self): diff --git a/tests/apidocs/endpoints/scim/test_member_details.py b/tests/apidocs/endpoints/scim/test_member_details.py index 586f376aa5c54a..182513491f6167 100644 --- a/tests/apidocs/endpoints/scim/test_member_details.py +++ b/tests/apidocs/endpoints/scim/test_member_details.py @@ -12,7 +12,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-scim-member-details", - kwargs={"organization_slug": self.organization.slug, "member_id": self.member.id}, + kwargs={"organization_id_or_slug": self.organization.slug, "member_id": self.member.id}, ) def test_get(self): @@ -28,7 +28,7 @@ def test_delete(self): def test_get_invalid(self): url = reverse( "sentry-api-0-organization-scim-member-details", - kwargs={"organization_slug": self.organization.slug, "member_id": 321}, + kwargs={"organization_id_or_slug": self.organization.slug, "member_id": 321}, ) response = self.client.get(url) assert response.status_code == 404 diff --git a/tests/apidocs/endpoints/scim/test_member_index.py b/tests/apidocs/endpoints/scim/test_member_index.py index 106fe6444ffa9c..3f1ce6e1cc0fd8 100644 --- a/tests/apidocs/endpoints/scim/test_member_index.py +++ b/tests/apidocs/endpoints/scim/test_member_index.py @@ -12,7 +12,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-scim-member-index", - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) def test_get(self): diff --git a/tests/relay_integration/lang/java/test_plugin.py b/tests/relay_integration/lang/java/test_plugin.py index 945c1728523fa0..3def190f00c928 100644 --- a/tests/relay_integration/lang/java/test_plugin.py +++ b/tests/relay_integration/lang/java/test_plugin.py @@ -406,7 +406,7 @@ def upload_proguard_mapping(self, uuid, mapping_file_content): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -937,7 +937,7 @@ def test_error_on_resolving(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) diff --git a/tests/sentry/api/endpoints/test_debug_files.py b/tests/sentry/api/endpoints/test_debug_files.py index 406a4b0b63a0a5..1349778cdae32f 100644 --- a/tests/sentry/api/endpoints/test_debug_files.py +++ b/tests/sentry/api/endpoints/test_debug_files.py @@ -46,7 +46,7 @@ def test_simple_proguard_upload(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -70,7 +70,7 @@ def test_associate_proguard_dsym(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -91,7 +91,7 @@ def test_associate_proguard_dsym(self): url = reverse( "sentry-api-0-associate-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -119,7 +119,7 @@ def test_associate_proguard_dsym_no_build(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -140,7 +140,7 @@ def test_associate_proguard_dsym_no_build(self): url = reverse( "sentry-api-0-associate-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -167,7 +167,7 @@ def test_dsyms_requests(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -182,7 +182,7 @@ def test_dsyms_requests(self): url = reverse( "sentry-api-0-associate-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -207,7 +207,7 @@ def test_dsyms_requests(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -283,7 +283,7 @@ def test_dsyms_search(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -318,7 +318,7 @@ def test_dsyms_delete_as_team_admin(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -330,7 +330,7 @@ def test_dsyms_delete_as_team_admin(self): url = reverse( "sentry-api-0-associate-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -350,7 +350,7 @@ def test_dsyms_delete_as_team_admin(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -424,7 +424,7 @@ def test_source_maps(self): url = reverse( "sentry-api-0-source-maps", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -469,7 +469,7 @@ def test_source_maps_sorting(self): url = reverse( "sentry-api-0-source-maps", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -507,7 +507,7 @@ def test_source_maps_delete_archive(self): url = reverse( "sentry-api-0-source-maps", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -529,7 +529,7 @@ def test_source_maps_release_archive(self): url = reverse( "sentry-api-0-source-maps", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -548,7 +548,7 @@ def test_access_control(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -577,7 +577,7 @@ def test_access_control(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": other_org.slug, + "organization_id_or_slug": other_org.slug, "project_id_or_slug": other_project.slug, }, ) diff --git a/tests/sentry/api/endpoints/test_project_artifact_lookup.py b/tests/sentry/api/endpoints/test_project_artifact_lookup.py index b71c80d41a38b5..42d75984585f8b 100644 --- a/tests/sentry/api/endpoints/test_project_artifact_lookup.py +++ b/tests/sentry/api/endpoints/test_project_artifact_lookup.py @@ -142,7 +142,7 @@ def test_query_by_debug_ids(self): url = reverse( "sentry-api-0-project-artifact-lookup", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -207,7 +207,7 @@ def test_query_by_url(self): url = reverse( "sentry-api-0-project-artifact-lookup", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -247,7 +247,7 @@ def test_query_by_url_from_releasefiles(self): url = reverse( "sentry-api-0-project-artifact-lookup", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -268,7 +268,7 @@ def test_query_by_url_from_legacy_bundle(self): url = reverse( "sentry-api-0-project-artifact-lookup", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -358,7 +358,7 @@ def test_query_by_url_and_dist_from_legacy_bundle(self): url = reverse( "sentry-api-0-project-artifact-lookup", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -506,7 +506,7 @@ def test_renewal_with_debug_id(self): url = reverse( "sentry-api-0-project-artifact-lookup", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -577,7 +577,7 @@ def test_renewal_with_url(self): url = reverse( "sentry-api-0-project-artifact-lookup", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -637,7 +637,7 @@ def test_access_control(self): url = reverse( "sentry-api-0-project-artifact-lookup", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -659,7 +659,7 @@ def test_access_control(self): url = reverse( "sentry-api-0-project-artifact-lookup", kwargs={ - "organization_slug": other_org.slug, + "organization_id_or_slug": other_org.slug, "project_id_or_slug": other_project.slug, }, ) diff --git a/tests/sentry/api/endpoints/test_project_key_details.py b/tests/sentry/api/endpoints/test_project_key_details.py index 662ba503ca9298..a8132e95b342c0 100644 --- a/tests/sentry/api/endpoints/test_project_key_details.py +++ b/tests/sentry/api/endpoints/test_project_key_details.py @@ -20,7 +20,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -37,7 +37,7 @@ def test_no_rate_limit(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -55,7 +55,7 @@ def test_unset_rate_limit(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -73,7 +73,7 @@ def test_remove_rate_limit(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -93,7 +93,7 @@ def test_simple_rate_limit(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -114,7 +114,7 @@ def test_rate_limit_change_data(self, mock_create_audit_entry): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -137,7 +137,7 @@ def test_deactivate(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -155,7 +155,7 @@ def test_default_browser_sdk_version(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -172,7 +172,7 @@ def test_set_browser_sdk_version(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -189,7 +189,7 @@ def test_default_dynamic_sdk_loader_options(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -210,7 +210,7 @@ def test_dynamic_sdk_loader_options(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -308,7 +308,7 @@ def test_use_case(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -329,7 +329,7 @@ def test_cannot_upgrade_to_internal(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -353,7 +353,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, @@ -370,7 +370,7 @@ def test_use_case(self): url = reverse( "sentry-api-0-project-key-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key_id": key.public_key, }, diff --git a/tests/sentry/models/test_debugfile.py b/tests/sentry/models/test_debugfile.py index 140bdf01d400e0..471a2dbbb5f000 100644 --- a/tests/sentry/models/test_debugfile.py +++ b/tests/sentry/models/test_debugfile.py @@ -241,7 +241,7 @@ def test_simple_cache_clear(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) diff --git a/tests/sentry/profiles/test_task.py b/tests/sentry/profiles/test_task.py index 2a646e57371378..36873137708230 100644 --- a/tests/sentry/profiles/test_task.py +++ b/tests/sentry/profiles/test_task.py @@ -659,7 +659,7 @@ def upload_proguard_mapping(self, uuid, mapping_file_content): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) diff --git a/tests/symbolicator/test_minidump_full.py b/tests/symbolicator/test_minidump_full.py index a12f59c51a1472..8d594d70cd67aa 100644 --- a/tests/symbolicator/test_minidump_full.py +++ b/tests/symbolicator/test_minidump_full.py @@ -46,7 +46,7 @@ def upload_symbols(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) diff --git a/tests/symbolicator/test_payload_full.py b/tests/symbolicator/test_payload_full.py index fb6c2135d26b70..144f5c4f82e84f 100644 --- a/tests/symbolicator/test_payload_full.py +++ b/tests/symbolicator/test_payload_full.py @@ -105,7 +105,7 @@ def test_real_resolving(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) diff --git a/tests/symbolicator/test_unreal_full.py b/tests/symbolicator/test_unreal_full.py index 1c2af561dafb05..7da423a79b288b 100644 --- a/tests/symbolicator/test_unreal_full.py +++ b/tests/symbolicator/test_unreal_full.py @@ -51,7 +51,7 @@ def upload_symbols(self): url = reverse( "sentry-api-0-dsym-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, }, ) From 121e1f3bd55582fa34ce4a2582246e4f312d720a Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Fri, 10 May 2024 14:17:00 -0700 Subject: [PATCH 301/376] feat(flagpole): Adds new options for flagpole migration checks (#70684) Renames existing flagpole options under new namespace. --- src/sentry/options/defaults.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 9114bbd1af8901..d15c34554f0dfe 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -1791,8 +1791,11 @@ register("hybrid_cloud.disable_tombstone_cleanup", default=False, flags=FLAG_AUTOMATOR_MODIFIABLE) # Flagpole Rollout -register("features", default={}, flags=FLAG_AUTOMATOR_MODIFIABLE) -register("hybrid_cloud.flagpole_rollout_phase", default=0, flags=FLAG_AUTOMATOR_MODIFIABLE) +register("flagpole_features", default={}, flags=FLAG_AUTOMATOR_MODIFIABLE) +register("flagpole.rollout_phase", default=0, flags=FLAG_AUTOMATOR_MODIFIABLE) +register("flagpole.flagpole_only_features", default=[], flags=FLAG_AUTOMATOR_MODIFIABLE) +register("flagpole.feature_compare_list", default=[], flags=FLAG_AUTOMATOR_MODIFIABLE) + # Retry controls register("hybridcloud.regionsiloclient.retries", default=5, flags=FLAG_AUTOMATOR_MODIFIABLE) From 8c9fbbdd2f86bd74bdc30cdc508d9f133b3e93d1 Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Fri, 10 May 2024 14:21:42 -0700 Subject: [PATCH 302/376] ref(assignee-dropdown): Add optional trigger prop to assignee selector dropdown (#70686) This PR adds an optional trigger prop to the newly created `` component, and adds comments to all props. The default trigger is what is currently on the Issue Stream page: image This will be needed for an upcoming change that swaps out the deprecated assignee dropdown in the issue details page with ``, since the trigger there is a little different --- .../components/assigneeSelectorDropdown.tsx | 35 ++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/static/app/components/assigneeSelectorDropdown.tsx b/static/app/components/assigneeSelectorDropdown.tsx index 1abab00e3a7d15..37afd4d5eea15d 100644 --- a/static/app/components/assigneeSelectorDropdown.tsx +++ b/static/app/components/assigneeSelectorDropdown.tsx @@ -56,13 +56,45 @@ type AssignableTeam = { }; export interface AssigneeSelectorDropdownProps { + /** + * The group (issue) that the assignee selector is for + * TODO: generalize this for alerts + */ group: Group; + /** + * If true, there will be a loading indicator in the menu header. + */ loading: boolean; + /** + * Optional list of members to populate the dropdown with. + */ memberList?: User[]; + /** + * If true, the chevron to open the dropdown will not be shown + */ noDropdown?: boolean; + /** + * Callback for when an assignee is selected from the dropdown. + * The parent component should update the group with the new assignee + * in this callback. + */ onAssign?: (assignedActor: AssignableEntity | null) => void; + /** + * Callback for when the assignee is cleared + */ onClear?: (clearedAssignee: User | Actor) => void; + /** + * Optional list of suggested owners of the group + */ owners?: Omit[]; + /** + * Optional trigger for the assignee selector. If nothing passed in, + * the default trigger will be used + */ + trigger?: ( + props: Omit, 'children'>, + isOpen: boolean + ) => React.ReactNode; } export function AssigneeAvatar({ @@ -167,6 +199,7 @@ export default function AssigneeSelectorDropdown({ onAssign, onClear, owners, + trigger, }: AssigneeSelectorDropdownProps) { const memberLists = useLegacyStore(MemberListStore); const sessionUser = ConfigStore.get('user'); @@ -506,7 +539,7 @@ export default function AssigneeSelectorDropdown({ size="sm" onChange={handleSelect} options={makeAllOptions()} - trigger={makeTrigger} + trigger={trigger ?? makeTrigger} menuFooter={makeFooterInviteButton()} /> From 38bd6d48e02e6cade9e23845d4fa223ab4b9b5fe Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Fri, 10 May 2024 15:18:04 -0700 Subject: [PATCH 303/376] feat(billing): Add new data categories (#70403) --- static/app/constants/index.tsx | 22 +++++++++++++++++++--- static/app/types/core.tsx | 5 ++++- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/static/app/constants/index.tsx b/static/app/constants/index.tsx index 24595fea3b249f..84abbde5cee0bf 100644 --- a/static/app/constants/index.tsx +++ b/static/app/constants/index.tsx @@ -310,6 +310,14 @@ export const DATA_CATEGORY_INFO = { titleName: t('Monitor Check-Ins'), uid: 10, }, + [DataCategoryExact.SPAN]: { + name: DataCategoryExact.SPAN, + apiName: 'span', + plural: 'spans', + displayName: 'spans', + titleName: t('Spans'), + uid: 12, + }, [DataCategoryExact.MONITOR_SEAT]: { name: DataCategoryExact.MONITOR_SEAT, apiName: 'monitorSeat', @@ -318,12 +326,20 @@ export const DATA_CATEGORY_INFO = { titleName: t('Cron Monitors'), uid: 13, }, + [DataCategoryExact.PROFILE_DURATION]: { + name: DataCategoryExact.PROFILE_DURATION, + apiName: 'profileSeconds', + plural: 'profileSeconds', + displayName: 'profile seconds', + titleName: t('Profile Seconds'), + uid: 17, + }, [DataCategoryExact.METRIC_SECOND]: { name: DataCategoryExact.METRIC_SECOND, apiName: 'metricSecond', - plural: 'metrics', - displayName: 'metrics', - titleName: t('Metrics'), + plural: 'metricSeconds', + displayName: 'metric hours', + titleName: t('Metrics Hours'), uid: 19, }, } as const satisfies Record; diff --git a/static/app/types/core.tsx b/static/app/types/core.tsx index 8077bee8f9f522..573dce5b44a6f9 100644 --- a/static/app/types/core.tsx +++ b/static/app/types/core.tsx @@ -67,7 +67,6 @@ export type Choice = [ export type Choices = Choice[]; /** - * @deprecated in favour of `DataCategoryExact` and `DATA_CATEGORY_INFO`. * This legacy type used plurals which will cause compatibility issues when categories * become more complex, e.g. processed transactions, session replays. Instead, access these values * with `DATA_CATEGORY_INFO[category].plural`, where category is the `DataCategoryExact` enum value. @@ -80,6 +79,8 @@ export enum DataCategory { PROFILES = 'profiles', REPLAYS = 'replays', MONITOR_SEATS = 'monitorSeats', + PROFILE_DURATION = 'profileDuration', + SPAN = 'span', METRIC_SECOND = 'metricSecond', } @@ -98,6 +99,8 @@ export enum DataCategoryExact { TRANSACTION_INDEXED = 'transaction_indexed', MONITOR = 'monitor', MONITOR_SEAT = 'monitorSeat', + PROFILE_DURATION = 'profileDuration', + SPAN = 'span', METRIC_SECOND = 'metricSecond', } From e0820a95f4a26293efcd2de551e54c5e86f45997 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Fri, 10 May 2024 15:53:58 -0700 Subject: [PATCH 304/376] fix(revenue): Add back metrics for stats page (#70695) --- static/app/constants/index.tsx | 12 ++++++++++++ static/app/types/core.tsx | 5 +++++ static/app/views/organizationStats/index.tsx | 4 ++-- .../app/views/organizationStats/usageChart/index.tsx | 6 +++--- static/app/views/organizationStats/usageStatsOrg.tsx | 6 +++--- .../app/views/organizationStats/usageStatsPerMin.tsx | 2 +- 6 files changed, 26 insertions(+), 9 deletions(-) diff --git a/static/app/constants/index.tsx b/static/app/constants/index.tsx index 84abbde5cee0bf..28c9dd96311b13 100644 --- a/static/app/constants/index.tsx +++ b/static/app/constants/index.tsx @@ -334,6 +334,18 @@ export const DATA_CATEGORY_INFO = { titleName: t('Profile Seconds'), uid: 17, }, + /** + * Used to display metrics on the stats page + */ + [DataCategoryExact.METRICS]: { + name: DataCategoryExact.METRICS, + apiName: 'metrics', + plural: 'metrics', + displayName: 'metrics', + titleName: t('Metrics'), + // Metrics has no uid, is only used on stats page + uid: -1, + }, [DataCategoryExact.METRIC_SECOND]: { name: DataCategoryExact.METRIC_SECOND, apiName: 'metricSecond', diff --git a/static/app/types/core.tsx b/static/app/types/core.tsx index 573dce5b44a6f9..403ced5ded2276 100644 --- a/static/app/types/core.tsx +++ b/static/app/types/core.tsx @@ -101,6 +101,11 @@ export enum DataCategoryExact { MONITOR_SEAT = 'monitorSeat', PROFILE_DURATION = 'profileDuration', SPAN = 'span', + /** + * Metrics does not actually exist as a data category, but is used on the stats page. + * See metricSecond instead. + */ + METRICS = 'metrics', METRIC_SECOND = 'metricSecond', } diff --git a/static/app/views/organizationStats/index.tsx b/static/app/views/organizationStats/index.tsx index 9bdfe772a5b916..6912f98afdd15f 100644 --- a/static/app/views/organizationStats/index.tsx +++ b/static/app/views/organizationStats/index.tsx @@ -263,7 +263,7 @@ export class OrganizationStats extends Component { if (opt.value === DATA_CATEGORY_INFO.replay.plural) { return organization.features.includes('session-replay'); } - if (opt.value === DATA_CATEGORY_INFO.metricSecond.plural) { + if (opt.value === DATA_CATEGORY_INFO.metrics.plural) { return hasMetricStats(organization); } return true; @@ -321,7 +321,7 @@ export class OrganizationStats extends Component { if (opt.value === DATA_CATEGORY_INFO.replay.plural) { return organization.features.includes('session-replay'); } - if (opt.value === DATA_CATEGORY_INFO.metricSecond.plural) { + if (opt.value === DATA_CATEGORY_INFO.metrics.plural) { return hasMetricStats(organization); } return true; diff --git a/static/app/views/organizationStats/usageChart/index.tsx b/static/app/views/organizationStats/usageChart/index.tsx index 07129efb28bae4..5b0c2435081be6 100644 --- a/static/app/views/organizationStats/usageChart/index.tsx +++ b/static/app/views/organizationStats/usageChart/index.tsx @@ -89,8 +89,8 @@ export const CHART_OPTIONS_DATACATEGORY: CategoryOption[] = [ yAxisMinInterval: 100, }, { - label: DATA_CATEGORY_INFO.metricSecond.titleName, - value: DATA_CATEGORY_INFO.metricSecond.plural, + label: DATA_CATEGORY_INFO.metrics.titleName, + value: DATA_CATEGORY_INFO.metrics.plural, disabled: false, yAxisMinInterval: 100, }, @@ -360,7 +360,7 @@ function UsageChartBody({ const filteredOptions = useMemo(() => { return categoryOptions.filter(option => { - if (option.value !== DATA_CATEGORY_INFO.metricSecond.plural) { + if (option.value !== DATA_CATEGORY_INFO.metrics.plural) { return true; } return ( diff --git a/static/app/views/organizationStats/usageStatsOrg.tsx b/static/app/views/organizationStats/usageStatsOrg.tsx index 67bebd2054c5ea..66b1cd1d50450b 100644 --- a/static/app/views/organizationStats/usageStatsOrg.tsx +++ b/static/app/views/organizationStats/usageStatsOrg.tsx @@ -136,7 +136,7 @@ class UsageStatsOrganization< { query: { ...this.endpointQuery, - category: DATA_CATEGORY_INFO.metricSecond.apiName, + category: DATA_CATEGORY_INFO.metrics.apiName, groupBy: ['outcome'], }, }, @@ -159,7 +159,7 @@ class UsageStatsOrganization< ...group, by: { ...group.by, - category: DATA_CATEGORY_INFO.metricSecond.apiName, + category: DATA_CATEGORY_INFO.metrics.apiName, }, }; }); @@ -345,7 +345,7 @@ class UsageStatsOrganization< filtered: { title: tct('Filtered [dataCategory]', {dataCategory: dataCategoryName}), help: - dataCategory === DATA_CATEGORY_INFO.metricSecond.plural + dataCategory === DATA_CATEGORY_INFO.metrics.plural ? tct( 'Filtered metrics were blocked due to your disabled metrics [settings: settings]', { diff --git a/static/app/views/organizationStats/usageStatsPerMin.tsx b/static/app/views/organizationStats/usageStatsPerMin.tsx index 49cad934d6fd64..d3dd2efbb03cb9 100644 --- a/static/app/views/organizationStats/usageStatsPerMin.tsx +++ b/static/app/views/organizationStats/usageStatsPerMin.tsx @@ -78,7 +78,7 @@ function UsageStatsPerMin({dataCategory, organization, projectIds}: Props) { }; // Metrics stats ingestion is delayed, so we can't show this for metrics right now - if (dataCategory === DATA_CATEGORY_INFO.metricSecond.plural) { + if (dataCategory === DATA_CATEGORY_INFO.metrics.plural) { return null; } From 7b497901805a5cd1bd60ce8019c9b12616b55dd8 Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Fri, 10 May 2024 18:50:25 -0700 Subject: [PATCH 305/376] fix(feedback): set spam issues to ignored (#70701) spam should be ignored not resolved --- src/sentry/feedback/usecases/create_feedback.py | 2 +- tests/sentry/feedback/usecases/test_create_feedback.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/feedback/usecases/create_feedback.py b/src/sentry/feedback/usecases/create_feedback.py index 7347c8856adc82..74d3b5d64cf788 100644 --- a/src/sentry/feedback/usecases/create_feedback.py +++ b/src/sentry/feedback/usecases/create_feedback.py @@ -359,7 +359,7 @@ def auto_ignore_spam_feedbacks(project, issue_fingerprint): status_change=StatusChangeMessage( fingerprint=issue_fingerprint, project_id=project.id, - new_status=GroupStatus.RESOLVED, + new_status=GroupStatus.IGNORED, # we use ignored in the UI for the spam tab new_substatus=None, ), ) diff --git a/tests/sentry/feedback/usecases/test_create_feedback.py b/tests/sentry/feedback/usecases/test_create_feedback.py index 27817576c26a3c..006f7950f0733d 100644 --- a/tests/sentry/feedback/usecases/test_create_feedback.py +++ b/tests/sentry/feedback/usecases/test_create_feedback.py @@ -546,7 +546,7 @@ def dummy_response(*args, **kwargs): mock_produce_occurrence_to_kafka.call_args_list[1] .kwargs["status_change"] .new_status - == GroupStatus.RESOLVED + == GroupStatus.IGNORED ) if not (expected_result and feature_flag): From bdb1880c5549034fc52d7b9eda4b121e21fddafb Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Fri, 10 May 2024 18:52:38 -0700 Subject: [PATCH 306/376] fix(feedback): use flag for spam actions instead of option (#70700) missed this in https://github.com/getsentry/sentry/pull/70676 --- src/sentry/tasks/post_process.py | 6 +++--- tests/sentry/tasks/test_post_process.py | 5 +++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index 817fc06f229684..aa05b4494ca032 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -13,7 +13,7 @@ from django.utils import timezone from google.api_core.exceptions import ServiceUnavailable -from sentry import features, options, projectoptions +from sentry import features, projectoptions from sentry.exceptions import PluginError from sentry.issues.grouptype import GroupCategory from sentry.issues.issue_occurrence import IssueOccurrence @@ -1307,8 +1307,8 @@ def should_postprocess_feedback(job: PostProcessJob) -> bool: if not hasattr(event, "occurrence") or event.occurrence is None: return False - if event.occurrence.evidence_data.get("is_spam") is True and options.get( - "feedback.spam-detection-actions" + if event.occurrence.evidence_data.get("is_spam") is True and features.has( + "organizations:user-feedback-spam-filter-actions", job["event"].project.organization ): metrics.incr("feedback.spam-detection-actions.dont-send-notification") return False diff --git a/tests/sentry/tasks/test_post_process.py b/tests/sentry/tasks/test_post_process.py index 86c14410fc2ed1..cc9affb3876afd 100644 --- a/tests/sentry/tasks/test_post_process.py +++ b/tests/sentry/tasks/test_post_process.py @@ -2841,11 +2841,12 @@ def create_event( group_event.occurrence = occurrence return group_event - @override_options({"feedback.spam-detection-actions": True}) def call_post_process_group( self, is_new, is_regression, is_new_group_environment, event, cache_key=None ): - with self.feature(FeedbackGroup.build_post_process_group_feature_name()): + with self.feature(FeedbackGroup.build_post_process_group_feature_name()), self.feature( + "organizations:user-feedback-spam-filter-actions" + ): post_process_group( is_new=is_new, is_regression=is_regression, From bedf1ff007b9dd164f41e6162d97f534ecb42ee9 Mon Sep 17 00:00:00 2001 From: Valery Brobbey Date: Fri, 10 May 2024 19:44:35 -0700 Subject: [PATCH 307/376] feat(notifications): add notification settings for new products (#70622) --- src/sentry/notifications/defaults.py | 3 +++ src/sentry/notifications/types.py | 15 +++++++++++++++ .../api/endpoints/test_notification_defaults.py | 3 +++ 3 files changed, 21 insertions(+) diff --git a/src/sentry/notifications/defaults.py b/src/sentry/notifications/defaults.py index 1f5d8257c1484e..5972c4b52dc869 100644 --- a/src/sentry/notifications/defaults.py +++ b/src/sentry/notifications/defaults.py @@ -19,6 +19,9 @@ NotificationSettingEnum.QUOTA_ATTACHMENTS: NotificationSettingsOptionEnum.ALWAYS, NotificationSettingEnum.QUOTA_REPLAYS: NotificationSettingsOptionEnum.ALWAYS, NotificationSettingEnum.QUOTA_MONITOR_SEATS: NotificationSettingsOptionEnum.ALWAYS, + NotificationSettingEnum.QUOTA_SPANS: NotificationSettingsOptionEnum.ALWAYS, + NotificationSettingEnum.QUOTA_PROFILE_DURATION: NotificationSettingsOptionEnum.ALWAYS, + NotificationSettingEnum.QUOTA_METRIC_SECONDS: NotificationSettingsOptionEnum.ALWAYS, NotificationSettingEnum.QUOTA_WARNINGS: NotificationSettingsOptionEnum.ALWAYS, NotificationSettingEnum.QUOTA_SPEND_ALLOCATIONS: NotificationSettingsOptionEnum.ALWAYS, NotificationSettingEnum.QUOTA_THRESHOLDS: NotificationSettingsOptionEnum.ALWAYS, diff --git a/src/sentry/notifications/types.py b/src/sentry/notifications/types.py index cac86119053930..427bafed54bf8b 100644 --- a/src/sentry/notifications/types.py +++ b/src/sentry/notifications/types.py @@ -27,6 +27,9 @@ class NotificationSettingEnum(ValueEqualityEnum): QUOTA_ATTACHMENTS = "quotaAttachments" QUOTA_REPLAYS = "quotaReplays" QUOTA_MONITOR_SEATS = "quotaMonitorSeats" + QUOTA_SPANS = "quotaSpans" + QUOTA_PROFILE_DURATION = "quotaProfileDuration" + QUOTA_METRIC_SECONDS = "quotaMetricSeconds" QUOTA_SPEND_ALLOCATIONS = "quotaSpendAllocations" SPIKE_PROTECTION = "spikeProtection" MISSING_MEMBERS = "missingMembers" @@ -111,6 +114,18 @@ class UserOptionsSettingsKey(Enum): NotificationSettingsOptionEnum.ALWAYS, NotificationSettingsOptionEnum.NEVER, }, + NotificationSettingEnum.QUOTA_SPANS: { + NotificationSettingsOptionEnum.ALWAYS, + NotificationSettingsOptionEnum.NEVER, + }, + NotificationSettingEnum.QUOTA_PROFILE_DURATION: { + NotificationSettingsOptionEnum.ALWAYS, + NotificationSettingsOptionEnum.NEVER, + }, + NotificationSettingEnum.QUOTA_METRIC_SECONDS: { + NotificationSettingsOptionEnum.ALWAYS, + NotificationSettingsOptionEnum.NEVER, + }, NotificationSettingEnum.QUOTA_WARNINGS: { NotificationSettingsOptionEnum.ALWAYS, NotificationSettingsOptionEnum.NEVER, diff --git a/tests/sentry/api/endpoints/test_notification_defaults.py b/tests/sentry/api/endpoints/test_notification_defaults.py index d9dda114b670b7..a6ec13089ef0e1 100644 --- a/tests/sentry/api/endpoints/test_notification_defaults.py +++ b/tests/sentry/api/endpoints/test_notification_defaults.py @@ -25,6 +25,9 @@ def test_basic(self): "quotaTransactions": "always", "quotaWarnings": "always", "quotaMonitorSeats": "always", + "quotaSpans": "always", + "quotaProfileDuration": "always", + "quotaMetricSeconds": "always", "reports": "always", "spikeProtection": "always", "workflow": "subscribe_only", From 72878c6dd2ff64b25cc69b7ade8a57eef4f16bb2 Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Sat, 11 May 2024 15:32:23 -0400 Subject: [PATCH 308/376] fix(sidebar): fix quick start on mobile view (#70140) --- static/app/components/sidebar/onboardingStatus.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/static/app/components/sidebar/onboardingStatus.tsx b/static/app/components/sidebar/onboardingStatus.tsx index 559df28ec4015c..4d1f7cde984a19 100644 --- a/static/app/components/sidebar/onboardingStatus.tsx +++ b/static/app/components/sidebar/onboardingStatus.tsx @@ -11,6 +11,7 @@ import ProgressRing, { RingBar, RingText, } from 'sentry/components/progressRing'; +import {ExpandedContext} from 'sentry/components/sidebar/expandedContextProvider'; import {isDone} from 'sentry/components/sidebar/utils'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; @@ -46,6 +47,7 @@ export default function OnboardingStatus({ }; const onboardingContext = useContext(OnboardingContext); const {projects} = useProjects(); + const {shouldAccordionFloat} = useContext(ExpandedContext); if (!org.features?.includes('onboarding')) { return null; @@ -98,7 +100,7 @@ export default function OnboardingStatus({ size={38} barWidth={6} /> - {!collapsed && ( + {!shouldAccordionFloat && (
{label} From 751ef4a029dda5802311fc424a5f63d72b7efd3d Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Sat, 11 May 2024 15:32:33 -0400 Subject: [PATCH 309/376] feat(sidebar): disable tooltip when floating accordion is open (#69346) --- static/app/components/sidebar/sidebarAccordion.tsx | 1 + static/app/components/sidebar/sidebarItem.tsx | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/static/app/components/sidebar/sidebarAccordion.tsx b/static/app/components/sidebar/sidebarAccordion.tsx index 4bdddaf2290ea8..da8ca13eef54ba 100644 --- a/static/app/components/sidebar/sidebarAccordion.tsx +++ b/static/app/components/sidebar/sidebarAccordion.tsx @@ -122,6 +122,7 @@ function SidebarAccordion({children, ...itemProps}: SidebarAccordionProps) { aria-expanded={expanded} aria-owns={contentId} onClick={handleMainItemClick} + isOpenInFloatingSidebar={isOpenInFloatingSidebar} trailingItems={ ) => void; search?: string; to?: string; @@ -138,6 +142,7 @@ function SidebarItem({ variant, isNested, isMainItem, + isOpenInFloatingSidebar, ...props }: SidebarItemProps) { const {setExpandedItemId, shouldAccordionFloat} = useContext(ExpandedContext); @@ -199,7 +204,10 @@ function SidebarItem({ return ( {label} {badges} From 7f0f0ff30c1e95865ac5c8cd821cb5c875096ba6 Mon Sep 17 00:00:00 2001 From: Abdkhan14 <60121741+Abdkhan14@users.noreply.github.com> Date: Sun, 12 May 2024 20:27:08 -0400 Subject: [PATCH 310/376] feat(new-trace): Using new ui for all non-txn/span row types in traceview (#70533) Errors: Screenshot 2024-05-08 at 4 13 27 PM Autogrouped: Screenshot 2024-05-08 at 4 15 03 PM Co-authored-by: Abdullah Khan --- .../traceDrawer/details/error.tsx | 94 +++++++++---------- .../details/missingInstrumentation.tsx | 94 +++++++++++-------- .../traceDrawer/details/noData.tsx | 28 +++--- .../traceDrawer/details/parentAutogroup.tsx | 58 +++++++----- .../traceDrawer/details/siblingAutogroup.tsx | 60 +++++++----- .../details/span/sections/generalInfo.tsx | 4 +- .../traceDrawer/details/styles.tsx | 41 +++++--- .../traceDrawer/details/transaction/index.tsx | 7 +- .../transaction/sections/generalInfo.tsx | 2 +- .../details/transaction/sections/tags.tsx | 36 ------- 10 files changed, 218 insertions(+), 206 deletions(-) delete mode 100644 static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/sections/tags.tsx diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/error.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/error.tsx index 0f6b6d573130f9..d6026e1bcdfb75 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/error.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/error.tsx @@ -3,7 +3,6 @@ import {useTheme} from '@emotion/react'; import styled from '@emotion/styled'; import {Button} from 'sentry/components/button'; -import {CopyToClipboardButton} from 'sentry/components/copyToClipboardButton'; import { getStacktrace, StackTracePreviewContent, @@ -13,11 +12,9 @@ import {generateIssueEventTarget} from 'sentry/components/quickTrace/utils'; import {t} from 'sentry/locale'; import type {EventError} from 'sentry/types'; import {useApiQuery} from 'sentry/utils/queryClient'; -import {useLocation} from 'sentry/utils/useLocation'; import {TraceIcons} from 'sentry/views/performance/newTraceDetails/icons'; import type {TraceTreeNodeDetailsProps} from 'sentry/views/performance/newTraceDetails/traceDrawer/tabs/traceTreeNodeDetails'; import {getTraceTabTitle} from 'sentry/views/performance/newTraceDetails/traceState/traceTabs'; -import {Row, Tags} from 'sentry/views/performance/traceDetails/styles'; import { makeTraceNodeBarColor, @@ -26,7 +23,7 @@ import { } from '../../traceModels/traceTree'; import {IssueList} from './issues/issues'; -import {TraceDrawerComponents} from './styles'; +import {type SectionCardKeyValueList, TraceDrawerComponents} from './styles'; export function ErrorNodeDetails({ node, @@ -34,7 +31,6 @@ export function ErrorNodeDetails({ onTabScrollToNode, onParentClick, }: TraceTreeNodeDetailsProps>) { - const location = useLocation(); const issues = useMemo(() => { return [...node.errors]; }, [node.errors]); @@ -59,6 +55,26 @@ export function ErrorNodeDetails({ const theme = useTheme(); const parentTransaction = node.parent_transaction; + const items: SectionCardKeyValueList = [ + { + key: 'title', + subject: t('Title'), + value: , + }, + ]; + + if (parentTransaction) { + items.push({ + key: 'parent_transaction', + subject: t('Parent Transaction'), + value: ( + onParentClick(parentTransaction)}> + {getTraceTabTitle(parentTransaction)} + + ), + }); + } + return isLoading ? ( ) : data ? ( @@ -92,44 +108,30 @@ export function ErrorNodeDetails({ - - - {stackTrace ? ( - - {t('Stack Trace')} - - - - - ) : ( - - {t('No stack trace has been reported with this error')} - - )} - - } - > - {node.value.title} - - {parentTransaction ? ( - - - onParentClick(parentTransaction)}> - {getTraceTabTitle(parentTransaction)} - - - - ) : null} - - + + + + ) : ( + t('No stack trace has been reported with this error') + ), + }, + ]} + title={t('Stack Trace')} + /> + + + + ) : null; } @@ -140,9 +142,3 @@ const StackTraceWrapper = styled('td')` border: 0; } `; - -const StackTraceTitle = styled('td')` - font-weight: 600; - font-size: 13px; - width: 175px; -`; diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/missingInstrumentation.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/missingInstrumentation.tsx index 8b58b54aef3df3..b8eeb18d16c2ad 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/missingInstrumentation.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/missingInstrumentation.tsx @@ -1,14 +1,13 @@ import {useTheme} from '@emotion/react'; -import {TransactionToProfileButton} from 'sentry/components/profiling/transactionToProfileButton'; import {IconSpan} from 'sentry/icons'; import {t} from 'sentry/locale'; import getDuration from 'sentry/utils/duration/getDuration'; +import {generateProfileFlamechartRouteWithQuery} from 'sentry/utils/profiling/routes'; import useProjects from 'sentry/utils/useProjects'; import {ProfilePreview} from 'sentry/views/performance/newTraceDetails/traceDrawer/details/profiling/profilePreview'; import type {TraceTreeNodeDetailsProps} from 'sentry/views/performance/newTraceDetails/traceDrawer/tabs/traceTreeNodeDetails'; import {getTraceTabTitle} from 'sentry/views/performance/newTraceDetails/traceState/traceTabs'; -import {Row} from 'sentry/views/performance/traceDetails/styles'; import {ProfileGroupProvider} from 'sentry/views/profiling/profileGroupProvider'; import {ProfileContext, ProfilesProvider} from 'sentry/views/profiling/profilesProvider'; @@ -17,7 +16,7 @@ import { type MissingInstrumentationNode, } from '../../traceModels/traceTree'; -import {TraceDrawerComponents} from './styles'; +import {type SectionCardKeyValueList, TraceDrawerComponents} from './styles'; export function MissingInstrumentationNodeDetails({ node, @@ -33,6 +32,54 @@ export function MissingInstrumentationNodeDetails({ const project = projects.find(proj => proj.slug === event?.projectSlug); const profileId = event?.contexts?.profile?.profile_id ?? null; + const items: SectionCardKeyValueList = [ + { + key: 'duration', + subject: t('Duration'), + value: getDuration(node.value.timestamp - node.value.start_timestamp, 2, true), + }, + { + key: 'previous_span', + subject: t('Previous Span'), + value: `${node.previous.value.op} - ${node.previous.value.description}`, + }, + { + key: 'next_span', + subject: t('Next Span'), + value: `${node.next.value.op} - ${node.next.value.description}`, + }, + ]; + + if (profileId && project?.slug) { + items.push({ + key: 'profile_id', + subject: 'Profile ID', + value: ( + + ), + }); + } + + if (parentTransaction) { + items.push({ + key: 'parent_transaction', + subject: t('Parent Transaction'), + value: ( + onParentClick(parentTransaction)}> + {getTraceTabTitle(parentTransaction)} + + ), + }); + } + return ( @@ -52,6 +99,7 @@ export function MissingInstrumentationNodeDetails({ onTabScrollToNode={onTabScrollToNode} /> + {event.projectSlug ? ( ) : null} - - - {parentTransaction ? ( - - - onParentClick(parentTransaction)}> - {getTraceTabTitle(parentTransaction)} - - - - ) : null} - - {getDuration(node.value.timestamp - node.value.start_timestamp, 2, true)} - - {profileId && project?.slug && ( - - {t('View Profile')} - - } - > - {profileId} - - )} - - {node.previous.value.op} - {node.previous.value.description} - - - {node.next.value.op} - {node.next.value.description} - - - + + ); } diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/noData.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/noData.tsx index 82e4959f8ef386..13efb47cab068b 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/noData.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/noData.tsx @@ -4,17 +4,30 @@ import {useTheme} from '@emotion/react'; import useFeedbackWidget from 'sentry/components/feedback/widget/useFeedbackWidget'; import {IconGroup} from 'sentry/icons'; import {t, tct} from 'sentry/locale'; -import {TraceDrawerComponents} from 'sentry/views/performance/newTraceDetails/traceDrawer/details/styles'; +import { + type SectionCardKeyValueList, + TraceDrawerComponents, +} from 'sentry/views/performance/newTraceDetails/traceDrawer/details/styles'; import type {TraceTreeNodeDetailsProps} from 'sentry/views/performance/newTraceDetails/traceDrawer/tabs/traceTreeNodeDetails'; import { makeTraceNodeBarColor, type NoDataNode, } from 'sentry/views/performance/newTraceDetails/traceModels/traceTree'; -import {Row} from 'sentry/views/performance/traceDetails/styles'; export function NoDataDetails(props: TraceTreeNodeDetailsProps) { const theme = useTheme(); + const items: SectionCardKeyValueList = [ + { + key: 'data_quality', + subject: t('Data quality'), + value: tct( + 'The cause of missing data could be misconfiguration or lack of instrumentation. Send us [feedback] if you are having trouble figuring this out.', + {feedback: } + ), + }, + ]; + return ( @@ -34,16 +47,7 @@ export function NoDataDetails(props: TraceTreeNodeDetailsProps) { /> - - - - {tct( - 'The cause of missing data could be misconfiguration or lack of instrumentation. Send us [feedback] if you are having trouble figuring this out.', - {feedback: } - )} - - - + ); } diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/parentAutogroup.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/parentAutogroup.tsx index c5ba912d6f65c7..907ad2c256af21 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/parentAutogroup.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/parentAutogroup.tsx @@ -5,7 +5,6 @@ import {IconGroup} from 'sentry/icons'; import {t} from 'sentry/locale'; import type {TraceTreeNodeDetailsProps} from 'sentry/views/performance/newTraceDetails/traceDrawer/tabs/traceTreeNodeDetails'; import {getTraceTabTitle} from 'sentry/views/performance/newTraceDetails/traceState/traceTabs'; -import {Row} from 'sentry/views/performance/traceDetails/styles'; import { makeTraceNodeBarColor, @@ -13,7 +12,7 @@ import { } from '../../traceModels/traceTree'; import {IssueList} from './issues/issues'; -import {TraceDrawerComponents} from './styles'; +import {type SectionCardKeyValueList, TraceDrawerComponents} from './styles'; export function ParentAutogroupNodeDetails({ node, @@ -28,6 +27,38 @@ export function ParentAutogroupNodeDetails({ const parentTransaction = node.parent_transaction; + const items: SectionCardKeyValueList = [ + { + key: 'grouping_logic', + subject: t('Grouping Logic'), + value: t( + 'Chain of immediate and only children spans with the same operation as their parent.' + ), + }, + { + key: 'group_count', + subject: t('Group Count'), + value: node.groupCount, + }, + { + key: 'grouping_key', + subject: t('Grouping Key'), + value: `${t('Span Operation')} : ${node.value.op}`, + }, + ]; + + if (parentTransaction) { + items.push({ + key: 'parent_transaction', + subject: t('Parent Transaction'), + value: ( + onParentClick(parentTransaction)}> + {getTraceTabTitle(parentTransaction)} + + ), + }); + } + return ( @@ -50,28 +81,7 @@ export function ParentAutogroupNodeDetails({ - - - {parentTransaction ? ( - - - onParentClick(parentTransaction)}> - {getTraceTabTitle(parentTransaction)} - - - - ) : null} - - {t( - 'Chain of immediate and only children spans with the same operation as their parent.' - )} - - {node.groupCount} - - {t('Span Operation')} : {node.value.op} - - - + ); } diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/siblingAutogroup.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/siblingAutogroup.tsx index c537bb7910edd9..7fe97c2a8d6239 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/siblingAutogroup.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/siblingAutogroup.tsx @@ -5,7 +5,6 @@ import {IconGroup} from 'sentry/icons'; import {t, tct} from 'sentry/locale'; import type {TraceTreeNodeDetailsProps} from 'sentry/views/performance/newTraceDetails/traceDrawer/tabs/traceTreeNodeDetails'; import {getTraceTabTitle} from 'sentry/views/performance/newTraceDetails/traceState/traceTabs'; -import {Row} from 'sentry/views/performance/traceDetails/styles'; import { makeTraceNodeBarColor, @@ -13,7 +12,7 @@ import { } from '../../traceModels/traceTree'; import {IssueList} from './issues/issues'; -import {TraceDrawerComponents} from './styles'; +import {type SectionCardKeyValueList, TraceDrawerComponents} from './styles'; export function SiblingAutogroupNodeDetails({ node, @@ -28,6 +27,39 @@ export function SiblingAutogroupNodeDetails({ const parentTransaction = node.parent_transaction; + const items: SectionCardKeyValueList = [ + { + key: 'grouping_logic', + subject: t('Grouping Logic'), + value: t('5 or more sibling spans with the same operation and description.'), + }, + { + key: 'group_count', + subject: t('Group Count'), + value: node.groupCount, + }, + { + key: 'grouping_key', + subject: t('Grouping Key'), + value: tct('Span operation: [operation] and description: [description]', { + operation: node.value.op, + description: node.value.description, + }), + }, + ]; + + if (parentTransaction) { + items.push({ + key: 'parent_transaction', + subject: t('Parent Transaction'), + value: ( + onParentClick(parentTransaction)}> + {getTraceTabTitle(parentTransaction)} + + ), + }); + } + return ( @@ -50,29 +82,7 @@ export function SiblingAutogroupNodeDetails({ - - - {parentTransaction ? ( - - - onParentClick(parentTransaction)}> - {getTraceTabTitle(parentTransaction)} - - - - ) : null} - - {t('5 or more sibling spans with the same operation and description.')} - - {node.groupCount} - - {tct('Span operation: [operation] and description: [description]', { - operation: node.value.op, - description: node.value.description, - })} - - - + ); } diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/generalInfo.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/generalInfo.tsx index 0bda96d86f829d..0cae29b5af4383 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/generalInfo.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/span/sections/generalInfo.tsx @@ -80,7 +80,7 @@ export function GeneralInfo(props: GeneralnfoProps) { subject: t('Description'), value: span.op && span.hash ? ( - ) : ( - + ), }); } diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx index 1ba85f773bb82a..193d1a9d670dba 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx @@ -15,9 +15,10 @@ import {Button} from 'sentry/components/button'; import {CopyToClipboardButton} from 'sentry/components/copyToClipboardButton'; import {DropdownMenu, type MenuItemProps} from 'sentry/components/dropdownMenu'; import {useIssueDetailsColumnCount} from 'sentry/components/events/eventTags/util'; +import NewTagsUI from 'sentry/components/events/eventTagsAndScreenshot/tags'; import {DataSection} from 'sentry/components/events/styles'; import FileSize from 'sentry/components/fileSize'; -import type {LazyRenderProps} from 'sentry/components/lazyRender'; +import {LazyRender, type LazyRenderProps} from 'sentry/components/lazyRender'; import Link from 'sentry/components/links/link'; import {normalizeDateTimeParams} from 'sentry/components/organizations/pageFilters/parse'; import Panel from 'sentry/components/panels/panel'; @@ -28,6 +29,7 @@ import {IconChevron, IconOpen} from 'sentry/icons'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {KeyValueListDataItem} from 'sentry/types'; +import type {Event} from 'sentry/types/event'; import type {Organization} from 'sentry/types/organization'; import {defined, formatBytesBase10} from 'sentry/utils'; import getDuration from 'sentry/utils/duration/getDuration'; @@ -508,6 +510,22 @@ const ActionsContainer = styled('div')` } `; +function EventTags({projectSlug, event}: {event: Event; projectSlug: string}) { + return ( + + + + + + ); +} + +const TagsWrapper = styled('div')` + h3 { + color: ${p => p.theme.textColor}; + } +`; + interface SectionCardContentConfig { disableErrors?: boolean; includeAliasInSubject?: boolean; @@ -631,7 +649,7 @@ const CardsColumn = styled('div')` grid-column: span 1; `; -function Description({ +function CardValueWithCopy({ value, linkTarget, linkText, @@ -641,27 +659,27 @@ function Description({ linkText?: string; }) { return ( - - + + {value} - - + + {linkTarget && linkTarget ? {linkText} : null} - + ); } -const StyledCopuToClipboardButton = styled(CopyToClipboardButton)` +const StyledCopyToClipboardButton = styled(CopyToClipboardButton)` transform: translateY(2px); `; -const DescriptionContainer = styled(FlexBox)` +const CardValueContainer = styled(FlexBox)` justify-content: space-between; gap: ${space(1)}; flex-wrap: wrap; `; -const DescriptionText = styled('span')` +const CardValueText = styled('span')` overflow-wrap: anywhere; `; @@ -731,7 +749,8 @@ const TraceDrawerComponents = { TableValueRow, IssuesLink, SectionCard, - Description, + CardValueWithCopy, + EventTags, SectionCardGroup, }; diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/index.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/index.tsx index e97b6e17477ef9..8f95084d517aab 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/index.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/index.tsx @@ -33,7 +33,6 @@ import {Measurements} from './sections/measurements'; import ReplayPreview from './sections/replayPreview'; import {Request} from './sections/request'; import {Sdk} from './sections/sdk'; -import {EventTags} from './sections/tags'; export const LAZY_RENDER_PROPS: Partial = { observerOptions: {rootMargin: '50px'}, @@ -149,11 +148,9 @@ export function TransactionNodeDetails({ /> ) : null} - diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/sections/generalInfo.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/sections/generalInfo.tsx index 690956a8a947bd..259b97b963e704 100644 --- a/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/sections/generalInfo.tsx +++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/transaction/sections/generalInfo.tsx @@ -133,7 +133,7 @@ function GeneralInfo({ key: 'description', subject: t('Description'), value: ( - ; - organization: Organization; -}) { - return ( - - - - - - ); -} - -const TagsWrapper = styled('div')` - h3 { - color: ${p => p.theme.textColor}; - } -`; From 54b747341b9a859adbd945847ca04dec496be462 Mon Sep 17 00:00:00 2001 From: Simon Hellmayr Date: Mon, 13 May 2024 07:38:24 +0200 Subject: [PATCH 311/376] feat(metrics): enable querying of metric tag values with multiple project ids when using new meta tables (#70502) --- src/sentry/snuba/metrics_layer/query.py | 5 ++--- tests/snuba/test_metrics_layer.py | 28 +++++++++++++++++++++++-- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/src/sentry/snuba/metrics_layer/query.py b/src/sentry/snuba/metrics_layer/query.py index 30e0bbef1e15d4..c8a73f13476a40 100644 --- a/src/sentry/snuba/metrics_layer/query.py +++ b/src/sentry/snuba/metrics_layer/query.py @@ -634,7 +634,7 @@ def build_request(query: Query) -> Request: def fetch_metric_tag_values( org_id: int, - project_id: int, + project_ids: list[int], use_case_id: UseCaseID, mri: str, tag_key: str, @@ -662,7 +662,7 @@ def fetch_metric_tag_values( metric_id, tag_key_id = resolved conditions = [ - Condition(Column("project_id"), Op.EQ, project_id), + Condition(Column("project_id"), Op.IN, project_ids), Condition(Column("metric_id"), Op.EQ, metric_id), Condition(Column("tag_key"), Op.EQ, tag_key_id), Condition(Column("timestamp"), Op.GTE, datetime.now(UTC) - timedelta(days=90)), @@ -687,7 +687,6 @@ def fetch_metric_tag_values( query=tag_values_query, tenant_ids={ "organization_id": org_id, - "project_id": project_id, "referrer": "generic_metrics_meta_tag_values", }, ) diff --git a/tests/snuba/test_metrics_layer.py b/tests/snuba/test_metrics_layer.py index 537d097192c752..b8ea06dd602110 100644 --- a/tests/snuba/test_metrics_layer.py +++ b/tests/snuba/test_metrics_layer.py @@ -947,7 +947,7 @@ def test_fetch_metric_tag_keys(self) -> None: def test_fetch_metric_tag_values(self) -> None: tag_values = fetch_metric_tag_values( self.org_id, - self.project.id, + [self.project.id], UseCaseID.TRANSACTIONS, "g:transactions/test_gauge@none", "transaction", @@ -958,7 +958,7 @@ def test_fetch_metric_tag_values(self) -> None: def test_fetch_metric_tag_values_with_prefix(self) -> None: tag_values = fetch_metric_tag_values( self.org_id, - self.project.id, + [self.project.id], UseCaseID.TRANSACTIONS, "g:transactions/test_gauge@none", "status_code", @@ -966,3 +966,27 @@ def test_fetch_metric_tag_values_with_prefix(self) -> None: ) assert len(tag_values) == 1 assert tag_values == ["500"] + + def test_fetch_metric_tag_values_for_multiple_projects(self) -> None: + new_project = self.create_project(name="New Project") + self.store_metric( + self.org_id, + new_project.id, + "gauge", + "g:transactions/test_gauge@none", + {"status_code": "524"}, + self.ts(self.hour_ago + timedelta(minutes=10)), + 10, + UseCaseID.TRANSACTIONS, + ) + + tag_values = fetch_metric_tag_values( + self.org_id, + [self.project.id, new_project.id], + UseCaseID.TRANSACTIONS, + "g:transactions/test_gauge@none", + "status_code", + "5", + ) + assert len(tag_values) == 2 + assert tag_values == ["500", "524"] From d4f3c6a8e79d00551119596929cb11561bef8ba6 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Mon, 13 May 2024 10:42:06 +0200 Subject: [PATCH 312/376] perf(orjson): Remove unused options (#70133) The options * `integrations.slack.enable-orjson` * `auth.enable-orjson` * `backup.enable-orjson` * `event-manager.enable-orjson` * `eventstore.enable-orjson` * `flagpole.enable-orjson` introduced in https://github.com/getsentry/sentry/pull/69409 and https://github.com/getsentry/sentry/pull/69653 are already set to `1.0` on US. This removes them in favor of direct calls to `orjson`. It also removes some automatic fixtures that were only used to set those options to 0 in tests. --- src/flagpole/__init__.py | 6 ++--- src/sentry/auth/authenticators/u2f.py | 4 +-- src/sentry/auth/helper.py | 9 ++++--- src/sentry/auth/idpmigration.py | 16 +++++++---- src/sentry/auth/providers/fly/client.py | 4 +-- src/sentry/auth/providers/github/client.py | 4 +-- src/sentry/auth/providers/google/views.py | 4 +-- src/sentry/auth/providers/oauth2.py | 6 ++--- src/sentry/auth/superuser.py | 7 ++--- src/sentry/backup/crypto.py | 7 +++-- src/sentry/backup/exports.py | 5 ++-- src/sentry/backup/imports.py | 13 ++++----- src/sentry/event_manager.py | 10 +++---- src/sentry/eventstore/compressor.py | 6 ++--- src/sentry/eventstore/models.py | 4 +-- src/sentry/eventstore/reprocessing/redis.py | 9 +++---- .../slack/actions/notification.py | 16 +++++------ .../slack/message_builder/issues.py | 3 +-- .../message_builder/notifications/base.py | 9 +++---- .../notifications/daily_summary.py | 7 +---- .../slack/message_builder/prompt.py | 8 +++--- .../integrations/slack/notifications.py | 9 +++---- .../integrations/slack/requests/action.py | 27 ++++++------------- .../slack/requests/options_load.py | 15 +++-------- src/sentry/integrations/slack/service.py | 7 +++-- .../integrations/slack/utils/notifications.py | 4 +-- .../integrations/slack/utils/rule_status.py | 10 +++---- .../integrations/slack/webhooks/action.py | 25 +++++++---------- .../integrations/slack/webhooks/event.py | 8 +++--- .../slack/webhooks/options_load.py | 6 ++--- src/sentry/options/defaults.py | 1 + tests/flagpole/test_feature.py | 7 ----- .../sentry/auth/providers/fly/test_client.py | 7 ----- tests/sentry/event_manager/test_severity.py | 18 ++++++------- .../processing/test_redis_cluster.py | 9 ------- tests/sentry/eventstore/test_compressor.py | 9 ------- tests/sentry/runner/commands/test_backup.py | 3 ++- 37 files changed, 126 insertions(+), 196 deletions(-) diff --git a/src/flagpole/__init__.py b/src/flagpole/__init__.py index 1cb3b653223e3a..737b87354a6f87 100644 --- a/src/flagpole/__init__.py +++ b/src/flagpole/__init__.py @@ -64,11 +64,11 @@ from datetime import datetime from typing import Any +import orjson from pydantic import BaseModel, Field, ValidationError, constr from flagpole.conditions import Segment from flagpole.evaluation_context import ContextBuilder, EvaluationContext -from sentry.utils import json class InvalidFeatureFlagConfiguration(Exception): @@ -111,8 +111,8 @@ def from_feature_config_json( cls, name: str, config_json: str, context_builder: ContextBuilder | None = None ) -> Feature: try: - config_data_dict = json.loads_experimental("flagpole.enable-orjson", config_json) - except json.JSONDecodeError as decode_error: + config_data_dict = orjson.loads(config_json) + except orjson.JSONDecodeError as decode_error: raise InvalidFeatureFlagConfiguration("Invalid feature json provided") from decode_error if not isinstance(config_data_dict, dict): diff --git a/src/sentry/auth/authenticators/u2f.py b/src/sentry/auth/authenticators/u2f.py index 18947a2cf4e94b..827bdd105f8674 100644 --- a/src/sentry/auth/authenticators/u2f.py +++ b/src/sentry/auth/authenticators/u2f.py @@ -3,6 +3,7 @@ from time import time from urllib.parse import urlparse +import orjson from cryptography.exceptions import InvalidKey, InvalidSignature from django.http.request import HttpRequest from django.urls import reverse @@ -18,7 +19,6 @@ from sentry import options from sentry.auth.authenticators.base import EnrollmentStatus -from sentry.utils import json from sentry.utils.dates import to_datetime from sentry.utils.http import absolute_uri @@ -188,7 +188,7 @@ def get_registered_devices(self): return rv def try_enroll(self, enrollment_data, response_data, device_name=None, state=None): - data = json.loads_experimental("auth.enable-orjson", response_data) + data = orjson.loads(response_data) client_data = ClientData(websafe_decode(data["response"]["clientDataJSON"])) att_obj = base.AttestationObject(websafe_decode(data["response"]["attestationObject"])) binding = self.webauthn_registration_server.register_complete(state, client_data, att_obj) diff --git a/src/sentry/auth/helper.py b/src/sentry/auth/helper.py index f7d03882941509..b2153937339e71 100644 --- a/src/sentry/auth/helper.py +++ b/src/sentry/auth/helper.py @@ -7,6 +7,7 @@ from typing import TYPE_CHECKING, Any, cast from uuid import uuid4 +import orjson import sentry_sdk from django.conf import settings from django.contrib import messages @@ -51,7 +52,7 @@ ) from sentry.signals import sso_enabled, user_signup from sentry.tasks.auth import email_missing_links_control -from sentry.utils import auth, json, metrics +from sentry.utils import auth, metrics from sentry.utils.audit import create_audit_entry from sentry.utils.hashlib import md5_text from sentry.utils.http import absolute_uri @@ -433,9 +434,9 @@ def _post_login_redirect(self) -> HttpResponseRedirect: # add events that we can handle on the front end provider = self.auth_provider.provider if self.auth_provider else None params = { - "frontend_events": json.dumps_experimental( - "auth.enable-orjson", {"event_name": "Sign Up", "event_label": provider} - ) + "frontend_events": orjson.dumps( + {"event_name": "Sign Up", "event_label": provider} + ).decode() } url = add_params_to_url(url, params) response = HttpResponseRedirect(url) diff --git a/src/sentry/auth/idpmigration.py b/src/sentry/auth/idpmigration.py index fc263b1a797a5f..c4cd2909f7ef7f 100644 --- a/src/sentry/auth/idpmigration.py +++ b/src/sentry/auth/idpmigration.py @@ -4,6 +4,7 @@ from datetime import timedelta from typing import Any +import orjson from django.urls import reverse from rb.clients import LocalClient @@ -11,7 +12,7 @@ from sentry.models.authprovider import AuthProvider from sentry.models.user import User from sentry.services.hybrid_cloud.organization import RpcOrganization, organization_service -from sentry.utils import json, metrics, redis +from sentry.utils import metrics, redis from sentry.utils.email import MessageBuilder from sentry.utils.http import absolute_uri from sentry.utils.security import get_secure_token @@ -51,6 +52,13 @@ def get_redis_cluster() -> LocalClient: return redis.clusters.get("default").get_local_client_for_key(_REDIS_KEY) +# Helper function for serializing named tuples with orjson. +def _serialize_named_tuple(data: Any) -> Any: + if isinstance(data, tuple) and hasattr(data, "_asdict") and hasattr(data, "_fields"): + return list(data) + raise TypeError + + @dataclass class AccountConfirmLink: user: User @@ -105,7 +113,7 @@ def store_in_redis(self) -> None: cluster.setex( self.verification_key, int(_TTL.total_seconds()), - json.dumps_experimental("auth.enable-orjson", verification_value), + orjson.dumps(verification_value, default=_serialize_named_tuple).decode(), ) @@ -117,9 +125,7 @@ def get_verification_value_from_key(key: str) -> dict[str, Any] | None: metrics.incr("idpmigration.confirmation_failure", sample_rate=1.0) return None - verification_value: dict[str, Any] = json.loads_experimental( - "auth.enable-orjson", verification_str - ) + verification_value: dict[str, Any] = orjson.loads(verification_str) metrics.incr( "idpmigration.confirmation_success", tags={"provider": verification_value.get("provider")}, diff --git a/src/sentry/auth/providers/fly/client.py b/src/sentry/auth/providers/fly/client.py index 66cd77119ae2b8..cdc717cd0ab9a4 100644 --- a/src/sentry/auth/providers/fly/client.py +++ b/src/sentry/auth/providers/fly/client.py @@ -1,7 +1,7 @@ +import orjson from requests.exceptions import RequestException from sentry import http -from sentry.utils import json from .constants import ACCESS_TOKEN_URL @@ -35,7 +35,7 @@ def _request(self, path): raise FlyApiError(f"{e}", status=getattr(e, "status_code", 0)) if req.status_code < 200 or req.status_code >= 300: raise FlyApiError(req.content, status=req.status_code) - return json.loads_experimental("auth.enable-orjson", req.content) + return orjson.loads(req.content) def get_info(self): """ diff --git a/src/sentry/auth/providers/github/client.py b/src/sentry/auth/providers/github/client.py index f03f18cf35821a..b30af3d8444928 100644 --- a/src/sentry/auth/providers/github/client.py +++ b/src/sentry/auth/providers/github/client.py @@ -1,7 +1,7 @@ +import orjson from requests.exceptions import RequestException from sentry import http -from sentry.utils import json from .constants import API_DOMAIN @@ -35,7 +35,7 @@ def _request(self, path): raise GitHubApiError(f"{e}", status=getattr(e, "status_code", 0)) if req.status_code < 200 or req.status_code >= 300: raise GitHubApiError(req.content, status=req.status_code) - return json.loads_experimental("auth.enable-orjson", req.content) + return orjson.loads(req.content) def get_org_list(self): return self._request("/user/orgs") diff --git a/src/sentry/auth/providers/google/views.py b/src/sentry/auth/providers/google/views.py index 926939fcd59fa5..a39842450fd69d 100644 --- a/src/sentry/auth/providers/google/views.py +++ b/src/sentry/auth/providers/google/views.py @@ -1,10 +1,10 @@ import logging +import orjson from rest_framework.request import Request from rest_framework.response import Response from sentry.auth.view import AuthView, ConfigureView -from sentry.utils import json from sentry.utils.signing import urlsafe_b64decode from .constants import DOMAIN_BLOCKLIST, ERR_INVALID_DOMAIN, ERR_INVALID_RESPONSE @@ -34,7 +34,7 @@ def dispatch(self, request: Request, helper) -> Response: return helper.error(ERR_INVALID_RESPONSE) try: - payload = json.loads_experimental("auth.enable-orjson", payload) + payload = orjson.loads(payload) except Exception as exc: logger.exception("Unable to decode id_token payload: %s", exc) return helper.error(ERR_INVALID_RESPONSE) diff --git a/src/sentry/auth/providers/oauth2.py b/src/sentry/auth/providers/oauth2.py index e918dfec0471fd..c7d47af9b7df1d 100644 --- a/src/sentry/auth/providers/oauth2.py +++ b/src/sentry/auth/providers/oauth2.py @@ -6,6 +6,7 @@ from typing import Any from urllib.parse import parse_qsl, urlencode +import orjson from django.http import HttpResponse from rest_framework.request import Request @@ -13,7 +14,6 @@ from sentry.auth.provider import Provider from sentry.auth.view import AuthView from sentry.http import safe_urlopen, safe_urlread -from sentry.utils import json ERR_INVALID_STATE = "An error occurred while validating your request." @@ -93,7 +93,7 @@ def exchange_token(self, request: Request, helper, code): body = safe_urlread(req) if req.headers["Content-Type"].startswith("application/x-www-form-urlencoded"): return dict(parse_qsl(body)) - return json.loads_experimental("auth.enable-orjson", body) + return orjson.loads(body) def dispatch(self, request: Request, helper) -> HttpResponse: error = request.GET.get("error") @@ -192,7 +192,7 @@ def refresh_identity(self, auth_identity): try: body = safe_urlread(req) - payload = json.loads_experimental("auth.enable-orjson", body) + payload = orjson.loads(body) except Exception: payload = {} diff --git a/src/sentry/auth/superuser.py b/src/sentry/auth/superuser.py index 1525e76de55b67..6fd43cd6b1c068 100644 --- a/src/sentry/auth/superuser.py +++ b/src/sentry/auth/superuser.py @@ -16,6 +16,7 @@ from datetime import datetime, timedelta, timezone from typing import Any +import orjson from django.conf import settings from django.core.signing import BadSignature from django.http import HttpRequest @@ -29,7 +30,7 @@ from sentry.auth.elevated_mode import ElevatedMode, InactiveReason from sentry.auth.system import is_system_auth from sentry.services.hybrid_cloud.auth.model import RpcAuthState -from sentry.utils import json, metrics +from sentry.utils import metrics from sentry.utils.auth import has_completed_sso from sentry.utils.settings import is_self_hosted @@ -436,8 +437,8 @@ def enable_and_log_superuser_access(): else: try: # need to use json loads as the data is no longer in request.data - su_access_json = json.loads_experimental("auth.enable-orjson", request.body) - except json.JSONDecodeError: + su_access_json = orjson.loads(request.body) + except orjson.JSONDecodeError: metrics.incr( "superuser.failure", sample_rate=1.0, diff --git a/src/sentry/backup/crypto.py b/src/sentry/backup/crypto.py index bd69d849861e60..f0ff04153533a6 100644 --- a/src/sentry/backup/crypto.py +++ b/src/sentry/backup/crypto.py @@ -6,6 +6,7 @@ from functools import lru_cache from typing import IO, Any, NamedTuple +import orjson from cryptography.fernet import Fernet from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization @@ -135,9 +136,7 @@ def create_encrypted_export_tarball(json_export: Any, encryptor: Encryptor) -> i pem = encryptor.get_public_key_pem() data_encryption_key = Fernet.generate_key() backup_encryptor = Fernet(data_encryption_key) - encrypted_json_export = backup_encryptor.encrypt( - json.dumps_experimental("backup.enable-orjson", json_export).encode() - ) + encrypted_json_export = backup_encryptor.encrypt(orjson.dumps(json_export)) # Encrypt the newly minted DEK using asymmetric public key encryption. dek_encryption_key = serialization.load_pem_public_key(pem, default_backend()) @@ -301,7 +300,7 @@ def decrypt_data_encryption_key(self, unwrapped: UnwrappedEncryptedExportTarball gcp_kms_config_bytes = self.__fp.read() # Read the user supplied configuration into the proper format. - gcp_kms_config_json = json.loads_experimental("backup.enable-orjson", gcp_kms_config_bytes) + gcp_kms_config_json = orjson.loads(gcp_kms_config_bytes) try: crypto_key_version = CryptoKeyVersion(**gcp_kms_config_json) except TypeError: diff --git a/src/sentry/backup/exports.py b/src/sentry/backup/exports.py index 4c617d2bff81e4..fac6f298afae94 100644 --- a/src/sentry/backup/exports.py +++ b/src/sentry/backup/exports.py @@ -7,6 +7,8 @@ import json as builtin_json # noqa: S003 from typing import IO +import orjson + from sentry.backup.crypto import Encryptor, create_encrypted_export_tarball from sentry.backup.dependencies import ( PrimaryKeyMap, @@ -27,7 +29,6 @@ import_export_service, ) from sentry.silo.base import SiloMode -from sentry.utils import json as sentry_json __all__ = ( "ExportingError", @@ -134,7 +135,7 @@ def _export( # TODO(getsentry/team-ospo#190): Since the structure of this data is very predictable (an # array of serialized model objects), we could probably avoid re-ingesting the JSON string # as a future optimization. - for json_model in sentry_json.loads_experimental("backup.enable-orjson", result.json_data): + for json_model in orjson.loads(result.json_data): json_export.append(json_model) # If no `encryptor` argument was passed in, this is an unencrypted export, so we can just dump diff --git a/src/sentry/backup/imports.py b/src/sentry/backup/imports.py index 91c37e9811ae71..81cbb94abb4714 100644 --- a/src/sentry/backup/imports.py +++ b/src/sentry/backup/imports.py @@ -6,6 +6,7 @@ from typing import IO, Any from uuid import uuid4 +import orjson from django.core import serializers from django.db import DatabaseError, connections, router, transaction from django.db.models.base import Model @@ -167,7 +168,7 @@ def _import( # TODO(getsentry#team-ospo/190): Reading the entire export into memory as a string is quite # wasteful - in the future, we should explore chunking strategies to enable a smaller memory # footprint when processing super large (>100MB) exports. - content = ( + content: bytes | str = ( decrypt_encrypted_tarball(src, decryptor) if decryptor is not None else src.read().decode("utf-8") @@ -176,7 +177,7 @@ def _import( if len(DELETED_MODELS) > 0 or len(DELETED_FIELDS) > 0: # Parse the content JSON and remove fields and models that we have marked for deletion in the # function. - content_as_json = json.loads_experimental("backup.enable-orjson", content) # type: ignore[arg-type] + content_as_json = orjson.loads(content) shimmed_models = set(DELETED_FIELDS.keys()) for i, json_model in enumerate(content_as_json): @@ -189,7 +190,7 @@ def _import( del content_as_json[i] # Return the content to byte form, as that is what the Django deserializer expects. - content = json.dumps_experimental("backup.enable-orjson", content_as_json) + content = orjson.dumps(content_as_json) filters = [] if filter_by is not None: @@ -271,7 +272,7 @@ def _import( # NOT including the current instance. def yield_json_models(content) -> Iterator[tuple[NormalizedModelName, str, int]]: # TODO(getsentry#team-ospo/190): Better error handling for unparsable JSON. - models = json.loads_experimental("backup.enable-orjson", content) + models = orjson.loads(content) last_seen_model_name: NormalizedModelName | None = None batch: list[type[Model]] = [] num_current_model_instances_yielded = 0 @@ -281,7 +282,7 @@ def yield_json_models(content) -> Iterator[tuple[NormalizedModelName, str, int]] if last_seen_model_name is not None and len(batch) > 0: yield ( last_seen_model_name, - json.dumps_experimental("backup.enable-orjson", batch), + orjson.dumps(batch).decode(), num_current_model_instances_yielded, ) @@ -298,7 +299,7 @@ def yield_json_models(content) -> Iterator[tuple[NormalizedModelName, str, int]] if last_seen_model_name is not None and batch: yield ( last_seen_model_name, - json.dumps_experimental("backup.enable-orjson", batch), + orjson.dumps(batch).decode(), num_current_model_instances_yielded, ) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index 32e441e3775ca4..d06f453b1beef3 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -119,7 +119,7 @@ from sentry.types.activity import ActivityType from sentry.types.group import GroupSubStatus, PriorityLevel from sentry.usage_accountant import record -from sentry.utils import json, metrics +from sentry.utils import metrics from sentry.utils.cache import cache_key_for_event from sentry.utils.canonical import CanonicalKeyDict from sentry.utils.circuit_breaker import ( @@ -2437,13 +2437,11 @@ def _get_severity_score(event: Event) -> tuple[float, str]: response = severity_connection_pool.urlopen( "POST", "/v0/issues/severity-score", - body=json.dumps_experimental("event-manager.enable-orjson", payload), + body=orjson.dumps(payload), headers={"content-type": "application/json;charset=utf-8"}, timeout=timeout, ) - severity = json.loads_experimental( - "event-manager.enable-orjson", response.data - ).get("severity") + severity = orjson.loads(response.data).get("severity") reason = "ml" except MaxRetryError as e: logger.warning( @@ -2790,7 +2788,7 @@ def _materialize_event_metrics(jobs: Sequence[Job]) -> None: # Capture the actual size that goes into node store. event_metrics["bytes.stored.event"] = len( - json.dumps_experimental("event-manager.enable-orjson", dict(job["event"].data.items())) + orjson.dumps(dict(job["event"].data.items())).decode() ) for metric_name in ("flag.processing.error", "flag.processing.fatal"): diff --git a/src/sentry/eventstore/compressor.py b/src/sentry/eventstore/compressor.py index ad9e4a499c8519..c71c7ab9fb4f4c 100644 --- a/src/sentry/eventstore/compressor.py +++ b/src/sentry/eventstore/compressor.py @@ -10,7 +10,7 @@ import hashlib from typing import Any -from sentry.utils import json +import orjson _INTERFACES = {} @@ -62,9 +62,7 @@ def deduplicate(data): continue to_deduplicate, to_inline = interface.encode(data.pop(key)) - to_deduplicate_serialized = json.dumps_experimental( - "eventstore.enable-orjson", to_deduplicate - ).encode() + to_deduplicate_serialized = orjson.dumps(to_deduplicate) checksum = hashlib.md5(to_deduplicate_serialized).hexdigest() extra_keys[checksum] = to_deduplicate patchsets.append([key, checksum, to_inline]) diff --git a/src/sentry/eventstore/models.py b/src/sentry/eventstore/models.py index 36f4fb99040c3d..51b90d58ee848b 100644 --- a/src/sentry/eventstore/models.py +++ b/src/sentry/eventstore/models.py @@ -9,6 +9,7 @@ from hashlib import md5 from typing import TYPE_CHECKING, Any, Optional, cast +import orjson import sentry_sdk from dateutil.parser import parse as parse_date from django.conf import settings @@ -25,7 +26,6 @@ from sentry.models.event import EventDict from sentry.snuba.events import Columns from sentry.spans.grouping.api import load_span_grouping_config -from sentry.utils import json from sentry.utils.canonical import CanonicalKeyView from sentry.utils.safe import get_path, trim from sentry.utils.strings import truncatechars @@ -519,7 +519,7 @@ def get_raw_data(self, for_stream: bool = False) -> Mapping[str, Any]: @property def size(self) -> int: - return len(json.dumps_experimental("eventstore.enable-orjson", dict(self.data))) + return len(orjson.dumps(dict(self.data)).decode()) def get_email_subject(self) -> str: template = self.project.get_option("mail:subject_template") diff --git a/src/sentry/eventstore/reprocessing/redis.py b/src/sentry/eventstore/reprocessing/redis.py index 4f21bf164634c9..cf945d06269224 100644 --- a/src/sentry/eventstore/reprocessing/redis.py +++ b/src/sentry/eventstore/reprocessing/redis.py @@ -3,10 +3,10 @@ from datetime import datetime from typing import Any +import orjson import redis from django.conf import settings -from sentry.utils import json from sentry.utils.dates import to_datetime from sentry.utils.redis import redis_clusters @@ -161,10 +161,9 @@ def start_reprocessing( self.redis.setex( _get_info_reprocessed_key(group_id), settings.SENTRY_REPROCESSING_SYNC_TTL, - json.dumps_experimental( - "eventstore.enable-orjson", + orjson.dumps( {"dateCreated": date_created, "syncCount": sync_count, "totalEvents": event_count}, - ), + ).decode(), ) def get_pending(self, group_id: int) -> tuple[str | None, int]: @@ -177,4 +176,4 @@ def get_progress(self, group_id: int) -> dict[str, Any] | None: info = self.redis.get(_get_info_reprocessed_key(group_id)) if info is None: return None - return json.loads_experimental("eventstore.enable-orjson", info) + return orjson.loads(info) diff --git a/src/sentry/integrations/slack/actions/notification.py b/src/sentry/integrations/slack/actions/notification.py index 94b04abd8a4402..ca20db90bacbcb 100644 --- a/src/sentry/integrations/slack/actions/notification.py +++ b/src/sentry/integrations/slack/actions/notification.py @@ -4,6 +4,8 @@ from logging import Logger, getLogger from typing import Any +import orjson + from sentry import features from sentry.api.serializers.rest_framework.rule import ACTION_UUID_KEY from sentry.eventstore.models import GroupEvent @@ -29,7 +31,7 @@ from sentry.shared_integrations.exceptions import ApiError from sentry.shared_integrations.response import BaseApiResponse, MappingApiResponse from sentry.types.rules import RuleFuture -from sentry.utils import json, metrics +from sentry.utils import metrics _default_logger: Logger = getLogger(__name__) @@ -100,9 +102,7 @@ def send_notification(event: GroupEvent, futures: Sequence[RuleFuture]) -> None: "unfurl_media": False, } if payload_blocks := blocks.get("blocks"): - payload["blocks"] = json.dumps_experimental( - "integrations.slack.enable-orjson", payload_blocks - ) + payload["blocks"] = orjson.dumps(payload_blocks).decode() rule = rules[0] if rules else None rule_to_use = self.rule if self.rule else rule @@ -179,9 +179,7 @@ def send_notification(event: GroupEvent, futures: Sequence[RuleFuture]) -> None: "channel_name": self.get_option("channel"), } # temporarily log the payload so we can debug message failures - log_params["payload"] = json.dumps_experimental( - "integrations.slack.enable-orjson", payload - ) + log_params["payload"] = orjson.dumps(payload).decode() self.logger.info( "rule.fail.slack_post", @@ -251,9 +249,7 @@ def send_confirmation_notification( blocks = SlackRuleSaveEditMessageBuilder(rule=rule, new=new, changed=changed).build() payload = { "text": blocks.get("text"), - "blocks": json.dumps_experimental( - "integrations.slack.enable-orjson", blocks.get("blocks") - ), + "blocks": orjson.dumps(blocks.get("blocks")).decode(), "channel": channel, "unfurl_links": False, "unfurl_media": False, diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py index 488066f36d3f9f..17e75d2086f133 100644 --- a/src/sentry/integrations/slack/message_builder/issues.py +++ b/src/sentry/integrations/slack/message_builder/issues.py @@ -59,7 +59,6 @@ from sentry.types.actor import Actor from sentry.types.group import SUBSTATUS_TO_STR from sentry.types.integrations import ExternalProviders -from sentry.utils import json STATUSES = {"resolved": "resolved", "ignored": "ignored", "unresolved": "re-opened"} SUPPORTED_COMMIT_PROVIDERS = ( @@ -692,6 +691,6 @@ def build(self, notification_uuid: str | None = None) -> SlackBlock: return self._build_blocks( *blocks, fallback_text=self.build_fallback_text(obj, project.slug), - block_id=json.dumps_experimental("integrations.slack.enable-orjson", block_id), + block_id=orjson.dumps(block_id).decode(), skip_fallback=self.skip_fallback, ) diff --git a/src/sentry/integrations/slack/message_builder/notifications/base.py b/src/sentry/integrations/slack/message_builder/notifications/base.py index d25d0fdfcd368b..9640fccef7077f 100644 --- a/src/sentry/integrations/slack/message_builder/notifications/base.py +++ b/src/sentry/integrations/slack/message_builder/notifications/base.py @@ -3,13 +3,14 @@ from collections.abc import Mapping from typing import Any +import orjson + from sentry.integrations.slack.message_builder import SlackBlock from sentry.integrations.slack.message_builder.base.block import BlockSlackMessageBuilder from sentry.integrations.slack.utils.escape import escape_slack_text from sentry.notifications.notifications.base import BaseNotification from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders -from sentry.utils import json class SlackNotificationsMessageBuilder(BlockSlackMessageBuilder): @@ -33,11 +34,7 @@ def build(self) -> SlackBlock: self.recipient, ExternalProviders.SLACK ) actions = self.notification.get_message_actions(self.recipient, ExternalProviders.SLACK) - callback_id = ( - json.dumps_experimental("integrations.slack.enable-orjson", callback_id_raw) - if callback_id_raw - else None - ) + callback_id = orjson.dumps(callback_id_raw).decode() if callback_id_raw else None first_block_text = "" if title_link: diff --git a/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py b/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py index 025d82ac276acf..2e175d56529b3f 100644 --- a/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py +++ b/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py @@ -17,7 +17,6 @@ from sentry.tasks.summaries.utils import COMPARISON_PERIOD from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders -from sentry.utils import json from sentry.utils.http import absolute_uri from .base import SlackNotificationsMessageBuilder @@ -192,11 +191,7 @@ def build(self) -> SlackBlock: text = subject callback_id_raw = self.notification.get_callback_data() - callback_id = ( - json.dumps_experimental("integrations.slack.enable-orjson", callback_id_raw) - if callback_id_raw - else None - ) + callback_id = orjson.dumps(callback_id_raw).decode() if callback_id_raw else None footer = self.notification.build_notification_footer( self.recipient, ExternalProviders.SLACK diff --git a/src/sentry/integrations/slack/message_builder/prompt.py b/src/sentry/integrations/slack/message_builder/prompt.py index 79b22eb05ad87f..211cd80a7f0d48 100644 --- a/src/sentry/integrations/slack/message_builder/prompt.py +++ b/src/sentry/integrations/slack/message_builder/prompt.py @@ -1,5 +1,6 @@ +import orjson + from sentry.integrations.slack.message_builder import SlackBody -from sentry.utils import json from .base.block import BlockSlackMessageBuilder @@ -13,11 +14,10 @@ def __init__(self, url: str) -> None: def build(self) -> SlackBody: return { - "blocks": json.dumps_experimental( - "integrations.slack.enable-orjson", + "blocks": orjson.dumps( [ self.get_markdown_block(LINK_IDENTITY_MESSAGE), self.get_action_block([("Link", self.url, "link"), ("Cancel", None, "ignore")]), ], - ) + ).decode() } diff --git a/src/sentry/integrations/slack/notifications.py b/src/sentry/integrations/slack/notifications.py index a4232b6b0c4a55..0bdcd242bc360e 100644 --- a/src/sentry/integrations/slack/notifications.py +++ b/src/sentry/integrations/slack/notifications.py @@ -5,6 +5,7 @@ from copy import copy from typing import Any +import orjson import sentry_sdk from sentry.integrations.mixins import NotifyBasicMixin @@ -21,7 +22,7 @@ from sentry.tasks.integrations.slack import post_message, post_message_control from sentry.types.actor import Actor from sentry.types.integrations import ExternalProviders -from sentry.utils import json, metrics +from sentry.utils import metrics logger = logging.getLogger("sentry.notifications") SLACK_TIMEOUT = 5 @@ -94,7 +95,7 @@ def _notify_recipient( "unfurl_links": False, "unfurl_media": False, "text": text if text else "", - "blocks": json.dumps_experimental("integrations.slack.enable-orjson", blocks), + "blocks": orjson.dumps(blocks).decode(), } callback_id = local_attachments.get("callback_id") if callback_id: @@ -102,9 +103,7 @@ def _notify_recipient( if isinstance(callback_id, str): payload["callback_id"] = callback_id else: - payload["callback_id"] = json.dumps_experimental( - "integrations.slack.enable-orjson", local_attachments.get("callback_id") - ) + payload["callback_id"] = orjson.dumps(local_attachments.get("callback_id")).decode() post_message_task = post_message if SiloMode.get_current_mode() == SiloMode.CONTROL: diff --git a/src/sentry/integrations/slack/requests/action.py b/src/sentry/integrations/slack/requests/action.py index 9aa08ecc7dfed5..df65d35dcb8496 100644 --- a/src/sentry/integrations/slack/requests/action.py +++ b/src/sentry/integrations/slack/requests/action.py @@ -2,12 +2,12 @@ from typing import Any +import orjson from django.utils.functional import cached_property from rest_framework import status from sentry.integrations.slack.requests.base import SlackRequest, SlackRequestError from sentry.models.group import Group -from sentry.utils import json class SlackActionRequest(SlackRequest): @@ -36,9 +36,7 @@ def callback_data(self) -> Any: - is_message: did the original message have a 'message' type """ if self.data.get("callback_id"): - return json.loads_experimental( - "integrations.slack.enable-orjson", self.data["callback_id"] - ) + return orjson.loads(self.data["callback_id"]) # XXX(CEO): can't really feature flag this but the block kit data is very different @@ -46,29 +44,22 @@ def callback_data(self) -> Any: # we don't do anything with it until the user hits "Submit" but we need to handle it anyway if self.data["type"] == "block_actions": if self.data.get("view"): - return json.loads_experimental( - "integrations.slack.enable-orjson", self.data["view"]["private_metadata"] - ) + return orjson.loads(self.data["view"]["private_metadata"]) elif self.data.get("container", {}).get( "is_app_unfurl" ): # for actions taken on interactive unfurls - return json.loads_experimental( - "integrations.slack.enable-orjson", + return orjson.loads( self.data["app_unfurl"]["blocks"][0]["block_id"], ) - return json.loads_experimental( - "integrations.slack.enable-orjson", self.data["message"]["blocks"][0]["block_id"] - ) + return orjson.loads(self.data["message"]["blocks"][0]["block_id"]) if self.data["type"] == "view_submission": - return json.loads_experimental( - "integrations.slack.enable-orjson", self.data["view"]["private_metadata"] - ) + return orjson.loads(self.data["view"]["private_metadata"]) for data in self.data["message"]["blocks"]: if data["type"] == "section" and len(data["block_id"]) > 5: - return json.loads_experimental("integrations.slack.enable-orjson", data["block_id"]) + return orjson.loads(data["block_id"]) # a bit hacky, you can only provide a block ID per block (not per entire message), # and if not provided slack generates a 5 char long one. our provided block_id is at least '{issue: }' # so we know it's longer than 5 chars @@ -85,9 +76,7 @@ def _validate_data(self) -> None: raise SlackRequestError(status=status.HTTP_400_BAD_REQUEST) try: - self._data = json.loads_experimental( - "integrations.slack.enable-orjson", self.data["payload"] - ) + self._data = orjson.loads(self.data["payload"]) except (KeyError, IndexError, TypeError, ValueError): raise SlackRequestError(status=status.HTTP_400_BAD_REQUEST) diff --git a/src/sentry/integrations/slack/requests/options_load.py b/src/sentry/integrations/slack/requests/options_load.py index 7f84430760f799..3e029987997a6b 100644 --- a/src/sentry/integrations/slack/requests/options_load.py +++ b/src/sentry/integrations/slack/requests/options_load.py @@ -2,11 +2,11 @@ from typing import Any +import orjson from rest_framework import status from sentry.integrations.slack.requests.base import SlackRequest, SlackRequestError from sentry.models.group import Group -from sentry.utils import json VALID_PAYLOAD_TYPES = ["block_suggestion"] @@ -20,16 +20,11 @@ class SlackOptionsLoadRequest(SlackRequest): def group_id(self) -> int: if self.data.get("container", {}).get("is_app_unfurl"): return int( - json.loads_experimental( - "integrations.slack.enable-orjson", + orjson.loads( self.data["app_unfurl"]["blocks"][0]["block_id"], )["issue"] ) - return int( - json.loads_experimental( - "integrations.slack.enable-orjson", self.data["message"]["blocks"][0]["block_id"] - )["issue"] - ) + return int(orjson.loads(self.data["message"]["blocks"][0]["block_id"])["issue"]) @property def substring(self) -> str: @@ -42,9 +37,7 @@ def _validate_data(self) -> None: raise SlackRequestError(status=status.HTTP_400_BAD_REQUEST) try: - self._data = json.loads_experimental( - "integrations.slack.enable-orjson", self.data["payload"] - ) + self._data = orjson.loads(self.data["payload"]) except (KeyError, IndexError, TypeError, ValueError): raise SlackRequestError(status=status.HTTP_400_BAD_REQUEST) diff --git a/src/sentry/integrations/slack/service.py b/src/sentry/integrations/slack/service.py index ea3181932d5c49..5a58f4aeaedd25 100644 --- a/src/sentry/integrations/slack/service.py +++ b/src/sentry/integrations/slack/service.py @@ -2,6 +2,8 @@ from logging import Logger, getLogger +import orjson + from sentry.integrations.repository import get_default_issue_alert_repository from sentry.integrations.repository.issue_alert import ( IssueAlertNotificationMessage, @@ -28,7 +30,6 @@ from sentry.notifications.notifications.activity.unresolved import UnresolvedActivityNotification from sentry.types.activity import ActivityType from sentry.types.integrations import ExternalProviderEnum -from sentry.utils import json _default_logger = getLogger(__name__) @@ -205,9 +206,7 @@ def _handle_parent_notification( ) payload.update(slack_payload) # TODO (Yash): Users should not have to remember to do this, interface should handle serializing the field - payload["blocks"] = json.dumps_experimental( - "integrations.slack.enable-orjson", payload.get("blocks") - ) + payload["blocks"] = orjson.dumps(payload.get("blocks")).decode() try: client.post("/chat.postMessage", data=payload, timeout=5) except Exception as err: diff --git a/src/sentry/integrations/slack/utils/notifications.py b/src/sentry/integrations/slack/utils/notifications.py index bffaaa65267571..9eb2dadc58f04e 100644 --- a/src/sentry/integrations/slack/utils/notifications.py +++ b/src/sentry/integrations/slack/utils/notifications.py @@ -3,6 +3,7 @@ from collections.abc import Mapping from typing import Any +import orjson import sentry_sdk from sentry import features @@ -21,7 +22,6 @@ from sentry.services.hybrid_cloud.integration import integration_service from sentry.shared_integrations.exceptions import ApiError from sentry.shared_integrations.response import BaseApiResponse, MappingApiResponse -from sentry.utils import json from . import logger @@ -63,7 +63,7 @@ def send_incident_alert_notification( payload = { "channel": channel, "text": text, - "attachments": json.dumps_experimental("integrations.slack.enable-orjson", [blocks]), + "attachments": orjson.dumps([blocks]).decode(), # Prevent duplicate unfurl # https://api.slack.com/reference/messaging/link-unfurling#no_unfurling_please "unfurl_links": False, diff --git a/src/sentry/integrations/slack/utils/rule_status.py b/src/sentry/integrations/slack/utils/rule_status.py index d34d2f92af0822..0d1117892eb6eb 100644 --- a/src/sentry/integrations/slack/utils/rule_status.py +++ b/src/sentry/integrations/slack/utils/rule_status.py @@ -3,9 +3,9 @@ from typing import Any, Union, cast from uuid import uuid4 +import orjson from django.conf import settings -from sentry.utils import json from sentry.utils.redis import redis_clusters SLACK_FAILED_MESSAGE = ( @@ -37,15 +37,13 @@ def set_value( def get_value(self) -> Any: key = self._get_redis_key() value = self.client.get(key) - return json.loads_experimental( - "integrations.slack.enable-orjson", cast(Union[str, bytes], value) - ) + return orjson.loads(cast(Union[str, bytes], value)) def _generate_uuid(self) -> str: return uuid4().hex def _set_initial_value(self) -> None: - value = json.dumps_experimental("integrations.slack.enable-orjson", {"status": "pending"}) + value = orjson.dumps({"status": "pending"}).decode() self.client.set(self._get_redis_key(), f"{value}", ex=60 * 60, nx=True) def _get_redis_key(self) -> str: @@ -65,4 +63,4 @@ def _format_value( elif status == "failed": value["error"] = SLACK_FAILED_MESSAGE - return json.dumps_experimental("integrations.slack.enable-orjson", value) + return orjson.dumps(value).decode() diff --git a/src/sentry/integrations/slack/webhooks/action.py b/src/sentry/integrations/slack/webhooks/action.py index 081949139a12d3..ce21b68db8d7c2 100644 --- a/src/sentry/integrations/slack/webhooks/action.py +++ b/src/sentry/integrations/slack/webhooks/action.py @@ -3,6 +3,7 @@ from collections.abc import Mapping, MutableMapping, Sequence from typing import Any +import orjson import requests as requests_ import sentry_sdk from django.urls import reverse @@ -38,7 +39,6 @@ from sentry.services.hybrid_cloud.user import RpcUser from sentry.shared_integrations.exceptions import ApiError from sentry.types.integrations import ExternalProviderEnum -from sentry.utils import json from ..utils import logger @@ -197,9 +197,7 @@ def api_error( if view: private_metadata = view.get("private_metadata") if private_metadata: - data = json.loads_experimental( - "integrations.slack.enable-orjson", private_metadata - ) + data = orjson.loads(private_metadata) channel_id = data.get("channel_id") response_url = data.get("orig_response_url") @@ -400,7 +398,7 @@ def open_resolve_dialog(self, slack_request: SlackActionRequest, group: Group) - if use_block_kit and slack_request.data.get("channel"): callback_id["channel_id"] = slack_request.data["channel"]["id"] callback_id["rule"] = slack_request.callback_data.get("rule") - callback_id = json.dumps_experimental("integrations.slack.enable-orjson", callback_id) + callback_id = orjson.dumps(callback_id).decode() dialog = { "callback_id": callback_id, @@ -410,7 +408,7 @@ def open_resolve_dialog(self, slack_request: SlackActionRequest, group: Group) - } payload = { - "dialog": json.dumps_experimental("integrations.slack.enable-orjson", dialog), + "dialog": orjson.dumps(dialog).decode(), "trigger_id": slack_request.data["trigger_id"], } slack_client = SlackClient(integration_id=slack_request.integration.id) @@ -420,15 +418,13 @@ def open_resolve_dialog(self, slack_request: SlackActionRequest, group: Group) - modal_payload = self.build_resolve_modal_payload(callback_id) try: payload = { - "view": json.dumps_experimental( - "integrations.slack.enable-orjson", modal_payload - ), + "view": orjson.dumps(modal_payload).decode(), "trigger_id": slack_request.data["trigger_id"], } headers = {"content-type": "application/json; charset=utf-8"} slack_client.post( "/views.open", - data=json.dumps_experimental("integrations.slack.enable-orjson", payload), + data=orjson.dumps(payload).decode(), headers=headers, ) except ApiError as e: @@ -468,19 +464,19 @@ def open_archive_dialog(self, slack_request: SlackActionRequest, group: Group) - if slack_request.data.get("channel"): callback_id["channel_id"] = slack_request.data["channel"]["id"] - callback_id = json.dumps_experimental("integrations.slack.enable-orjson", callback_id) + callback_id = orjson.dumps(callback_id).decode() slack_client = SlackClient(integration_id=slack_request.integration.id) modal_payload = self.build_archive_modal_payload(callback_id) try: payload = { - "view": json.dumps_experimental("integrations.slack.enable-orjson", modal_payload), + "view": orjson.dumps(modal_payload).decode(), "trigger_id": slack_request.data["trigger_id"], } headers = {"content-type": "application/json; charset=utf-8"} slack_client.post( "/views.open", - data=json.dumps_experimental("integrations.slack.enable-orjson", payload), + data=orjson.dumps(payload), headers=headers, ) except ApiError as e: @@ -583,8 +579,7 @@ def _handle_group_actions( # use the original response_url to update the link attachment slack_client = SlackClient(integration_id=slack_request.integration.id) try: - private_metadata = json.loads_experimental( - "integrations.slack.enable-orjson", + private_metadata = orjson.loads( slack_request.data["view"]["private_metadata"], ) slack_client.post(private_metadata["orig_response_url"], data=body, json=True) diff --git a/src/sentry/integrations/slack/webhooks/event.py b/src/sentry/integrations/slack/webhooks/event.py index 000d1707a23e8b..d75f8efd73b09b 100644 --- a/src/sentry/integrations/slack/webhooks/event.py +++ b/src/sentry/integrations/slack/webhooks/event.py @@ -4,6 +4,7 @@ from collections.abc import Mapping, MutableMapping from typing import Any +import orjson from rest_framework.request import Request from rest_framework.response import Response @@ -21,7 +22,6 @@ from sentry.services.hybrid_cloud.integration import integration_service from sentry.services.hybrid_cloud.organization import organization_service from sentry.shared_integrations.exceptions import ApiError -from sentry.utils import json from sentry.utils.urls import parse_link from ..utils import logger @@ -156,9 +156,7 @@ def on_link_shared(self, request: Request, slack_request: SlackDMRequest) -> boo return True # Don't unfurl the same thing multiple times - seen_marker = hash( - json.dumps_experimental("integrations.slack.enable-orjson", (link_type, args)) - ) + seen_marker = hash(orjson.dumps((link_type, list(args))).decode()) if seen_marker in links_seen: continue @@ -194,7 +192,7 @@ def on_link_shared(self, request: Request, slack_request: SlackDMRequest) -> boo payload = { "channel": data["channel"], "ts": data["message_ts"], - "unfurls": json.dumps_experimental("integrations.slack.enable-orjson", results), + "unfurls": orjson.dumps(results).decode(), } client = SlackClient(integration_id=slack_request.integration.id) diff --git a/src/sentry/integrations/slack/webhooks/options_load.py b/src/sentry/integrations/slack/webhooks/options_load.py index 47db8329d83c01..beb34645cd9cf0 100644 --- a/src/sentry/integrations/slack/webhooks/options_load.py +++ b/src/sentry/integrations/slack/webhooks/options_load.py @@ -4,6 +4,7 @@ from collections.abc import Mapping, Sequence from typing import Any +import orjson from rest_framework import status from rest_framework.request import Request from rest_framework.response import Response @@ -16,7 +17,6 @@ from sentry.integrations.slack.requests.base import SlackRequestError from sentry.integrations.slack.requests.options_load import SlackOptionsLoadRequest from sentry.models.group import Group -from sentry.utils import json from ..utils import logger @@ -99,9 +99,7 @@ def post(self, request: Request) -> Response: extra={ "group_id": group.id if group else None, "organization_id": group.project.organization.id if group else None, - "request_data": json.dumps_experimental( - "integrations.slack.enable-orjson", slack_request.data - ), + "request_data": orjson.dumps(slack_request.data).decode(), }, ) return self.respond(status=status.HTTP_400_BAD_REQUEST) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index d15c34554f0dfe..32b45d099a0145 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -650,6 +650,7 @@ flags=FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE, ) +# Currently unused `orjson` options register("integrations.slack.enable-orjson", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) register("auth.enable-orjson", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) register("backup.enable-orjson", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) diff --git a/tests/flagpole/test_feature.py b/tests/flagpole/test_feature.py index 70978931ae2752..a013df7d0dc0c3 100644 --- a/tests/flagpole/test_feature.py +++ b/tests/flagpole/test_feature.py @@ -4,13 +4,6 @@ from flagpole import ContextBuilder, EvaluationContext, Feature, InvalidFeatureFlagConfiguration from flagpole.operators import OperatorKind -from sentry.testutils.helpers import override_options - - -@pytest.fixture(autouse=True) -def run_before_each(): - with override_options({"flagpole.enable-orjson": 0.0}): - yield class TestParseFeatureConfig: diff --git a/tests/sentry/auth/providers/fly/test_client.py b/tests/sentry/auth/providers/fly/test_client.py index 2f9fa5d8c0a460..fd12bf2f950577 100644 --- a/tests/sentry/auth/providers/fly/test_client.py +++ b/tests/sentry/auth/providers/fly/test_client.py @@ -5,13 +5,6 @@ from sentry.auth.providers.fly.client import FlyClient from sentry.auth.providers.fly.constants import ACCESS_TOKEN_URL -from sentry.testutils.helpers import override_options - - -@pytest.fixture(autouse=True) -def run_before_each(): - with override_options({"auth.enable-orjson": 0.0}): - yield @pytest.fixture diff --git a/tests/sentry/event_manager/test_severity.py b/tests/sentry/event_manager/test_severity.py index 0d2703a304be91..504900bd4d546a 100644 --- a/tests/sentry/event_manager/test_severity.py +++ b/tests/sentry/event_manager/test_severity.py @@ -4,6 +4,7 @@ from typing import Any from unittest.mock import MagicMock, patch +import orjson from django.core.cache import cache from urllib3 import HTTPResponse from urllib3.exceptions import MaxRetryError @@ -21,7 +22,6 @@ from sentry.testutils.helpers.features import apply_feature_flag_on_cls from sentry.testutils.helpers.task_runner import TaskRunner from sentry.testutils.skips import requires_snuba -from sentry.utils import json pytestmark = [requires_snuba] @@ -37,7 +37,7 @@ def make_event(**kwargs) -> dict[str, Any]: class TestGetEventSeverity(TestCase): @patch( "sentry.event_manager.severity_connection_pool.urlopen", - return_value=HTTPResponse(body=json.dumps({"severity": 0.1231})), + return_value=HTTPResponse(body=orjson.dumps({"severity": 0.1231})), ) @patch("sentry.event_manager.logger.info") def test_error_event_simple( @@ -72,7 +72,7 @@ def test_error_event_simple( mock_urlopen.assert_called_with( "POST", "/v0/issues/severity-score", - body=json.dumps(payload), + body=orjson.dumps(payload), headers={"content-type": "application/json;charset=utf-8"}, timeout=0.2, ) @@ -92,7 +92,7 @@ def test_error_event_simple( @patch( "sentry.event_manager.severity_connection_pool.urlopen", - return_value=HTTPResponse(body=json.dumps({"severity": 0.1231})), + return_value=HTTPResponse(body=orjson.dumps({"severity": 0.1231})), ) @patch("sentry.event_manager.logger.info") def test_message_event_simple( @@ -120,7 +120,7 @@ def test_message_event_simple( mock_urlopen.assert_called_with( "POST", "/v0/issues/severity-score", - body=json.dumps(payload), + body=orjson.dumps(payload), headers={"content-type": "application/json;charset=utf-8"}, timeout=0.2, ) @@ -140,7 +140,7 @@ def test_message_event_simple( @patch( "sentry.event_manager.severity_connection_pool.urlopen", - return_value=HTTPResponse(body=json.dumps({"severity": 0.1231})), + return_value=HTTPResponse(body=orjson.dumps({"severity": 0.1231})), ) def test_uses_exception( self, @@ -160,13 +160,13 @@ def test_uses_exception( _get_severity_score(event) assert ( - json.loads(mock_urlopen.call_args.kwargs["body"])["message"] + orjson.loads(mock_urlopen.call_args.kwargs["body"])["message"] == "NopeError: Nopey McNopeface" ) @patch( "sentry.event_manager.severity_connection_pool.urlopen", - return_value=HTTPResponse(body=json.dumps({"severity": 0.1231})), + return_value=HTTPResponse(body=orjson.dumps({"severity": 0.1231})), ) def test_short_circuit_level( self, @@ -195,7 +195,7 @@ def test_short_circuit_level( @patch( "sentry.event_manager.severity_connection_pool.urlopen", - return_value=HTTPResponse(body=json.dumps({"severity": 0.1231})), + return_value=HTTPResponse(body=orjson.dumps({"severity": 0.1231})), ) @patch("sentry.event_manager.logger.warning") def test_unusable_event_title( diff --git a/tests/sentry/eventstore/processing/test_redis_cluster.py b/tests/sentry/eventstore/processing/test_redis_cluster.py index 5a9b9f00350529..7704f4591610f1 100644 --- a/tests/sentry/eventstore/processing/test_redis_cluster.py +++ b/tests/sentry/eventstore/processing/test_redis_cluster.py @@ -1,18 +1,9 @@ from datetime import datetime -import pytest - from sentry.eventstore.reprocessing.redis import RedisReprocessingStore -from sentry.testutils.helpers import override_options from sentry.testutils.helpers.redis import use_redis_cluster -@pytest.fixture(autouse=True) -def run_before_each(): - with override_options({"eventstore.enable-orjson": 0.0}): - yield - - @use_redis_cluster() def test_mark_event_reprocessed(): group_id = 5 diff --git a/tests/sentry/eventstore/test_compressor.py b/tests/sentry/eventstore/test_compressor.py index e5b022a52fd1db..908d559d1e126f 100644 --- a/tests/sentry/eventstore/test_compressor.py +++ b/tests/sentry/eventstore/test_compressor.py @@ -1,15 +1,6 @@ import copy -import pytest - from sentry.eventstore.compressor import assemble, deduplicate -from sentry.testutils.helpers import override_options - - -@pytest.fixture(autouse=True) -def run_before_each(): - with override_options({"eventstore.enable-orjson": 0.0}): - yield def _assert_roundtrip(data, assert_extra_keys=None): diff --git a/tests/sentry/runner/commands/test_backup.py b/tests/sentry/runner/commands/test_backup.py index 6587317c0f4bce..5ef46421995c65 100644 --- a/tests/sentry/runner/commands/test_backup.py +++ b/tests/sentry/runner/commands/test_backup.py @@ -8,6 +8,7 @@ from types import SimpleNamespace from unittest.mock import patch +import orjson import pytest from click.testing import CliRunner from google_crc32c import value as crc32c @@ -905,7 +906,7 @@ def test_import_unreadable_gcp_kms_config(self): str(gcp_kms_config_path), ], ) - assert isinstance(rv.exception, json.JSONDecodeError) + assert isinstance(rv.exception, orjson.JSONDecodeError) assert rv.exit_code == 1 def test_import_invalid_gcp_kms_config(self): From dd70023688b311b4d1062bd9fc18d4b768b6d191 Mon Sep 17 00:00:00 2001 From: Daniel Griesser Date: Mon, 13 May 2024 11:12:38 +0200 Subject: [PATCH 313/376] fix: Bottom margin for Wizard page (#70640) Adds a bottom margin to this ![image (44)](https://github.com/getsentry/sentry/assets/363802/c1f0abf2-1fd9-4112-b505-7238e6c79f40) --- static/app/views/setupWizard/index.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/static/app/views/setupWizard/index.tsx b/static/app/views/setupWizard/index.tsx index 0213a09644a1e2..53fa41885efd9f 100644 --- a/static/app/views/setupWizard/index.tsx +++ b/static/app/views/setupWizard/index.tsx @@ -132,6 +132,7 @@ function SetupWizard({hash = false, organizations}: Props) { const MinWidthButtonBar = styled(ButtonBar)` width: min-content; margin-top: 20px; + margin-bottom: 20px; `; export default SetupWizard; From 0e4f003bcea9376175deedeaaa0eec10175d6533 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 13 May 2024 06:41:32 -0400 Subject: [PATCH 314/376] ref: fix nodestore related BytesWarnings (#70732) failing with `-b` with: ``` _____ ProcessingIssueTest.test_with_release_dist_pair_and_previous_issue_without_release_dist ______ tests/sentry/models/test_processingissue.py:112: in test_with_release_dist_pair_and_previous_issue_without_release_dist raw_event = RawEvent.objects.create( src/sentry/silo/base.py:146: in override return original_method(*args, **kwargs) .venv/lib/python3.11/site-packages/django/db/models/manager.py:87: in manager_method return getattr(self.get_queryset(), name)(*args, **kwargs) src/sentry/silo/base.py:146: in override return original_method(*args, **kwargs) .venv/lib/python3.11/site-packages/django/db/models/query.py:679: in create obj.save(force_insert=True, using=self.db) src/sentry/silo/base.py:146: in override return original_method(*args, **kwargs) .venv/lib/python3.11/site-packages/django/db/models/base.py:822: in save self.save_base( src/sentry/silo/base.py:146: in override return original_method(*args, **kwargs) .venv/lib/python3.11/site-packages/django/db/models/base.py:909: in save_base updated = self._save_table( .venv/lib/python3.11/site-packages/django/db/models/base.py:1067: in _save_table results = self._do_insert( .venv/lib/python3.11/site-packages/django/db/models/base.py:1108: in _do_insert return manager._insert( .venv/lib/python3.11/site-packages/django/db/models/manager.py:87: in manager_method return getattr(self.get_queryset(), name)(*args, **kwargs) .venv/lib/python3.11/site-packages/django/db/models/query.py:1847: in _insert return query.get_compiler(using=using).execute_sql(returning_fields) .venv/lib/python3.11/site-packages/django/db/models/sql/compiler.py:1822: in execute_sql for sql, params in self.as_sql(): .venv/lib/python3.11/site-packages/django/db/models/sql/compiler.py:1745: in as_sql value_rows = [ .venv/lib/python3.11/site-packages/django/db/models/sql/compiler.py:1746: in [ .venv/lib/python3.11/site-packages/django/db/models/sql/compiler.py:1747: in self.prepare_value(field, self.pre_save_val(field, obj)) .venv/lib/python3.11/site-packages/django/db/models/sql/compiler.py:1686: in prepare_value return field.get_db_prep_save(value, connection=self.connection) .venv/lib/python3.11/site-packages/django/db/models/fields/__init__.py:1013: in get_db_prep_save return self.get_db_prep_value(value, connection=connection, prepared=False) .venv/lib/python3.11/site-packages/django/db/models/fields/__init__.py:1006: in get_db_prep_value value = self.get_prep_value(value) src/sentry/db/models/fields/node.py:245: in get_prep_value value.save() src/sentry/db/models/fields/node.py:159: in save nodestore.backend.set_subkeys(self.id, subkeys) src/sentry/utils/metrics.py:229: in inner return f(*args, **kwargs) .venv/lib/python3.11/site-packages/sentry_sdk/tracing_utils.py:669: in func_with_tracing return func(*args, **kwargs) src/sentry/nodestore/base.py:262: in set_subkeys self.set_bytes(item_id, bytes_data, ttl=ttl) src/sentry/nodestore/base.py:229: in set_bytes return self._set_bytes(item_id, data, ttl) src/sentry/nodestore/django/backend.py:56: in _set_bytes create_or_update(Node, id=id, values={"data": compress(data), "timestamp": timezone.now()}) src/sentry/db/models/query.py:151: in create_or_update affected = objects.filter(**kwargs).update(**values) .venv/lib/python3.11/site-packages/django/db/models/query.py:1476: in filter return self._filter_or_exclude(False, args, kwargs) .venv/lib/python3.11/site-packages/django/db/models/query.py:1494: in _filter_or_exclude clone._filter_or_exclude_inplace(negate, args, kwargs) .venv/lib/python3.11/site-packages/django/db/models/query.py:1501: in _filter_or_exclude_inplace self._query.add_q(Q(*args, **kwargs)) .venv/lib/python3.11/site-packages/django/db/models/sql/query.py:1613: in add_q clause, _ = self._add_q(q_object, self.used_aliases) .venv/lib/python3.11/site-packages/django/db/models/sql/query.py:1645: in _add_q child_clause, needed_inner = self.build_filter( .venv/lib/python3.11/site-packages/django/db/models/sql/query.py:1559: in build_filter condition = self.build_lookup(lookups, col, value) .venv/lib/python3.11/site-packages/django/db/models/sql/query.py:1389: in build_lookup lookup = lookup_class(lhs, rhs) .venv/lib/python3.11/site-packages/django/db/models/lookups.py:30: in __init__ self.rhs = self.get_prep_lookup() .venv/lib/python3.11/site-packages/django/db/models/lookups.py:364: in get_prep_lookup return super().get_prep_lookup() .venv/lib/python3.11/site-packages/django/db/models/lookups.py:88: in get_prep_lookup return self.lhs.output_field.get_prep_value(self.rhs) .venv/lib/python3.11/site-packages/django/db/models/fields/__init__.py:1292: in get_prep_value return self.to_python(value) .venv/lib/python3.11/site-packages/django/db/models/fields/__init__.py:1288: in to_python return str(value) E BytesWarning: str() on a bytes instance ``` --- src/sentry/db/models/fields/node.py | 2 +- tests/sentry/nodestore/django/test_backend.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/sentry/db/models/fields/node.py b/src/sentry/db/models/fields/node.py index a96c8a3719a801..3814f0a593f575 100644 --- a/src/sentry/db/models/fields/node.py +++ b/src/sentry/db/models/fields/node.py @@ -169,7 +169,7 @@ def __init__(self, *args, **kwargs): self.ref_func = kwargs.pop("ref_func", None) self.ref_version = kwargs.pop("ref_version", None) self.wrapper = kwargs.pop("wrapper", None) - self.id_func = kwargs.pop("id_func", lambda: b64encode(uuid4().bytes)) + self.id_func = kwargs.pop("id_func", lambda: b64encode(uuid4().bytes).decode()) super().__init__(*args, **kwargs) def contribute_to_class(self, cls, name): diff --git a/tests/sentry/nodestore/django/test_backend.py b/tests/sentry/nodestore/django/test_backend.py index a1aa306029d73a..415714bbd08eeb 100644 --- a/tests/sentry/nodestore/django/test_backend.py +++ b/tests/sentry/nodestore/django/test_backend.py @@ -53,13 +53,13 @@ def test_set(self): ) def test_delete(self): - node = Node.objects.create(id="d2502ebbd7df41ceba8d3275595cac33", data=b'{"foo": "bar"}') + node = Node.objects.create(id="d2502ebbd7df41ceba8d3275595cac33", data='{"foo": "bar"}') self.ns.delete(node.id) assert not Node.objects.filter(id=node.id).exists() def test_delete_multi(self): - node = Node.objects.create(id="d2502ebbd7df41ceba8d3275595cac33", data=b'{"foo": "bar"}') + node = Node.objects.create(id="d2502ebbd7df41ceba8d3275595cac33", data='{"foo": "bar"}') self.ns.delete_multi([node.id]) assert not Node.objects.filter(id=node.id).exists() @@ -69,11 +69,11 @@ def test_cleanup(self): cutoff = now - timedelta(days=1) node = Node.objects.create( - id="d2502ebbd7df41ceba8d3275595cac33", timestamp=now, data=b'{"foo": "bar"}' + id="d2502ebbd7df41ceba8d3275595cac33", timestamp=now, data='{"foo": "bar"}' ) node2 = Node.objects.create( - id="d2502ebbd7df41ceba8d3275595cac34", timestamp=cutoff, data=b'{"foo": "bar"}' + id="d2502ebbd7df41ceba8d3275595cac34", timestamp=cutoff, data='{"foo": "bar"}' ) self.ns.cleanup(cutoff) From f88583df4f844977f422f8b0edcc836928764a1b Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 13 May 2024 06:43:05 -0400 Subject: [PATCH 315/376] ref: fix some bytes warnings in tests (#70734) these tests fail when run with `-b` --- .../api/endpoints/test_project_release_file_details.py | 6 +++--- tests/sentry/integrations/jira/test_sentry_issue_details.py | 4 ++-- tests/sentry/integrations/test_pipeline.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/sentry/api/endpoints/test_project_release_file_details.py b/tests/sentry/api/endpoints/test_project_release_file_details.py index 9bbc05f014afb5..a1d07ef155e75b 100644 --- a/tests/sentry/api/endpoints/test_project_release_file_details.py +++ b/tests/sentry/api/endpoints/test_project_release_file_details.py @@ -224,7 +224,7 @@ def test_update_archived(self): self.login_as(user=self.user) self.create_release_archive() - id = urlsafe_b64encode(b"_~/index.js") + id = urlsafe_b64encode(b"_~/index.js").decode() url = reverse( "sentry-api-0-project-release-file-details", @@ -302,10 +302,10 @@ def test_delete_archived(self): assert response.status_code == 204 assert self.release.count_artifacts() == 1 - response = self.client.delete(url(urlsafe_b64encode(b"invalid_id"))) + response = self.client.delete(url(urlsafe_b64encode(b"invalid_id").decode())) assert response.status_code == 404 assert self.release.count_artifacts() == 1 - response = self.client.delete(url(urlsafe_b64encode(b"_~/does_not_exist.js"))) + response = self.client.delete(url(urlsafe_b64encode(b"_~/does_not_exist.js").decode())) assert response.status_code == 404 assert self.release.count_artifacts() == 1 diff --git a/tests/sentry/integrations/jira/test_sentry_issue_details.py b/tests/sentry/integrations/jira/test_sentry_issue_details.py index 0d8cbc3ec11f24..b8d9116d56d203 100644 --- a/tests/sentry/integrations/jira/test_sentry_issue_details.py +++ b/tests/sentry/integrations/jira/test_sentry_issue_details.py @@ -106,7 +106,7 @@ def test_simple_get(self, mock_get_integration_from_request, mock_get_last_relea mock_get_integration_from_request.return_value = self.integration response = self.client.get(self.path) assert response.status_code == 200 - resp_content = str(response.content) + resp_content = response.content.decode() assert self.group.title in resp_content assert self.group.get_absolute_url() in resp_content assert self.first_seen.strftime("%b. %d, %Y") in resp_content @@ -137,7 +137,7 @@ def test_multiple_issues(self, mock_get_integration_from_request, mock_get_last_ response = self.client.get(self.path) assert response.status_code == 200 - resp_content = str(response.content) + resp_content = response.content.decode() group_url = self.group.get_absolute_url() new_group_url = new_group.get_absolute_url() diff --git a/tests/sentry/integrations/test_pipeline.py b/tests/sentry/integrations/test_pipeline.py index 415221d3e4d04f..b02a9f67af0aea 100644 --- a/tests/sentry/integrations/test_pipeline.py +++ b/tests/sentry/integrations/test_pipeline.py @@ -476,4 +476,4 @@ def test_different_user_same_external_id(self, *args): } resp = self.pipeline.finish_pipeline() assert not OrganizationIntegration.objects.filter(integration_id=integration.id) - assert "account is linked to a different Sentry user" in str(resp.content) + assert "account is linked to a different Sentry user" in resp.content.decode() From 7be978dfe5e804686610a4b785f8cfa691ef21bf Mon Sep 17 00:00:00 2001 From: Riccardo Busetti Date: Mon, 13 May 2024 12:57:41 +0200 Subject: [PATCH 316/376] ref(cogs): Remove and sample logs for dynamic sampling and on demand metrics (#70718) --- src/sentry/dynamic_sampling/__init__.py | 2 - src/sentry/dynamic_sampling/rules/base.py | 3 - src/sentry/dynamic_sampling/rules/logging.py | 120 --------- .../tasks/boost_low_volume_projects.py | 29 ++- .../tasks/boost_low_volume_transactions.py | 28 ++- .../dynamic_sampling/tasks/collect_orgs.py | 3 - src/sentry/dynamic_sampling/tasks/common.py | 19 +- .../tasks/custom_rule_notifications.py | 12 +- src/sentry/dynamic_sampling/tasks/logging.py | 81 ------ .../tasks/recalibrate_orgs.py | 52 ++-- src/sentry/dynamic_sampling/tasks/utils.py | 20 +- src/sentry/relay/config/metric_extraction.py | 47 +--- .../dynamic_sampling/tasks/test_utils.py | 11 +- tests/sentry/dynamic_sampling/test_logging.py | 237 ------------------ 14 files changed, 81 insertions(+), 583 deletions(-) delete mode 100644 src/sentry/dynamic_sampling/rules/logging.py delete mode 100644 tests/sentry/dynamic_sampling/test_logging.py diff --git a/src/sentry/dynamic_sampling/__init__.py b/src/sentry/dynamic_sampling/__init__.py index 3b3d841e2cb430..cf30cb43648fd5 100644 --- a/src/sentry/dynamic_sampling/__init__.py +++ b/src/sentry/dynamic_sampling/__init__.py @@ -9,7 +9,6 @@ ProjectBoostedReleases, ) from .rules.helpers.time_to_adoptions import LATEST_RELEASE_TTAS, Platform -from .rules.logging import should_log_rules_change from .rules.utils import ( DEFAULT_BIASES, RESERVED_IDS, @@ -28,7 +27,6 @@ "get_enabled_user_biases", "get_redis_client_for_ds", "get_rule_hash", - "should_log_rules_change", "RuleType", "ExtendedBoostedRelease", "ProjectBoostedReleases", diff --git a/src/sentry/dynamic_sampling/rules/base.py b/src/sentry/dynamic_sampling/rules/base.py index e96300d6632792..0153c94e69ef92 100644 --- a/src/sentry/dynamic_sampling/rules/base.py +++ b/src/sentry/dynamic_sampling/rules/base.py @@ -8,7 +8,6 @@ from sentry.db.models import Model from sentry.dynamic_sampling.rules.biases.base import Bias from sentry.dynamic_sampling.rules.combine import get_relay_biases_combinator -from sentry.dynamic_sampling.rules.logging import log_rules from sentry.dynamic_sampling.rules.utils import PolymorphicRule, RuleType, get_enabled_user_biases from sentry.dynamic_sampling.tasks.helpers.boost_low_volume_projects import ( get_boost_low_volume_projects_sample_rate, @@ -92,8 +91,6 @@ def _get_rules_of_enabled_biases( except Exception: logger.exception("Rule generator %s failed.", rule_type) - log_rules(project.organization.id, project.id, rules) - return rules diff --git a/src/sentry/dynamic_sampling/rules/logging.py b/src/sentry/dynamic_sampling/rules/logging.py deleted file mode 100644 index 312b002f10e2ee..00000000000000 --- a/src/sentry/dynamic_sampling/rules/logging.py +++ /dev/null @@ -1,120 +0,0 @@ -import logging - -import sentry_sdk - -from sentry.dynamic_sampling.rules.utils import ( - DecayingFn, - PolymorphicRule, - RuleType, - get_rule_hash, - get_rule_type, - get_sampling_value, -) - -logger = logging.getLogger("sentry.dynamic_sampling") - -# Maximum number of projects of which we track active releases. We need to bound this element in order to avoid out -# of memory errors. In case a single instance will receive a lot of requests from Relay, it will accumulate a lot of -# projects. -MAX_PROJECTS_IN_MEMORY = 1000 - -# Dictionary that contains a mapping between project_id -> active_rules where active_rules is rule_hash -> -# sample_rate. It is used to check whether the new set of rules has been actually changed, in order to log the -# change. This is a minor optimization to avoid flooding Google Cloud Logging with data. -# -# This data is stored in-memory for simplicity, however, it introduces a problem because Sentry is running in multiple -# instances without shared memory, therefore we might have different active rules on each instance. This will lead to -# some false positives, that is, rules are logged, but they are not changed. This can happen if a rule is marked as -# active by instance X, and then we receive it in instance Y that didn't see it before. -# -# If we find that this naive implementation generates too much data, we can always use a shared-memory architecture -# by using Redis for example. -active_rules: dict[int, dict[int, float]] = {} - - -def should_log_rules_change(project_id: int, rules: list[PolymorphicRule]) -> bool: - active_rules_per_project = active_rules.get(project_id, None) - new_rules_per_project = {} - - for rule in rules: - if (sampling_value := get_sampling_value(rule)) is not None: - # Here for simplicity we don't make a difference between sampling value type. In case we will end up in a - # situation in which rules change their sampling value type and not value, we will need to address this - # here. - _, value = sampling_value - new_rules_per_project[get_rule_hash(rule)] = value - - should_log = new_rules_per_project != active_rules_per_project - if should_log: - _delete_active_rule_if_limit(active_rules_per_project is None) - active_rules[project_id] = new_rules_per_project - - return should_log - - -def _delete_active_rule_if_limit(is_new_project: bool) -> None: - if is_new_project and len(active_rules) >= MAX_PROJECTS_IN_MEMORY: - active_rules.popitem() - - -def log_rules(org_id: int, project_id: int, rules: list[PolymorphicRule]) -> None: - try: - if should_log_rules_change(project_id, rules): - logger.info( - "rules_generator.generate_rules", - extra={ - "org_id": org_id, - "project_id": project_id, - "rules": _format_rules(rules), - }, - ) - except Exception as e: - # If there is any problem while generating the log message, we just silently fail and notify the error to - # Sentry. - sentry_sdk.capture_exception(e) - - -def _format_rules( - rules: list[PolymorphicRule], -) -> list[dict[str, list[str] | str | float | None]]: - formatted_rules = [] - - for rule in rules: - rule_type = get_rule_type(rule) - if (sampling_value := get_sampling_value(rule)) is not None: - value_type, value = sampling_value - formatted_rules.append( - { - "id": rule["id"], - "type": rule_type.value if rule_type else "unknown_rule_type", - "samplingValue": {"type": value_type, "value": value}, - **_extract_info_from_rule(rule_type, rule), # type: ignore[arg-type] - } - ) - - return formatted_rules # type: ignore[return-value] - - -def _extract_info_from_rule( - rule_type: RuleType, rule: PolymorphicRule -) -> dict[str, DecayingFn | list[str] | str | None]: - if rule_type == RuleType.BOOST_ENVIRONMENTS_RULE: - return {"environments": rule["condition"]["inner"][0]["value"]} # type: ignore[literal-required, typeddict-item] - elif rule_type == RuleType.BOOST_LATEST_RELEASES_RULE: - return { - "release": rule["condition"]["inner"][0]["value"], # type: ignore[literal-required, typeddict-item] - "environment": rule["condition"]["inner"][1]["value"], # type: ignore[literal-required, typeddict-item] - "decayingFn": rule["decayingFn"], # type: ignore[typeddict-item] - } - elif rule_type == RuleType.IGNORE_HEALTH_CHECKS_RULE: - return {"healthChecks": rule["condition"]["inner"][0]["value"]} # type: ignore[literal-required, typeddict-item] - elif rule_type == RuleType.BOOST_KEY_TRANSACTIONS_RULE: - return {"transactions": rule["condition"]["inner"][0]["value"]} # type: ignore[literal-required, typeddict-item] - elif rule_type == RuleType.BOOST_LOW_VOLUME_TRANSACTIONS_RULE: - inner_condition = rule["condition"]["inner"] # type: ignore[typeddict-item] - if isinstance(inner_condition, list) and len(inner_condition) > 0: - return {"transaction": rule["condition"]["inner"][0]["value"]} # type: ignore[literal-required, typeddict-item] - else: - return {} - else: - return {} diff --git a/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py b/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py index 213fea7250e4f9..1581535f569b3f 100644 --- a/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py +++ b/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py @@ -49,16 +49,13 @@ generate_boost_low_volume_projects_cache_key, ) from sentry.dynamic_sampling.tasks.helpers.sliding_window import get_sliding_window_org_sample_rate -from sentry.dynamic_sampling.tasks.logging import ( - log_project_with_zero_root_count, - log_sample_rate_source, - log_skipped_job, -) +from sentry.dynamic_sampling.tasks.logging import log_sample_rate_source from sentry.dynamic_sampling.tasks.task_context import TaskContext from sentry.dynamic_sampling.tasks.utils import ( dynamic_sampling_task, dynamic_sampling_task_with_context, has_dynamic_sampling, + sample_function, ) from sentry.models.organization import Organization from sentry.models.project import Project @@ -266,7 +263,6 @@ def adjust_sample_rates_of_projects( # If the org doesn't have dynamic sampling, we want to early return to avoid unnecessary work. if not has_dynamic_sampling(organization): - log_skipped_job(org_id, "boost_low_volume_projects") return # If we have the sliding window org sample rate, we use that or fall back to the blended sample rate in case of @@ -276,12 +272,24 @@ def adjust_sample_rates_of_projects( default_sample_rate=quotas.backend.get_blended_sample_rate(organization_id=org_id), ) if success: - log_sample_rate_source( - org_id, None, "boost_low_volume_projects", "sliding_window_org", sample_rate + sample_function( + function=log_sample_rate_source, + _sample_rate=0.1, + org_id=org_id, + project_id=None, + used_for="boost_low_volume_projects", + source="sliding_window_org", + sample_rate=sample_rate, ) else: - log_sample_rate_source( - org_id, None, "boost_low_volume_projects", "blended_sample_rate", sample_rate + sample_function( + function=log_sample_rate_source, + _sample_rate=0.1, + org_id=org_id, + project_id=None, + used_for="boost_low_volume_projects", + source="blended_sample_rate", + sample_rate=sample_rate, ) # If we didn't find any sample rate, it doesn't make sense to run the adjustment model. @@ -308,7 +316,6 @@ def adjust_sample_rates_of_projects( # for it, thus we consider it as having 0 transactions for the query's time window. if project_id not in projects_with_counts: projects_with_counts[project_id] = 0 - log_project_with_zero_root_count(org_id=org_id, project_id=project_id) projects = [] for project_id, count_per_root in projects_with_counts.items(): diff --git a/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py b/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py index 61291956df6b26..f7699251fe37ae 100644 --- a/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py +++ b/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py @@ -37,12 +37,13 @@ from sentry.dynamic_sampling.tasks.helpers.boost_low_volume_transactions import ( set_transactions_resampling_rates, ) -from sentry.dynamic_sampling.tasks.logging import log_sample_rate_source, log_skipped_job +from sentry.dynamic_sampling.tasks.logging import log_sample_rate_source from sentry.dynamic_sampling.tasks.task_context import DynamicSamplingLogState, TaskContext from sentry.dynamic_sampling.tasks.utils import ( dynamic_sampling_task, dynamic_sampling_task_with_context, has_dynamic_sampling, + sample_function, ) from sentry.models.organization import Organization from sentry.sentry_metrics import indexer @@ -167,7 +168,6 @@ def boost_low_volume_transactions_of_project(project_transactions: ProjectTransa # If the org doesn't have dynamic sampling, we want to early return to avoid unnecessary work. if not has_dynamic_sampling(organization): - log_skipped_job(org_id, "boost_low_volume_transactions") return # We try to use the sample rate that was individually computed for each project, but if we don't find it, we will @@ -178,16 +178,24 @@ def boost_low_volume_transactions_of_project(project_transactions: ProjectTransa error_sample_rate_fallback=quotas.backend.get_blended_sample_rate(organization_id=org_id), ) if success: - log_sample_rate_source( - org_id, - project_id, - "boost_low_volume_transactions", - "boost_low_volume_projects", - sample_rate, + sample_function( + function=log_sample_rate_source, + _sample_rate=0.1, + org_id=org_id, + project_id=project_id, + used_for="boost_low_volume_transactions", + source="boost_low_volume_projects", + sample_rate=sample_rate, ) else: - log_sample_rate_source( - org_id, project_id, "boost_low_volume_transactions", "blended_sample_rate", sample_rate + sample_function( + function=log_sample_rate_source, + _sample_rate=0.1, + org_id=org_id, + project_id=project_id, + used_for="boost_low_volume_transactions", + source="blended_sample_rate", + sample_rate=sample_rate, ) if sample_rate is None: diff --git a/src/sentry/dynamic_sampling/tasks/collect_orgs.py b/src/sentry/dynamic_sampling/tasks/collect_orgs.py index fbb308bbea75d1..5fdcbcd25c019a 100644 --- a/src/sentry/dynamic_sampling/tasks/collect_orgs.py +++ b/src/sentry/dynamic_sampling/tasks/collect_orgs.py @@ -3,7 +3,6 @@ from sentry import options from sentry.dynamic_sampling.tasks.common import GetActiveOrgs, TimedIterator, TimeoutException from sentry.dynamic_sampling.tasks.constants import MAX_PROJECTS_PER_QUERY, MAX_TASK_SECONDS -from sentry.dynamic_sampling.tasks.logging import log_task_execution, log_task_timeout from sentry.dynamic_sampling.tasks.task_context import TaskContext from sentry.dynamic_sampling.tasks.utils import dynamic_sampling_task from sentry.silo.base import SiloMode @@ -35,9 +34,7 @@ def collect_orgs() -> None: pass except TimeoutException: set_extra("context-data", context.to_dict()) - log_task_timeout(context) raise else: set_extra("context-data", context.to_dict()) capture_message("Collect orgs") - log_task_execution(context) diff --git a/src/sentry/dynamic_sampling/tasks/common.py b/src/sentry/dynamic_sampling/tasks/common.py index dfe08e3b3361e1..62db588be58fc4 100644 --- a/src/sentry/dynamic_sampling/tasks/common.py +++ b/src/sentry/dynamic_sampling/tasks/common.py @@ -24,8 +24,9 @@ from sentry.dynamic_sampling.rules.utils import OrganizationId from sentry.dynamic_sampling.tasks.constants import CHUNK_SIZE, MAX_ORGS_PER_QUERY, MAX_SECONDS from sentry.dynamic_sampling.tasks.helpers.sliding_window import extrapolate_monthly_volume -from sentry.dynamic_sampling.tasks.logging import log_extrapolated_monthly_volume, log_query_timeout +from sentry.dynamic_sampling.tasks.logging import log_extrapolated_monthly_volume from sentry.dynamic_sampling.tasks.task_context import DynamicSamplingLogState, TaskContext +from sentry.dynamic_sampling.tasks.utils import sample_function from sentry.sentry_metrics import indexer from sentry.sentry_metrics.use_case_id_registry import UseCaseID from sentry.snuba.dataset import Dataset, EntityKey @@ -577,12 +578,6 @@ def fetch_orgs_with_total_root_transactions_count( if not more_results: break - else: - log_query_timeout( - query="fetch_orgs_with_total_root_transactions_count", - offset=offset, - timeout_seconds=MAX_SECONDS, - ) return aggregated_projects @@ -679,8 +674,14 @@ def compute_sliding_window_sample_rate( return None # We want to log the monthly volume for observability purposes. - log_extrapolated_monthly_volume( - org_id, project_id, total_root_count, extrapolated_volume, window_size + sample_function( + function=log_extrapolated_monthly_volume, + _sample_rate=0.1, + org_id=org_id, + project_id=project_id, + volume=total_root_count, + extrapolated_volume=extrapolated_volume, + window_size=window_size, ) func_name = "get_transaction_sampling_tier_for_volume" diff --git a/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py b/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py index 40bb3afe0b91fe..2ac744edc64b46 100644 --- a/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py +++ b/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py @@ -10,7 +10,6 @@ from sentry.constants import ObjectStatus from sentry.dynamic_sampling.tasks.common import TimedIterator, to_context_iterator from sentry.dynamic_sampling.tasks.constants import MAX_TASK_SECONDS -from sentry.dynamic_sampling.tasks.logging import log_custom_rule_progress from sentry.dynamic_sampling.tasks.task_context import DynamicSamplingLogState, TaskContext from sentry.dynamic_sampling.tasks.utils import ( dynamic_sampling_task, @@ -98,16 +97,7 @@ def get_num_samples(rule: CustomDynamicSamplingRule) -> int: referrer="dynamic_sampling.tasks.custom_rule_notifications", ) - samples_count = result["data"][0]["count"] - log_custom_rule_progress( - org_id=rule.organization.id, - project_ids=[project.id for project in projects], - rule_id=rule.id, - samples_count=samples_count, - min_samples_count=MIN_SAMPLES_FOR_NOTIFICATION, - ) - - return samples_count + return result["data"][0]["count"] def send_notification(rule: CustomDynamicSamplingRule, num_samples: int) -> None: diff --git a/src/sentry/dynamic_sampling/tasks/logging.py b/src/sentry/dynamic_sampling/tasks/logging.py index f72ffd7fce2b9a..7875cafe303897 100644 --- a/src/sentry/dynamic_sampling/tasks/logging.py +++ b/src/sentry/dynamic_sampling/tasks/logging.py @@ -1,9 +1,4 @@ import logging -from collections.abc import Sequence -from typing import Any - -from sentry.dynamic_sampling.tasks.task_context import TaskContext -from sentry.utils import metrics logger = logging.getLogger(__name__) @@ -39,79 +34,3 @@ def log_sample_rate_source( "dynamic_sampling.sample_rate_source", extra=extra, ) - - -def log_task_timeout(context: TaskContext) -> None: - logger.error("dynamic_sampling.task_timeout", extra=context.to_dict()) - metrics.incr("dynamic_sampling.task_timeout", tags={"task_name": context.name}) - - -def log_task_execution(context: TaskContext) -> None: - logger.info( - "dynamic_sampling.task_execution", - extra=context.to_dict(), - ) - - -def log_query_timeout(query: str, offset: int, timeout_seconds: int) -> None: - logger.error( - "dynamic_sampling.query_timeout", - extra={"query": query, "offset": offset, "timeout_seconds": timeout_seconds}, - ) - - # We also want to collect a metric, in order to measure how many retries we are having. It may help us to spot - # possible problems on the Snuba end that affect query performance. - metrics.incr("dynamic_sampling.query_timeout", tags={"query": query}) - - -def log_skipped_job(org_id: int, job: str): - logger.info("dynamic_sampling.skipped_job", extra={"org_id": org_id, "job": job}) - - -def log_project_with_zero_root_count(org_id: int, project_id: int): - logger.info( - "dynamic_sampling.project_with_zero_root_count", - extra={"org_id": org_id, "project_id": project_id}, - ) - - -def log_recalibrate_org_error(org_id: int, error: str) -> None: - logger.info("dynamic_sampling.recalibrate_org_error", extra={"org_id": org_id, "error": error}) - - -def log_custom_rule_progress( - org_id: int, - project_ids: Sequence[int], - rule_id: int, - samples_count: int, - min_samples_count: int, -): - extra: dict[str, Any] = { - "org_id": org_id, - "rule_id": rule_id, - "samples_count": samples_count, - "min_samples_count": min_samples_count, - } - - if project_ids: - extra["project_ids"] = project_ids - - logger.info( - "dynamic_sampling.custom_rule_progress", - extra=extra, - ) - - -def log_recalibrate_org_state( - org_id: int, previous_factor: float, effective_sample_rate: float, target_sample_rate: float -) -> None: - logger.info( - "dynamic_sampling.recalibrate_org_state", - extra={ - "org_id": org_id, - "previous_factor": previous_factor, - "effective_sample_rate": effective_sample_rate, - "target_sample_rate": target_sample_rate, - "target_effective_ratio": target_sample_rate / effective_sample_rate, - }, - ) diff --git a/src/sentry/dynamic_sampling/tasks/recalibrate_orgs.py b/src/sentry/dynamic_sampling/tasks/recalibrate_orgs.py index e4bd3563229c70..a17aebd81ca4ef 100644 --- a/src/sentry/dynamic_sampling/tasks/recalibrate_orgs.py +++ b/src/sentry/dynamic_sampling/tasks/recalibrate_orgs.py @@ -16,17 +16,13 @@ set_guarded_adjusted_factor, ) from sentry.dynamic_sampling.tasks.helpers.sliding_window import get_sliding_window_org_sample_rate -from sentry.dynamic_sampling.tasks.logging import ( - log_recalibrate_org_error, - log_recalibrate_org_state, - log_sample_rate_source, - log_skipped_job, -) +from sentry.dynamic_sampling.tasks.logging import log_sample_rate_source from sentry.dynamic_sampling.tasks.task_context import TaskContext from sentry.dynamic_sampling.tasks.utils import ( dynamic_sampling_task, dynamic_sampling_task_with_context, has_dynamic_sampling, + sample_function, ) from sentry.models.organization import Organization from sentry.silo.base import SiloMode @@ -52,10 +48,6 @@ def recalibrate_orgs(context: TaskContext) -> None: for org_volume in org_volumes: if org_volume.is_valid_for_recalibration(): valid_orgs.append((org_volume.org_id, org_volume.total, org_volume.indexed)) - else: - log_recalibrate_org_error( - org_volume.org_id, "The organization is not valid for recalibration" - ) # We run an asynchronous job for recalibrating a batch of orgs whose size is specified in # `GetActiveOrgsVolumes`. @@ -92,7 +84,6 @@ def recalibrate_org(org_id: int, total: int, indexed: int) -> None: # If the org doesn't have dynamic sampling, we want to early return to avoid unnecessary work. if not has_dynamic_sampling(organization): - log_skipped_job(org_id, "recalibrate_orgs") return # If we have the sliding window org sample rate, we use that or fall back to the blended sample rate in case of @@ -102,20 +93,24 @@ def recalibrate_org(org_id: int, total: int, indexed: int) -> None: default_sample_rate=quotas.backend.get_blended_sample_rate(organization_id=org_id), ) if success: - log_sample_rate_source( - org_id, - None, - "recalibrate_orgs", - "sliding_window_org", - target_sample_rate, + sample_function( + function=log_sample_rate_source, + _sample_rate=0.1, + org_id=org_id, + project_id=None, + used_for="recalibrate_orgs", + source="sliding_window_org", + sample_rate=target_sample_rate, ) else: - log_sample_rate_source( - org_id, - None, - "recalibrate_orgs", - "blended_sample_rate", - target_sample_rate, + sample_function( + function=log_sample_rate_source, + _sample_rate=0.1, + org_id=org_id, + project_id=None, + used_for="recalibrate_orgs", + source="blended_sample_rate", + sample_rate=target_sample_rate, ) # If we didn't find any sample rate, we can't recalibrate the organization. @@ -128,25 +123,20 @@ def recalibrate_org(org_id: int, total: int, indexed: int) -> None: # We get the previous factor that was used for the recalibration. previous_factor = get_adjusted_factor(org_id) - log_recalibrate_org_state(org_id, previous_factor, effective_sample_rate, target_sample_rate) - # We want to compute the new adjusted factor. adjusted_factor = compute_adjusted_factor( previous_factor, effective_sample_rate, target_sample_rate ) if adjusted_factor is None: - log_recalibrate_org_error(org_id, "The adjusted factor can't be computed") + sentry_sdk.capture_message( + "The adjusted factor for org recalibration could not be computed" + ) return if adjusted_factor < MIN_REBALANCE_FACTOR or adjusted_factor > MAX_REBALANCE_FACTOR: # In case the new factor would result into too much recalibration, we want to remove it from cache, # effectively removing the generated rule. delete_adjusted_factor(org_id) - log_recalibrate_org_error( - org_id, - f"The adjusted factor {adjusted_factor} outside of the acceptable range [{MIN_REBALANCE_FACTOR}.." - f"{MAX_REBALANCE_FACTOR}]", - ) return # At the end we set the adjusted factor. diff --git a/src/sentry/dynamic_sampling/tasks/utils.py b/src/sentry/dynamic_sampling/tasks/utils.py index c44acf0526c27c..dafe4b688d1648 100644 --- a/src/sentry/dynamic_sampling/tasks/utils.py +++ b/src/sentry/dynamic_sampling/tasks/utils.py @@ -1,13 +1,20 @@ from functools import wraps +from random import random from sentry import features -from sentry.dynamic_sampling.tasks.common import TimeoutException -from sentry.dynamic_sampling.tasks.logging import log_task_execution, log_task_timeout from sentry.dynamic_sampling.tasks.task_context import TaskContext from sentry.models.organization import Organization from sentry.utils import metrics +def sample_function(function, _sample_rate: float = 1.0, **kwargs): + """ + Calls the supplied function with a uniform probability of `_sample_rate`. + """ + if _sample_rate >= 1.0 or 0.0 <= random() <= _sample_rate: + function(**kwargs) + + def has_dynamic_sampling(organization: Organization | None) -> bool: # If an organization can't be fetched, we will assume it has no dynamic sampling. if organization is None: @@ -32,14 +39,7 @@ def _wrapper(): # We will count how much it takes to run the function. with metrics.timer(task_name, sample_rate=1.0): context = TaskContext(task_name, max_task_execution) - - try: - func(context=context) - except TimeoutException: - log_task_timeout(context) - raise - else: - log_task_execution(context) + func(context=context) return _wrapper diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index cb29fe2d70ed6c..65f1c0041e6b56 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -4,7 +4,7 @@ from collections.abc import Sequence from dataclasses import dataclass from datetime import timedelta -from typing import Any, Literal, TypedDict +from typing import Any, TypedDict import sentry_sdk from celery.exceptions import SoftTimeLimitExceeded @@ -184,15 +184,6 @@ def _get_alert_metric_specs( if results := _convert_snuba_query_to_metrics(project, alert_snuba_query, prefilling): for spec in results: - _log_on_demand_metric_spec( - project_id=project.id, - spec_for="alert", - spec=spec, - id=alert.id, - field=alert_snuba_query.aggregate, - query=alert_snuba_query.query, - prefilling=prefilling, - ) metrics.incr( "on_demand_metrics.on_demand_spec.for_alert", tags={"prefilling": prefilling}, @@ -502,15 +493,6 @@ def _generate_metric_specs( organization_bulk_query_cache=organization_bulk_query_cache, ): for spec in results: - _log_on_demand_metric_spec( - project_id=project.id, - spec_for="widget", - spec=spec, - id=widget_query.id, - field=aggregate, - query=widget_query.conditions, - prefilling=prefilling, - ) metrics.incr( "on_demand_metrics.on_demand_spec.for_widget", tags={"prefilling": prefilling}, @@ -820,33 +802,6 @@ def _convert_aggregate_and_query_to_metrics( return metric_specs_and_hashes -def _log_on_demand_metric_spec( - project_id: int, - spec_for: Literal["alert", "widget"], - spec: HashedMetricSpec, - id: int, - field: str, - query: str, - prefilling: bool, -) -> None: - spec_query_hash, spec_dict, spec_version = spec - - logger.info( - "on_demand_metrics.on_demand_metric_spec", - extra={ - "project_id": project_id, - f"{spec_for}.id": id, - f"{spec_for}.field": field, - f"{spec_for}.query": query, - "spec_for": spec_for, - "spec_query_hash": spec_query_hash, - "spec": spec_dict, - "spec_version": spec_version, - "prefilling": prefilling, - }, - ) - - # CONDITIONAL TAGGING diff --git a/tests/sentry/dynamic_sampling/tasks/test_utils.py b/tests/sentry/dynamic_sampling/tasks/test_utils.py index bada3d1fa242d3..44b0a87425872f 100644 --- a/tests/sentry/dynamic_sampling/tasks/test_utils.py +++ b/tests/sentry/dynamic_sampling/tasks/test_utils.py @@ -1,5 +1,3 @@ -from unittest.mock import patch - import pytest from sentry.dynamic_sampling.tasks.common import TimeoutException @@ -18,23 +16,18 @@ def inner(context: TaskContext): inner() -@patch("sentry.dynamic_sampling.tasks.utils.log_task_execution") -def test_log_dynamic_sampling_task_with_context(log_task_execution): +def test_log_dynamic_sampling_task_with_context(): @dynamic_sampling_task_with_context(max_task_execution=100) def inner(context: TaskContext): pass inner() - log_task_execution.assert_called_once() -@patch("sentry.dynamic_sampling.tasks.utils.log_task_timeout") -def test_timeout_dynamic_sampling_task_with_context(log_task_timeout): +def test_timeout_dynamic_sampling_task_with_context(): @dynamic_sampling_task_with_context(max_task_execution=100) def inner(context: TaskContext): raise TimeoutException(context) with pytest.raises(TimeoutException): inner() - - log_task_timeout.assert_called_once() diff --git a/tests/sentry/dynamic_sampling/test_logging.py b/tests/sentry/dynamic_sampling/test_logging.py deleted file mode 100644 index e2f6d312e03f46..00000000000000 --- a/tests/sentry/dynamic_sampling/test_logging.py +++ /dev/null @@ -1,237 +0,0 @@ -from unittest.mock import patch - -from sentry.dynamic_sampling import get_rule_hash, should_log_rules_change - - -@patch( - "sentry.dynamic_sampling.rules.logging.active_rules", - new={ - 1: { - get_rule_hash( - { - "condition": {"inner": [], "op": "and"}, - "id": 1000, - "samplingValue": {"type": "sampleRate", "value": 0.1}, - "type": "trace", - }, - ): 0.1 - } - }, -) -def test_should_not_log_rules_if_unchanged(): - new_rules = [ - { - "condition": {"inner": [], "op": "and"}, - "id": 1000, - "samplingValue": {"type": "sampleRate", "value": 0.1}, - "type": "trace", - }, - ] - - assert not should_log_rules_change(1, new_rules) # type: ignore[arg-type] - - -@patch( - "sentry.dynamic_sampling.rules.logging.active_rules", - new={ - 1: { - get_rule_hash( - { - "condition": {"inner": [], "op": "and"}, - "id": 1000, - "samplingValue": {"type": "sampleRate", "value": 0.1}, - "type": "trace", - }, - ): 0.1 - } - }, -) -def test_should_not_log_rules_if_unchanged_and_different_order(): - new_rules = [ - { - "samplingValue": {"type": "sampleRate", "value": 0.1}, - "condition": {"op": "and", "inner": []}, - "id": 1000, - "type": "trace", - "active": True, - }, - ] - - assert not should_log_rules_change(1, new_rules) # type: ignore[arg-type] - - -@patch( - "sentry.dynamic_sampling.rules.logging.active_rules", - new={ - 1: { - get_rule_hash( - { - "samplingValue": {"type": "sampleRate", "value": 1.0}, - "type": "trace", - "condition": { - "op": "or", - "inner": [ - { - "op": "glob", - "name": "trace.environment", - "value": ["*dev*", "*test*"], - } - ], - }, - "id": 1001, - }, - ): 1.0 - } - }, -) -def test_should_log_rules_if_new_rule_added(): - new_rules = [ - { - "samplingValue": {"type": "sampleRate", "value": 1.0}, - "type": "trace", - "condition": { - "op": "or", - "inner": [ - { - "op": "glob", - "name": "trace.environment", - "value": ["*dev*", "*test*"], - } - ], - }, - "active": True, - "id": 1001, - }, - { - "samplingValue": {"type": "sampleRate", "value": 0.5}, - "type": "trace", - "active": True, - "condition": { - "op": "and", - "inner": [ - {"op": "eq", "name": "trace.release", "value": ["1.0"]}, - {"op": "eq", "name": "trace.environment", "value": "dev"}, - ], - }, - "id": 1501, - "timeRange": {"start": "2022-10-21 18:50:25+00:00", "end": "2022-10-21 20:03:03+00:00"}, - }, - ] - - assert should_log_rules_change(1, new_rules) # type: ignore[arg-type] - - -@patch( - "sentry.dynamic_sampling.rules.logging.active_rules", - new={ - 1: { - get_rule_hash( - { - "samplingValue": {"type": "sampleRate", "value": 0.7}, - "type": "trace", - "condition": { - "op": "or", - "inner": [ - { - "op": "glob", - "name": "trace.environment", - "value": ["*dev*", "*test*"], - } - ], - }, - "id": 1001, - }, - ): 0.7 - } - }, -) -def test_should_log_rules_if_same_rule_has_different_sample_rate(): - new_rules = [ - { - "samplingValue": {"type": "sampleRate", "value": 0.5}, - "type": "trace", - "condition": { - "op": "or", - "inner": [ - { - "op": "glob", - "name": "trace.environment", - "value": ["*dev*", "*test*"], - } - ], - }, - "active": True, - "id": 1001, - }, - ] - - assert should_log_rules_change(1, new_rules) # type: ignore[arg-type] - - -@patch( - "sentry.dynamic_sampling.rules.logging.active_rules", - new={ - 1: { - get_rule_hash( - { - "samplingValue": {"type": "sampleRate", "value": 0.7}, - "type": "trace", - "condition": { - "op": "or", - "inner": [ - { - "op": "glob", - "name": "trace.environment", - "value": ["*dev*", "*test*"], - } - ], - }, - "id": 1001, - }, - ): 0.7, - get_rule_hash( - { - "samplingValue": {"type": "sampleRate", "value": 0.5}, - "type": "trace", - "condition": { - "op": "and", - "inner": [ - {"op": "eq", "name": "trace.release", "value": ["1.0"]}, # type: ignore[typeddict-item] - { - "op": "eq", # type: ignore[typeddict-item] - "name": "trace.environment", - "value": ["dev"], - }, - ], - }, - "id": 1501, - "timeRange": { - "start": "2022-10-21 18:50:25+00:00", - "end": "2022-10-21 20:03:03+00:00", - }, - }, - ): 0.5, - } - }, -) -def test_should_log_rules_if_rule_is_deleted(): - new_rules = [ - { - "samplingValue": {"type": "sampleRate", "value": 0.7}, - "type": "trace", - "condition": { - "op": "or", - "inner": [ - { - "op": "glob", - "name": "trace.environment", - "value": ["*dev*", "*test*"], - } - ], - }, - "active": True, - "id": 1001, - }, - ] - - assert should_log_rules_change(1, new_rules) # type: ignore[arg-type] From 8510452a60b00a9c8bb19f0c127fbb3d237f45c4 Mon Sep 17 00:00:00 2001 From: Riccardo Busetti Date: Mon, 13 May 2024 13:35:28 +0200 Subject: [PATCH 317/376] ref(dynamic-sampling): Remove unused task that was used to measure execution time (#70735) --- src/sentry/conf/server.py | 6 --- .../dynamic_sampling/tasks/collect_orgs.py | 40 ------------------- src/sentry/options/defaults.py | 3 -- src/sentry/utils/sdk.py | 1 - 4 files changed, 50 deletions(-) delete mode 100644 src/sentry/dynamic_sampling/tasks/collect_orgs.py diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index bea30ae27ba053..a7d5784d008ed0 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -793,7 +793,6 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: "sentry.tasks.weekly_escalating_forecast", "sentry.tasks.auto_ongoing_issues", "sentry.tasks.check_am2_compatibility", - "sentry.dynamic_sampling.tasks.collect_orgs", "sentry.tasks.statistical_detectors", "sentry.debug_files.tasks", "sentry.tasks.on_demand_metrics", @@ -1210,11 +1209,6 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: # 9:00 PDT, 12:00 EDT, 16:00 UTC "schedule": crontab(minute="0", hour="16"), }, - "dynamic-sampling-collect-orgs": { - "task": "sentry.dynamic_sampling.tasks.collect_orgs", - # Run every 20 minutes - "schedule": crontab(minute="*/20"), - }, "statistical-detectors-detect-regressions": { "task": "sentry.tasks.statistical_detectors.run_detection", # Run every 1 hour diff --git a/src/sentry/dynamic_sampling/tasks/collect_orgs.py b/src/sentry/dynamic_sampling/tasks/collect_orgs.py deleted file mode 100644 index 5fdcbcd25c019a..00000000000000 --- a/src/sentry/dynamic_sampling/tasks/collect_orgs.py +++ /dev/null @@ -1,40 +0,0 @@ -from sentry_sdk import capture_message, set_extra - -from sentry import options -from sentry.dynamic_sampling.tasks.common import GetActiveOrgs, TimedIterator, TimeoutException -from sentry.dynamic_sampling.tasks.constants import MAX_PROJECTS_PER_QUERY, MAX_TASK_SECONDS -from sentry.dynamic_sampling.tasks.task_context import TaskContext -from sentry.dynamic_sampling.tasks.utils import dynamic_sampling_task -from sentry.silo.base import SiloMode -from sentry.tasks.base import instrumented_task - - -@instrumented_task( - name="sentry.dynamic_sampling.tasks.collect_orgs", - queue="dynamicsampling", - default_retry_delay=5, - max_retries=5, - soft_time_limit=2 * 60 * 60, - time_limit=2 * 60 * 60 + 5, - silo_mode=SiloMode.REGION, -) -@dynamic_sampling_task -def collect_orgs() -> None: - enabled = options.get("dynamic-sampling.tasks.collect_orgs") or False - - if not enabled: - return - - context = TaskContext("sentry.dynamic-sampling.tasks.collect_orgs", MAX_TASK_SECONDS) - iterator_name = GetActiveOrgs.__name__ - try: - for orgs in TimedIterator( - context, GetActiveOrgs(max_projects=MAX_PROJECTS_PER_QUERY), iterator_name - ): - pass - except TimeoutException: - set_extra("context-data", context.to_dict()) - raise - else: - set_extra("context-data", context.to_dict()) - capture_message("Collect orgs") diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 32b45d099a0145..3f517c341d8118 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -1872,9 +1872,6 @@ # Killswitch for monitor check-ins register("crons.organization.disable-check-in", type=Sequence, default=[]) -# Turns on and off the running for dynamic sampling collect_orgs. -register("dynamic-sampling.tasks.collect_orgs", default=False, flags=FLAG_MODIFIABLE_BOOL) - # Sets the timeout for webhooks register( "sentry-apps.webhook.timeout.sec", diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 8111271c109bcb..fabebb8e463069 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -80,7 +80,6 @@ "sentry.dynamic_sampling.tasks.boost_low_volume_transactions": 0.2, "sentry.dynamic_sampling.tasks.recalibrate_orgs": 0.2, "sentry.dynamic_sampling.tasks.sliding_window_org": 0.2, - "sentry.dynamic_sampling.tasks.collect_orgs": 0.2, "sentry.dynamic_sampling.tasks.custom_rule_notifications": 0.2, "sentry.dynamic_sampling.tasks.clean_custom_rule_notifications": 0.2, } From f80216c4e8691379339744bf6fae010cb2d7f1cc Mon Sep 17 00:00:00 2001 From: Armen Zambrano G <44410+armenzg@users.noreply.github.com> Date: Mon, 13 May 2024 08:12:26 -0400 Subject: [PATCH 318/376] feat(related_issues): Create separate sections for each relation type (#70648) In #70504 I added support for the `type` parameter which allows getting the data for same-root issues vs trace-connected issues separately. This UI change allows each related issues section to fetch and render the data independently, thus, making the page load faster. These changes are required to support fetching trace-connected issues from the Issues Details page without also fetching same-root-connected data. --- .../groupRelatedIssues/index.spec.tsx | 78 +++---- .../issueDetails/groupRelatedIssues/index.tsx | 199 +++++++++--------- 2 files changed, 133 insertions(+), 144 deletions(-) diff --git a/static/app/views/issueDetails/groupRelatedIssues/index.spec.tsx b/static/app/views/issueDetails/groupRelatedIssues/index.spec.tsx index a78b8edac8a61a..f77f77d0b6cdea 100644 --- a/static/app/views/issueDetails/groupRelatedIssues/index.spec.tsx +++ b/static/app/views/issueDetails/groupRelatedIssues/index.spec.tsx @@ -6,7 +6,8 @@ import {render, screen} from 'sentry-test/reactTestingLibrary'; import {GroupRelatedIssues} from 'sentry/views/issueDetails/groupRelatedIssues'; describe('Related Issues View', function () { - let relatedIssuesMock: jest.Mock; + let sameRootIssuesMock: jest.Mock; + let traceIssuesMock: jest.Mock; let issuesMock: jest.Mock; const router = RouterFixture(); @@ -21,44 +22,20 @@ describe('Related Issues View', function () { const params = {groupId: groupId}; const errorType = 'RuntimeError'; const noData = { - data: [ - { - type: 'same_root_cause', - data: [], - }, - { - type: 'trace_connected', - data: [], - }, - ], + type: 'irrelevant', + data: [], }; const onlySameRootData = { - data: [ - { - type: 'same_root_cause', - data: [group1, group2], - }, - { - type: 'trace_connected', - data: [], - }, - ], + type: 'same_root_cause', + data: [group1, group2], }; const onlyTraceConnectedData = { - data: [ - { - type: 'same_root_cause', - data: [], - }, - { - type: 'trace_connected', - data: [group1, group2], - meta: { - event_id: 'abcd', - trace_id: '1234', - }, - }, - ], + type: 'trace_connected', + data: [group1, group2], + meta: { + event_id: 'abcd', + trace_id: '1234', + }, }; const issuesData = [ { @@ -99,8 +76,12 @@ describe('Related Issues View', function () { }); it('renders with no data', async function () { - relatedIssuesMock = MockApiClient.addMockResponse({ - url: `/issues/${groupId}/related-issues/`, + sameRootIssuesMock = MockApiClient.addMockResponse({ + url: `/issues/${groupId}/related-issues/?type=same_root_cause`, + body: noData, + }); + traceIssuesMock = MockApiClient.addMockResponse({ + url: `/issues/${groupId}/related-issues/?type=trace_connected`, body: noData, }); render( @@ -121,14 +102,19 @@ describe('Related Issues View', function () { await screen.findByText('No trace-connected related issues were found.') ).toBeInTheDocument(); - expect(relatedIssuesMock).toHaveBeenCalled(); + expect(sameRootIssuesMock).toHaveBeenCalled(); + expect(traceIssuesMock).toHaveBeenCalled(); }); it('renders with same root issues', async function () { - relatedIssuesMock = MockApiClient.addMockResponse({ - url: `/issues/${groupId}/related-issues/`, + sameRootIssuesMock = MockApiClient.addMockResponse({ + url: `/issues/${groupId}/related-issues/?type=same_root_cause`, body: onlySameRootData, }); + MockApiClient.addMockResponse({ + url: `/issues/${groupId}/related-issues/?type=trace_connected`, + body: [], + }); issuesMock = MockApiClient.addMockResponse({ url: orgIssuesEndpoint, body: issuesData, @@ -149,7 +135,7 @@ describe('Related Issues View', function () { expect(await screen.findByText(`EARTH-${group1}`)).toBeInTheDocument(); expect(await screen.findByText(`EARTH-${group2}`)).toBeInTheDocument(); - expect(relatedIssuesMock).toHaveBeenCalled(); + expect(sameRootIssuesMock).toHaveBeenCalled(); expect(issuesMock).toHaveBeenCalled(); expect( await screen.findByText('No trace-connected related issues were found.') @@ -163,8 +149,12 @@ describe('Related Issues View', function () { }); it('renders with trace connected issues', async function () { - relatedIssuesMock = MockApiClient.addMockResponse({ - url: `/issues/${groupId}/related-issues/`, + MockApiClient.addMockResponse({ + url: `/issues/${groupId}/related-issues/?type=same_root_cause`, + body: [], + }); + traceIssuesMock = MockApiClient.addMockResponse({ + url: `/issues/${groupId}/related-issues/?type=trace_connected`, body: onlyTraceConnectedData, }); issuesMock = MockApiClient.addMockResponse({ @@ -186,7 +176,7 @@ describe('Related Issues View', function () { expect(await screen.findByText(`EARTH-${group1}`)).toBeInTheDocument(); expect(await screen.findByText(`EARTH-${group2}`)).toBeInTheDocument(); - expect(relatedIssuesMock).toHaveBeenCalled(); + expect(traceIssuesMock).toHaveBeenCalled(); expect(issuesMock).toHaveBeenCalled(); expect( await screen.findByText('No same-root-cause related issues were found.') diff --git a/static/app/views/issueDetails/groupRelatedIssues/index.tsx b/static/app/views/issueDetails/groupRelatedIssues/index.tsx index 0b94bf16281a9c..5364e4b740e119 100644 --- a/static/app/views/issueDetails/groupRelatedIssues/index.tsx +++ b/static/app/views/issueDetails/groupRelatedIssues/index.tsx @@ -19,59 +19,109 @@ type RouteParams = { type Props = RouteComponentProps; type RelatedIssuesResponse = { - data: [ - { - data: number[]; - meta: { - event_id: string; - trace_id: string; - }; - type: string; - }, - ]; + data: number[]; + meta: { + event_id: string; + trace_id: string; + }; + type: string; }; +interface RelatedIssuesSectionProps { + groupId: string; + orgSlug: string; + relationType: string; +} + function GroupRelatedIssues({params}: Props) { const {groupId} = params; const organization = useOrganization(); const orgSlug = organization.slug; + return ( + + + + + ); +} + +function RelatedIssuesSection({ + groupId, + orgSlug, + relationType, +}: RelatedIssuesSectionProps) { // Fetch the list of related issues const { isLoading, isError, data: relatedIssues, refetch, - } = useApiQuery([`/issues/${groupId}/related-issues/`], { - staleTime: 0, - }); - - let traceMeta = { - trace_id: '', - event_id: '', - }; - const { - same_root_cause: sameRootCauseIssues = [], - trace_connected: traceConnectedIssues = [], - } = (relatedIssues?.data ?? []).reduce( - (mapping, item) => { - if (item.type === 'trace_connected') { - traceMeta = {...item.meta}; - } - const issuesList = item.data; - mapping[item.type] = issuesList; - return mapping; - }, - {same_root_cause: [], trace_connected: []} + } = useApiQuery( + [`/issues/${groupId}/related-issues/?type=${relationType}`], + { + staleTime: 0, + } ); + const traceMeta = relationType === 'trace_connected' ? relatedIssues?.meta : undefined; + const issues = relatedIssues?.data ?? []; + const query = `issue.id:[${issues}]`; // project=-1 allows ensuring that the query will show issues from any projects for the org // This is important for traces since issues can be for any project in the org const baseUrl = `/organizations/${orgSlug}/issues/?project=-1`; + let title; + let linkToTrace; + let openIssuesButton; + if (relationType === 'trace_connected' && traceMeta) { + title = t('Issues in the same trace'); + linkToTrace = ( + + {t('These issues were all found within ')} + + {t('this trace')} + + . + + ); + openIssuesButton = ( + + {t('Open in Issues')} + + ); + } else { + title = t('Issues caused by the same root cause'); + openIssuesButton = ( + + {t('Open in Issues')} + + ); + } return ( - + + {title} {isLoading ? ( ) : isError ? ( @@ -79,77 +129,26 @@ function GroupRelatedIssues({params}: Props) { message={t('Unable to load related issues, please try again later')} onRetry={refetch} /> - ) : ( + ) : issues.length > 0 ? ( -
- - {t('Issues caused by the same root cause')} - {sameRootCauseIssues.length > 0 ? ( -
- -
- - {t('Open in Issues')} - - - -
- ) : ( - {t('No same-root-cause related issues were found.')} - )} - -
-
- - {t('Issues in the same trace')} - {traceConnectedIssues.length > 0 ? ( -
- - - {t('These issues were all found within ')} - - {t('this trace')} - - . - - - {t('Open in Issues')} - - - -
- ) : ( - {t('No trace-connected related issues were found.')} - )} -
-
+ + {linkToTrace ?? null} + {openIssuesButton ?? null} + + + ) : relationType === 'trace_connected' ? ( + {t('No trace-connected related issues were found.')} + ) : ( + {t('No same-root-cause related issues were found.')} )} - +
); } From 31ce316cc553f57645c981dfa48fadf735434afd Mon Sep 17 00:00:00 2001 From: Armen Zambrano G <44410+armenzg@users.noreply.github.com> Date: Mon, 13 May 2024 08:12:50 -0400 Subject: [PATCH 319/376] deprecate(related_issues): Drop support for doing all types of related issues (#70606) This is a follow-up to #70504. Once the UI stops using this approach we will merge this change. --- src/sentry/api/endpoints/issues/related_issues.py | 14 +++----------- src/sentry/issues/related/__init__.py | 11 ----------- 2 files changed, 3 insertions(+), 22 deletions(-) diff --git a/src/sentry/api/endpoints/issues/related_issues.py b/src/sentry/api/endpoints/issues/related_issues.py index 6a1104e73f27fb..d5606ab2a9a35d 100644 --- a/src/sentry/api/endpoints/issues/related_issues.py +++ b/src/sentry/api/endpoints/issues/related_issues.py @@ -5,7 +5,6 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.group import GroupEndpoint -from sentry.issues.related import find_related_issues # To be deprecated from sentry.issues.related import RELATED_ISSUES_ALGORITHMS from sentry.models.group import Group from sentry.types.ratelimit import RateLimit, RateLimitCategory @@ -35,13 +34,6 @@ def get(self, request: Request, group: Group) -> Response: :pparam Group group: the group object """ # The type of related issues to retrieve. Can be either `same_root_cause` or `trace_connected`. - related_type = request.query_params.get("type") - related_issues: list[dict[str, str | list[int] | dict[str, str]]] = [] - - if related_type in RELATED_ISSUES_ALGORITHMS: - data, meta = RELATED_ISSUES_ALGORITHMS[related_type](group) - return Response({"type": related_type, "data": data, "meta": meta}) - else: - # XXX: We will be deprecating this approach soon - related_issues = find_related_issues(group) - return Response({"data": [related_set for related_set in related_issues]}) + related_type = request.query_params["type"] + data, meta = RELATED_ISSUES_ALGORITHMS[related_type](group) + return Response({"type": related_type, "data": data, "meta": meta}) diff --git a/src/sentry/issues/related/__init__.py b/src/sentry/issues/related/__init__.py index c5771783e68973..51d26537e00d66 100644 --- a/src/sentry/issues/related/__init__.py +++ b/src/sentry/issues/related/__init__.py @@ -1,7 +1,5 @@ """This module exports a function to find related issues. It groups them by type.""" -from sentry.models.group import Group - from .same_root_cause import same_root_cause_analysis from .trace_connected import trace_connected_analysis @@ -11,12 +9,3 @@ "same_root_cause": same_root_cause_analysis, "trace_connected": trace_connected_analysis, } - - -def find_related_issues(group: Group) -> list[dict[str, str | list[int] | dict[str, str]]]: - related_issues: list[dict[str, str | list[int] | dict[str, str]]] = [] - for key, func in RELATED_ISSUES_ALGORITHMS.items(): - data, meta = func(group) - related_issues.append({"type": key, "data": data, "meta": meta}) - - return related_issues From 59cd05d076d0d218ba849238a53ac692b8b7b857 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 13 May 2024 09:06:03 -0400 Subject: [PATCH 320/376] ref: fix BytesWarnings in data_export tests (#70731) previously failing (`-b`) with: ``` _______________________________ ExportedDataTest.test_email_failure ________________________________ tests/sentry/data_export/test_models.py:157: in test_email_failure self.data_export.email_failure(self.TEST_STRING) src/sentry/data_export/models.py:134: in email_failure msg.send_async([user.email]) src/sentry/utils/email/message_builder.py:241: in send_async messages = self.get_built_messages(to, cc=cc, bcc=bcc) src/sentry/utils/email/message_builder.py:207: in get_built_messages results = [ src/sentry/utils/email/message_builder.py:208: in self.build(to=email, reply_to=send_to, cc=cc, bcc=bcc) for email in send_to if email src/sentry/utils/email/message_builder.py:185: in build body=self.__render_text_body(), src/sentry/utils/email/message_builder.py:136: in __render_text_body body: str = render_to_string(self.template, self.context) src/sentry/web/helpers.py:29: in render_to_string rendered = loader.render_to_string(template, context=context, request=request) .venv/lib/python3.11/site-packages/django/template/loader.py:62: in render_to_string return template.render(context, request) .venv/lib/python3.11/site-packages/django/template/backends/django.py:61: in render return self.template.render(context) .venv/lib/python3.11/site-packages/django/template/base.py:171: in render return self._render(context) .venv/lib/python3.11/site-packages/django/test/utils.py:111: in instrumented_test_render return self.nodelist.render(context) .venv/lib/python3.11/site-packages/django/template/base.py:1000: in render return SafeString("".join([node.render_annotated(context) for node in self])) .venv/lib/python3.11/site-packages/django/template/base.py:1000: in return SafeString("".join([node.render_annotated(context) for node in self])) .venv/lib/python3.11/site-packages/django/template/base.py:961: in render_annotated return self.render(context) .venv/lib/python3.11/site-packages/django/template/base.py:1065: in render return render_value_in_context(output, context) .venv/lib/python3.11/site-packages/django/template/base.py:1042: in render_value_in_context value = str(value) E BytesWarning: str() on a bytes instance ``` --- src/sentry/data_export/models.py | 13 ++++++------- tests/sentry/data_export/test_models.py | 6 +++--- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/src/sentry/data_export/models.py b/src/sentry/data_export/models.py index 5d7c1ead33f026..8f5a4f5b7df7a8 100644 --- a/src/sentry/data_export/models.py +++ b/src/sentry/data_export/models.py @@ -4,7 +4,6 @@ from django.db import models, router, transaction from django.urls import reverse from django.utils import timezone -from django.utils.encoding import force_str from sentry.backup.scopes import RelocationScope from sentry.db.models import ( @@ -43,7 +42,7 @@ class ExportedData(Model): query_info = JSONField() @property - def status(self): + def status(self) -> ExportStatus: if self.date_finished is None: return ExportStatus.Early elif self.date_expired is not None and self.date_expired < timezone.now(): @@ -58,16 +57,16 @@ def payload(self): return payload @property - def file_name(self): + def file_name(self) -> str: date = self.date_added.strftime("%Y-%B-%d") export_type = ExportQueryType.as_str(self.query_type) # Example: Discover_2020-July-21_27.csv return f"{export_type}_{date}_{self.id}.csv" @staticmethod - def format_date(date): + def format_date(date) -> str | None: # Example: 12:21 PM on July 21, 2020 (UTC) - return None if date is None else force_str(date.strftime("%-I:%M %p on %B %d, %Y (%Z)")) + return None if date is None else date.strftime("%-I:%M %p on %B %d, %Y (%Z)") def delete_file(self): file = self._get_file() @@ -85,7 +84,7 @@ def finalize_upload(self, file, expiration=DEFAULT_EXPIRATION): self.update(file_id=file.id, date_finished=current_time, date_expired=expire_time) transaction.on_commit(lambda: self.email_success(), router.db_for_write(ExportedData)) - def email_success(self): + def email_success(self) -> None: from sentry.utils.email import MessageBuilder user_email = None @@ -113,7 +112,7 @@ def email_success(self): if user_email is not None: msg.send_async([user_email]) - def email_failure(self, message): + def email_failure(self, message: str) -> None: from sentry.utils.email import MessageBuilder user = user_service.get_user(user_id=self.user_id) diff --git a/tests/sentry/data_export/test_models.py b/tests/sentry/data_export/test_models.py index f2db7d3e58919b..fbac266f8e88df 100644 --- a/tests/sentry/data_export/test_models.py +++ b/tests/sentry/data_export/test_models.py @@ -154,19 +154,19 @@ def test_email_success_content(self, builder): def test_email_failure(self): with self.tasks(): - self.data_export.email_failure(self.TEST_STRING) + self.data_export.email_failure("failed to export data!") assert len(mail.outbox) == 1 assert not ExportedData.objects.filter(id=self.data_export.id).exists() @patch("sentry.utils.email.MessageBuilder") def test_email_failure_content(self, builder): with self.tasks(): - self.data_export.email_failure(self.TEST_STRING) + self.data_export.email_failure("failed to export data!") expected_email_args = { "subject": "We couldn't export your data.", "context": { "creation": ExportedData.format_date(date=self.data_export.date_added), - "error_message": self.TEST_STRING, + "error_message": "failed to export data!", "payload": json.dumps(self.data_export.payload), }, "type": "organization.export-data", From 2d014766abebde743b1b26acdda2f03d0aad8be2 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 13 May 2024 09:06:34 -0400 Subject: [PATCH 321/376] ref: remove SENTRY_LIGHT_BUILD (#70737) it was replaced by tools.fast_editable --- scripts/lib.sh | 3 --- setup.py | 35 ++++++++++++++--------------------- 2 files changed, 14 insertions(+), 24 deletions(-) diff --git a/scripts/lib.sh b/scripts/lib.sh index 2cc8be59c87a19..859924edacb53a 100755 --- a/scripts/lib.sh +++ b/scripts/lib.sh @@ -99,9 +99,6 @@ install-py-dev() { pip-install -r requirements-dev-frozen.txt - # SENTRY_LIGHT_BUILD=1 disables webpacking during setup.py. - # Webpacked assets are only necessary for devserver (which does it lazily anyways) - # and acceptance tests, which webpack automatically if run. python3 -m tools.fast_editable --path . } diff --git a/setup.py b/setup.py index 936a40d745c95e..cdbfea2b619e8b 100644 --- a/setup.py +++ b/setup.py @@ -18,19 +18,14 @@ from sentry.utils.distutils.commands.build_integration_docs import BuildIntegrationDocsCommand from sentry.utils.distutils.commands.build_js_sdk_registry import BuildJsSdkRegistryCommand -IS_LIGHT_BUILD = os.environ.get("SENTRY_LIGHT_BUILD") == "1" - class SentrySDistCommand(SDistCommand): - # If we are not a light build we want to also execute build_assets as - # part of our source build pipeline. - if not IS_LIGHT_BUILD: - sub_commands = [ - *SDistCommand.sub_commands, - ("build_integration_docs", None), - ("build_assets", None), - ("build_js_sdk_registry", None), - ] + sub_commands = [ + *SDistCommand.sub_commands, + ("build_integration_docs", None), + ("build_assets", None), + ("build_js_sdk_registry", None), + ] class SentryBuildCommand(BuildCommand): @@ -39,20 +34,18 @@ def run(self): logging.getLogger("sentry").setLevel(logging.WARNING) - if not IS_LIGHT_BUILD: - self.run_command("build_integration_docs") - self.run_command("build_assets") - self.run_command("build_js_sdk_registry") - BuildCommand.run(self) + self.run_command("build_integration_docs") + self.run_command("build_assets") + self.run_command("build_js_sdk_registry") + super().run() class SentryDevelopCommand(DevelopCommand): def run(self): - DevelopCommand.run(self) - if not IS_LIGHT_BUILD: - self.run_command("build_integration_docs") - self.run_command("build_assets") - self.run_command("build_js_sdk_registry") + super().run() + self.run_command("build_integration_docs") + self.run_command("build_assets") + self.run_command("build_js_sdk_registry") cmdclass = { From 5ce6705cd4e776caa7ea47a9cf1424133d105b4d Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Mon, 13 May 2024 15:11:55 +0200 Subject: [PATCH 322/376] Fully enable `reprocessing-v2` (backend) (#69204) This fully enables the feature by simply removing all the checks and early returns for the feature flag. --- Split out from https://github.com/getsentry/sentry/pull/68412 --- src/sentry/api/endpoints/event_reprocessable.py | 9 --------- src/sentry/api/endpoints/group_reprocessing.py | 9 --------- tests/sentry/api/endpoints/test_event_reprocessable.py | 9 ++++----- tests/sentry/tasks/test_reprocessing2.py | 4 +--- tests/symbolicator/test_minidump_full.py | 9 ++++----- 5 files changed, 9 insertions(+), 31 deletions(-) diff --git a/src/sentry/api/endpoints/event_reprocessable.py b/src/sentry/api/endpoints/event_reprocessable.py index 9358add9776c5f..7a06b4ebc33d67 100644 --- a/src/sentry/api/endpoints/event_reprocessable.py +++ b/src/sentry/api/endpoints/event_reprocessable.py @@ -2,7 +2,6 @@ from rest_framework.request import Request from rest_framework.response import Response -from sentry import features from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint @@ -56,14 +55,6 @@ def get(self, request: Request, project, event_id) -> Response: :auth: required """ - if not features.has( - "organizations:reprocessing-v2", project.organization, actor=request.user - ): - return self.respond( - {"error": "This project does not have the reprocessing v2 feature"}, - status=404, - ) - try: pull_event_data(project.id, event_id) except CannotReprocess as e: diff --git a/src/sentry/api/endpoints/group_reprocessing.py b/src/sentry/api/endpoints/group_reprocessing.py index d69b196617f2ca..ba62cc747e5699 100644 --- a/src/sentry/api/endpoints/group_reprocessing.py +++ b/src/sentry/api/endpoints/group_reprocessing.py @@ -1,7 +1,6 @@ from rest_framework.request import Request from rest_framework.response import Response -from sentry import features from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases import GroupEndpoint @@ -27,14 +26,6 @@ def post(self, request: Request, group) -> Response: :auth: required """ - if not features.has( - "organizations:reprocessing-v2", group.project.organization, actor=request.user - ): - return self.respond( - {"error": "This project does not have the reprocessing v2 feature"}, - status=404, - ) - max_events = request.data.get("maxEvents") if max_events: max_events = int(max_events) diff --git a/tests/sentry/api/endpoints/test_event_reprocessable.py b/tests/sentry/api/endpoints/test_event_reprocessable.py index 11b18218c06300..296b6c458229d2 100644 --- a/tests/sentry/api/endpoints/test_event_reprocessable.py +++ b/tests/sentry/api/endpoints/test_event_reprocessable.py @@ -17,8 +17,7 @@ def test_simple(self): ) path = f"/api/0/projects/{event1.project.organization.slug}/{event1.project.slug}/events/{event1.event_id}/reprocessable/" - with self.feature("organizations:reprocessing-v2"): - response = self.client.get(path, format="json") - assert response.status_code == 200 - assert not response.data["reprocessable"] - assert response.data["reason"] == "unprocessed_event.not_found" + response = self.client.get(path, format="json") + assert response.status_code == 200 + assert not response.data["reprocessable"] + assert response.data["reason"] == "unprocessed_event.not_found" diff --git a/tests/sentry/tasks/test_reprocessing2.py b/tests/sentry/tasks/test_reprocessing2.py index 39dc4aca58b603..29524f818fb86f 100644 --- a/tests/sentry/tasks/test_reprocessing2.py +++ b/tests/sentry/tasks/test_reprocessing2.py @@ -26,7 +26,6 @@ from sentry.tasks.reprocessing2 import finish_reprocessing, reprocess_group from sentry.tasks.store import preprocess_event from sentry.testutils.cases import TestCase -from sentry.testutils.helpers import Feature from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.testutils.pytest.fixtures import django_db_all from sentry.testutils.skips import requires_snuba @@ -62,8 +61,7 @@ def _create_user_report(evt): def reprocessing_feature(settings): settings.SENTRY_REPROCESSING_PAGE_SIZE = 1 - with Feature({"organizations:reprocessing-v2": True}): - yield + yield @pytest.fixture diff --git a/tests/symbolicator/test_minidump_full.py b/tests/symbolicator/test_minidump_full.py index 8d594d70cd67aa..c0d75601f8e5e0 100644 --- a/tests/symbolicator/test_minidump_full.py +++ b/tests/symbolicator/test_minidump_full.py @@ -36,8 +36,9 @@ class SymbolicatorMinidumpIntegrationTest(RelayStoreHelper, TransactionTestCase) def initialize(self, live_server, reset_snuba): self.project.update_option("sentry:builtin_symbol_sources", []) - with patch("sentry.auth.system.is_internal_ip", return_value=True), self.options( - {"system.url-prefix": live_server.url} + with ( + patch("sentry.auth.system.is_internal_ip", return_value=True), + self.options({"system.url-prefix": live_server.url}), ): # Run test case yield @@ -162,9 +163,7 @@ def test_reprocessing(self): self.project.update_option("sentry:store_crash_reports", STORE_CRASH_REPORTS_ALL) - features = dict(self._FEATURES) - features["organizations:reprocessing-v2"] = True - with self.feature(features): + with self.feature(self._FEATURES): with open(get_fixture_path("native", "windows.dmp"), "rb") as f: event = self.post_and_retrieve_minidump( {"upload_file_minidump": f}, {"sentry[logger]": "test-logger"} From 6f4ceed029e7e80cf305f676fee6e5d2e97d3bf1 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 13 May 2024 09:14:09 -0400 Subject: [PATCH 323/376] ref: use repr(...) instead of str(...) for bytes values in metrics error log (#70733) fixes a `BytesWarning` when run with `-b` --- src/sentry/sentry_metrics/consumers/indexer/batch.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/sentry/sentry_metrics/consumers/indexer/batch.py b/src/sentry/sentry_metrics/consumers/indexer/batch.py index 92e1e15f9ce4be..7dcb54b97300c3 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/batch.py +++ b/src/sentry/sentry_metrics/consumers/indexer/batch.py @@ -152,7 +152,7 @@ def _extract_messages(self) -> None: self.invalid_msg_meta.add(broker_meta) logger.exception( str(e), - extra={"payload_value": str(msg.payload.value)}, + extra={"payload_value": repr(msg.payload.value)}, ) for namespace, cnt in skipped_msgs_cnt.items(): @@ -172,7 +172,7 @@ def _extract_message( except orjson.JSONDecodeError: logger.exception( "process_messages.invalid_json", - extra={"payload_value": str(msg.payload.value)}, + extra={"payload_value": repr(msg.payload.value)}, ) raise @@ -186,7 +186,7 @@ def _extract_message( raise logger.warning( "process_messages.invalid_schema", - extra={"payload_value": str(msg.payload.value)}, + extra={"payload_value": repr(msg.payload.value)}, exc_info=True, ) From 9f8895c6b80406fd1f3ec0fe3790a963612acd88 Mon Sep 17 00:00:00 2001 From: Simon Hellmayr Date: Mon, 13 May 2024 15:20:44 +0200 Subject: [PATCH 324/376] feat(metrics): use new meta tables for metrics meta queries in metrics API (#70420) --- .../api/endpoints/organization_metrics.py | 67 +++--- .../querying/metadata/__init__.py | 11 +- .../querying/metadata/metrics.py | 97 +++++++++ .../sentry_metrics/querying/metadata/tags.py | 60 ++++++ .../sentry_metrics/querying/metadata/utils.py | 13 ++ .../test_organization_metrics_details.py | 50 ++--- .../test_organization_metrics_metadata.py | 179 +++++++++++++++++ .../test_organization_metrics_tag_details.py | 190 ++---------------- ...est_organization_metrics_tag_details_v2.py | 178 ++++++++++++++++ .../querying/metadata/__init__.py | 0 .../querying/metadata/test_metrics.py | 19 ++ 11 files changed, 624 insertions(+), 240 deletions(-) create mode 100644 src/sentry/sentry_metrics/querying/metadata/metrics.py create mode 100644 src/sentry/sentry_metrics/querying/metadata/tags.py create mode 100644 src/sentry/sentry_metrics/querying/metadata/utils.py create mode 100644 tests/sentry/api/endpoints/test_organization_metrics_metadata.py create mode 100644 tests/sentry/api/endpoints/test_organization_metrics_tag_details_v2.py create mode 100644 tests/sentry/sentry_metrics/querying/metadata/__init__.py create mode 100644 tests/sentry/sentry_metrics/querying/metadata/test_metrics.py diff --git a/src/sentry/api/endpoints/organization_metrics.py b/src/sentry/api/endpoints/organization_metrics.py index 25931f7c7e9f67..722f85937b5033 100644 --- a/src/sentry/api/endpoints/organization_metrics.py +++ b/src/sentry/api/endpoints/organization_metrics.py @@ -2,7 +2,7 @@ from datetime import datetime, timedelta, timezone from rest_framework import serializers -from rest_framework.exceptions import ParseError +from rest_framework.exceptions import NotFound, ParseError from rest_framework.request import Request from rest_framework.response import Response @@ -36,7 +36,13 @@ LatestReleaseNotFoundError, MetricsQueryExecutionError, ) -from sentry.sentry_metrics.querying.metadata import MetricCodeLocations, get_metric_code_locations +from sentry.sentry_metrics.querying.metadata import ( + MetricCodeLocations, + convert_metric_names_to_mris, + get_metric_code_locations, + get_metrics_meta, + get_tag_values, +) from sentry.sentry_metrics.querying.samples_list import get_sample_list_executor_cls from sentry.sentry_metrics.querying.types import QueryOrder, QueryType from sentry.sentry_metrics.use_case_id_registry import ( @@ -45,14 +51,7 @@ get_use_case_id_api_access, ) from sentry.sentry_metrics.utils import string_to_use_case_id -from sentry.snuba.metrics import ( - QueryDefinition, - get_all_tags, - get_metrics_meta, - get_series, - get_single_metric_info, - get_tag_values, -) +from sentry.snuba.metrics import QueryDefinition, get_all_tags, get_series, get_single_metric_info from sentry.snuba.metrics.naming_layer.mri import is_mri from sentry.snuba.metrics.utils import DerivedMetricException, DerivedMetricParseException from sentry.snuba.referrer import Referrer @@ -155,10 +154,8 @@ def get(self, request: Request, organization) -> Response: if not projects: raise InvalidParams("You must supply at least one project to see its metrics") - start, end = get_date_range_from_params(request.GET) - metrics = get_metrics_meta( - projects=projects, use_case_ids=get_use_case_ids(request), start=start, end=end + organization=organization, projects=projects, use_case_ids=get_use_case_ids(request) ) return Response(metrics, status=200) @@ -246,25 +243,45 @@ class OrganizationMetricsTagDetailsEndpoint(OrganizationEndpoint): def get(self, request: Request, organization, tag_name) -> Response: metric_names = request.GET.getlist("metric") or [] + if len(metric_names) > 1: + raise ParseError( + "Please supply only a single metric name. Specifying multiple metric names is not supported for this endpoint." + ) projects = self.get_projects(request, organization) if not projects: raise InvalidParams("You must supply at least one project to see the tag values") - start, end = get_date_range_from_params(request.GET) - try: - tag_values = get_tag_values( - projects=projects, - tag_name=tag_name, - metric_names=metric_names, - use_case_id=get_use_case_id(request), - start=start, - end=end, - ) - except (InvalidParams, DerivedMetricParseException) as exc: + mris = convert_metric_names_to_mris(metric_names) + tag_values: set[str] = set() + for mri in mris: + mri_tag_values = get_tag_values( + organization=organization, + projects=projects, + use_case_ids=[get_use_case_id(request)], + mri=mri, + tag_key=tag_name, + ) + tag_values = tag_values.union(mri_tag_values) + + except InvalidParams: + raise NotFound(self._generate_not_found_message(metric_names, tag_name)) + + except DerivedMetricParseException as exc: raise ParseError(str(exc)) - return Response(tag_values, status=200) + tag_values_formatted = [{"key": tag_name, "value": tag_value} for tag_value in tag_values] + + if len(tag_values_formatted) > 0: + return Response(tag_values_formatted, status=200) + else: + raise NotFound(self._generate_not_found_message(metric_names, tag_name)) + + def _generate_not_found_message(self, metric_names: list[str], tag_name: str) -> str: + if len(metric_names) > 0: + return f"No data found for metric: {metric_names[0]} and tag: {tag_name}" + else: + return f"No data found for tag: {tag_name}" @region_silo_endpoint diff --git a/src/sentry/sentry_metrics/querying/metadata/__init__.py b/src/sentry/sentry_metrics/querying/metadata/__init__.py index 715b3addf7f1e6..57c86bd36b527c 100644 --- a/src/sentry/sentry_metrics/querying/metadata/__init__.py +++ b/src/sentry/sentry_metrics/querying/metadata/__init__.py @@ -1,3 +1,12 @@ +from .metrics import get_metrics_meta from .metrics_code_locations import MetricCodeLocations, get_metric_code_locations +from .tags import get_tag_values +from .utils import convert_metric_names_to_mris -__all__ = ["MetricCodeLocations", "get_metric_code_locations"] +__all__ = [ + "MetricCodeLocations", + "convert_metric_names_to_mris", + "get_metric_code_locations", + "get_metrics_meta", + "get_tag_values", +] diff --git a/src/sentry/sentry_metrics/querying/metadata/metrics.py b/src/sentry/sentry_metrics/querying/metadata/metrics.py new file mode 100644 index 00000000000000..21d4ec5f0fbb47 --- /dev/null +++ b/src/sentry/sentry_metrics/querying/metadata/metrics.py @@ -0,0 +1,97 @@ +from collections import defaultdict +from collections.abc import Sequence + +from sentry.models.organization import Organization +from sentry.models.project import Project +from sentry.sentry_metrics.use_case_id_registry import UseCaseID +from sentry.snuba.metrics import parse_mri +from sentry.snuba.metrics.datasource import ( + _build_metric_meta, + get_metrics_blocking_state_of_projects, +) +from sentry.snuba.metrics.utils import BlockedMetric, MetricMeta +from sentry.snuba.metrics_layer.query import fetch_metric_mris + + +def get_metrics_meta( + organization: Organization, + projects: Sequence[Project], + use_case_ids: Sequence[UseCaseID], +) -> Sequence[MetricMeta]: + if not projects: + return [] + + metrics_metas = [] + + for use_case_id in use_case_ids: + stored_metrics = get_available_mris(organization, projects, use_case_id) + metrics_blocking_state = ( + get_metrics_blocking_state_of_projects(projects) + if UseCaseID.CUSTOM in use_case_ids + else {} + ) + + for metric_mri, project_ids in stored_metrics.items(): + parsed_mri = parse_mri(metric_mri) + + if parsed_mri is None: + continue + + blocking_status = [] + if (metric_blocking := metrics_blocking_state.get(metric_mri)) is not None: + blocking_status = [ + BlockedMetric( + isBlocked=is_blocked, blockedTags=blocked_tags, projectId=project_id + ) + for is_blocked, blocked_tags, project_id in metric_blocking + ] + # We delete the metric so that in the next steps we can just merge the remaining blocked metrics that are + # not stored. + del metrics_blocking_state[metric_mri] + + metrics_metas.append(_build_metric_meta(parsed_mri, project_ids, blocking_status)) + + for metric_mri, metric_blocking in metrics_blocking_state.items(): + parsed_mri = parse_mri(metric_mri) + if parsed_mri is None: + continue + + metrics_metas.append( + _build_metric_meta( + parsed_mri, + [], + [ + BlockedMetric( + isBlocked=is_blocked, blockedTags=blocked_tags, projectId=project_id + ) + for is_blocked, blocked_tags, project_id in metric_blocking + ], + ) + ) + + return metrics_metas + + +def get_available_mris( + organization: Organization, projects: Sequence[Project], use_case_id: UseCaseID +) -> dict[str, list[int]]: + """ + Returns a dictionary containing the Metrics MRIs available as keys, and the corresponding + list of project_ids in which the MRI is available as values. + """ + project_ids = [project.id for project in projects] + project_id_to_mris = fetch_metric_mris(organization.id, project_ids, use_case_id) + mris_to_project_ids = _convert_to_mris_to_project_ids_mapping(project_id_to_mris) + + return mris_to_project_ids + + +def _convert_to_mris_to_project_ids_mapping(project_id_to_mris: dict[int, list[str]]): + mris_to_project_ids: dict[str, list[int]] = defaultdict(list) + + mris_to_project_ids = {} + for project_id, mris in project_id_to_mris.items(): + for mri in mris: + mris_to_project_ids.setdefault(mri, []).append(project_id) + + return mris_to_project_ids diff --git a/src/sentry/sentry_metrics/querying/metadata/tags.py b/src/sentry/sentry_metrics/querying/metadata/tags.py new file mode 100644 index 00000000000000..d0b3c054a09a21 --- /dev/null +++ b/src/sentry/sentry_metrics/querying/metadata/tags.py @@ -0,0 +1,60 @@ +from collections.abc import Sequence +from dataclasses import dataclass + +from sentry.models.organization import Organization +from sentry.models.project import Project +from sentry.sentry_metrics.use_case_id_registry import UseCaseID +from sentry.snuba.metrics_layer.query import fetch_metric_tag_keys, fetch_metric_tag_values + + +@dataclass +class TagValue: + key: str + value: str + + def __hash__(self): + return hash((self.key, self.value)) + + +def get_tag_keys( + organization: Organization, + projects: Sequence[Project], + use_case_ids: Sequence[UseCaseID], + mris: list[str], +) -> list[str]: + """ + Get all available tag keys for a given MRI, specified projects and use_case_ids. + Returns list of strings representing tag keys for a list of MRIs + """ + all_tag_keys: set[str] = set() + project_ids = [project.id for project in projects] + for mri in mris: + for use_case_id in use_case_ids: + tag_keys_per_project = fetch_metric_tag_keys( + organization.id, project_ids, use_case_id, mri + ) + for tag_keys in tag_keys_per_project.values(): + all_tag_keys = all_tag_keys.union(tag_keys) + + return sorted(list(all_tag_keys)) + + +def get_tag_values( + organization: Organization, + projects: Sequence[Project], + use_case_ids: Sequence[UseCaseID], + mri: str, + tag_key: str, +) -> list[str]: + """ + Get all available tag values for an MRI and tag key from metrics. + """ + project_ids = [project.id for project in projects] + tag_values: set[str] = set() + for use_case_id in use_case_ids: + use_case_tag_values = fetch_metric_tag_values( + organization.id, project_ids, use_case_id, mri, tag_key + ) + tag_values = tag_values.union(use_case_tag_values) + + return list(tag_values) diff --git a/src/sentry/sentry_metrics/querying/metadata/utils.py b/src/sentry/sentry_metrics/querying/metadata/utils.py new file mode 100644 index 00000000000000..cc7df54f7e8b8e --- /dev/null +++ b/src/sentry/sentry_metrics/querying/metadata/utils.py @@ -0,0 +1,13 @@ +from sentry.snuba.metrics import get_mri +from sentry.snuba.metrics.naming_layer.mri import is_mri + + +def convert_metric_names_to_mris(metric_names: list[str]) -> list[str]: + mris: list[str] = [] + for metric_name in metric_names or (): + if is_mri(metric_name): + mris.append(metric_name) + else: + mris.append(get_mri(metric_name)) + + return mris diff --git a/tests/sentry/api/endpoints/test_organization_metrics_details.py b/tests/sentry/api/endpoints/test_organization_metrics_details.py index d79ef41bc71365..ad19524fbcafc3 100644 --- a/tests/sentry/api/endpoints/test_organization_metrics_details.py +++ b/tests/sentry/api/endpoints/test_organization_metrics_details.py @@ -1,5 +1,3 @@ -from datetime import timedelta -from unittest import mock from unittest.mock import patch import pytest @@ -84,7 +82,9 @@ def test_metrics_details_with_public_use_case(self, get_metrics_meta): self.organization.slug, project=[self.project.id], useCase=UseCaseID.SESSIONS.value ) get_metrics_meta.assert_called_once_with( - projects=[self.project], use_case_ids=[UseCaseID.SESSIONS], start=mock.ANY, end=mock.ANY + organization=self.organization, + projects=[self.project], + use_case_ids=[UseCaseID.SESSIONS], ) get_metrics_meta.reset_mock() @@ -98,7 +98,9 @@ def test_metrics_details_with_public_use_case(self, get_metrics_meta): self.organization.slug, project=[self.project.id], useCase=UseCaseID.SESSIONS.value ) get_metrics_meta.assert_called_once_with( - projects=[self.project], use_case_ids=[UseCaseID.SESSIONS], start=mock.ANY, end=mock.ANY + organization=self.organization, + projects=[self.project], + use_case_ids=[UseCaseID.SESSIONS], ) @patch("sentry.api.endpoints.organization_metrics.get_metrics_meta") @@ -110,10 +112,9 @@ def test_metrics_details_with_private_use_case(self, get_metrics_meta): self.organization.slug, project=[self.project.id], useCase=UseCaseID.METRIC_STATS.value ) get_metrics_meta.assert_called_once_with( + organization=self.organization, projects=[self.project], use_case_ids=[UseCaseID.METRIC_STATS], - start=mock.ANY, - end=mock.ANY, ) get_metrics_meta.reset_mock() @@ -146,7 +147,7 @@ def test_metrics_details_default_use_cases(self, get_metrics_meta): self.login_as(user=self.user, superuser=True) self.get_success_response(self.organization.slug, project=[self.project.id]) get_metrics_meta.assert_called_once_with( - projects=[self.project], use_case_ids=all_use_case_ids, start=mock.ANY, end=mock.ANY + organization=self.organization, projects=[self.project], use_case_ids=all_use_case_ids ) get_metrics_meta.reset_mock() @@ -155,7 +156,9 @@ def test_metrics_details_default_use_cases(self, get_metrics_meta): self.login_as(user=self.user, staff=True) self.get_success_response(self.organization.slug, project=[self.project.id]) get_metrics_meta.assert_called_once_with( - projects=[self.project], use_case_ids=public_use_case_ids, start=mock.ANY, end=mock.ANY + organization=self.organization, + projects=[self.project], + use_case_ids=public_use_case_ids, ) get_metrics_meta.reset_mock() @@ -168,7 +171,9 @@ def test_metrics_details_default_use_cases(self, get_metrics_meta): self.login_as(user=normal_user) self.get_success_response(self.organization.slug, project=[self.project.id]) get_metrics_meta.assert_called_once_with( - projects=[self.project], use_case_ids=public_use_case_ids, start=mock.ANY, end=mock.ANY + organization=self.organization, + projects=[self.project], + use_case_ids=public_use_case_ids, ) def test_metrics_details_for_custom_metrics(self): @@ -215,30 +220,3 @@ def test_metrics_details_for_custom_metrics(self): assert data[2]["blockingStatus"] == [ {"isBlocked": True, "blockedTags": [], "projectId": project_1.id} ] - - def test_metrics_details_with_date_range(self): - metrics = ( - ("c:custom/clicks_1@none", 0), - ("c:custom/clicks_2@none", 1), - ("c:custom/clicks_3@none", 7), - ) - for mri, days in metrics: - self.store_metric( - self.project.organization.id, - self.project.id, - "counter", - mri, - {"transaction": "/hello"}, - int((self.now - timedelta(days=days)).timestamp()), - 10, - UseCaseID.CUSTOM, - ) - - for stats_period, expected_count in (("1d", 1), ("2d", 2), ("2w", 3)): - response = self.get_success_response( - self.organization.slug, - project=self.project.id, - useCase="custom", - statsPeriod=stats_period, - ) - assert len(response.data) == expected_count diff --git a/tests/sentry/api/endpoints/test_organization_metrics_metadata.py b/tests/sentry/api/endpoints/test_organization_metrics_metadata.py new file mode 100644 index 00000000000000..b678c21ae91d71 --- /dev/null +++ b/tests/sentry/api/endpoints/test_organization_metrics_metadata.py @@ -0,0 +1,179 @@ +from datetime import timedelta + +import pytest + +from sentry.sentry_metrics.use_case_id_registry import UseCaseID +from sentry.snuba.metrics.naming_layer import TransactionMRI +from sentry.testutils.cases import MetricsAPIBaseTestCase +from sentry.testutils.helpers.datetime import freeze_time + +pytestmark = pytest.mark.sentry_metrics + + +@freeze_time(MetricsAPIBaseTestCase.MOCK_DATETIME) +class OrganizationMetricsTagValues(MetricsAPIBaseTestCase): + method = "get" + endpoint = "sentry-api-0-organization-metrics-tag-details" + + def setUp(self): + super().setUp() + self.login_as(self.user) + + release_1 = self.create_release( + project=self.project, version="1.0", date_added=MetricsAPIBaseTestCase.MOCK_DATETIME + ) + release_2 = self.create_release( + project=self.project, + version="2.0", + date_added=MetricsAPIBaseTestCase.MOCK_DATETIME + timedelta(minutes=5), + ) + + # Use Case: TRANSACTIONS + for value, transaction, platform, env, release, time in ( + (1, "/hello", "android", "prod", release_1.version, self.now()), + (6, "/hello", "ios", "dev", release_2.version, self.now()), + (5, "/world", "windows", "prod", release_1.version, self.now() + timedelta(minutes=30)), + (3, "/hello", "ios", "dev", release_2.version, self.now() + timedelta(hours=1)), + (2, "/hello", "android", "dev", release_1.version, self.now() + timedelta(hours=1)), + ( + 4, + "/world", + "windows", + "prod", + release_2.version, + self.now() + timedelta(hours=1, minutes=30), + ), + ): + self.store_metric( + self.project.organization.id, + self.project.id, + "distribution", + TransactionMRI.DURATION.value, + { + "transaction": transaction, + "platform": platform, + "environment": env, + "release": release, + }, + self.now().timestamp(), + value, + UseCaseID.TRANSACTIONS, + ) + # Use Case: CUSTOM + for value, release, tag_value, time in ( + (1, release_1.version, "tag_value_1", self.now()), + (1, release_1.version, "tag_value_1", self.now()), + (1, release_1.version, "tag_value_2", self.now() - timedelta(days=40)), + (1, release_2.version, "tag_value_3", self.now() - timedelta(days=50)), + (1, release_2.version, "tag_value_4", self.now() - timedelta(days=60)), + ): + self.store_metric( + self.project.organization.id, + self.project.id, + "distribution", + "d:custom/my_test_metric@percent", + { + "transaction": "/hello", + "platform": "platform", + "environment": "prod", + "release": release, + "mytag": tag_value, + }, + self.now().timestamp(), + value, + UseCaseID.CUSTOM, + ) + + self.prod_env = self.create_environment(name="prod", project=self.project) + self.dev_env = self.create_environment(name="dev", project=self.project) + + def now(self): + return MetricsAPIBaseTestCase.MOCK_DATETIME + + def test_tag_details_for_transactions_use_case(self): + response = self.get_success_response( + self.project.organization.slug, + "transaction", + metric=["d:transactions/duration@millisecond"], + project=[self.project.id], + useCase="transactions", + ) + assert sorted(response.data, key=lambda x: x["value"]) == [ + {"key": "transaction", "value": "/hello"}, + {"key": "transaction", "value": "/world"}, + ] + + def test_tag_details_for_custom_use_case(self): + response = self.get_success_response( + self.project.organization.slug, + "mytag", + metric=["d:custom/my_test_metric@percent"], + project=[self.project.id], + useCase="custom", + ) + assert sorted(response.data, key=lambda x: x["value"]) == [ + {"key": "mytag", "value": "tag_value_1"}, + {"key": "mytag", "value": "tag_value_2"}, + {"key": "mytag", "value": "tag_value_3"}, + {"key": "mytag", "value": "tag_value_4"}, + ] + + def test_non_existing_tag_for_transactions_use_case(self): + response = self.get_error_response( + self.project.organization.slug, + "my_non_existent_tag", + metric=["d:transactions/duration@millisecond"], + project=[self.project.id], + useCase="transactions", + ) + assert response.status_code == 404 + assert ( + response.json()["detail"] + == "No data found for metric: d:transactions/duration@millisecond and tag: my_non_existent_tag" + ) + + def test_non_existing_tag_for_custom_use_case(self): + response = self.get_error_response( + self.project.organization.slug, + "my_non_existent_tag", + metric=["d:custom/my_test_metric@percent"], + project=[self.project.id], + useCase="custom", + ) + assert response.status_code == 404 + assert ( + response.json()["detail"] + == "No data found for metric: d:custom/my_test_metric@percent and tag: my_non_existent_tag" + ) + + def test_tag_details_for_non_existent_metric(self): + response = self.get_error_response( + self.project.organization.slug, + "my_non_existent_tag", + metric=["d:custom/my_non_existent_test_metric@percent"], + project=[self.project.id], + useCase="custom", + ) + assert response.status_code == 404 + assert ( + response.json()["detail"] + == "No data found for metric: d:custom/my_non_existent_test_metric@percent and tag: my_non_existent_tag" + ) + + # fix this + def test_tag_details_for_multiple_supplied_metrics(self): + response = self.get_error_response( + self.project.organization.slug, + "my_non_existent_tag", + metric=[ + "d:custom/my_test_metric@percent", + "d:transactions/duration@millisecond", + ], + project=[self.project.id], + useCase="custom", + ) + + assert ( + response.json()["detail"] + == "Please supply only a single metric name. Specifying multiple metric names is not supported for this endpoint." + ) diff --git a/tests/sentry/api/endpoints/test_organization_metrics_tag_details.py b/tests/sentry/api/endpoints/test_organization_metrics_tag_details.py index 10cc6d2a1ca3e3..33beffe73442c3 100644 --- a/tests/sentry/api/endpoints/test_organization_metrics_tag_details.py +++ b/tests/sentry/api/endpoints/test_organization_metrics_tag_details.py @@ -1,19 +1,10 @@ import time -from datetime import datetime, timedelta -from unittest.mock import patch import pytest from sentry.sentry_metrics import indexer from sentry.sentry_metrics.use_case_id_registry import UseCaseID -from sentry.snuba.metrics.naming_layer import get_mri -from sentry.snuba.metrics.naming_layer.public import SessionMetricKey from sentry.testutils.cases import MetricsAPIBaseTestCase, OrganizationMetricsIntegrationTestCase -from sentry.testutils.helpers.datetime import freeze_time -from tests.sentry.api.endpoints.test_organization_metrics import ( - MOCKED_DERIVED_METRICS, - mocked_mri_resolver, -) pytestmark = pytest.mark.sentry_metrics @@ -32,98 +23,20 @@ def now(self): def test_unknown_tag(self): _indexer_record(self.organization.id, "bar") - response = self.get_success_response(self.project.organization.slug, "bar") - assert response.data == [] + response = self.get_response(self.project.organization.slug, "bar") + assert response.status_code == 404 + assert response.json()["detail"] == "No data found for tag: bar" def test_non_existing_tag(self): response = self.get_response(self.project.organization.slug, "bar") - assert response.status_code == 400 + assert response.status_code == 404 + assert response.json()["detail"] == "No data found for tag: bar" - @patch("sentry.snuba.metrics.datasource.get_mri", mocked_mri_resolver(["bad"], get_mri)) - def test_non_existing_filter(self): + def test_non_existing_metric_name(self): _indexer_record(self.organization.id, "bar") response = self.get_response(self.project.organization.slug, "bar", metric="bad") - assert response.status_code == 200 - assert response.data == [] - - @patch( - "sentry.snuba.metrics.datasource.get_mri", - mocked_mri_resolver(["metric1", "metric2", "metric3", "random_tag"], get_mri), - ) - def test_metric_tag_details(self): - response = self.get_success_response( - self.organization.slug, - "tag1", - ) - assert response.data == [ - {"key": "tag1", "value": "value1"}, - {"key": "tag1", "value": "value2"}, - ] - - # When single metric_name is supplied, get only tag values for that metric: - response = self.get_success_response( - self.organization.slug, - "tag1", - metric=["metric1"], - ) - assert response.data == [ - {"key": "tag1", "value": "value1"}, - ] - - # When metric names are supplied, get intersection of tags: - response = self.get_success_response( - self.organization.slug, - "tag1", - metric=["metric1", "metric2"], - ) - assert response.data == [] - - # We need to ensure that if the tag is present in the indexer but has no values in the - # dataset, the intersection of it and other tags should not yield any results - _indexer_record(self.organization.id, "random_tag") - response = self.get_success_response( - self.organization.slug, - "tag1", - metric=["metric1", "random_tag"], - ) - assert response.data == [] - - def test_tag_values_for_session_status_tag(self): - self.store_session( - self.build_session( - project_id=self.project.id, - started=(time.time() // 60) * 60, - status="ok", - release="foobar", - errors=2, - ) - ) - response = self.get_response( - self.organization.slug, - "session.status", - ) - assert response.data["detail"] == "Tag name session.status is an unallowed tag" - - @freeze_time((datetime.now() - timedelta(hours=1)).replace(minute=30)) - def test_tag_values_for_derived_metrics(self): - self.store_session( - self.build_session( - project_id=self.project.id, - started=(time.time() // 60) * 60, - status="ok", - release="foobar", - errors=2, - ) - ) - response = self.get_response( - self.organization.slug, - "release", - metric=[ - SessionMetricKey.CRASH_FREE_RATE.value, - SessionMetricKey.ALL.value, - ], - ) - assert response.data == [{"key": "release", "value": "foobar"}] + assert response.status_code == 404 + assert response.json()["detail"] == "No data found for metric: bad and tag: bar" def test_metric_not_in_naming_layer(self): self.store_session( @@ -141,87 +54,8 @@ def test_metric_not_in_naming_layer(self): "release", metric=["session.abnormal_and_crashed"], ) - assert response.data == [] - - @freeze_time((datetime.now() - timedelta(hours=1)).replace(minute=30)) - def test_tag_values_for_composite_derived_metrics(self): - self.store_session( - self.build_session( - project_id=self.project.id, - started=(time.time() // 60) * 60, - status="ok", - release="foobar@2.0", - errors=2, - ) - ) - response = self.get_success_response( - self.organization.slug, - "release", - metric=[SessionMetricKey.HEALTHY.value], - ) - assert response.data == [{"key": "release", "value": "foobar@2.0"}] - - def test_tag_not_available_in_the_indexer(self): - response = self.get_response( - self.organization.slug, - "random_foo_tag", - metric=[SessionMetricKey.HEALTHY.value], - ) - assert response.status_code == 400 - assert response.json()["detail"] == "Tag random_foo_tag is not available in the indexer" - - @freeze_time((datetime.now() - timedelta(hours=1)).replace(minute=30)) - @patch("sentry.snuba.metrics.fields.base.DERIVED_METRICS", MOCKED_DERIVED_METRICS) - @patch("sentry.snuba.metrics.datasource.get_mri") - @patch("sentry.snuba.metrics.datasource.get_derived_metrics") - def test_incorrectly_setup_derived_metric(self, mocked_derived_metrics, mocked_mri): - mocked_derived_metrics.return_value = MOCKED_DERIVED_METRICS - mocked_mri.return_value = "crash_free_fake" - self.store_session( - self.build_session( - project_id=self.project.id, - started=(time.time() // 60) * 60, - status="ok", - release="foobar", - errors=2, - ) - ) - response = self.get_response( - self.organization.slug, - "release", - metric=["crash_free_fake"], + assert response.status_code == 404 + assert ( + response.json()["detail"] + == "No data found for metric: session.abnormal_and_crashed and tag: release" ) - assert response.json()["detail"] == ( - "The following metrics {'crash_free_fake'} cannot be computed from single entities. " - "Please revise the definition of these singular entity derived metrics" - ) - - def test_metric_tag_details_with_date_range(self): - mri = "c:custom/clicks@none" - transactions = ( - ("/hello", 0), - ("/world", 1), - ("/foo", 7), - ) - for transaction, days in transactions: - self.store_metric( - self.project.organization.id, - self.project.id, - "counter", - mri, - {"transaction": transaction}, - int((self.now - timedelta(days=days)).timestamp()), - 10, - UseCaseID.CUSTOM, - ) - - for stats_period, expected_count in (("1d", 1), ("2d", 2), ("2w", 3)): - response = self.get_success_response( - self.organization.slug, - "transaction", - metric=[mri], - project=self.project.id, - useCase="custom", - statsPeriod=stats_period, - ) - assert len(response.data) == expected_count diff --git a/tests/sentry/api/endpoints/test_organization_metrics_tag_details_v2.py b/tests/sentry/api/endpoints/test_organization_metrics_tag_details_v2.py new file mode 100644 index 00000000000000..1cb32ee1e8a61e --- /dev/null +++ b/tests/sentry/api/endpoints/test_organization_metrics_tag_details_v2.py @@ -0,0 +1,178 @@ +from datetime import timedelta + +import pytest + +from sentry.sentry_metrics.use_case_id_registry import UseCaseID +from sentry.snuba.metrics.naming_layer import TransactionMRI +from sentry.testutils.cases import MetricsAPIBaseTestCase +from sentry.testutils.helpers.datetime import freeze_time + +pytestmark = pytest.mark.sentry_metrics + + +@freeze_time(MetricsAPIBaseTestCase.MOCK_DATETIME) +class OrganizationMetricsTagValues(MetricsAPIBaseTestCase): + method = "get" + endpoint = "sentry-api-0-organization-metrics-tag-details" + + def setUp(self): + super().setUp() + self.login_as(self.user) + + release_1 = self.create_release( + project=self.project, version="1.0", date_added=MetricsAPIBaseTestCase.MOCK_DATETIME + ) + release_2 = self.create_release( + project=self.project, + version="2.0", + date_added=MetricsAPIBaseTestCase.MOCK_DATETIME + timedelta(minutes=5), + ) + + # Use Case: TRANSACTIONS + for value, transaction, platform, env, release, time in ( + (1, "/hello", "android", "prod", release_1.version, self.now()), + (6, "/hello", "ios", "dev", release_2.version, self.now()), + (5, "/world", "windows", "prod", release_1.version, self.now() + timedelta(minutes=30)), + (3, "/hello", "ios", "dev", release_2.version, self.now() + timedelta(hours=1)), + (2, "/hello", "android", "dev", release_1.version, self.now() + timedelta(hours=1)), + ( + 4, + "/world", + "windows", + "prod", + release_2.version, + self.now() + timedelta(hours=1, minutes=30), + ), + ): + self.store_metric( + self.project.organization.id, + self.project.id, + "distribution", + TransactionMRI.DURATION.value, + { + "transaction": transaction, + "platform": platform, + "environment": env, + "release": release, + }, + self.now().timestamp(), + value, + UseCaseID.TRANSACTIONS, + ) + # Use Case: CUSTOM + for value, release, tag_value, time in ( + (1, release_1.version, "tag_value_1", self.now()), + (1, release_1.version, "tag_value_1", self.now()), + (1, release_1.version, "tag_value_2", self.now() - timedelta(days=40)), + (1, release_2.version, "tag_value_3", self.now() - timedelta(days=50)), + (1, release_2.version, "tag_value_4", self.now() - timedelta(days=60)), + ): + self.store_metric( + self.project.organization.id, + self.project.id, + "distribution", + "d:custom/my_test_metric@percent", + { + "transaction": "/hello", + "platform": "platform", + "environment": "prod", + "release": release, + "mytag": tag_value, + }, + self.now().timestamp(), + value, + UseCaseID.CUSTOM, + ) + + self.prod_env = self.create_environment(name="prod", project=self.project) + self.dev_env = self.create_environment(name="dev", project=self.project) + + def now(self): + return MetricsAPIBaseTestCase.MOCK_DATETIME + + def test_tag_details_for_transactions_use_case(self): + response = self.get_success_response( + self.project.organization.slug, + "transaction", + metric=["d:transactions/duration@millisecond"], + project=[self.project.id], + useCase="transactions", + ) + assert sorted(response.data, key=lambda x: x["value"]) == [ + {"key": "transaction", "value": "/hello"}, + {"key": "transaction", "value": "/world"}, + ] + + def test_tag_details_for_custom_use_case(self): + response = self.get_success_response( + self.project.organization.slug, + "mytag", + metric=["d:custom/my_test_metric@percent"], + project=[self.project.id], + useCase="custom", + ) + assert sorted(response.data, key=lambda x: x["value"]) == [ + {"key": "mytag", "value": "tag_value_1"}, + {"key": "mytag", "value": "tag_value_2"}, + {"key": "mytag", "value": "tag_value_3"}, + {"key": "mytag", "value": "tag_value_4"}, + ] + + def test_non_existing_tag_for_transactions_use_case(self): + response = self.get_error_response( + self.project.organization.slug, + "my_non_existent_tag", + metric=["d:transactions/duration@millisecond"], + project=[self.project.id], + useCase="transactions", + ) + assert response.status_code == 404 + assert ( + response.json()["detail"] + == "No data found for metric: d:transactions/duration@millisecond and tag: my_non_existent_tag" + ) + + def test_non_existing_tag_for_custom_use_case(self): + response = self.get_error_response( + self.project.organization.slug, + "my_non_existent_tag", + metric=["d:custom/my_test_metric@percent"], + project=[self.project.id], + useCase="custom", + ) + assert response.status_code == 404 + assert ( + response.json()["detail"] + == "No data found for metric: d:custom/my_test_metric@percent and tag: my_non_existent_tag" + ) + + def test_tag_details_for_non_existent_metric(self): + response = self.get_error_response( + self.project.organization.slug, + "my_non_existent_tag", + metric=["d:custom/my_non_existent_test_metric@percent"], + project=[self.project.id], + useCase="custom", + ) + assert response.status_code == 404 + assert ( + response.json()["detail"] + == "No data found for metric: d:custom/my_non_existent_test_metric@percent and tag: my_non_existent_tag" + ) + + def test_tag_details_for_multiple_supplied_metrics(self): + response = self.get_error_response( + self.project.organization.slug, + "my_non_existent_tag", + metric=[ + "d:custom/my_test_metric@percent", + "d:transactions/duration@millisecond", + ], + project=[self.project.id], + useCase="custom", + ) + assert response.status_code == 400 + assert ( + response.json()["detail"] + == "Please supply only a single metric name. Specifying multiple metric names is not supported for this endpoint." + ) diff --git a/tests/sentry/sentry_metrics/querying/metadata/__init__.py b/tests/sentry/sentry_metrics/querying/metadata/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tests/sentry/sentry_metrics/querying/metadata/test_metrics.py b/tests/sentry/sentry_metrics/querying/metadata/test_metrics.py new file mode 100644 index 00000000000000..60644dde7bbbe7 --- /dev/null +++ b/tests/sentry/sentry_metrics/querying/metadata/test_metrics.py @@ -0,0 +1,19 @@ +from sentry.sentry_metrics.querying.metadata.metrics import _convert_to_mris_to_project_ids_mapping + + +def test_convert_to_mris_to_project_ids_mapping(): + project_id_to_mris = { + 1: ["metric1", "metric2", "metric3"], + 2: ["metric1", "metric4", "metric5", "metric6"], + 3: ["metric1", "metric6"], + } + expected = { + "metric1": [1, 2, 3], + "metric2": [1], + "metric3": [1], + "metric4": [2], + "metric5": [2], + "metric6": [2, 3], + } + + assert _convert_to_mris_to_project_ids_mapping(project_id_to_mris) == expected From 1ef0c6af5c90f040d822c42b0f5de86244f32114 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 13 May 2024 09:52:44 -0400 Subject: [PATCH 325/376] ref: move integration docs / sdk registry building to sentry.build (#70743) I'm planning to kill the setuptools-based build which is blocking our upgrade to python 3.12 -- this is the first step --- pyproject.toml | 1 + scripts/lib.sh | 2 +- setup.cfg | 3 +- src/sentry/build/__init__.py | 0 src/sentry/build/_download.py | 17 ++ src/sentry/build/_integration_docs.py | 117 ++++++++++++++ src/sentry/build/_js_sdk_registry.py | 33 ++++ src/sentry/runner/commands/repair.py | 30 +--- .../commands/build_integration_docs.py | 15 +- .../commands/build_js_sdk_registry.py | 38 +---- src/sentry/utils/integrationdocs.py | 148 +----------------- tests/sentry/utils/test_integrationdocs.py | 24 +-- 12 files changed, 187 insertions(+), 241 deletions(-) create mode 100644 src/sentry/build/__init__.py create mode 100644 src/sentry/build/_download.py create mode 100644 src/sentry/build/_integration_docs.py create mode 100644 src/sentry/build/_js_sdk_registry.py diff --git a/pyproject.toml b/pyproject.toml index ca64efdea039f5..f7d88bd56c7d45 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -556,6 +556,7 @@ module = [ "sentry.api.helpers.source_map_helper", "sentry.buffer.base", "sentry.buffer.redis", + "sentry.build.*", "sentry.eventstore.reprocessing.redis", "sentry.issues", "sentry.issues.analytics", diff --git a/scripts/lib.sh b/scripts/lib.sh index 859924edacb53a..2922b518aa0454 100755 --- a/scripts/lib.sh +++ b/scripts/lib.sh @@ -188,7 +188,7 @@ create-superuser() { build-platform-assets() { echo "--> Building platform assets" - echo "from sentry.utils.integrationdocs import sync_docs; sync_docs(quiet=True)" | sentry exec + python3 -m sentry.build._integration_docs # make sure this didn't silently do nothing test -f src/sentry/integration-docs/android.json } diff --git a/setup.cfg b/setup.cfg index 8151da3c9ccf05..580bed09e41147 100644 --- a/setup.cfg +++ b/setup.cfg @@ -97,9 +97,10 @@ extend-ignore = E203,E501,E402,E731,B007,B009,B010,B011,B020,B023,B024,B026,B027 per-file-ignores = # these scripts must have minimal dependencies so opt out of the usual sentry rules - tools/*: S .github/*: S devenv/sync.py: S + src/sentry/build/*: S + tools/*: S # testing the options manager itself src/sentry/testutils/helpers/options.py, tests/sentry/options/test_manager.py: S011 diff --git a/src/sentry/build/__init__.py b/src/sentry/build/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/src/sentry/build/_download.py b/src/sentry/build/_download.py new file mode 100644 index 00000000000000..7d535a400b8240 --- /dev/null +++ b/src/sentry/build/_download.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +import time +import urllib.request +from typing import IO + + +def urlopen_with_retries(url: str, timeout: int = 5, retries: int = 10) -> IO[bytes]: + for i in range(retries): + try: + return urllib.request.urlopen(url, timeout=timeout) + except Exception: + if i == retries - 1: + raise + time.sleep(i * 0.01) + else: + raise AssertionError("unreachable") diff --git a/src/sentry/build/_integration_docs.py b/src/sentry/build/_integration_docs.py new file mode 100644 index 00000000000000..879dc0d36a5c93 --- /dev/null +++ b/src/sentry/build/_integration_docs.py @@ -0,0 +1,117 @@ +from __future__ import annotations + +import argparse +import concurrent.futures +import json +import multiprocessing +import os.path + +from sentry.build._download import urlopen_with_retries + +_INTEGRATION_DOCS_URL = os.environ.get("INTEGRATION_DOCS_URL", "https://docs.sentry.io/_platforms/") + +_HERE = os.path.dirname(os.path.abspath(__file__)) +_TARGET = os.path.join(_HERE, "..", "integration-docs") + + +def _integration_id(platform_id: str, integration_id: str) -> str: + if integration_id == "_self": + return platform_id + return f"{platform_id}-{integration_id}" + + +def _dump_doc(dest: str, path: str, data: object) -> None: + expected_commonpath = os.path.realpath(dest) + doc_path = os.path.join(dest, f"{path}.json") + doc_real_path = os.path.realpath(doc_path) + + if expected_commonpath != os.path.commonpath([expected_commonpath, doc_real_path]): + raise AssertionError("illegal path access") + + directory = os.path.dirname(doc_path) + os.makedirs(directory, exist_ok=True) + with open(doc_path, "w", encoding="utf-8") as f: + f.write(json.dumps(data, indent=2)) + f.write("\n") + + +def _sync_one( + dest: str, platform_id: str, integration_id: str, path: str, quiet: bool = False +) -> None: + if not quiet: + print(f" syncing documentation for {platform_id}.{integration_id} integration") + + data = json.load(urlopen_with_retries(f"{_INTEGRATION_DOCS_URL}{path}")) + + key = _integration_id(platform_id, integration_id) + + _dump_doc( + dest, + key, + { + "id": key, + "name": data["name"], + "html": data["body"], + "link": data["doc_link"], + "wizard_setup": data.get("wizard_setup", None), + }, + ) + + +def _sync_docs(dest: str, *, quiet: bool = False) -> None: + if not quiet: + print("syncing documentation (platform index)") + data = json.load(urlopen_with_retries(f"{_INTEGRATION_DOCS_URL}_index.json")) + platform_list = [] + for platform_id, integrations in data["platforms"].items(): + platform_list.append( + { + "id": platform_id, + "name": integrations["_self"]["name"], + "integrations": [ + { + "id": _integration_id(platform_id, i_id), + "name": i_data["name"], + "type": i_data["type"], + "link": i_data["doc_link"], + } + for i_id, i_data in sorted(integrations.items(), key=lambda x: x[1]["name"]) + ], + } + ) + + platform_list.sort(key=lambda x: x["name"]) + + _dump_doc(dest, "_platforms", {"platforms": platform_list}) + + # This value is derived from https://docs.python.org/3/library/concurrent.futures.html#threadpoolexecutor + MAX_THREADS = 32 + thread_count = min(len(data["platforms"]), multiprocessing.cpu_count() * 5, MAX_THREADS) + with concurrent.futures.ThreadPoolExecutor(thread_count) as exe: + for future in concurrent.futures.as_completed( + exe.submit( + _sync_one, + dest, + platform_id, + integration_id, + integration["details"], + quiet=quiet, + ) + for platform_id, platform_data in data["platforms"].items() + for integration_id, integration in platform_data.items() + ): + future.result() # needed to trigger exceptions + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument("--dest", default=_TARGET) + parser.add_argument("--quiet", action="store_true") + args = parser.parse_args() + + _sync_docs(args.dest, quiet=args.quiet) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/src/sentry/build/_js_sdk_registry.py b/src/sentry/build/_js_sdk_registry.py new file mode 100644 index 00000000000000..c11014ea22f44d --- /dev/null +++ b/src/sentry/build/_js_sdk_registry.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +import argparse +import json +import os.path + +from sentry.build._download import urlopen_with_retries + +_HERE = os.path.dirname(os.path.abspath(__file__)) +_TARGET = os.path.join(_HERE, "..", "loader") + + +def _download(dest: str) -> None: + resp = urlopen_with_retries( + "https://release-registry.services.sentry.io/sdks/sentry.javascript.browser/versions" + ) + data = json.load(resp) + with open(os.path.join(dest, "_registry.json"), "w", encoding="UTF-8") as f: + json.dump(data, f, indent=2) + + +def main() -> int: # convenience to debug with `python -m ...` + parser = argparse.ArgumentParser() + parser.add_argument("--dest", default=_TARGET) + args = parser.parse_args() + + _download(args.dest) + + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/src/sentry/runner/commands/repair.py b/src/sentry/runner/commands/repair.py index 1f03dd615e6a32..658ce5d22ebea9 100644 --- a/src/sentry/runner/commands/repair.py +++ b/src/sentry/runner/commands/repair.py @@ -1,4 +1,3 @@ -import os from collections.abc import Generator from contextlib import contextmanager @@ -24,21 +23,6 @@ def catchable_atomic() -> Generator[None, None, None]: pass -def sync_docs() -> None: - click.echo("Forcing documentation sync") - from sentry.utils.integrationdocs import DOC_FOLDER, sync_docs - - if os.access(DOC_FOLDER, os.W_OK): - try: - sync_docs() - except Exception as e: - click.echo(" - skipping, failure: %s" % e) - elif os.path.isdir(DOC_FOLDER): - click.echo(" - skipping, path cannot be written to: %r" % DOC_FOLDER) - else: - click.echo(" - skipping, path does not exist: %r" % DOC_FOLDER) - - @region_silo_function def create_missing_dsns() -> None: from sentry.models.project import Project @@ -71,24 +55,14 @@ def fix_group_counters() -> None: @click.command() -@click.option( - "--with-docs/--without-docs", - default=False, - help="Synchronize and repair embedded documentation. This " "is disabled by default.", -) @configuration -def repair(with_docs: bool) -> None: +def repair() -> None: """Attempt to repair any invalid data. This by default will correct some common issues like projects missing - DSNs or counters desynchronizing. Optionally it can also synchronize - the current client documentation from the Sentry documentation server - (--with-docs). + DSNs or counters desynchronizing. """ - if with_docs: - sync_docs() - try: create_missing_dsns() fix_group_counters() diff --git a/src/sentry/utils/distutils/commands/build_integration_docs.py b/src/sentry/utils/distutils/commands/build_integration_docs.py index 39a48ee0dfbc0e..0afc68946bd565 100644 --- a/src/sentry/utils/distutils/commands/build_integration_docs.py +++ b/src/sentry/utils/distutils/commands/build_integration_docs.py @@ -1,7 +1,8 @@ import logging -import os.path -from .base import ROOT, BaseBuildCommand +from sentry.build._integration_docs import _TARGET, _sync_docs + +from .base import BaseBuildCommand log = logging.getLogger(__name__) @@ -10,13 +11,9 @@ class BuildIntegrationDocsCommand(BaseBuildCommand): description = "build integration docs" def get_dist_paths(self): - return [ - # Also see sentry.utils.integrationdocs.DOC_FOLDER - os.path.join(ROOT, "src", "sentry", "integration-docs") - ] + # Also see sentry.utils.integrationdocs.DOC_FOLDER + return [_TARGET] def _build(self): - from sentry.utils.integrationdocs import sync_docs - log.info("downloading integration docs") - sync_docs() + _sync_docs(_TARGET) diff --git a/src/sentry/utils/distutils/commands/build_js_sdk_registry.py b/src/sentry/utils/distutils/commands/build_js_sdk_registry.py index 4958d4388c9a87..be41e65ec572b7 100644 --- a/src/sentry/utils/distutils/commands/build_js_sdk_registry.py +++ b/src/sentry/utils/distutils/commands/build_js_sdk_registry.py @@ -1,49 +1,15 @@ -# NOTE: This is run external to sentry as well as part of the setup -# process. Thus we do not want to import non stdlib things here. - -import json # NOQA - -# Import the stdlib json instead of sentry.utils.json, since this command is -# run in setup.py import logging -import os -from urllib.request import urlopen -import sentry +from sentry.build._js_sdk_registry import _TARGET, _download from .base import BaseBuildCommand log = logging.getLogger(__name__) -JS_SDK_REGISTRY_URL = ( - "https://release-registry.services.sentry.io/sdks/sentry.javascript.browser/versions" -) -LOADER_FOLDER = os.path.abspath(os.path.join(os.path.dirname(sentry.__file__), "loader")) - - -def dump_registry(path, data): - fn = os.path.join(LOADER_FOLDER, path + ".json") - directory = os.path.dirname(fn) - os.makedirs(directory, exist_ok=True) - with open(fn, "w", encoding="utf-8") as f: - f.write(json.dumps(data, indent=2)) - f.write("\n") - - -def sync_registry(): - body = urlopen(JS_SDK_REGISTRY_URL).read().decode("utf-8") - data = json.loads(body) - dump_registry("_registry", data) - class BuildJsSdkRegistryCommand(BaseBuildCommand): description = "build js sdk registry" def run(self): log.info("downloading js sdk information from the release registry") - try: - sync_registry() - except Exception: - log.exception( - "error occurred while trying to fetch js sdk information from the registry" - ) + _download(_TARGET) diff --git a/src/sentry/utils/integrationdocs.py b/src/sentry/utils/integrationdocs.py index aa7c83d3a090bc..eef240f92f4585 100644 --- a/src/sentry/utils/integrationdocs.py +++ b/src/sentry/utils/integrationdocs.py @@ -1,42 +1,11 @@ from __future__ import annotations -import concurrent.futures - -# Import the stdlib json instead of sentry.utils.json, since this command is -# run at build time -import json # noqa: S003 -import logging -import multiprocessing import os -import sys -import time -from typing import IO, Any, TypedDict -from urllib.request import urlopen - -import sentry - -# NOTE: This is run external to sentry as well as part of the setup -# process. Thus we do not want to import non stdlib things here. - +from typing import Any -class Integration(TypedDict): - key: str - type: str - details: str - doc_link: str - name: str - aliases: list[str] - categories: list[str] +import orjson - -class Platform(TypedDict): - id: str - name: str - integrations: list[dict[str, str]] - - -INTEGRATION_DOCS_URL = os.environ.get("INTEGRATION_DOCS_URL", "https://docs.sentry.io/_platforms/") -BASE_URL = INTEGRATION_DOCS_URL + "{}" +import sentry DOC_FOLDER = os.path.abspath(os.path.join(os.path.dirname(sentry.__file__), "integration-docs")) @@ -53,32 +22,10 @@ class SuspiciousDocPathOperation(Exception): Example: https://github.com/getsentry/raven-js/blob/master/docs/sentry-doc-config.json -Once the docs have been deployed, you can run `sentry repair --with-docs` to pull down +Once the docs have been deployed, you can run `make build-platform-assets` to pull down the latest list of integrations and serve them in your local Sentry install. """ -logger = logging.getLogger("sentry") - - -def echo(what: str) -> None: - sys.stdout.write(what + "\n") - sys.stdout.flush() - - -def dump_doc(path: str, data: dict[str, Any]) -> None: - expected_commonpath = os.path.realpath(DOC_FOLDER) - doc_path = os.path.join(DOC_FOLDER, f"{path}.json") - doc_real_path = os.path.realpath(doc_path) - - if expected_commonpath != os.path.commonpath([expected_commonpath, doc_real_path]): - raise SuspiciousDocPathOperation("illegal path access") - - directory = os.path.dirname(doc_path) - os.makedirs(directory, exist_ok=True) - with open(doc_path, "w", encoding="utf-8") as f: - f.write(json.dumps(data, indent=2)) - f.write("\n") - def load_doc(path: str) -> dict[str, Any] | None: expected_commonpath = os.path.realpath(DOC_FOLDER) @@ -90,91 +37,6 @@ def load_doc(path: str) -> dict[str, Any] | None: try: with open(doc_path, encoding="utf-8") as f: - return json.load(f) + return orjson.loads(f.read()) except OSError: return None - - -def get_integration_id(platform_id: str, integration_id: str) -> str: - if integration_id == "_self": - return platform_id - return f"{platform_id}-{integration_id}" - - -def urlopen_with_retries(url: str, timeout: int = 5, retries: int = 10) -> IO[bytes]: - for i in range(retries): - try: - return urlopen(url, timeout=timeout) - except Exception: - if i == retries - 1: - raise - time.sleep(i * 0.01) - else: - raise AssertionError("unreachable") - - -def sync_docs(quiet: bool = False) -> None: - if not quiet: - echo("syncing documentation (platform index)") - data: dict[str, dict[str, dict[str, Integration]]] - data = json.load(urlopen_with_retries(BASE_URL.format("_index.json"))) - platform_list: list[Platform] = [] - for platform_id, integrations in data["platforms"].items(): - platform_list.append( - { - "id": platform_id, - "name": integrations["_self"]["name"], - "integrations": [ - { - "id": get_integration_id(platform_id, i_id), - "name": i_data["name"], - "type": i_data["type"], - "link": i_data["doc_link"], - } - for i_id, i_data in sorted(integrations.items(), key=lambda x: x[1]["name"]) - ], - } - ) - - platform_list.sort(key=lambda x: x["name"]) - - dump_doc("_platforms", {"platforms": platform_list}) - - # This value is derived from https://docs.python.org/3/library/concurrent.futures.html#threadpoolexecutor - MAX_THREADS = 32 - thread_count = min(len(data["platforms"]), multiprocessing.cpu_count() * 5, MAX_THREADS) - with concurrent.futures.ThreadPoolExecutor(thread_count) as exe: - for future in concurrent.futures.as_completed( - exe.submit( - sync_integration_docs, - platform_id, - integration_id, - integration["details"], - quiet, - ) - for platform_id, platform_data in data["platforms"].items() - for integration_id, integration in platform_data.items() - ): - future.result() # needed to trigger exceptions - - -def sync_integration_docs( - platform_id: str, integration_id: str, path: str, quiet: bool = False -) -> None: - if not quiet: - echo(f" syncing documentation for {platform_id}.{integration_id} integration") - - data = json.load(urlopen_with_retries(BASE_URL.format(path))) - - key = get_integration_id(platform_id, integration_id) - - dump_doc( - key, - { - "id": key, - "name": data["name"], - "html": data["body"], - "link": data["doc_link"], - "wizard_setup": data.get("wizard_setup", None), - }, - ) diff --git a/tests/sentry/utils/test_integrationdocs.py b/tests/sentry/utils/test_integrationdocs.py index 47be2e29f75bf8..67bbbef0b735eb 100644 --- a/tests/sentry/utils/test_integrationdocs.py +++ b/tests/sentry/utils/test_integrationdocs.py @@ -1,6 +1,6 @@ import pytest -from sentry.utils.integrationdocs import SuspiciousDocPathOperation, dump_doc, load_doc +from sentry.utils.integrationdocs import SuspiciousDocPathOperation, load_doc @pytest.mark.parametrize( @@ -21,25 +21,3 @@ def test_path_traversal_attempt_on_load_doc_raises_exception(path): (msg,) = excinfo.value.args assert msg == "illegal path access" - - -@pytest.mark.parametrize( - "path", - [ - "/", - "/..", - "//....", - "/%5c..", - "../", - "../../", - "../../../etc/passwd", - ], -) -def test_path_traversal_attempt_on_dump_doc_raises_exception(path): - data = {"foo": "bar", "baz": 1234} - - with pytest.raises(SuspiciousDocPathOperation) as excinfo: - dump_doc(path, data) - - (msg,) = excinfo.value.args - assert msg == "illegal path access" From 3360365439eb1f962c46ab88f388446c1c826ef9 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Mon, 13 May 2024 09:55:06 -0400 Subject: [PATCH 326/376] ref: enable bytes warnings in test (#69910) this prevents doing silly things like `str(b'some bytes')`, formatting a bytes directly (without `!r`) etc. -- I've cleaned up all the existing problems in other PRs (originally noticed due to a bunch of GCP logspam!) --- Makefile | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/Makefile b/Makefile index f4cc5cdd91b152..c7b4b9698311ff 100644 --- a/Makefile +++ b/Makefile @@ -91,7 +91,7 @@ fetch-release-registry: run-acceptance: @echo "--> Running acceptance tests" - pytest tests/acceptance --cov . --cov-report="xml:.artifacts/acceptance.coverage.xml" --json-report --json-report-file=".artifacts/pytest.acceptance.json" --json-report-omit=log + python3 -b -m pytest tests/acceptance --cov . --cov-report="xml:.artifacts/acceptance.coverage.xml" --json-report --json-report-file=".artifacts/pytest.acceptance.json" --json-report-omit=log @echo "" test-cli: create-db @@ -130,7 +130,7 @@ COV_ARGS = --cov-report="xml:.artifacts/python.coverage.xml" test-python-ci: @echo "--> Running CI Python tests" - pytest \ + python3 -b -m pytest \ tests \ --ignore tests/acceptance \ --ignore tests/apidocs \ @@ -151,7 +151,7 @@ test-monolith-dbs: @echo "--> Running CI Python tests (SENTRY_USE_MONOLITH_DBS=1)" SENTRY_LEGACY_TEST_SUITE=1 \ SENTRY_USE_MONOLITH_DBS=1 \ - pytest \ + python3 -b -m pytest \ tests/sentry/backup/test_exhaustive.py \ tests/sentry/backup/test_exports.py \ tests/sentry/backup/test_imports.py \ @@ -168,16 +168,16 @@ test-monolith-dbs: test-tools: @echo "--> Running tools tests" @# bogus configuration to force vanilla pytest - python3 -m pytest -c setup.cfg --confcutdir tests/tools tests/tools -vv --cov=tools --cov=tests/tools --cov-report="xml:.artifacts/tools.coverage.xml" + python3 -b -m pytest -c setup.cfg --confcutdir tests/tools tests/tools -vv --cov=tools --cov=tests/tools --cov-report="xml:.artifacts/tools.coverage.xml" @echo "" # JavaScript relay tests are meant to be run within Symbolicator test suite, as they are parametrized to verify both processing pipelines during migration process. # Running Locally: Run `sentry devservices up kafka` before starting these tests test-symbolicator: @echo "--> Running symbolicator tests" - pytest tests/symbolicator -vv --cov . --cov-report="xml:.artifacts/symbolicator.coverage.xml" - pytest tests/relay_integration/lang/javascript/ -vv -m symbolicator - pytest tests/relay_integration/lang/java/ -vv -m symbolicator + python3 -b -m pytest tests/symbolicator -vv --cov . --cov-report="xml:.artifacts/symbolicator.coverage.xml" + python3 -b -m pytest tests/relay_integration/lang/javascript/ -vv -m symbolicator + python3 -b -m pytest tests/relay_integration/lang/java/ -vv -m symbolicator @echo "" test-acceptance: node-version-check @@ -188,7 +188,7 @@ test-acceptance: node-version-check # XXX: this is called by `getsentry/relay` test-relay-integration: @echo "--> Running Relay integration tests" - pytest \ + python3 -b -m pytest \ tests/relay_integration \ tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py \ -vv --cov . --cov-report="xml:.artifacts/relay.coverage.xml" @@ -196,7 +196,7 @@ test-relay-integration: test-api-docs: build-api-docs yarn run validate-api-examples - pytest tests/apidocs + python3 -b -m pytest tests/apidocs @echo "" review-python-snapshots: From 70046f19f9f23ec811cfea5797b34e64a7bf45ab Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Mon, 13 May 2024 16:19:28 +0200 Subject: [PATCH 327/376] ref(orjson): Delete unused options (#70744) These options' usages were removed in https://github.com/getsentry/sentry/pull/70133 and https://github.com/getsentry/sentry/pull/70450. ref: https://github.com/getsentry/sentry/issues/68903 --- src/sentry/options/defaults.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 3f517c341d8118..8619a4fe0116f6 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -650,15 +650,6 @@ flags=FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE, ) -# Currently unused `orjson` options -register("integrations.slack.enable-orjson", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) -register("auth.enable-orjson", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) -register("backup.enable-orjson", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) -register("event-manager.enable-orjson", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) -register("eventstore.enable-orjson", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) -register("flagpole.enable-orjson", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) -register("relay.enable-orjson", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) - # Kafka Publisher register("kafka-publisher.raw-event-sample-rate", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) From e8e01b01e8ebe7d7985736623041cbaa53c5fd28 Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Mon, 13 May 2024 07:20:37 -0700 Subject: [PATCH 328/376] feat(crons): Add deletion apis for processing errors (#70639) This adds apis at both the monitor and project level for deleting checkin processing errors. --- src/sentry/api/urls.py | 8 +++ src/sentry/apidocs/parameters.py | 7 +++ .../project_processing_errors_details.py | 60 +++++++++++++++++++ src/sentry/monitors/processing_errors.py | 44 +++++++++++--- .../test_project_processing_errors_details.py | 45 ++++++++++++++ .../sentry/monitors/test_processing_errors.py | 28 +++++++-- 6 files changed, 180 insertions(+), 12 deletions(-) create mode 100644 src/sentry/monitors/endpoints/project_processing_errors_details.py create mode 100644 tests/sentry/monitors/endpoints/test_project_processing_errors_details.py diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 183342307085f7..ea7ffd3e66cd45 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -651,6 +651,9 @@ from ..monitors.endpoints.organization_monitor_processing_errors_index import ( OrganizationMonitorProcessingErrorsIndexEndpoint, ) +from ..monitors.endpoints.project_processing_errors_details import ( + ProjectProcessingErrorsDetailsEndpoint, +) # issues endpoints are available both top level (by numerical ID) as well as coupled # to the organization (and queryable via short ID) @@ -2759,6 +2762,11 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ProjectMonitorEnvironmentDetailsEndpoint.as_view(), name="sentry-api-0-project-monitor-environment-details", ), + re_path( + r"^(?P[^\/]+)/(?P[^\/]+)/processing-errors/(?P[^\/]+)/$", + ProjectProcessingErrorsDetailsEndpoint.as_view(), + name="sentry-api-0-project-processing-errors-details", + ), re_path( r"^(?P[^\/]+)/(?P[^\/]+)/monitors/(?P[^\/]+)/processing-errors/$", ProjectMonitorProcessingErrorsIndexEndpoint.as_view(), diff --git a/src/sentry/apidocs/parameters.py b/src/sentry/apidocs/parameters.py index e9d553324d64e1..bbc94baf76bbbf 100644 --- a/src/sentry/apidocs/parameters.py +++ b/src/sentry/apidocs/parameters.py @@ -231,6 +231,13 @@ class MonitorParams: type=str, description="The owner of the monitor, in the format `user:id` or `team:id`. May be specified multiple times.", ) + PROCESSING_ERROR_ID = OpenApiParameter( + name="processing_error_id", + location="path", + required=False, + type=OpenApiTypes.UUID, + description="The id of the processing error.", + ) class EventParams: diff --git a/src/sentry/monitors/endpoints/project_processing_errors_details.py b/src/sentry/monitors/endpoints/project_processing_errors_details.py new file mode 100644 index 00000000000000..3a85add3a80eca --- /dev/null +++ b/src/sentry/monitors/endpoints/project_processing_errors_details.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +from uuid import UUID + +from drf_spectacular.utils import extend_schema +from rest_framework.exceptions import ValidationError +from rest_framework.permissions import BasePermission +from rest_framework.request import Request +from rest_framework.response import Response + +from sentry.api.api_owners import ApiOwner +from sentry.api.api_publish_status import ApiPublishStatus +from sentry.api.base import region_silo_endpoint +from sentry.api.bases import ProjectEndpoint +from sentry.apidocs.constants import ( + RESPONSE_FORBIDDEN, + RESPONSE_NO_CONTENT, + RESPONSE_NOT_FOUND, + RESPONSE_UNAUTHORIZED, +) +from sentry.apidocs.parameters import GlobalParams, MonitorParams +from sentry.models.project import Project +from sentry.monitors.endpoints.base import ProjectMonitorPermission +from sentry.monitors.processing_errors import CheckinProcessErrorsManager, InvalidProjectError + + +@region_silo_endpoint +@extend_schema(tags=["Crons"]) +class ProjectProcessingErrorsDetailsEndpoint(ProjectEndpoint): + permission_classes: tuple[type[BasePermission], ...] = (ProjectMonitorPermission,) + + publish_status = { + "DELETE": ApiPublishStatus.PRIVATE, + } + owner = ApiOwner.CRONS + + @extend_schema( + operation_id="Delete a processing error for a Monitor", + parameters=[ + GlobalParams.ORG_SLUG, + GlobalParams.PROJECT_ID_OR_SLUG, + MonitorParams.PROCESSING_ERROR_ID, + ], + responses={ + 204: RESPONSE_NO_CONTENT, + 401: RESPONSE_UNAUTHORIZED, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + ) + def delete(self, request: Request, project: Project, uuid: str) -> Response: + try: + parsed_uuid = UUID(uuid) + except ValueError: + raise ValidationError("Invalid UUID") + try: + CheckinProcessErrorsManager().delete(project, parsed_uuid) + except InvalidProjectError: + raise ValidationError("Invalid uuid for project") + return self.respond(status=204) diff --git a/src/sentry/monitors/processing_errors.py b/src/sentry/monitors/processing_errors.py index ef152be9ff25b5..4956d535903fa7 100644 --- a/src/sentry/monitors/processing_errors.py +++ b/src/sentry/monitors/processing_errors.py @@ -5,6 +5,7 @@ import uuid from datetime import timedelta from enum import Enum +from itertools import chain from typing import Any, TypedDict from django.conf import settings @@ -126,11 +127,17 @@ def __eq__(self, other): return False +class InvalidProjectError(Exception): + pass + + class CheckinProcessErrorsManager: def _get_cluster(self) -> RedisCluster[str] | StrictRedis[str]: return redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) - def store(self, error: CheckinProcessingError, monitor: Monitor | None): + def _get_entity_identifier_from_error( + self, error: CheckinProcessingError, monitor: Monitor | None = None + ) -> str: if monitor is None: # Attempt to get the monitor from the checkin info if we failed to retrieve it during ingestion try: @@ -145,8 +152,12 @@ def store(self, error: CheckinProcessingError, monitor: Monitor | None): else: entity_identifier = self.build_project_identifier(error.checkin.message["project_id"]) + return entity_identifier + + def store(self, error: CheckinProcessingError, monitor: Monitor | None): + entity_identifier = self._get_entity_identifier_from_error(error, monitor) error_set_key = self.build_set_identifier(entity_identifier) - error_key = self.build_error_identifier(entity_identifier, error.id) + error_key = self.build_error_identifier(error.id) serialized_error = json.dumps(error.to_dict()) redis_client = self._get_cluster() pipeline = redis_client.pipeline(transaction=False) @@ -160,8 +171,8 @@ def store(self, error: CheckinProcessingError, monitor: Monitor | None): def build_set_identifier(self, entity_identifier: str) -> str: return f"monitors.processing_errors_set.{entity_identifier}" - def build_error_identifier(self, entity_identifier: str, uuid: uuid.UUID) -> str: - return f"monitors.processing_errors.{entity_identifier}.{uuid.hex}" + def build_error_identifier(self, uuid: uuid.UUID) -> str: + return f"monitors.processing_errors.{uuid.hex}" def build_monitor_identifier(self, monitor: Monitor) -> str: return f"monitor:{monitor.id}" @@ -177,15 +188,28 @@ def get_for_projects(self, projects: list[Project]) -> list[CheckinProcessingErr [self.build_project_identifier(project.id) for project in projects] ) + def delete(self, project: Project, uuid: uuid.UUID): + error_identifier = self.build_error_identifier(uuid) + redis = self._get_cluster() + raw_error = redis.get(error_identifier) + if raw_error is None: + return + error = CheckinProcessingError.from_dict(json.loads(raw_error)) + if error.checkin.message["project_id"] != project.id: + # TODO: Better exception class + raise InvalidProjectError() + + entity_identifier = self._get_entity_identifier_from_error(error) + self._delete_for_entity(entity_identifier, uuid) + def _get_for_entities(self, entity_identifiers: list[str]) -> list[CheckinProcessingError]: redis = self._get_cluster() pipeline = redis.pipeline() for identifier in entity_identifiers: pipeline.zrange(self.build_set_identifier(identifier), 0, MAX_ERRORS_PER_SET, desc=True) error_identifiers = [ - self.build_error_identifier(entity_identifier, uuid.UUID(error_identifier)) - for entity_identifier, error_identifiers in zip(entity_identifiers, pipeline.execute()) - for error_identifier in error_identifiers + self.build_error_identifier(uuid.UUID(error_identifier)) + for error_identifier in chain(*pipeline.execute()) ] errors = [ CheckinProcessingError.from_dict(json.loads(raw_error)) @@ -195,6 +219,12 @@ def _get_for_entities(self, entity_identifiers: list[str]) -> list[CheckinProces errors.sort(key=lambda error: error.checkin.ts.timestamp(), reverse=True) return errors + def _delete_for_entity(self, entity_identifier: str, uuid: uuid.UUID) -> None: + pipeline = self._get_cluster().pipeline() + pipeline.zrem(self.build_set_identifier(entity_identifier), uuid.hex) + pipeline.delete(self.build_error_identifier(uuid)) + pipeline.execute() + def handle_processing_errors(item: CheckinItem, error: CheckinValidationError): try: diff --git a/tests/sentry/monitors/endpoints/test_project_processing_errors_details.py b/tests/sentry/monitors/endpoints/test_project_processing_errors_details.py new file mode 100644 index 00000000000000..06f161fbdef43f --- /dev/null +++ b/tests/sentry/monitors/endpoints/test_project_processing_errors_details.py @@ -0,0 +1,45 @@ +from sentry.monitors.processing_errors import ( + CheckinProcessErrorsManager, + ProcessingError, + ProcessingErrorType, +) +from sentry.monitors.testutils import build_checkin_processing_error +from sentry.testutils.cases import APITestCase, MonitorTestCase + + +class ProjectProcessingErrorsDetailsEndpointTest(MonitorTestCase, APITestCase): + endpoint = "sentry-api-0-project-processing-errors-details" + method = "delete" + + def setUp(self): + super().setUp() + self.login_as(user=self.user) + + def test_empty(self): + self.get_error_response(self.organization.slug, self.project.slug, "hi") + + def test(self): + manager = CheckinProcessErrorsManager() + monitor_error = build_checkin_processing_error( + [ProcessingError(ProcessingErrorType.CHECKIN_INVALID_GUID, {"guid": "bad"})], + message_overrides={"project_id": self.project.id}, + ) + + manager.store(monitor_error, None) + assert len(manager.get_for_projects([self.project])) == 1 + self.get_success_response(self.organization.slug, self.project.slug, monitor_error.id) + assert len(manager.get_for_projects([self.project])) == 0 + + def test_invalid_project(self): + manager = CheckinProcessErrorsManager() + monitor_error = build_checkin_processing_error( + [ProcessingError(ProcessingErrorType.CHECKIN_INVALID_GUID, {"guid": "bad"})], + message_overrides={"project_id": self.project.id}, + ) + unrelated_project = self.create_project() + manager.store(monitor_error, None) + assert len(manager.get_for_projects([self.project])) == 1 + self.get_error_response( + self.organization.slug, unrelated_project.slug, monitor_error.id, status_code=400 + ) + assert len(manager.get_for_projects([self.project])) == 1 diff --git a/tests/sentry/monitors/test_processing_errors.py b/tests/sentry/monitors/test_processing_errors.py index b2a6576406260f..d5e916f902f571 100644 --- a/tests/sentry/monitors/test_processing_errors.py +++ b/tests/sentry/monitors/test_processing_errors.py @@ -129,15 +129,33 @@ def test_get_missing_data(self): for processing_error in processing_errors: manager.store(processing_error, monitor) redis = manager._get_cluster() - redis.delete( - manager.build_error_identifier( - manager.build_monitor_identifier(monitor), processing_errors[0].id - ) - ) + redis.delete(manager.build_error_identifier(processing_errors[0].id)) fetched_processing_error = manager.get_for_monitor(monitor) assert len(fetched_processing_error) == 1 self.assert_processing_errors_equal(processing_errors[1], fetched_processing_error[0]) + def test_delete_for_monitor(self): + manager = CheckinProcessErrorsManager() + monitor = self.create_monitor() + processing_error = build_checkin_processing_error( + message_overrides={"project_id": self.project.id}, + payload_overrides={"monitor_slug": monitor.slug}, + ) + manager.store(processing_error, monitor) + assert len(manager.get_for_monitor(monitor)) == 1 + manager.delete(self.project, processing_error.id) + assert len(manager.get_for_monitor(monitor)) == 0 + + def test_delete_for_project(self): + manager = CheckinProcessErrorsManager() + processing_error = build_checkin_processing_error( + message_overrides={"project_id": self.project.id}, + ) + manager.store(processing_error, None) + assert len(manager.get_for_projects([self.project])) == 1 + manager.delete(self.project, processing_error.id) + assert len(manager.get_for_projects([self.project])) == 0 + class HandleProcessingErrorsTest(TestCase): def test(self): From 590910fd5a8062a5ee2f775eea56f1223c7bd695 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Mon, 13 May 2024 14:41:08 +0000 Subject: [PATCH 329/376] Revert "deprecate(related_issues): Drop support for doing all types of related issues (#70606)" This reverts commit 31ce316cc553f57645c981dfa48fadf735434afd. Co-authored-by: armenzg <44410+armenzg@users.noreply.github.com> --- src/sentry/api/endpoints/issues/related_issues.py | 14 +++++++++++--- src/sentry/issues/related/__init__.py | 11 +++++++++++ 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/src/sentry/api/endpoints/issues/related_issues.py b/src/sentry/api/endpoints/issues/related_issues.py index d5606ab2a9a35d..6a1104e73f27fb 100644 --- a/src/sentry/api/endpoints/issues/related_issues.py +++ b/src/sentry/api/endpoints/issues/related_issues.py @@ -5,6 +5,7 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.group import GroupEndpoint +from sentry.issues.related import find_related_issues # To be deprecated from sentry.issues.related import RELATED_ISSUES_ALGORITHMS from sentry.models.group import Group from sentry.types.ratelimit import RateLimit, RateLimitCategory @@ -34,6 +35,13 @@ def get(self, request: Request, group: Group) -> Response: :pparam Group group: the group object """ # The type of related issues to retrieve. Can be either `same_root_cause` or `trace_connected`. - related_type = request.query_params["type"] - data, meta = RELATED_ISSUES_ALGORITHMS[related_type](group) - return Response({"type": related_type, "data": data, "meta": meta}) + related_type = request.query_params.get("type") + related_issues: list[dict[str, str | list[int] | dict[str, str]]] = [] + + if related_type in RELATED_ISSUES_ALGORITHMS: + data, meta = RELATED_ISSUES_ALGORITHMS[related_type](group) + return Response({"type": related_type, "data": data, "meta": meta}) + else: + # XXX: We will be deprecating this approach soon + related_issues = find_related_issues(group) + return Response({"data": [related_set for related_set in related_issues]}) diff --git a/src/sentry/issues/related/__init__.py b/src/sentry/issues/related/__init__.py index 51d26537e00d66..c5771783e68973 100644 --- a/src/sentry/issues/related/__init__.py +++ b/src/sentry/issues/related/__init__.py @@ -1,5 +1,7 @@ """This module exports a function to find related issues. It groups them by type.""" +from sentry.models.group import Group + from .same_root_cause import same_root_cause_analysis from .trace_connected import trace_connected_analysis @@ -9,3 +11,12 @@ "same_root_cause": same_root_cause_analysis, "trace_connected": trace_connected_analysis, } + + +def find_related_issues(group: Group) -> list[dict[str, str | list[int] | dict[str, str]]]: + related_issues: list[dict[str, str | list[int] | dict[str, str]]] = [] + for key, func in RELATED_ISSUES_ALGORITHMS.items(): + data, meta = func(group) + related_issues.append({"type": key, "data": data, "meta": meta}) + + return related_issues From d99e2d3b8a133ae0b79b11774885888c47b258fd Mon Sep 17 00:00:00 2001 From: Sigrid Huemer <32902192+s1gr1d@users.noreply.github.com> Date: Mon, 13 May 2024 16:49:43 +0200 Subject: [PATCH 330/376] ref(onboarding): Remove unsupported serverlesscloud (frontend) (#70719) Remove already hidden onboarding guide for Serverless Cloud. This package does not exist anymore: https://www.serverless.com/blog/serverless-cloud-spins-off-as-ampt ref https://github.com/getsentry/sentry-docs/issues/7718 --- .../onboarding/frameworkSuggestionModal.tsx | 1 - static/app/data/platformCategories.tsx | 1 - static/app/data/platforms.tsx | 8 -- .../node/serverlesscloud.spec.tsx | 15 --- .../node/serverlesscloud.tsx | 110 ------------------ static/app/types/project.tsx | 1 - 6 files changed, 136 deletions(-) delete mode 100644 static/app/gettingStartedDocs/node/serverlesscloud.spec.tsx delete mode 100644 static/app/gettingStartedDocs/node/serverlesscloud.tsx diff --git a/static/app/components/onboarding/frameworkSuggestionModal.tsx b/static/app/components/onboarding/frameworkSuggestionModal.tsx index 55f6eec274eccd..d451fa16327e26 100644 --- a/static/app/components/onboarding/frameworkSuggestionModal.tsx +++ b/static/app/components/onboarding/frameworkSuggestionModal.tsx @@ -68,7 +68,6 @@ const topNodeFrameworks: PlatformKey[] = [ 'node-express', 'node-awslambda', 'node-gcpfunctions', - 'node-serverlesscloud', 'node-koa', ]; diff --git a/static/app/data/platformCategories.tsx b/static/app/data/platformCategories.tsx index 86e44b71aafd0a..11206b7e174378 100644 --- a/static/app/data/platformCategories.tsx +++ b/static/app/data/platformCategories.tsx @@ -429,7 +429,6 @@ export const feedbackWebApiPlatforms: readonly PlatformKey[] = [ 'node-azurefunctions', 'node-connect', 'node-gcpfunctions', - 'node-serverlesscloud', 'minidump', 'python-asgi', 'python-awslambda', diff --git a/static/app/data/platforms.tsx b/static/app/data/platforms.tsx index 6749dfa83dc7a7..96a9a38e024a1c 100644 --- a/static/app/data/platforms.tsx +++ b/static/app/data/platforms.tsx @@ -410,14 +410,6 @@ export const platforms: PlatformIntegration[] = [ language: 'node', link: 'https://docs.sentry.io/platforms/javascript/guides/koa/', }, - { - // this isn't supported anymore, not in docs, nor in project selector - id: 'node-serverlesscloud', - name: 'Serverless (Node)', - type: 'framework', - language: 'node', - link: 'https://docs.sentry.io/platforms/javascript/guides/serverless-cloud/', - }, { id: 'php', name: 'PHP', diff --git a/static/app/gettingStartedDocs/node/serverlesscloud.spec.tsx b/static/app/gettingStartedDocs/node/serverlesscloud.spec.tsx deleted file mode 100644 index 1c3511d58502ce..00000000000000 --- a/static/app/gettingStartedDocs/node/serverlesscloud.spec.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import {renderWithOnboardingLayout} from 'sentry-test/onboarding/renderWithOnboardingLayout'; -import {screen} from 'sentry-test/reactTestingLibrary'; - -import docs from './serverlesscloud'; - -describe('serverlesscloud onboarding docs', function () { - it('renders docs correctly', function () { - renderWithOnboardingLayout(docs); - - // Renders main headings - expect(screen.getByRole('heading', {name: 'Install'})).toBeInTheDocument(); - expect(screen.getByRole('heading', {name: 'Configure SDK'})).toBeInTheDocument(); - expect(screen.getByRole('heading', {name: 'Verify'})).toBeInTheDocument(); - }); -}); diff --git a/static/app/gettingStartedDocs/node/serverlesscloud.tsx b/static/app/gettingStartedDocs/node/serverlesscloud.tsx deleted file mode 100644 index 062cfab45c82df..00000000000000 --- a/static/app/gettingStartedDocs/node/serverlesscloud.tsx +++ /dev/null @@ -1,110 +0,0 @@ -import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; -import type { - Docs, - DocsParams, - OnboardingConfig, -} from 'sentry/components/onboarding/gettingStartedDoc/types'; -import {CrashReportWebApiOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding'; -import {t, tct} from 'sentry/locale'; - -type Params = DocsParams; - -const getSdkSetupSnippet = (params: Params) => ` -const api = require("@serverless/cloud"); -const Sentry = require('@sentry/node'); - -// or using ESM -// import api from "@serverless/cloud"; -// import * as Sentry from "@sentry/node"; - -Sentry.init({ -dsn: "${params.dsn}", -integrations: [ -// enable HTTP calls tracing -new Sentry.Integrations.Http({ tracing: true }), -// enable Express.js middleware tracing -new Sentry.Integrations.Express({ app }), -// Automatically instrument Node.js libraries and frameworks -...Sentry.autoDiscoverNodePerformanceMonitoringIntegrations(), -], -environment: params.INSTANCE_NAME, -// Performance Monitoring -// Capture 100% of the transactions -tracesSampleRate: 1.0, -}); - -// RequestHandler creates a separate execution context, so that all -// transactions/spans/breadcrumbs are isolated across requests -api.use(Sentry.Handlers.requestHandler()); -// TracingHandler creates a trace for every incoming request -api.use(Sentry.Handlers.tracingHandler()); - -// All controllers should live here -api.get("/", function rootHandler(req, res) { -res.end("Hello world!"); -}); - -// The error handler must be before any other error middleware and after all controllers -api.use(Sentry.Handlers.errorHandler()); - -// Optional fallthrough error handler -api.use(function onError(err, req, res, next) { -// The error id is attached to \`res.sentry\` to be returned -// and optionally displayed to the user for support. -res.statusCode = 500; -res.end(res.sentry + "\\n"); -});`; - -const getVerifySnippet = () => ` -api.get("/debug-sentry", function mainHandler(req, res) { - throw new Error("My first Sentry error!"); -}); -`; - -const onboarding: OnboardingConfig = { - install: () => [ - { - type: StepType.INSTALL, - description: tct('Add [code:@sentry/node] as a dependency:', {code: }), - configurations: [ - { - language: 'bash', - code: `cloud install @sentry/node:`, - }, - ], - }, - ], - configure: params => [ - { - type: StepType.CONFIGURE, - description: t('Sentry should be initialized as early in your app as possible.'), - configurations: [ - { - language: 'javascript', - code: getSdkSetupSnippet(params), - }, - ], - }, - ], - verify: () => [ - { - type: StepType.VERIFY, - description: t( - "This snippet contains an intentional error and can be used as a test to make sure that everything's working as expected." - ), - configurations: [ - { - language: 'javascript', - code: getVerifySnippet(), - }, - ], - }, - ], -}; - -const docs: Docs = { - onboarding, - crashReportOnboarding: CrashReportWebApiOnboarding, -}; - -export default docs; diff --git a/static/app/types/project.tsx b/static/app/types/project.tsx index fb996be42f4e7d..119a5cb7494fb9 100644 --- a/static/app/types/project.tsx +++ b/static/app/types/project.tsx @@ -228,7 +228,6 @@ export type PlatformKey = | 'node-koa' | 'node-nodeawslambda' | 'node-nodegcpfunctions' - | 'node-serverlesscloud' | 'objc' | 'other' | 'perl' From 12e9454353ca60e729aed40cad040e72d78d7f17 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Mon, 13 May 2024 10:56:20 -0400 Subject: [PATCH 331/376] ref(crons): Don't use relative imports in api/urls.py (#70753) --- src/sentry/api/urls.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index ea7ffd3e66cd45..0e7ce0fffd3784 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -132,6 +132,9 @@ from sentry.monitors.endpoints.organization_monitor_index_stats import ( OrganizationMonitorIndexStatsEndpoint, ) +from sentry.monitors.endpoints.organization_monitor_processing_errors_index import ( + OrganizationMonitorProcessingErrorsIndexEndpoint, +) from sentry.monitors.endpoints.organization_monitor_schedule_sample_data import ( OrganizationMonitorScheduleSampleDataEndpoint, ) @@ -150,6 +153,9 @@ ProjectMonitorProcessingErrorsIndexEndpoint, ) from sentry.monitors.endpoints.project_monitor_stats import ProjectMonitorStatsEndpoint +from sentry.monitors.endpoints.project_processing_errors_details import ( + ProjectProcessingErrorsDetailsEndpoint, +) from sentry.replays.endpoints.organization_replay_count import OrganizationReplayCountEndpoint from sentry.replays.endpoints.organization_replay_details import OrganizationReplayDetailsEndpoint from sentry.replays.endpoints.organization_replay_events_meta import ( @@ -648,12 +654,6 @@ __all__ = ("urlpatterns",) -from ..monitors.endpoints.organization_monitor_processing_errors_index import ( - OrganizationMonitorProcessingErrorsIndexEndpoint, -) -from ..monitors.endpoints.project_processing_errors_details import ( - ProjectProcessingErrorsDetailsEndpoint, -) # issues endpoints are available both top level (by numerical ID) as well as coupled # to the organization (and queryable via short ID) From ce6c36fd6c0a4c5fe2e3458c40fb207cede27e2b Mon Sep 17 00:00:00 2001 From: Billy Vong Date: Mon, 13 May 2024 12:28:43 -0230 Subject: [PATCH 332/376] test: skip flakey test -> newTraceDetails/trace.spec.tsx (#70757) See https://github.com/getsentry/sentry/actions/runs/9062640474/job/24896948589#step:6:609 --- static/app/views/performance/newTraceDetails/trace.spec.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/static/app/views/performance/newTraceDetails/trace.spec.tsx b/static/app/views/performance/newTraceDetails/trace.spec.tsx index 908284d9b5406b..8b1e9f4d21394d 100644 --- a/static/app/views/performance/newTraceDetails/trace.spec.tsx +++ b/static/app/views/performance/newTraceDetails/trace.spec.tsx @@ -726,7 +726,9 @@ describe('trace view', () => { await userEvent.keyboard('{arrowup}'); await waitFor(() => expect(rows[0]).toHaveFocus()); }); - it('arrow right expands row and fetches data', async () => { + // this is flakey + // eslint-disable-next-line jest/no-disabled-tests + it.skip('arrow right expands row and fetches data', async () => { const {virtualizedContainer} = await keyboardNavigationTestSetup(); const rows = virtualizedContainer.querySelectorAll(VISIBLE_TRACE_ROW_SELECTOR); From 367fdb4af73cf158e12edd07993267ad54b59532 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Mon, 13 May 2024 11:00:14 -0400 Subject: [PATCH 333/376] ref(perf): Improve `SpanDescriptionLink` (#70755) ## Changes - Remove `extractRoute` helper It _always_ returns `null` because the Starfish routes are gone, so it's pointless. - `SpanDescriptionLink` --> `SpanGroupDetailsLink` More descriptive! The component is, specifically, a link to a span's group details page. This is different from other description links, like when span descriptions link to a domain page. - Remove unused parameters The `endpoint` and `endpointMethod` parameters are left over from early Starfish days. Nothing provides or checks those parameters anymore. --- ...iptionLink.tsx => spanGroupDetailsLink.tsx} | 18 ++---------------- .../tableCells/spanDescriptionCell.tsx | 10 ++-------- .../app/views/starfish/utils/extractRoute.tsx | 11 ----------- 3 files changed, 4 insertions(+), 35 deletions(-) rename static/app/views/starfish/components/{spanDescriptionLink.tsx => spanGroupDetailsLink.tsx} (75%) delete mode 100644 static/app/views/starfish/utils/extractRoute.tsx diff --git a/static/app/views/starfish/components/spanDescriptionLink.tsx b/static/app/views/starfish/components/spanGroupDetailsLink.tsx similarity index 75% rename from static/app/views/starfish/components/spanDescriptionLink.tsx rename to static/app/views/starfish/components/spanGroupDetailsLink.tsx index 973da80f10bb74..ed4d85791d7022 100644 --- a/static/app/views/starfish/components/spanDescriptionLink.tsx +++ b/static/app/views/starfish/components/spanGroupDetailsLink.tsx @@ -6,7 +6,6 @@ import useOrganization from 'sentry/utils/useOrganization'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; import {OverflowEllipsisTextContainer} from 'sentry/views/starfish/components/textAlign'; import {SpanMetricsField} from 'sentry/views/starfish/types'; -import {extractRoute} from 'sentry/views/starfish/utils/extractRoute'; import {useRoutingContext} from 'sentry/views/starfish/utils/routingContext'; const {SPAN_OP} = SpanMetricsField; @@ -14,20 +13,11 @@ const {SPAN_OP} = SpanMetricsField; interface Props { description: React.ReactNode; projectId: number; - endpoint?: string; - endpointMethod?: string; group?: string; spanOp?: string; } -export function SpanDescriptionLink({ - group, - projectId, - endpoint, - endpointMethod, - spanOp, - description, -}: Props) { +export function SpanGroupDetailsLink({group, projectId, spanOp, description}: Props) { const location = useLocation(); const organization = useOrganization(); const routingContext = useRoutingContext(); @@ -35,8 +25,6 @@ export function SpanDescriptionLink({ const queryString = { ...location.query, project: projectId, - endpoint, - endpointMethod, ...(spanOp ? {[SPAN_OP]: spanOp} : {}), }; @@ -45,9 +33,7 @@ export function SpanDescriptionLink({ {group ? ( {description} diff --git a/static/app/views/starfish/components/tableCells/spanDescriptionCell.tsx b/static/app/views/starfish/components/tableCells/spanDescriptionCell.tsx index d055111e481b8a..63e75dd0fa2087 100644 --- a/static/app/views/starfish/components/tableCells/spanDescriptionCell.tsx +++ b/static/app/views/starfish/components/tableCells/spanDescriptionCell.tsx @@ -5,7 +5,7 @@ import {Hovercard} from 'sentry/components/hovercard'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import {FullSpanDescription} from 'sentry/views/starfish/components/fullSpanDescription'; -import {SpanDescriptionLink} from 'sentry/views/starfish/components/spanDescriptionLink'; +import {SpanGroupDetailsLink} from 'sentry/views/starfish/components/spanGroupDetailsLink'; import {ModuleName, SpanMetricsField} from 'sentry/views/starfish/types'; import {SQLishFormatter} from 'sentry/views/starfish/utils/sqlish/SQLishFormatter'; @@ -17,8 +17,6 @@ interface Props { description: string; moduleName: ModuleName; projectId: number; - endpoint?: string; - endpointMethod?: string; group?: string; spanOp?: string; } @@ -28,8 +26,6 @@ export function SpanDescriptionCell({ group, moduleName, spanOp, - endpoint, - endpointMethod, projectId, }: Props) { const formatterDescription = useMemo(() => { @@ -45,12 +41,10 @@ export function SpanDescriptionCell({ } const descriptionLink = ( - ); diff --git a/static/app/views/starfish/utils/extractRoute.tsx b/static/app/views/starfish/utils/extractRoute.tsx deleted file mode 100644 index 41b868ffa844f2..00000000000000 --- a/static/app/views/starfish/utils/extractRoute.tsx +++ /dev/null @@ -1,11 +0,0 @@ -import type {Location} from 'history'; - -export function extractRoute(location: Location) { - if (location.pathname.match(/^\/starfish\/api\//)) { - return 'api'; - } - if (location.pathname.match(/^\/starfish\/database\//)) { - return 'database'; - } - return null; -} From 0a9775bbe618045b6db2aad10079708d105f0a8d Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Mon, 13 May 2024 11:11:51 -0400 Subject: [PATCH 334/376] chore(perf): Remove frontend Starfish routes (#70758) These are not in use anymore anywhere. --- static/app/components/sidebar/sidebarItem.tsx | 3 +-- static/app/routes.tsx | 22 ------------------- .../views/starfish/utils/routingContext.tsx | 2 +- 3 files changed, 2 insertions(+), 25 deletions(-) diff --git a/static/app/components/sidebar/sidebarItem.tsx b/static/app/components/sidebar/sidebarItem.tsx index ee6f9e94a9b993..7709a3d2ff3534 100644 --- a/static/app/components/sidebar/sidebarItem.tsx +++ b/static/app/components/sidebar/sidebarItem.tsx @@ -295,8 +295,7 @@ export function isItemActive( location.pathname.includes('/alerts/') && !location.pathname.startsWith('/settings/')) || (item?.label === 'Releases' && location.pathname.includes('/release-thresholds/')) || - (item?.label === 'Performance' && location.pathname.includes('/performance/')) || - (item?.label === 'Starfish' && location.pathname.includes('/starfish/')) + (item?.label === 'Performance' && location.pathname.includes('/performance/')) ); } diff --git a/static/app/routes.tsx b/static/app/routes.tsx index 014aedee9ea37a..403f9a1a203816 100644 --- a/static/app/routes.tsx +++ b/static/app/routes.tsx @@ -1669,27 +1669,6 @@ function buildRoutes() { ); - const starfishRoutes = ( - import('sentry/views/starfish'))} - withOrgPath - > - - - import('sentry/views/performance/mobile/appStarts'))} - /> - import('sentry/views/performance/mobile/appStarts/screenSummary') - )} - /> - - - ); - const userFeedbackRoutes = ( (DEFAULT_VALUE); From 94e1d96a84f91dc937b357958bf5630eeada185d Mon Sep 17 00:00:00 2001 From: Matej Minar Date: Mon, 13 May 2024 17:28:59 +0200 Subject: [PATCH 335/376] chore(metrics): Remove metrics-stats flag on frontend (#70746) Relates to https://github.com/getsentry/sentry/issues/70724 --- static/app/utils/metrics/features.tsx | 6 ------ static/app/views/organizationStats/index.tsx | 6 +++--- static/app/views/organizationStats/usageChart/index.tsx | 4 +--- static/app/views/organizationStats/usageStatsOrg.tsx | 4 ++-- 4 files changed, 6 insertions(+), 14 deletions(-) diff --git a/static/app/utils/metrics/features.tsx b/static/app/utils/metrics/features.tsx index b37fecefb7197d..7819a9d6884fa5 100644 --- a/static/app/utils/metrics/features.tsx +++ b/static/app/utils/metrics/features.tsx @@ -16,12 +16,6 @@ export function hasCustomMetrics(organization: Organization) { ); } -export function hasMetricStats(organization: Organization) { - return ( - hasCustomMetrics(organization) && organization.features.includes('metrics-stats') - ); -} - /** * Returns the forceMetricsLayer query param for the alert * wrapped in an object so it can be spread into existing query params diff --git a/static/app/views/organizationStats/index.tsx b/static/app/views/organizationStats/index.tsx index 6912f98afdd15f..6862ebf2210ad7 100644 --- a/static/app/views/organizationStats/index.tsx +++ b/static/app/views/organizationStats/index.tsx @@ -35,7 +35,7 @@ import type { PageFilters, Project, } from 'sentry/types'; -import {hasMetricStats} from 'sentry/utils/metrics/features'; +import {hasCustomMetrics} from 'sentry/utils/metrics/features'; import withOrganization from 'sentry/utils/withOrganization'; import withPageFilters from 'sentry/utils/withPageFilters'; import HeaderTabs from 'sentry/views/organizationStats/header'; @@ -264,7 +264,7 @@ export class OrganizationStats extends Component { return organization.features.includes('session-replay'); } if (opt.value === DATA_CATEGORY_INFO.metrics.plural) { - return hasMetricStats(organization); + return hasCustomMetrics(organization); } return true; }); @@ -322,7 +322,7 @@ export class OrganizationStats extends Component { return organization.features.includes('session-replay'); } if (opt.value === DATA_CATEGORY_INFO.metrics.plural) { - return hasMetricStats(organization); + return hasCustomMetrics(organization); } return true; }); diff --git a/static/app/views/organizationStats/usageChart/index.tsx b/static/app/views/organizationStats/usageChart/index.tsx index 5b0c2435081be6..9f5ace365410aa 100644 --- a/static/app/views/organizationStats/usageChart/index.tsx +++ b/static/app/views/organizationStats/usageChart/index.tsx @@ -363,9 +363,7 @@ function UsageChartBody({ if (option.value !== DATA_CATEGORY_INFO.metrics.plural) { return true; } - return ( - hasCustomMetrics(organization) && organization.features.includes('metrics-stats') - ); + return hasCustomMetrics(organization); }); }, [organization, categoryOptions]); diff --git a/static/app/views/organizationStats/usageStatsOrg.tsx b/static/app/views/organizationStats/usageStatsOrg.tsx index 66b1cd1d50450b..50a932ccf82d2a 100644 --- a/static/app/views/organizationStats/usageStatsOrg.tsx +++ b/static/app/views/organizationStats/usageStatsOrg.tsx @@ -22,7 +22,7 @@ import {space} from 'sentry/styles/space'; import type {DataCategoryInfo, IntervalPeriod, Organization} from 'sentry/types'; import {Outcome} from 'sentry/types'; import {parsePeriodToHours} from 'sentry/utils/dates'; -import {hasMetricStats} from 'sentry/utils/metrics/features'; +import {hasCustomMetrics} from 'sentry/utils/metrics/features'; import { FORMAT_DATETIME_DAILY, @@ -128,7 +128,7 @@ class UsageStatsOrganization< // Metric stats are not reported when grouping by category, so we make a separate request // and combine the results get metricsEndpoint(): [string, string, {query: Record}][] { - if (hasMetricStats(this.props.organization)) { + if (hasCustomMetrics(this.props.organization)) { return [ [ 'metricOrgStats', From 9475d1c9e587585efb8122bdc60de06b8ffb5bca Mon Sep 17 00:00:00 2001 From: Mark Story Date: Mon, 13 May 2024 11:38:15 -0400 Subject: [PATCH 336/376] feat: Add secondary database and update router (#69697) We've recently moved crons to a dedicated primary in saas. This has resulted in a tombstone issue as the joins used for tombstones don't work in the crons database. Reproducing this issue in the test suite has been challenging because in tests monitor tables and tombstone tables end up in the same connection. By adding an additional database, connection and a new db router we're able to more closely represent how the application operates in tests. --- .github/workflows/codecov_ats.yml | 3 +- scripts/lib.sh | 4 +- src/sentry/db/router.py | 19 +++++ .../monitors/consumers/monitor_consumer.py | 4 +- .../endpoints/base_monitor_details.py | 79 ++++++++++--------- .../hybrid_cloud/import_export/impl.py | 39 ++++----- src/sentry/tasks/deletion/hybrid_cloud.py | 1 + src/sentry/testutils/pytest/sentry.py | 6 +- .../api/endpoints/test_project_index.py | 2 + tests/sentry/deletions/test_apiapplication.py | 2 + tests/sentry/deletions/test_release.py | 2 + .../test_sentry_app_installations.py | 2 + tests/sentry/models/test_user.py | 6 ++ .../test_base_monitor_checkin_attachment.py | 7 +- .../tasks/deletion/test_hybrid_cloud.py | 20 +++-- tests/sentry/tasks/test_groupowner.py | 2 + 16 files changed, 132 insertions(+), 66 deletions(-) diff --git a/.github/workflows/codecov_ats.yml b/.github/workflows/codecov_ats.yml index cac817f938ebfa..2300d381df0cd6 100644 --- a/.github/workflows/codecov_ats.yml +++ b/.github/workflows/codecov_ats.yml @@ -132,8 +132,7 @@ jobs: "--ignore=tests/sentry/post_process_forwarder", "--ignore=tests/sentry/snuba", "--ignore=tests/sentry/search/events", - "--ignore=tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py", - "--ignore=tests/sentry/region_to_control/test_region_to_control_kafka.py" + "--ignore=tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py" ]' > .artifacts/codecov_ats/tests_to_skip.json echo '[]' > .artifacts/codecov_ats/tests_to_run.json # If we reached this point it means that ATS failed with some error diff --git a/scripts/lib.sh b/scripts/lib.sh index 2922b518aa0454..84642c743b00ec 100755 --- a/scripts/lib.sh +++ b/scripts/lib.sh @@ -165,9 +165,10 @@ create-db() { container_name=${POSTGRES_CONTAINER:-sentry_postgres} echo "--> Creating 'sentry' database" docker exec "${container_name}" createdb -h 127.0.0.1 -U postgres -E utf-8 sentry || true - echo "--> Creating 'control' and 'region' database" + echo "--> Creating 'control', 'region' and 'secondary' database" docker exec "${container_name}" createdb -h 127.0.0.1 -U postgres -E utf-8 control || true docker exec "${container_name}" createdb -h 127.0.0.1 -U postgres -E utf-8 region || true + docker exec "${container_name}" createdb -h 127.0.0.1 -U postgres -E utf-8 secondary || true } apply-migrations() { @@ -224,6 +225,7 @@ drop-db() { echo "--> Dropping 'control' and 'region' database" docker exec "${container_name}" dropdb --if-exists -h 127.0.0.1 -U postgres control docker exec "${container_name}" dropdb --if-exists -h 127.0.0.1 -U postgres region + docker exec "${container_name}" dropdb --if-exists -h 127.0.0.1 -U postgres secondary } reset-db() { diff --git a/src/sentry/db/router.py b/src/sentry/db/router.py index 819c108b066887..b9ce14aed31b3f 100644 --- a/src/sentry/db/router.py +++ b/src/sentry/db/router.py @@ -207,3 +207,22 @@ def allow_migrate(self, db, app_label, model=None, **hints): # Assume migrations with no model routing or hints need to run on # the default database. return db == "default" + + +class TestSiloMultiDatabaseRouter(SiloRouter): + """Silo router used in CI""" + + secondary_db_models = { + "sentry_monitor", + "sentry_monitorcheckin", + "sentry_monitorlocation", + "sentry_monitorenvironment", + "sentry_monitorincident", + "sentry_monitorenvbrokendetection", + } + + def _resolve_silo_connection(self, silo_modes: Iterable[SiloMode], table: str) -> str | None: + connection = super()._resolve_silo_connection(silo_modes=silo_modes, table=table) + if table in self.secondary_db_models: + return "secondary" + return connection diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py index a139a6369c8c8c..4822f107d422ca 100644 --- a/src/sentry/monitors/consumers/monitor_consumer.py +++ b/src/sentry/monitors/consumers/monitor_consumer.py @@ -25,6 +25,7 @@ from sentry import quotas, ratelimits from sentry.conf.types.kafka_definition import Topic, get_topic_codec from sentry.constants import DataCategory, ObjectStatus +from sentry.db.postgres.transactions import in_test_hide_transaction_boundary from sentry.killswitches import killswitch_matches_context from sentry.models.project import Project from sentry.monitors.clock_dispatch import try_monitor_clock_tick @@ -822,7 +823,8 @@ def _process_checkin(item: CheckinItem, txn: Transaction | Span): ) else: txn.set_tag("outcome", "create_new_checkin") - signal_first_checkin(project, monitor) + with in_test_hide_transaction_boundary(): + signal_first_checkin(project, monitor) metrics.incr( "monitors.checkin.result", tags={**metric_kwargs, "status": "created_new_checkin"}, diff --git a/src/sentry/monitors/endpoints/base_monitor_details.py b/src/sentry/monitors/endpoints/base_monitor_details.py index 80c4269000e7d2..e2115903d0fdd6 100644 --- a/src/sentry/monitors/endpoints/base_monitor_details.py +++ b/src/sentry/monitors/endpoints/base_monitor_details.py @@ -176,13 +176,15 @@ def delete_monitor(self, request: Request, project: Project, monitor: Monitor) - Delete a monitor or monitor environments. """ environment_names = request.query_params.getlist("environment") + env_ids = None + if environment_names: + env_ids = list( + Environment.objects.filter( + organization_id=project.organization_id, name__in=environment_names + ).values_list("id", flat=True) + ) with transaction.atomic(router.db_for_write(MonitorEnvironment)): - if environment_names: - env_ids = list( - Environment.objects.filter( - organization_id=project.organization_id, name__in=environment_names - ).values_list("id", flat=True) - ) + if env_ids: monitor_objects = ( MonitorEnvironment.objects.filter( environment_id__in=env_ids, monitor_id=monitor.id @@ -199,8 +201,10 @@ def delete_monitor(self, request: Request, project: Project, monitor: Monitor) - MonitorStatus.DELETION_IN_PROGRESS, ] ) + .select_related("monitor") ) event = audit_log.get_event_id("MONITOR_ENVIRONMENT_REMOVE") + issue_alert_rule_id = None else: monitor_objects = Monitor.objects.filter(id=monitor.id).exclude( status__in=[ @@ -213,36 +217,6 @@ def delete_monitor(self, request: Request, project: Project, monitor: Monitor) - # Mark rule for deletion if present and monitor is being deleted monitor = monitor_objects.first() issue_alert_rule_id = monitor.config.get("alert_rule_id") if monitor else None - if issue_alert_rule_id: - rule = ( - Rule.objects.filter( - project_id=monitor.project_id, - id=issue_alert_rule_id, - ) - .exclude( - status__in=[ - ObjectStatus.PENDING_DELETION, - ObjectStatus.DELETION_IN_PROGRESS, - ] - ) - .first() - ) - if rule: - rule.update(status=ObjectStatus.PENDING_DELETION) - RuleActivity.objects.create( - rule=rule, user_id=request.user.id, type=RuleActivityType.DELETED.value - ) - scheduled_rule = RegionScheduledDeletion.schedule( - rule, days=0, actor=request.user - ) - self.create_audit_entry( - request=request, - organization=project.organization, - target_object=rule.id, - event=audit_log.get_event_id("RULE_REMOVE"), - data=rule.get_audit_log_data(), - transaction_id=scheduled_rule, - ) # create copy of queryset as update will remove objects monitor_objects_list = list(monitor_objects) @@ -258,6 +232,8 @@ def delete_monitor(self, request: Request, project: Project, monitor: Monitor) - quotas.backend.update_monitor_slug(monitor.slug, new_slug, monitor.project_id) monitor_object.update(slug=new_slug) + with transaction.atomic(router.db_for_write(Rule)): + for monitor_object in monitor_objects_list: schedule = RegionScheduledDeletion.schedule( monitor_object, days=0, actor=request.user ) @@ -269,5 +245,36 @@ def delete_monitor(self, request: Request, project: Project, monitor: Monitor) - data=monitor_object.get_audit_log_data(), transaction_id=schedule.guid, ) + # Mark rule for deletion if present and monitor is being deleted + if issue_alert_rule_id: + rule = ( + Rule.objects.filter( + project_id=monitor.project_id, + id=issue_alert_rule_id, + ) + .exclude( + status__in=[ + ObjectStatus.PENDING_DELETION, + ObjectStatus.DELETION_IN_PROGRESS, + ] + ) + .first() + ) + if rule: + rule.update(status=ObjectStatus.PENDING_DELETION) + RuleActivity.objects.create( + rule=rule, user_id=request.user.id, type=RuleActivityType.DELETED.value + ) + scheduled_rule = RegionScheduledDeletion.schedule( + rule, days=0, actor=request.user + ) + self.create_audit_entry( + request=request, + organization=project.organization, + target_object=rule.id, + event=audit_log.get_event_id("RULE_REMOVE"), + data=rule.get_audit_log_data(), + transaction_id=scheduled_rule, + ) return self.respond(status=202) diff --git a/src/sentry/services/hybrid_cloud/import_export/impl.py b/src/sentry/services/hybrid_cloud/import_export/impl.py index c03b6197409dc5..458e0ad231c8a9 100644 --- a/src/sentry/services/hybrid_cloud/import_export/impl.py +++ b/src/sentry/services/hybrid_cloud/import_export/impl.py @@ -27,6 +27,7 @@ from sentry.backup.helpers import EXCLUDED_APPS, DatetimeSafeDjangoJSONEncoder, Filter, ImportFlags from sentry.backup.scopes import ExportScope from sentry.db.models.base import BaseModel +from sentry.db.postgres.transactions import in_test_hide_transaction_boundary from sentry.models.importchunk import ControlImportChunk, RegionImportChunk from sentry.models.user import User from sentry.models.userpermission import UserPermission @@ -171,25 +172,25 @@ def import_by_model( } try: + # It's possible that this write has already occurred, and we are simply retrying + # because the response got lost in transit. If so, just re-use that reply. We do + # this in the transaction because, while `import_by_model` is generally called in a + # sequential manner, cases like timeouts or long queues may cause a previous call to + # still be active when the next one is made. We'll check once here for an existing + # copy of this (uniquely identifiable) import chunk here to short circuit and avoid + # doing frivolous work. However, this doesn't fully solve our data race error, as it + # is possible that another runaway process makes the colliding write while we're + # building our transaction. Thus, we'll check `get_existing_import_chunk()` again if + # we catch an `IntegrityError` below. + existing_import_chunk = get_existing_import_chunk( + batch_model_name, import_flags, import_chunk_type, min_ordinal + ) + if existing_import_chunk is not None: + logger.info("import_by_model.already_imported", extra=extra) + return existing_import_chunk + using = router.db_for_write(model) with transaction.atomic(using=using): - # It's possible that this write has already occurred, and we are simply retrying - # because the response got lost in transit. If so, just re-use that reply. We do - # this in the transaction because, while `import_by_model` is generally called in a - # sequential manner, cases like timeouts or long queues may cause a previous call to - # still be active when the next one is made. We'll check once here for an existing - # copy of this (uniquely identifiable) import chunk here to short circuit and avoid - # doing frivolous work. However, this doesn't fully solve our data race error, as it - # is possible that another runaway process makes the colliding write while we're - # building our transaction. Thus, we'll check `get_existing_import_chunk()` again if - # we catch an `IntegrityError` below. - existing_import_chunk = get_existing_import_chunk( - batch_model_name, import_flags, import_chunk_type, min_ordinal - ) - if existing_import_chunk is not None: - logger.info("import_by_model.already_imported", extra=extra) - return existing_import_chunk - ok_relocation_scopes = import_scope.value out_pk_map = PrimaryKeyMap() min_old_pk = 0 @@ -339,7 +340,9 @@ def import_by_model( if import_chunk_type == ControlImportChunk: ControlImportChunk(**import_chunk_args).save() else: - RegionImportChunk(**import_chunk_args).save() + # XXX: Monitors and Files are stored in non-default connections in saas. + with in_test_hide_transaction_boundary(): + RegionImportChunk(**import_chunk_args).save() logger.info("import_by_model.successfully_imported", extra=extra) return RpcImportOk( diff --git a/src/sentry/tasks/deletion/hybrid_cloud.py b/src/sentry/tasks/deletion/hybrid_cloud.py index ba1469fc2d5cb4..f6cf0eff44e4ac 100644 --- a/src/sentry/tasks/deletion/hybrid_cloud.py +++ b/src/sentry/tasks/deletion/hybrid_cloud.py @@ -405,6 +405,7 @@ def get_ids_cross_db_for_row_watermark( field: HybridCloudForeignKey, row_watermark_batch: WatermarkBatch, ) -> tuple[list[int], datetime.datetime]: + oldest_seen = timezone.now() model_object_id_pairs = model.objects.filter( id__lte=row_watermark_batch.up, id__gt=row_watermark_batch.low diff --git a/src/sentry/testutils/pytest/sentry.py b/src/sentry/testutils/pytest/sentry.py index ad567c757c6320..de6e7eccf41077 100644 --- a/src/sentry/testutils/pytest/sentry.py +++ b/src/sentry/testutils/pytest/sentry.py @@ -50,7 +50,11 @@ def configure_split_db() -> None: # silo database is the 'default' elsewhere in application logic. settings.DATABASES["default"]["NAME"] = "region" - settings.DATABASE_ROUTERS = ("sentry.db.router.SiloRouter",) + # Add a connection for the secondary db + settings.DATABASES["secondary"] = settings.DATABASES["default"].copy() + settings.DATABASES["secondary"]["NAME"] = "secondary" + + settings.DATABASE_ROUTERS = ("sentry.db.router.TestSiloMultiDatabaseRouter",) def get_default_silo_mode_for_test_cases() -> SiloMode: diff --git a/tests/sentry/api/endpoints/test_project_index.py b/tests/sentry/api/endpoints/test_project_index.py index 7b1abf5ce442c2..6cbb0721beb66e 100644 --- a/tests/sentry/api/endpoints/test_project_index.py +++ b/tests/sentry/api/endpoints/test_project_index.py @@ -15,6 +15,7 @@ schedule_hybrid_cloud_foreign_key_jobs_control, ) from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode @@ -249,6 +250,7 @@ def test_valid_with_public_integration(self): assert self.project.name.encode("utf-8") in response.content @responses.activate + @override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_deleted_token_with_public_integration(self): token = self.get_installed_unpublished_sentry_app_access_token() diff --git a/tests/sentry/deletions/test_apiapplication.py b/tests/sentry/deletions/test_apiapplication.py index 4cc9753fff038a..bd15e43a2a4aa7 100644 --- a/tests/sentry/deletions/test_apiapplication.py +++ b/tests/sentry/deletions/test_apiapplication.py @@ -7,6 +7,7 @@ from sentry.tasks.deletion.hybrid_cloud import schedule_hybrid_cloud_foreign_key_jobs from sentry.tasks.deletion.scheduled import run_scheduled_deletions_control from sentry.testutils.cases import TransactionTestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.hybrid_cloud import HybridCloudTestMixin from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode, control_silo_test @@ -14,6 +15,7 @@ @control_silo_test class DeleteApiApplicationTest(TransactionTestCase, HybridCloudTestMixin): + @override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_simple(self): app = ApiApplication.objects.create( owner=self.user, status=ApiApplicationStatus.pending_deletion diff --git a/tests/sentry/deletions/test_release.py b/tests/sentry/deletions/test_release.py index fdb10d2ab3c958..84331a2db2b7ed 100644 --- a/tests/sentry/deletions/test_release.py +++ b/tests/sentry/deletions/test_release.py @@ -9,6 +9,7 @@ from sentry.tasks.deletion.scheduled import run_scheduled_deletions from sentry.testutils.cases import TransactionTestCase from sentry.testutils.helpers import TaskRunner +from sentry.testutils.helpers.options import override_options from sentry.testutils.hybrid_cloud import HybridCloudTestMixin from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode @@ -36,6 +37,7 @@ def test_simple(self): assert Environment.objects.filter(id=env.id).exists() assert Project.objects.filter(id=project.id).exists() + @override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_cascade_from_user(self): org = self.create_organization() project = self.create_project(organization=org) diff --git a/tests/sentry/deletions/test_sentry_app_installations.py b/tests/sentry/deletions/test_sentry_app_installations.py index 0fbdede963f983..188bbda77a53b6 100644 --- a/tests/sentry/deletions/test_sentry_app_installations.py +++ b/tests/sentry/deletions/test_sentry_app_installations.py @@ -15,6 +15,7 @@ from sentry.silo.safety import unguarded_write from sentry.tasks.deletion.hybrid_cloud import schedule_hybrid_cloud_foreign_key_jobs from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode, control_silo_test @@ -69,6 +70,7 @@ def test_deletes_installation_provider(self): assert not SentryAppInstallationForProvider.objects.filter() + @override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_deletes_service_hooks(self): hook = self.create_service_hook( application=self.sentry_app.application, diff --git a/tests/sentry/models/test_user.py b/tests/sentry/models/test_user.py index 16a6ba033eb2b3..6814a51e70a2af 100644 --- a/tests/sentry/models/test_user.py +++ b/tests/sentry/models/test_user.py @@ -7,6 +7,7 @@ from sentry.silo.base import SiloMode from sentry.tasks.deletion.hybrid_cloud import schedule_hybrid_cloud_foreign_key_jobs from sentry.testutils.cases import TestCase +from sentry.testutils.helpers.options import override_options from sentry.testutils.hybrid_cloud import HybridCloudTestMixin from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode, assume_test_silo_mode_of, control_silo_test @@ -43,6 +44,7 @@ def user_tombstone_exists(self, user_id: int) -> bool: def get_user_saved_search_count(self) -> int: return SavedSearch.objects.filter(owner_id=self.user_id).count() + @override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_simple(self): assert not self.user_tombstone_exists(user_id=self.user_id) with outbox_runner(): @@ -59,6 +61,7 @@ def test_simple(self): # Ensure they are all now gone. assert self.get_user_saved_search_count() == 0 + @override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_unrelated_saved_search_is_not_deleted(self): another_user = self.create_user() self.create_member(user=another_user, organization=self.organization) @@ -74,6 +77,7 @@ def test_unrelated_saved_search_is_not_deleted(self): with assume_test_silo_mode(SiloMode.REGION): assert SavedSearch.objects.filter(owner_id=another_user.id).exists() + @override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_cascades_to_multiple_regions(self): eu_org = self.create_organization(region=_TEST_REGIONS[1]) self.create_member(user=self.user, organization=eu_org) @@ -87,6 +91,7 @@ def test_cascades_to_multiple_regions(self): schedule_hybrid_cloud_foreign_key_jobs() assert self.get_user_saved_search_count() == 0 + @override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_deletions_create_tombstones_in_regions_for_user_with_no_orgs(self): # Create a user with no org memberships user_to_delete = self.create_user("foo@example.com") @@ -96,6 +101,7 @@ def test_deletions_create_tombstones_in_regions_for_user_with_no_orgs(self): assert self.user_tombstone_exists(user_id=user_id) + @override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_cascades_to_regions_even_if_user_ownership_revoked(self): eu_org = self.create_organization(region=_TEST_REGIONS[1]) self.create_member(user=self.user, organization=eu_org) diff --git a/tests/sentry/monitors/endpoints/test_base_monitor_checkin_attachment.py b/tests/sentry/monitors/endpoints/test_base_monitor_checkin_attachment.py index b3ad363fb03201..27424339136dcd 100644 --- a/tests/sentry/monitors/endpoints/test_base_monitor_checkin_attachment.py +++ b/tests/sentry/monitors/endpoints/test_base_monitor_checkin_attachment.py @@ -1,8 +1,10 @@ from django.core.files.base import ContentFile +from sentry.db.postgres.transactions import in_test_hide_transaction_boundary from sentry.models.files import File from sentry.monitors.models import CheckInStatus, MonitorCheckIn from sentry.testutils.cases import MonitorTestCase +from sentry.testutils.helpers.options import override_options class BaseMonitorCheckInAttachmentEndpointTest(MonitorTestCase): @@ -47,6 +49,7 @@ def test_download_no_file(self): ) assert resp.data["detail"] == "Check-in has no attachment" + @override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_delete_cascade(self): file = self.create_file(name="log.txt", type="checkin.attachment") file.putfile(ContentFile(b"some data!")) @@ -62,6 +65,8 @@ def test_delete_cascade(self): attachment_id=file.id, ) - checkin.delete() + # checkin has a post_delete signal that removes files. + with in_test_hide_transaction_boundary(): + checkin.delete() assert not File.objects.filter(type="checkin.attachment").exists() diff --git a/tests/sentry/tasks/deletion/test_hybrid_cloud.py b/tests/sentry/tasks/deletion/test_hybrid_cloud.py index 00de30117b89be..16da4bafb043a9 100644 --- a/tests/sentry/tasks/deletion/test_hybrid_cloud.py +++ b/tests/sentry/tasks/deletion/test_hybrid_cloud.py @@ -146,6 +146,7 @@ def setup_deletable_objects( @django_db_all +@override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_region_processing(task_runner): reset_watermarks() @@ -224,6 +225,7 @@ def setup_deletion_test(): @django_db_all +@override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_cascade_deletion_behavior(task_runner): data = setup_deletion_test() integration = data["integration"] @@ -245,6 +247,7 @@ def test_cascade_deletion_behavior(task_runner): @django_db_all +@override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_do_nothing_deletion_behavior(task_runner): data = setup_deletion_test() integration = data["integration"] @@ -268,6 +271,7 @@ def test_do_nothing_deletion_behavior(task_runner): @django_db_all +@override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_set_null_deletion_behavior(task_runner): data = setup_deletion_test() user = data["user"] @@ -324,10 +328,12 @@ def setup_cross_db_deletion_data( ) -# TODO(Gabe): Enable this test when the multi-db test changes land @region_silo_test -@pytest.mark.skip class TestCrossDatabaseTombstoneCascadeBehavior(TestCase): + def setUp(self) -> None: + super().setUp() + reset_watermarks() + def assert_monitors_unchanged(self, unaffected_data: list[dict]): for u_data in unaffected_data: u_user, u_monitor = itemgetter("user", "monitor")(u_data) @@ -411,8 +417,9 @@ def test_deletion_row_after_tombstone(self): affected_monitors = [monitor] + user_id = user.id with assume_test_silo_mode_of(User), outbox_runner(): - User.objects.get(id=user.id).delete() + User.objects.get(id=user_id).delete() assert Monitor.objects.filter(id=monitor.id).exists() assert monitor.owner_user_id == user.id @@ -424,15 +431,16 @@ def test_deletion_row_after_tombstone(self): # Same as previous test, but this time with monitors created after # the tombstone has been processed + start_id = monitor.id + 10 affected_monitors.extend( [ Monitor.objects.create( - id=10 + i * 2, # Ensure that each monitor is in its own batch + id=start_id + i * 2, # Ensure that each monitor is in its own batch organization_id=organization.id, project_id=project.id, slug=f"test-monitor-{i}", - name="Test Monitor", - owner_user_id=user.id, + name=f"Row After Tombstone {i}", + owner_user_id=user_id, ) for i in range(4) ] diff --git a/tests/sentry/tasks/test_groupowner.py b/tests/sentry/tasks/test_groupowner.py index 5c2e0205e5380e..3e823e8eaa6abd 100644 --- a/tests/sentry/tasks/test_groupowner.py +++ b/tests/sentry/tasks/test_groupowner.py @@ -11,6 +11,7 @@ from sentry.testutils.cases import TestCase from sentry.testutils.helpers import TaskRunner from sentry.testutils.helpers.datetime import before_now, iso_format +from sentry.testutils.helpers.options import override_options from sentry.testutils.outbox import outbox_runner from sentry.testutils.silo import assume_test_silo_mode from sentry.testutils.skips import requires_snuba @@ -100,6 +101,7 @@ def test_simple(self): type=GroupOwnerType.SUSPECT_COMMIT.value, ) + @override_options({"hybrid_cloud.allow_cross_db_tombstones": True}) def test_user_deletion_cascade(self): other_user = self.create_user() group = self.create_group() From 89be379877a3ef149be365830f25883390f0499d Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Mon, 13 May 2024 11:39:06 -0400 Subject: [PATCH 337/376] ref(ui): Add margin to "Need help with your account" (#70760) --- src/sentry/templates/sentry/bases/auth.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/templates/sentry/bases/auth.html b/src/sentry/templates/sentry/bases/auth.html index 3b052499f6a43e..632f3270d776a7 100644 --- a/src/sentry/templates/sentry/bases/auth.html +++ b/src/sentry/templates/sentry/bases/auth.html @@ -18,7 +18,7 @@ {% block auth_container %}
{% block auth_main %}{% endblock %} -

+

{% blocktrans %} Need help with your account? Click here to check out our help center. {% endblocktrans %} From 79871b806059d17c3014ae0e9c235c544dac4da3 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Mon, 13 May 2024 11:40:43 -0400 Subject: [PATCH 338/376] chore(perf): Remove backend Starfish routes (#70759) These are not in use anywhere. --- src/sentry/web/urls.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/sentry/web/urls.py b/src/sentry/web/urls.py index 07ce17bbff66de..fde08d4ad3a53c 100644 --- a/src/sentry/web/urls.py +++ b/src/sentry/web/urls.py @@ -719,12 +719,6 @@ react_page_view, name="performance", ), - # Starfish - re_path( - r"^starfish/", - react_page_view, - name="starfish", - ), # Profiling re_path( r"^profiling/", From c77c851aac754ac9647f0097f9e9c4d0e5145421 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Mon, 13 May 2024 11:56:09 -0400 Subject: [PATCH 339/376] fix(wizard): Avoid using Link components (#70765) Links should be rendered within react router context, this page is not --- static/app/views/setupWizard/index.tsx | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/static/app/views/setupWizard/index.tsx b/static/app/views/setupWizard/index.tsx index 53fa41885efd9f..f9aaa1239a295e 100644 --- a/static/app/views/setupWizard/index.tsx +++ b/static/app/views/setupWizard/index.tsx @@ -1,7 +1,7 @@ import {useCallback, useEffect, useMemo, useRef, useState} from 'react'; import styled from '@emotion/styled'; -import {Button} from 'sentry/components/button'; +import {LinkButton} from 'sentry/components/button'; import ButtonBar from 'sentry/components/buttonBar'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import {ThemeAndStyleProvider} from 'sentry/components/themeAndStyleProvider'; @@ -103,16 +103,16 @@ function SetupWizard({hash = false, organizations}: Props) {

{t('Return to your terminal to complete your setup')}
- - +
)} From ae8423115db17b2458084684f9f9ef70559f9f81 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Mon, 13 May 2024 12:00:19 -0400 Subject: [PATCH 340/376] ref(screenloads): Remove old StarfishPageFiltersContainer (#70766) This component isn't necessary anymore. --- .../mobile/screenload/screenLoadSpans/index.tsx | 6 +++--- .../components/starfishPageFiltersContainer.tsx | 13 ------------- 2 files changed, 3 insertions(+), 16 deletions(-) delete mode 100644 static/app/views/starfish/components/starfishPageFiltersContainer.tsx diff --git a/static/app/views/performance/mobile/screenload/screenLoadSpans/index.tsx b/static/app/views/performance/mobile/screenload/screenLoadSpans/index.tsx index cdee35003e3b06..d5ba9d3279bd8b 100644 --- a/static/app/views/performance/mobile/screenload/screenLoadSpans/index.tsx +++ b/static/app/views/performance/mobile/screenload/screenLoadSpans/index.tsx @@ -11,6 +11,7 @@ import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidg import * as Layout from 'sentry/components/layouts/thirds'; import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; +import PageFiltersContainer from 'sentry/components/organizations/pageFilters/container'; import SentryDocumentTitle from 'sentry/components/sentryDocumentTitle'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; @@ -41,7 +42,6 @@ import { ReleaseComparisonSelector, SECONDARY_RELEASE_ALIAS, } from 'sentry/views/starfish/components/releaseSelector'; -import {StarfishPageFiltersContainer} from 'sentry/views/starfish/components/starfishPageFiltersContainer'; import {SpanMetricsField} from 'sentry/views/starfish/types'; import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; @@ -124,7 +124,7 @@ function ScreenLoadSpans() { - + @@ -176,7 +176,7 @@ function ScreenLoadSpans() { referrer="api.starfish.mobile-screen-totals" /> - + - {children} - - ); -} From b6f490a0ea35322eb4c34baea7b02130fb81395a Mon Sep 17 00:00:00 2001 From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com> Date: Mon, 13 May 2024 18:09:01 +0200 Subject: [PATCH 341/376] chore(icons): Add more options for iconLock (#70482) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit this pr updates the options for `iconLock` so that it is never solid and the only choice is whether it is locked. old prop is included since it is being used in `getsentry`, will remove in future pr Screenshot 2024-05-07 at 3 26 56 PM Screenshot 2024-05-07 at 3 26 22 PM --- .../components/events/interfaces/threads.tsx | 2 +- .../app/components/onboardingWizard/task.tsx | 2 +- .../components/organizations/headerItem.tsx | 2 +- .../pageFilters/pageFilterPinButton.tsx | 2 +- .../pageFilters/pageFilterPinIndicator.tsx | 2 +- static/app/icons/iconLock.tsx | 40 ++++++++++--------- .../organizationAuth/providerItem.tsx | 2 +- .../organizationSettingsForm.tsx | 2 +- static/app/views/settings/settingsIndex.tsx | 2 +- 9 files changed, 30 insertions(+), 26 deletions(-) diff --git a/static/app/components/events/interfaces/threads.tsx b/static/app/components/events/interfaces/threads.tsx index d72f4bff9cce92..ec57827b68dc51 100644 --- a/static/app/components/events/interfaces/threads.tsx +++ b/static/app/components/events/interfaces/threads.tsx @@ -64,7 +64,7 @@ export function getThreadStateIcon(state: ThreadStates | undefined) { } switch (state) { case ThreadStates.BLOCKED: - return ; + return ; case ThreadStates.TIMED_WAITING: return ; case ThreadStates.WAITING: diff --git a/static/app/components/onboardingWizard/task.tsx b/static/app/components/onboardingWizard/task.tsx index 13203471230044..e67eeb2cbe82ec 100644 --- a/static/app/components/onboardingWizard/task.tsx +++ b/static/app/components/onboardingWizard/task.tsx @@ -129,7 +129,7 @@ function Task(props: Props) { requisite: task.requisiteTasks[0].title, })} > - + ); diff --git a/static/app/components/organizations/headerItem.tsx b/static/app/components/organizations/headerItem.tsx index 62e85539edbfba..dd048a13d0f332 100644 --- a/static/app/components/organizations/headerItem.tsx +++ b/static/app/components/organizations/headerItem.tsx @@ -89,7 +89,7 @@ function HeaderItem({ )} {locked && ( - + )} diff --git a/static/app/components/organizations/pageFilters/pageFilterPinButton.tsx b/static/app/components/organizations/pageFilters/pageFilterPinButton.tsx index 97ff30b5e02757..b0ce04bf0d72f6 100644 --- a/static/app/components/organizations/pageFilters/pageFilterPinButton.tsx +++ b/static/app/components/organizations/pageFilters/pageFilterPinButton.tsx @@ -38,7 +38,7 @@ function PageFilterPinButton({organization, filter, size, className}: Props) { size={size} pinned={pinned} borderless={size === 'zero'} - icon={} + icon={} title={t("Once locked, Sentry will remember this filter's value across pages.")} tooltipProps={{delay: 500}} > diff --git a/static/app/components/organizations/pageFilters/pageFilterPinIndicator.tsx b/static/app/components/organizations/pageFilters/pageFilterPinIndicator.tsx index f51c83506372ca..f949913b0bb8d8 100644 --- a/static/app/components/organizations/pageFilters/pageFilterPinIndicator.tsx +++ b/static/app/components/organizations/pageFilters/pageFilterPinIndicator.tsx @@ -20,7 +20,7 @@ function PageFilterPinIndicator({children, filter}: Props) { {children} {pinned && ( - + )} diff --git a/static/app/icons/iconLock.tsx b/static/app/icons/iconLock.tsx index b650c2cb665af7..06a2c733bbff85 100644 --- a/static/app/icons/iconLock.tsx +++ b/static/app/icons/iconLock.tsx @@ -5,26 +5,30 @@ import {SvgIcon} from './svgIcon'; interface Props extends SVGIconProps { isSolid?: boolean; + locked?: boolean; } -const IconLock = forwardRef(({isSolid = false, ...props}, ref) => { - return ( - - {isSolid ? ( - - - - - ) : ( - - - - - - )} - - ); -}); +const IconLock = forwardRef( + ({locked = false, isSolid = false, ...props}, ref) => { + return ( + + {locked || isSolid ? ( + + + + + + ) : ( + + + + + + )} + + ); + } +); IconLock.displayName = 'IconLock'; diff --git a/static/app/views/settings/organizationAuth/providerItem.tsx b/static/app/views/settings/organizationAuth/providerItem.tsx index 567167afac3884..400dba39e21924 100644 --- a/static/app/views/settings/organizationAuth/providerItem.tsx +++ b/static/app/views/settings/organizationAuth/providerItem.tsx @@ -196,7 +196,7 @@ function LockedFeature({provider, features, className}: LockedFeatureProps) { /> } > - }> + }> {t('disabled')} diff --git a/static/app/views/settings/organizationGeneralSettings/organizationSettingsForm.tsx b/static/app/views/settings/organizationGeneralSettings/organizationSettingsForm.tsx index 091cb370de28bb..8635a33f50b6a6 100644 --- a/static/app/views/settings/organizationGeneralSettings/organizationSettingsForm.tsx +++ b/static/app/views/settings/organizationGeneralSettings/organizationSettingsForm.tsx @@ -71,7 +71,7 @@ function OrganizationSettingsForm({initialData, onSave}: Props) { /> } > - }> + }> {t('disabled')} diff --git a/static/app/views/settings/settingsIndex.tsx b/static/app/views/settings/settingsIndex.tsx index 8ed487509af109..860e178f46931e 100644 --- a/static/app/views/settings/settingsIndex.tsx +++ b/static/app/views/settings/settingsIndex.tsx @@ -210,7 +210,7 @@ function SettingsIndex({organization, ...props}: SettingsIndexProps) { - + {t('API Keys')} From 877618339dd76539ddc2f2ac31ee439ec65884f6 Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Mon, 13 May 2024 12:17:55 -0400 Subject: [PATCH 342/376] feat(insights): fetch equal hit/miss samples in cache sidebar (#70763) Always fetch 5 hit and 5 miss samples as otherwise in a low cache miss scenario, you'll potentially have only misses. ![image](https://github.com/getsentry/sentry/assets/44422760/5b944b8a-6684-4188-8037-64b8fabc3b71) --- .../cache/samplePanel/samplePanel.tsx | 52 +++++++++++++++---- 1 file changed, 41 insertions(+), 11 deletions(-) diff --git a/static/app/views/performance/cache/samplePanel/samplePanel.tsx b/static/app/views/performance/cache/samplePanel/samplePanel.tsx index 1359c32ada6643..43c533aead2dd1 100644 --- a/static/app/views/performance/cache/samplePanel/samplePanel.tsx +++ b/static/app/views/performance/cache/samplePanel/samplePanel.tsx @@ -107,11 +107,11 @@ export function CacheSamplePanel() { }; const { - data: cacheSpanSamplesData, - isFetching: isCacheSpanSamplesFetching, - refetch: refetchSpanSamples, + data: cacheHitSamples, + isFetching: isCacheHitsFetching, + refetch: refetchCacheHits, } = useIndexedSpans({ - search: MutableSearch.fromQueryObject(sampleFilters).addFreeText('has:cache.hit'), + search: MutableSearch.fromQueryObject({...sampleFilters, 'cache.hit': 'true'}), fields: [ SpanIndexedField.PROJECT, SpanIndexedField.TRACE, @@ -124,24 +124,49 @@ export function CacheSamplePanel() { SpanIndexedField.CACHE_ITEM_SIZE, ], sorts: [SPAN_SAMPLES_SORT], - limit: SPAN_SAMPLE_LIMIT, + limit: SPAN_SAMPLE_LIMIT / 2, enabled: isPanelOpen, referrer: Referrer.SAMPLES_CACHE_SPAN_SAMPLES, }); + const { + data: cacheMissSamples, + isFetching: isCacheMissesFetching, + refetch: refetchCacheMisses, + } = useIndexedSpans({ + search: MutableSearch.fromQueryObject({...sampleFilters, 'cache.hit': 'false'}), + fields: [ + SpanIndexedField.PROJECT, + SpanIndexedField.TRACE, + SpanIndexedField.TRANSACTION_ID, + SpanIndexedField.ID, + SpanIndexedField.TIMESTAMP, + SpanIndexedField.SPAN_DESCRIPTION, + SpanIndexedField.CACHE_HIT, + SpanIndexedField.SPAN_OP, + SpanIndexedField.CACHE_ITEM_SIZE, + ], + sorts: [SPAN_SAMPLES_SORT], + limit: SPAN_SAMPLE_LIMIT / 2, + enabled: isPanelOpen, + referrer: Referrer.SAMPLES_CACHE_SPAN_SAMPLES, + }); + + const cacheSamples = [...(cacheHitSamples || []), ...(cacheMissSamples || [])]; + const { data: transactionData, error: transactionError, isFetching: isFetchingTransactions, } = useTransactions( - cacheSpanSamplesData?.map(span => span['transaction.id']) || [], + cacheSamples?.map(span => span['transaction.id']) || [], Referrer.SAMPLES_CACHE_SPAN_SAMPLES ); const transactionDurationsMap = keyBy(transactionData, 'id'); const spansWithDuration = - cacheSpanSamplesData?.map(span => ({ + cacheSamples?.map(span => ({ ...span, 'transaction.duration': transactionDurationsMap[span['transaction.id']]?.['transaction.duration'], @@ -161,6 +186,11 @@ export function CacheSamplePanel() { }); }; + const handleRefetch = () => { + refetchCacheHits(); + refetchCacheMisses(); + }; + return ( @@ -290,7 +320,9 @@ export function CacheSamplePanel() { }, units: {[SpanIndexedField.CACHE_ITEM_SIZE]: 'byte'}, }} - isLoading={isCacheSpanSamplesFetching || isFetchingTransactions} + isLoading={ + isCacheHitsFetching || isCacheMissesFetching || isFetchingTransactions + } highlightedSpanId={highlightedSpanId} onSampleMouseOver={sample => setHighlightedSpanId(sample.span_id)} onSampleMouseOut={() => setHighlightedSpanId(undefined)} @@ -301,9 +333,7 @@ export function CacheSamplePanel() { - + From cb501a3c6a44cb82d56b51b1ff20c8c57fe26780 Mon Sep 17 00:00:00 2001 From: Armen Zambrano G <44410+armenzg@users.noreply.github.com> Date: Mon, 13 May 2024 12:19:39 -0400 Subject: [PATCH 343/376] feat(related_issues): Issue Details Related Issues flag (#70738) This flag will control showing related issues on the Issue Details page. --- src/sentry/conf/server.py | 2 ++ src/sentry/features/temporary.py | 1 + 2 files changed, 3 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index a7d5784d008ed0..930fd60b18657a 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1794,6 +1794,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:related-events": False, # Enable related issues feature "organizations:related-issues": False, + # Enable related issues in issue details page + "organizations:related-issues-issue-details-page": False, # Enable usage of external relays, for use with Relay. See # https://github.com/getsentry/relay. "organizations:relay": True, diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py index 4ad8a1641c32e1..30271366d96c9d 100644 --- a/src/sentry/features/temporary.py +++ b/src/sentry/features/temporary.py @@ -181,6 +181,7 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:project-stats", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:related-events", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) manager.add("organizations:related-issues", OrganizationFeature, FeatureHandlerStrategy.REMOTE) + manager.add("organizations:related-issues-issue-details-page", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:relay-cardinality-limiter", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) manager.add("organizations:release-comparison-performance", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:releases-v2", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From dfce1cf811e986477e81365d06cfac6005c1431c Mon Sep 17 00:00:00 2001 From: Matt Duncan <14761+mrduncan@users.noreply.github.com> Date: Mon, 13 May 2024 09:26:40 -0700 Subject: [PATCH 344/376] chore(issues): Simple type improvements (#70697) Another quick batch of followups to #69828. --- pyproject.toml | 8 +++ src/sentry/issues/attributes.py | 20 ++++--- .../endpoints/organization_group_index.py | 2 +- .../issues/endpoints/organization_searches.py | 5 +- .../issues/endpoints/source_map_debug.py | 2 +- src/sentry/issues/escalating.py | 4 +- .../test_organization_group_index.py | 60 ++++++++++--------- .../endpoints/test_project_stacktrace_link.py | 45 +++++++------- tests/sentry/issues/test_attributes.py | 17 +++--- tests/sentry/issues/test_escalating.py | 14 ++--- tests/sentry/issues/test_issue_velocity.py | 38 ++++++------ tests/sentry/issues/test_priority.py | 6 +- tests/sentry/issues/test_producer.py | 12 ++-- tests/sentry/issues/test_run.py | 22 ++++--- 14 files changed, 146 insertions(+), 109 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f7d88bd56c7d45..694a2250a6de57 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -566,6 +566,7 @@ module = [ "sentry.issues.endpoints.group_events", "sentry.issues.endpoints.organization_activity", "sentry.issues.endpoints.organization_release_previous_commits", + "sentry.issues.endpoints.organization_searches", "sentry.issues.endpoints.project_stacktrace_link", "sentry.issues.escalating_group_forecast", "sentry.issues.escalating_issues_alg", @@ -643,17 +644,24 @@ module = [ "tests.sentry.issues.endpoints.test_actionable_items", "tests.sentry.issues.endpoints.test_organization_activity", "tests.sentry.issues.endpoints.test_organization_searches", + "tests.sentry.issues.endpoints.test_project_stacktrace_link", "tests.sentry.issues.endpoints.test_source_map_debug", + "tests.sentry.issues.test_attributes", + "tests.sentry.issues.test_escalating", "tests.sentry.issues.test_escalating_issues_alg", "tests.sentry.issues.test_group_attributes_dataset", "tests.sentry.issues.test_grouptype", "tests.sentry.issues.test_ignored", "tests.sentry.issues.test_ingest", "tests.sentry.issues.test_issue_occurrence", + "tests.sentry.issues.test_issue_velocity", "tests.sentry.issues.test_json_schemas", "tests.sentry.issues.test_merge", "tests.sentry.issues.test_occurrence_consumer", "tests.sentry.issues.test_ongoing", + "tests.sentry.issues.test_priority", + "tests.sentry.issues.test_producer", + "tests.sentry.issues.test_run", "tests.sentry.issues.test_search_issues_dataset", "tests.sentry.issues.test_status_change", "tests.sentry.issues.test_status_change_consumer", diff --git a/src/sentry/issues/attributes.py b/src/sentry/issues/attributes.py index a883ddcb36defc..6e5a06960bc54b 100644 --- a/src/sentry/issues/attributes.py +++ b/src/sentry/issues/attributes.py @@ -211,7 +211,7 @@ def _bulk_retrieve_snapshot_values( @receiver( post_save, sender=Group, dispatch_uid="post_save_log_group_attributes_changed", weak=False ) -def post_save_log_group_attributes_changed(instance, sender, created, *args, **kwargs): +def post_save_log_group_attributes_changed(instance, sender, created, *args, **kwargs) -> None: try: if created: _log_group_attributes_changed(Operation.CREATED, "group", None) @@ -224,7 +224,7 @@ def post_save_log_group_attributes_changed(instance, sender, created, *args, **k @receiver(post_update, sender=Group, dispatch_uid="post_update_group", weak=False) -def post_update_group(sender, updated_fields, model_ids, *args, **kwargs): +def post_update_group(sender, updated_fields, model_ids, *args, **kwargs) -> None: try: updated_fields = process_update_fields(updated_fields) if updated_fields: @@ -233,7 +233,7 @@ def post_update_group(sender, updated_fields, model_ids, *args, **kwargs): logger.exception("failed to log group attributes after group_owner updated") -def process_update_fields(updated_fields): +def process_update_fields(updated_fields) -> set[str]: if not updated_fields: # we have no guarantees update_fields is used everywhere save() is called # we'll need to assume any of the attributes are updated in that case @@ -247,7 +247,7 @@ def process_update_fields(updated_fields): @issue_deleted.connect(weak=False) -def on_issue_deleted_log_deleted(group, user, delete_type, **kwargs): +def on_issue_deleted_log_deleted(group, user, delete_type, **kwargs) -> None: try: _log_group_attributes_changed(Operation.DELETED, "group", "all") send_snapshot_values(None, group, True) @@ -256,7 +256,7 @@ def on_issue_deleted_log_deleted(group, user, delete_type, **kwargs): @issue_assigned.connect(weak=False) -def on_issue_assigned_log_group_assignee_attributes_changed(project, group, user, **kwargs): +def on_issue_assigned_log_group_assignee_attributes_changed(project, group, user, **kwargs) -> None: try: _log_group_attributes_changed(Operation.UPDATED, "group_assignee", "all") send_snapshot_values(None, group, False) @@ -265,7 +265,9 @@ def on_issue_assigned_log_group_assignee_attributes_changed(project, group, user @issue_unassigned.connect(weak=False) -def on_issue_unassigned_log_group_assignee_attributes_changed(project, group, user, **kwargs): +def on_issue_unassigned_log_group_assignee_attributes_changed( + project, group, user, **kwargs +) -> None: try: _log_group_attributes_changed(Operation.DELETED, "group_assignee", "all") send_snapshot_values(None, group, False) @@ -276,7 +278,9 @@ def on_issue_unassigned_log_group_assignee_attributes_changed(project, group, us @receiver( post_save, sender=GroupOwner, dispatch_uid="post_save_log_group_owner_changed", weak=False ) -def post_save_log_group_owner_changed(instance, sender, created, update_fields, *args, **kwargs): +def post_save_log_group_owner_changed( + instance, sender, created, update_fields, *args, **kwargs +) -> None: try: _log_group_attributes_changed( Operation.CREATED if created else Operation.UPDATED, "group_owner", "all" @@ -289,7 +293,7 @@ def post_save_log_group_owner_changed(instance, sender, created, update_fields, @receiver( post_delete, sender=GroupOwner, dispatch_uid="post_delete_log_group_owner_changed", weak=False ) -def post_delete_log_group_owner_changed(instance, sender, *args, **kwargs): +def post_delete_log_group_owner_changed(instance, sender, *args, **kwargs) -> None: try: _log_group_attributes_changed(Operation.DELETED, "group_owner", "all") send_snapshot_values(instance.group_id, None, False) diff --git a/src/sentry/issues/endpoints/organization_group_index.py b/src/sentry/issues/endpoints/organization_group_index.py index 2c4c3f4455c74b..77fbd252d151b7 100644 --- a/src/sentry/issues/endpoints/organization_group_index.py +++ b/src/sentry/issues/endpoints/organization_group_index.py @@ -177,7 +177,7 @@ def _search( result = inbox_search(**query_kwargs) else: - def use_group_snuba_dataset(): + def use_group_snuba_dataset() -> bool: # if useGroupSnubaDataset we consider using the snuba dataset if not request.GET.get("useGroupSnubaDataset"): return False diff --git a/src/sentry/issues/endpoints/organization_searches.py b/src/sentry/issues/endpoints/organization_searches.py index 2af593e6766bf3..f4b7af4a4d419b 100644 --- a/src/sentry/issues/endpoints/organization_searches.py +++ b/src/sentry/issues/endpoints/organization_searches.py @@ -13,6 +13,7 @@ OrganizationSearchAdminSerializer, OrganizationSearchMemberSerializer, ) +from sentry.models.organization import Organization from sentry.models.savedsearch import SavedSearch, Visibility from sentry.models.search_common import SearchType @@ -26,7 +27,7 @@ class OrganizationSearchesEndpoint(OrganizationEndpoint): owner = ApiOwner.ISSUES permission_classes = (OrganizationSearchPermission,) - def get(self, request: Request, organization) -> Response: + def get(self, request: Request, organization: Organization) -> Response: """ List an Organization's saved searches ````````````````````````````````````` @@ -60,7 +61,7 @@ def get(self, request: Request, organization) -> Response: return Response(serialize(list(query), request.user)) - def post(self, request: Request, organization) -> Response: + def post(self, request: Request, organization: Organization) -> Response: serializer: BaseOrganizationSearchSerializer if request.access.has_scope("org:write"): serializer = OrganizationSearchAdminSerializer(data=request.data) diff --git a/src/sentry/issues/endpoints/source_map_debug.py b/src/sentry/issues/endpoints/source_map_debug.py index 8b0f5669b38867..fa07de1c795f40 100644 --- a/src/sentry/issues/endpoints/source_map_debug.py +++ b/src/sentry/issues/endpoints/source_map_debug.py @@ -80,7 +80,7 @@ def get(self, request: Request, project: Project, event_id: str) -> Response: return self._create_response(issue, data) - def _create_response(self, issue=None, data=None): + def _create_response(self, issue=None, data=None) -> Response: errors_list = [] if issue: response = SourceMapProcessingIssue(issue, data=data).get_api_context() diff --git a/src/sentry/issues/escalating.py b/src/sentry/issues/escalating.py index 2f59d8756dd5da..d893bc5ea4311a 100644 --- a/src/sentry/issues/escalating.py +++ b/src/sentry/issues/escalating.py @@ -205,7 +205,7 @@ def _query_metrics_with_pagination( end_date: datetime, all_results: list[GroupsCountResponse], category: GroupCategory | None = None, -): +) -> None: """ Paginates Snuba metric queries for event counts for the given list of project ids and groups ids in a time range. @@ -296,7 +296,7 @@ def _generate_generic_metrics_backend_query( end_date: datetime, offset: int, category: GroupCategory | None = None, -): +) -> MetricsQuery: """ This function generates a query to fetch the hourly events for a group_id through the Generic Metrics Backend. diff --git a/tests/sentry/issues/endpoints/test_organization_group_index.py b/tests/sentry/issues/endpoints/test_organization_group_index.py index d0a837479cacb2..f9b6646b8d3903 100644 --- a/tests/sentry/issues/endpoints/test_organization_group_index.py +++ b/tests/sentry/issues/endpoints/test_organization_group_index.py @@ -1,7 +1,7 @@ import functools from datetime import UTC, datetime, timedelta from time import sleep -from unittest.mock import Mock, call, patch +from unittest.mock import MagicMock, Mock, call, patch from uuid import uuid4 from dateutil.parser import parse as parse_datetime @@ -121,7 +121,7 @@ def test_query_for_archived(self) -> None: "sentry.search.snuba.executors.GroupAttributesPostgresSnubaQueryExecutor.query", side_effect=GroupAttributesPostgresSnubaQueryExecutor.query, ) - def test_sort_by_trends(self, mock_query) -> None: + def test_sort_by_trends(self, mock_query: MagicMock) -> None: group = self.store_event( data={ "timestamp": iso_format(before_now(seconds=10)), @@ -812,7 +812,7 @@ def test_date_range(self) -> None: assert len(response.data) == 0 @patch("sentry.analytics.record") - def test_advanced_search_errors(self, mock_record) -> None: + def test_advanced_search_errors(self, mock_record: MagicMock) -> None: self.login_as(user=self.user) response = self.get_response(sort_by="date", query="!has:user") assert response.status_code == 200, response.data @@ -839,7 +839,7 @@ def test_advanced_search_errors(self, mock_record) -> None: # the orderby being sent to snuba for a certain call. This function has a simple # return value and can be used to set variables in the snuba payload. @patch("sentry.utils.snuba.get_query_params_to_update_for_projects") - def test_assigned_to_pagination(self, patched_params_update) -> None: + def test_assigned_to_pagination(self, patched_params_update: MagicMock) -> None: old_sample_size = options.get("snuba.search.hits-sample-size") assert options.set("snuba.search.hits-sample-size", 1) @@ -2376,7 +2376,7 @@ def test_query_status_and_substatus_nonoverlapping(self) -> None: side_effect=GroupAttributesPostgresSnubaQueryExecutor.query, autospec=True, ) - def test_use_group_snuba_dataset(self, mock_query) -> None: + def test_use_group_snuba_dataset(self, mock_query: MagicMock) -> None: self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2424,7 +2424,7 @@ def test_snuba_order_by_first_seen_of_issue(self) -> None: autospec=True, ) @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_order_by_freq(self, mock_query) -> None: + def test_snuba_order_by_freq(self, mock_query: MagicMock) -> None: event1 = self.store_event( data={"timestamp": iso_format(before_now(seconds=3)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -2457,7 +2457,7 @@ def test_snuba_order_by_freq(self, mock_query) -> None: autospec=True, ) @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_order_by_user_count(self, mock_query) -> None: + def test_snuba_order_by_user_count(self, mock_query: MagicMock) -> None: user1 = { "email": "foo@example.com", } @@ -2796,7 +2796,7 @@ def test_snuba_unsupported_filters(self) -> None: autospec=True, ) @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_query_title(self, mock_query) -> None: + def test_snuba_query_title(self, mock_query: MagicMock) -> None: self.project = self.create_project(organization=self.organization) event1 = self.store_event( data={"fingerprint": ["group-1"], "message": "MyMessage"}, @@ -2825,7 +2825,7 @@ def test_snuba_query_title(self, mock_query) -> None: ) @override_options({"issues.group_attributes.send_kafka": True}) @with_feature("organizations:issue-platform") - def test_snuba_perf_issue(self, mock_query) -> None: + def test_snuba_perf_issue(self, mock_query: MagicMock) -> None: self.project = self.create_project(organization=self.organization) # create a performance issue _, _, group_info = self.store_search_issue( @@ -2888,7 +2888,9 @@ def test_snuba_perf_issue(self, mock_query) -> None: @with_feature("organizations:issue-platform") @with_feature(PerformanceRenderBlockingAssetSpanGroupType.build_visible_feature_name()) @with_feature(PerformanceNPlusOneGroupType.build_visible_feature_name()) - def test_snuba_type_and_category(self, mock_query, mock_should_create_group) -> None: + def test_snuba_type_and_category( + self, mock_query: MagicMock, mock_should_create_group: MagicMock + ) -> None: self.project = self.create_project(organization=self.organization) # create a render blocking issue _, _, group_info = self.store_search_issue( @@ -3072,7 +3074,7 @@ def test_find_error_by_message_with_snuba_only_search(self) -> None: assert any(int(issue["id"]) == event2.group.id for issue in issues) @override_options({"issues.group_attributes.send_kafka": True}) - def test_first_seen_and_last_seen_filters(self): + def test_first_seen_and_last_seen_filters(self) -> None: self.login_as(user=self.user) project = self.project # Create 4 issues at different times @@ -3128,7 +3130,7 @@ def test_first_seen_and_last_seen_filters(self): assert len(response.data) == 0 @override_options({"issues.group_attributes.send_kafka": True}) - def test_filter_by_bookmarked_by(self): + def test_filter_by_bookmarked_by(self) -> None: self.login_as(user=self.user) project = self.project user2 = self.create_user(email="user2@example.com") @@ -3171,7 +3173,7 @@ def test_filter_by_bookmarked_by(self): assert int(response.data[0]["id"]) == group2.id @override_options({"issues.group_attributes.send_kafka": True}) - def test_filter_by_linked(self): + def test_filter_by_linked(self) -> None: self.login_as(user=self.user) project = self.project @@ -3212,7 +3214,7 @@ def test_filter_by_linked(self): assert int(response.data[0]["id"]) == group2.id @override_options({"issues.group_attributes.send_kafka": True}) - def test_filter_by_subscribed_by(self): + def test_filter_by_subscribed_by(self) -> None: self.login_as(user=self.user) project = self.project @@ -3255,7 +3257,7 @@ def test_filter_by_subscribed_by(self): assert len(response.data) == 0 @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_search_lookup_by_regressed_in_release(self): + def test_snuba_search_lookup_by_regressed_in_release(self) -> None: self.login_as(self.user) project = self.project release = self.create_release() @@ -3274,7 +3276,7 @@ def test_snuba_search_lookup_by_regressed_in_release(self): assert [int(issue["id"]) for issue in issues] == [event.group.id] @override_options({"issues.group_attributes.send_kafka": True}) - def test_lookup_by_release_build(self): + def test_lookup_by_release_build(self) -> None: for i in range(3): j = 119 + i @@ -3301,7 +3303,7 @@ def test_lookup_by_release_build(self): assert len(issues) == 0 @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_search_lookup_by_stack_filename(self): + def test_snuba_search_lookup_by_stack_filename(self) -> None: self.login_as(self.user) project = self.project event = self.store_event( @@ -3366,7 +3368,7 @@ def test_snuba_search_lookup_by_stack_filename(self): assert len(issues) == 0 @override_options({"issues.group_attributes.send_kafka": True}) - def test_error_main_thread_condition(self): + def test_error_main_thread_condition(self) -> None: self.login_as(user=self.user) project = self.project # Simulate sending an event with main_thread set to true @@ -3421,7 +3423,7 @@ def test_error_main_thread_condition(self): assert int(issues[0]["id"]) == event2.group.id @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_heavy_search_aggregate_stats_regression_test(self): + def test_snuba_heavy_search_aggregate_stats_regression_test(self) -> None: self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -3439,7 +3441,7 @@ def test_snuba_heavy_search_aggregate_stats_regression_test(self): assert len(response.data) == 1 @override_options({"issues.group_attributes.send_kafka": True}) - def test_snuba_heavy_search_inbox_search(self): + def test_snuba_heavy_search_inbox_search(self) -> None: self.store_event( data={ "timestamp": iso_format(before_now(seconds=200)), @@ -3499,7 +3501,7 @@ def get_response(self, *args, **kwargs): org = args[0] return super().get_response(org, **kwargs) - def assertNoResolution(self, group) -> None: + def assertNoResolution(self, group: Group) -> None: assert not GroupResolution.objects.filter(group=group).exists() def test_global_resolve(self) -> None: @@ -3612,7 +3614,7 @@ def test_bulk_resolve(self) -> None: assert len(response.data) == 0 @patch("sentry.integrations.example.integration.ExampleIntegration.sync_status_outbound") - def test_resolve_with_integration(self, mock_sync_status_outbound) -> None: + def test_resolve_with_integration(self, mock_sync_status_outbound: MagicMock) -> None: self.login_as(user=self.user) org = self.organization @@ -3669,7 +3671,7 @@ def test_resolve_with_integration(self, mock_sync_status_outbound) -> None: assert len(response.data) == 0 @patch("sentry.integrations.example.integration.ExampleIntegration.sync_status_outbound") - def test_set_unresolved_with_integration(self, mock_sync_status_outbound) -> None: + def test_set_unresolved_with_integration(self, mock_sync_status_outbound: MagicMock) -> None: release = self.create_release(project=self.project, version="abc") group = self.create_group(status=GroupStatus.RESOLVED) with assume_test_silo_mode(SiloMode.CONTROL): @@ -4610,7 +4612,9 @@ def test_set_has_seen(self) -> None: @patch("sentry.issues.merge.uuid4") @patch("sentry.issues.merge.merge_groups") @patch("sentry.eventstream.backend") - def test_merge(self, mock_eventstream, merge_groups, mock_uuid4) -> None: + def test_merge( + self, mock_eventstream: MagicMock, merge_groups: MagicMock, mock_uuid4: MagicMock + ) -> None: eventstream_state = object() mock_eventstream.start_merge = Mock(return_value=eventstream_state) @@ -4644,7 +4648,9 @@ def test_merge(self, mock_eventstream, merge_groups, mock_uuid4) -> None: @patch("sentry.issues.merge.uuid4") @patch("sentry.issues.merge.merge_groups") @patch("sentry.eventstream.backend") - def test_merge_performance_issues(self, mock_eventstream, merge_groups, mock_uuid4) -> None: + def test_merge_performance_issues( + self, mock_eventstream: MagicMock, merge_groups: MagicMock, mock_uuid4: MagicMock + ) -> None: eventstream_state = object() mock_eventstream.start_merge = Mock(return_value=eventstream_state) @@ -4885,7 +4891,7 @@ def get_response(self, *args, **kwargs): return super().get_response(org, **kwargs) @patch("sentry.eventstream.backend") - def test_delete_by_id(self, mock_eventstream) -> None: + def test_delete_by_id(self, mock_eventstream: MagicMock) -> None: eventstream_state = {"event_stream_state": uuid4()} mock_eventstream.start_delete_groups = Mock(return_value=eventstream_state) @@ -4958,7 +4964,7 @@ def test_delete_by_id(self, mock_eventstream) -> None: assert GroupHash.objects.filter(group_id=group4.id).exists() @patch("sentry.eventstream.backend") - def test_delete_performance_issue_by_id(self, mock_eventstream) -> None: + def test_delete_performance_issue_by_id(self, mock_eventstream: MagicMock) -> None: eventstream_state = {"event_stream_state": uuid4()} mock_eventstream.start_delete_groups = Mock(return_value=eventstream_state) diff --git a/tests/sentry/issues/endpoints/test_project_stacktrace_link.py b/tests/sentry/issues/endpoints/test_project_stacktrace_link.py index 5505e1a54d72ff..8e3a6e8e077d10 100644 --- a/tests/sentry/issues/endpoints/test_project_stacktrace_link.py +++ b/tests/sentry/issues/endpoints/test_project_stacktrace_link.py @@ -1,9 +1,10 @@ from collections.abc import Mapping from typing import Any -from unittest.mock import PropertyMock, patch +from unittest.mock import MagicMock, PropertyMock, patch from sentry.integrations.example.integration import ExampleIntegration from sentry.models.integrations.integration import Integration +from sentry.models.integrations.repository_project_path_config import RepositoryProjectPathConfig from sentry.silo.base import SiloMode from sentry.testutils.cases import APITestCase from sentry.testutils.silo import assume_test_silo_mode @@ -76,7 +77,7 @@ def serialized_integration(integration: Integration) -> Mapping[str, Any]: class BaseProjectStacktraceLink(APITestCase): endpoint = "sentry-api-0-project-stacktrace-link" - def setUp(self): + def setUp(self) -> None: with assume_test_silo_mode(SiloMode.CONTROL): self.integration, self.oi = self.create_provider_integration_for( self.organization, self.user, provider="example", name="Example" @@ -92,7 +93,9 @@ def setUp(self): self.login_as(self.user) - def expected_configurations(self, code_mapping) -> Mapping[str, Any]: + def expected_configurations( + self, code_mapping: RepositoryProjectPathConfig + ) -> Mapping[str, Any]: return { "automaticallyGenerated": code_mapping.automatically_generated, "defaultBranch": "master", @@ -111,7 +114,7 @@ def expected_configurations(self, code_mapping) -> Mapping[str, Any]: class ProjectStacktraceLinkTest(BaseProjectStacktraceLink): endpoint = "sentry-api-0-project-stacktrace-link" - def setUp(self): + def setUp(self) -> None: BaseProjectStacktraceLink.setUp(self) self.code_mapping1 = self.create_code_mapping( organization_integration=self.oi, @@ -131,14 +134,14 @@ def setUp(self): self.filepath = "usr/src/getsentry/src/sentry/src/sentry/utils/safe.py" - def test_no_filepath(self): + def test_no_filepath(self) -> None: """The file query search is missing""" response = self.get_error_response( self.organization.slug, self.project.slug, status_code=400 ) assert response.data == {"detail": "Filepath is required"} - def test_no_configs(self): + def test_no_configs(self) -> None: """No code mappings have been set for this project""" # new project that has no configurations set up for it project = self.create_project( @@ -157,7 +160,7 @@ def test_no_configs(self): "error": "no_code_mappings_for_project", } - def test_file_not_found_error(self): + def test_file_not_found_error(self) -> None: """File matches code mapping but it cannot be found in the source repository.""" response = self.get_success_response( self.organization.slug, self.project.slug, qs_params={"file": self.filepath} @@ -171,7 +174,7 @@ def test_file_not_found_error(self): == f"https://example.com/{self.repo.name}/blob/master/src/sentry/src/sentry/utils/safe.py" ) - def test_stack_root_mismatch_error(self): + def test_stack_root_mismatch_error(self) -> None: """Looking for a stacktrace file path that will not match any code mappings""" response = self.get_success_response( self.organization.slug, self.project.slug, qs_params={"file": "wrong/file/path"} @@ -181,7 +184,7 @@ def test_stack_root_mismatch_error(self): assert response.data["error"] == "stack_root_mismatch" assert response.data["integrations"] == [serialized_integration(self.integration)] - def test_config_and_source_url(self): + def test_config_and_source_url(self) -> None: """Having a different source url should also work""" with patch.object( ExampleIntegration, "get_stacktrace_link", return_value="https://sourceurl.com/" @@ -194,7 +197,7 @@ def test_config_and_source_url(self): assert response.data["integrations"] == [serialized_integration(self.integration)] @patch.object(ExampleIntegration, "get_stacktrace_link") - def test_file_no_stack_root_match(self, mock_integration): + def test_file_no_stack_root_match(self, mock_integration: MagicMock) -> None: # Pretend that the file was not found in the repository mock_integration.return_value = None @@ -212,7 +215,9 @@ def test_file_no_stack_root_match(self, mock_integration): @patch("sentry.analytics.record") @patch("sentry.integrations.utils.stacktrace_link.Timer") @patch.object(ExampleIntegration, "get_stacktrace_link") - def test_timer_duration_for_analytics(self, mock_integration, mock_timer, mock_record): + def test_timer_duration_for_analytics( + self, mock_integration: MagicMock, mock_timer: MagicMock, mock_record: MagicMock + ) -> None: mock_integration.return_value = "https://github.com/" mock_duration = PropertyMock(return_value=5) type(mock_timer.return_value.__enter__.return_value).duration = mock_duration @@ -251,7 +256,7 @@ def test_timer_duration_for_analytics(self, mock_integration, mock_timer, mock_r class ProjectStacktraceLinkTestMobile(BaseProjectStacktraceLink): - def setUp(self): + def setUp(self) -> None: BaseProjectStacktraceLink.setUp(self) self.android_code_mapping = self.create_code_mapping( organization_integration=self.oi, @@ -283,7 +288,7 @@ def setUp(self): ) @patch.object(ExampleIntegration, "get_stacktrace_link") - def test_munge_android_worked(self, mock_integration): + def test_munge_android_worked(self, mock_integration: MagicMock) -> None: file_path = "src/getsentry/file.java" mock_integration.side_effect = [f"{example_base_url}/{file_path}"] response = self.get_success_response( @@ -299,7 +304,7 @@ def test_munge_android_worked(self, mock_integration): assert response.data["sourceUrl"] == f"{example_base_url}/{file_path}" @patch.object(ExampleIntegration, "get_stacktrace_link") - def test_munge_android_failed_stack_root_mismatch(self, mock_integration): + def test_munge_android_failed_stack_root_mismatch(self, mock_integration: MagicMock) -> None: """ Returns a stack_root_mismatch if module doesn't match stack root """ @@ -321,7 +326,7 @@ def test_munge_android_failed_stack_root_mismatch(self, mock_integration): assert response.data["integrations"] == [serialized_integration(self.integration)] @patch.object(ExampleIntegration, "get_stacktrace_link") - def test_cocoa_abs_path_success(self, mock_integration): + def test_cocoa_abs_path_success(self, mock_integration: MagicMock) -> None: """ Cocoa events with code mappings referencing the abs_path should apply correctly. """ @@ -344,7 +349,7 @@ def test_cocoa_abs_path_success(self, mock_integration): assert response.data["sourceUrl"] == f"{example_base_url}/src/{filename}" @patch.object(ExampleIntegration, "get_stacktrace_link") - def test_cocoa_filename_success(self, mock_integration): + def test_cocoa_filename_success(self, mock_integration: MagicMock) -> None: """ Cocoa events with code mappings that match the file should apply correctly. """ @@ -367,7 +372,7 @@ def test_cocoa_filename_success(self, mock_integration): assert response.data["sourceUrl"] == f"{example_base_url}/src/{filename}" @patch.object(ExampleIntegration, "get_stacktrace_link") - def test_cocoa_failed_stack_root_mismatch(self, mock_integration): + def test_cocoa_failed_stack_root_mismatch(self, mock_integration: MagicMock) -> None: """ Should return stack_root_mismatch if stack root doesn't match file or abs_path """ @@ -390,7 +395,7 @@ def test_cocoa_failed_stack_root_mismatch(self, mock_integration): assert response.data["integrations"] == [serialized_integration(self.integration)] @patch.object(ExampleIntegration, "get_stacktrace_link") - def test_munge_flutter_worked(self, mock_integration): + def test_munge_flutter_worked(self, mock_integration: MagicMock) -> None: file_path = "a/b/main.dart" mock_integration.side_effect = [f"{example_base_url}/{file_path}"] response = self.get_success_response( @@ -409,7 +414,7 @@ def test_munge_flutter_worked(self, mock_integration): class ProjectStacktraceLinkTestMultipleMatches(BaseProjectStacktraceLink): - def setUp(self): + def setUp(self) -> None: BaseProjectStacktraceLink.setUp(self) # Created by the user, not well defined stack root self.code_mapping1 = self.create_code_mapping( @@ -466,7 +471,7 @@ def setUp(self): self.filepath = "usr/src/getsentry/src/sentry/src/sentry/utils/safe.py" - def test_multiple_code_mapping_matches(self): + def test_multiple_code_mapping_matches(self) -> None: with patch.object( ExampleIntegration, "get_stacktrace_link", diff --git a/tests/sentry/issues/test_attributes.py b/tests/sentry/issues/test_attributes.py index 81f30f32fa38ef..eb4e25cca6ac58 100644 --- a/tests/sentry/issues/test_attributes.py +++ b/tests/sentry/issues/test_attributes.py @@ -1,3 +1,4 @@ +from collections.abc import Sequence from datetime import timedelta from unittest.mock import patch @@ -140,13 +141,13 @@ def test_bulk_retrieve_snapshot_values_group_owner(self) -> None: class PostSaveLogGroupAttributesChangedTest(TestCase): - def test(self): + def test(self) -> None: self.run_attr_test(self.group, [], "all") self.run_attr_test(self.group, ["status"], "status") self.run_attr_test(self.group, ["status", "last_seen"], "status") self.run_attr_test(self.group, ["status", "substatus"], "status-substatus") - def run_attr_test(self, group, update_fields, expected_str): + def run_attr_test(self, group: Group, update_fields: Sequence[str], expected_str: str) -> None: with patch( "sentry.issues.attributes._log_group_attributes_changed" ) as _log_group_attributes_changed, patch( @@ -161,7 +162,7 @@ def run_attr_test(self, group, update_fields, expected_str): ) send_snapshot_values.assert_called_with(None, group, False) - def test_new(self): + def test_new(self) -> None: with patch( "sentry.issues.attributes._log_group_attributes_changed" ) as _log_group_attributes_changed, patch( @@ -172,7 +173,7 @@ def test_new(self): send_snapshot_values.assert_called_with(None, new_group, False) - def test_model_update(self): + def test_model_update(self) -> None: with patch( "sentry.issues.attributes._log_group_attributes_changed" ) as _log_group_attributes_changed, patch( @@ -184,11 +185,11 @@ def test_model_update(self): class PostUpdateLogGroupAttributesChangedTest(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.group_2 = self.create_group() - def test(self): + def test(self) -> None: self.run_attr_test([self.group, self.group_2], {"status": GroupStatus.RESOLVED}, "status") self.run_attr_test( [self.group, self.group_2], @@ -196,7 +197,9 @@ def test(self): "status-substatus", ) - def run_attr_test(self, groups, update_fields, expected_str): + def run_attr_test( + self, groups: list[Group], update_fields: dict[str, int], expected_str: str + ) -> None: groups.sort(key=lambda g: g.id) with patch( "sentry.issues.attributes._log_group_attributes_changed" diff --git a/tests/sentry/issues/test_escalating.py b/tests/sentry/issues/test_escalating.py index e2f4b3f1c0b135..b67d270a52904b 100644 --- a/tests/sentry/issues/test_escalating.py +++ b/tests/sentry/issues/test_escalating.py @@ -3,7 +3,7 @@ from datetime import datetime, timedelta from typing import Any from unittest import mock -from unittest.mock import patch +from unittest.mock import MagicMock, patch from uuid import uuid4 import pytest @@ -95,7 +95,7 @@ def _create_hourly_bucket(self, count: int, event: Event) -> GroupsCountResponse @with_feature("organizations:escalating-issues-v2") @mock.patch("sentry.issues.escalating.logger") - def test_query_single_group(self, mock_logger) -> None: + def test_query_single_group(self, mock_logger: MagicMock) -> None: event = self._create_events_for_group() assert query_groups_past_counts(Group.objects.all()) == [ self._create_hourly_bucket(1, event) @@ -105,7 +105,7 @@ def test_query_single_group(self, mock_logger) -> None: @with_feature("organizations:escalating-issues-v2") @freeze_time(TIME_YESTERDAY) @mock.patch("sentry.issues.escalating.logger") - def test_query_different_group_categories(self, mock_logger) -> None: + def test_query_different_group_categories(self, mock_logger: MagicMock) -> None: from django.utils import timezone timestamp = timezone.now() - timedelta(minutes=1) @@ -165,7 +165,7 @@ def test_query_different_group_categories(self, mock_logger) -> None: @mock.patch("sentry.issues.escalating.ELEMENTS_PER_SNUBA_METRICS_QUERY", new=4) @mock.patch("sentry.issues.escalating.ELEMENTS_PER_SNUBA_PAGE", new=4) @mock.patch("sentry.issues.escalating.logger") - def test_pagination(self, mock_logger) -> None: + def test_pagination(self, mock_logger: MagicMock) -> None: events = [] for i in range(20): event = self._create_events_for_group(count=1, hours_ago=2, group=f"group-{i}") @@ -205,7 +205,7 @@ def test_query_optimization(self) -> None: @with_feature("organizations:escalating-issues-v2") @mock.patch("sentry.issues.escalating.logger") - def test_query_multiple_projects(self, mock_logger) -> None: + def test_query_multiple_projects(self, mock_logger: MagicMock) -> None: proj_x = self.create_project(organization=self.project.organization) proj_y = self.create_project(organization=self.project.organization) @@ -228,7 +228,7 @@ def test_query_multiple_projects(self, mock_logger) -> None: @with_feature("organizations:escalating-issues-v2") @mock.patch("sentry.issues.escalating.logger") - def test_query_different_orgs(self, mock_logger) -> None: + def test_query_different_orgs(self, mock_logger: MagicMock) -> None: proj_a = self.create_project(organization=self.project.organization) org_b = self.create_organization() proj_b = self.create_project(organization=org_b) @@ -259,7 +259,7 @@ def test_datetime_number_of_days() -> None: class DailyGroupCountsEscalating(BaseGroupCounts): def save_mock_escalating_group_forecast( - self, group: Group, forecast_values=list[int], date_added=datetime + self, group: Group, forecast_values: list[int], date_added: datetime ) -> None: """Save mock data for escalating group forecast in nodestore""" escalating_forecast = EscalatingGroupForecast( diff --git a/tests/sentry/issues/test_issue_velocity.py b/tests/sentry/issues/test_issue_velocity.py index 43aa535a510cd3..03a585320af4ce 100644 --- a/tests/sentry/issues/test_issue_velocity.py +++ b/tests/sentry/issues/test_issue_velocity.py @@ -1,6 +1,6 @@ import math from datetime import datetime, timedelta -from unittest.mock import patch +from unittest.mock import MagicMock, patch from django.utils import timezone @@ -25,12 +25,12 @@ @freeze_time() class IssueVelocityTests(TestCase, SnubaTestCase): - def setUp(self): + def setUp(self) -> None: self.now = timezone.now() self.utcnow = datetime.utcnow() super().setUp() - def test_calculation_simple(self): + def test_calculation_simple(self) -> None: """ Tests threshold calculation for a single issue with the minimum number of events in the past week. @@ -56,7 +56,7 @@ def test_calculation_simple(self): threshold = calculate_threshold(self.project) assert threshold == 2 / WEEK_IN_HOURS - def test_calculation_multiple_issues(self): + def test_calculation_multiple_issues(self) -> None: """ Tests that we receive the approximate 90th percentile for multiple issues older than a week with multiple events in the past week. @@ -91,7 +91,7 @@ def test_calculation_multiple_issues(self): assert actual_threshold is not None assert math.isclose(expected_threshold, actual_threshold, abs_tol=10**-3) - def test_calculation_for_issues_first_seen_recently(self): + def test_calculation_for_issues_first_seen_recently(self) -> None: """ Tests that issues first seen within the past week use the difference in hours between now and when they were first seen to calculate frequency instead of the full week in hours. @@ -108,7 +108,7 @@ def test_calculation_for_issues_first_seen_recently(self): threshold = calculate_threshold(self.project) assert threshold == 2 / 24 - def test_calculation_excludes_issues_with_only_one_event_in_past_week(self): + def test_calculation_excludes_issues_with_only_one_event_in_past_week(self) -> None: """ Tests that issues with only one event in the past week are excluded from the calculation. """ @@ -134,7 +134,7 @@ def test_calculation_excludes_issues_with_only_one_event_in_past_week(self): assert threshold is not None assert math.isnan(threshold) - def test_calculation_excludes_issues_newer_than_an_hour(self): + def test_calculation_excludes_issues_newer_than_an_hour(self) -> None: """ Tests that issues that were first seen within the past hour are excluded from the calculation. """ @@ -161,7 +161,7 @@ def test_calculation_excludes_issues_newer_than_an_hour(self): assert math.isnan(threshold) @patch("sentry.issues.issue_velocity.update_threshold") - def test_get_latest_threshold_simple(self, mock_update): + def test_get_latest_threshold_simple(self, mock_update: MagicMock) -> None: """ Tests that we get the last threshold stored when the stale date has not passed yet. """ @@ -173,7 +173,7 @@ def test_get_latest_threshold_simple(self, mock_update): assert threshold == 0.1 @patch("sentry.issues.issue_velocity.update_threshold") - def test_get_latest_threshold_outdated(self, mock_update): + def test_get_latest_threshold_outdated(self, mock_update: MagicMock) -> None: """ Tests that we update the threshold when the stale date has passed. """ @@ -187,7 +187,7 @@ def test_get_latest_threshold_outdated(self, mock_update): assert get_latest_threshold(self.project) == 1.5 @patch("sentry.issues.issue_velocity.update_threshold") - def test_get_latest_threshold_when_none_saved(self, mock_update): + def test_get_latest_threshold_when_none_saved(self, mock_update: MagicMock) -> None: """ Tests that we update the threshold when it is non-existent. """ @@ -195,7 +195,7 @@ def test_get_latest_threshold_when_none_saved(self, mock_update): assert get_latest_threshold(self.project) == 10.7 @patch("sentry.issues.issue_velocity.update_threshold") - def test_get_latest_threshold_locked(self, mock_update): + def test_get_latest_threshold_locked(self, mock_update: MagicMock) -> None: """ Tests that we return the stale threshold when another process has the lock. """ @@ -217,7 +217,7 @@ def test_get_latest_threshold_locked(self, mock_update): assert threshold == 0.7 @patch("sentry.issues.issue_velocity.update_threshold") - def test_get_latest_threshold_locked_no_stale(self, mock_update): + def test_get_latest_threshold_locked_no_stale(self, mock_update: MagicMock) -> None: """ Tests that we return 0 when another process has the lock and there is no stale value. """ @@ -232,7 +232,7 @@ def test_get_latest_threshold_locked_no_stale(self, mock_update): assert threshold == 0 @patch("sentry.issues.issue_velocity.calculate_threshold") - def test_update_threshold_simple(self, mock_calculation): + def test_update_threshold_simple(self, mock_calculation: MagicMock) -> None: """ Tests that we save the newly calculated threshold at the default TTL and return it. """ @@ -248,7 +248,7 @@ def test_update_threshold_simple(self, mock_calculation): assert redis_client.ttl("date-key") == DEFAULT_TTL @patch("sentry.issues.issue_velocity.calculate_threshold") - def test_update_threshold_with_stale(self, mock_calculation): + def test_update_threshold_with_stale(self, mock_calculation: MagicMock) -> None: """ Tests that we return the stale threshold if the calculation method returns None. """ @@ -259,7 +259,7 @@ def test_update_threshold_with_stale(self, mock_calculation): assert update_threshold(self.project, "threshold-key", "date-key", 0.5) == 0.5 @patch("sentry.issues.issue_velocity.calculate_threshold") - def test_update_threshold_none(self, mock_calculation): + def test_update_threshold_none(self, mock_calculation: MagicMock) -> None: """ Tests that we return 0 if the calculation method returns None and we don't have a stale threshold. @@ -268,7 +268,7 @@ def test_update_threshold_none(self, mock_calculation): assert update_threshold(self.project, "threshold-key", "date-key") == 0 @patch("sentry.issues.issue_velocity.calculate_threshold") - def test_update_threshold_nan(self, mock_calculation): + def test_update_threshold_nan(self, mock_calculation: MagicMock) -> None: """ Tests that we return 0 and save a threshold for the default TTL if the calculation returned NaN. """ @@ -281,7 +281,7 @@ def test_update_threshold_nan(self, mock_calculation): assert datetime.fromisoformat(stored_date) == self.utcnow assert redis_client.ttl("threshold-key") == DEFAULT_TTL - def test_fallback_to_stale(self): + def test_fallback_to_stale(self) -> None: """ Tests that we return the stale threshold and maintain its TTL, and update the stale date to make the threshold usable for the next ten minutes as a fallback. @@ -302,7 +302,7 @@ def test_fallback_to_stale(self): assert redis_client.ttl("threshold-key") == 86400 assert redis_client.ttl("date-key") == 86400 - def test_fallback_to_zero(self): + def test_fallback_to_zero(self) -> None: """ Tests that we return 0 and store it in Redis for the next ten minutes as a fallback if we do not have a stale threshold. @@ -320,7 +320,7 @@ def test_fallback_to_zero(self): assert redis_client.ttl("threshold-key") == FALLBACK_TTL assert redis_client.ttl("date-key") == FALLBACK_TTL - def test_fallback_to_stale_zero_ttl(self): + def test_fallback_to_stale_zero_ttl(self) -> None: """ Tests that we return 0 and store it in Redis for the next ten minutes as a fallback if our stale threshold has a TTL <= 0. diff --git a/tests/sentry/issues/test_priority.py b/tests/sentry/issues/test_priority.py index f70fb7d124dda4..0cc272867cae66 100644 --- a/tests/sentry/issues/test_priority.py +++ b/tests/sentry/issues/test_priority.py @@ -6,7 +6,7 @@ auto_update_priority, ) from sentry.models.activity import Activity -from sentry.models.group import GroupStatus +from sentry.models.group import Group, GroupStatus from sentry.models.grouphistory import GroupHistory, GroupHistoryStatus from sentry.testutils.cases import TestCase from sentry.testutils.helpers.datetime import before_now @@ -17,7 +17,9 @@ @apply_feature_flag_on_cls("projects:issue-priority") class TestUpdatesPriority(TestCase): - def assert_activity_grouphistory_set(self, group, priority, reason) -> None: + def assert_activity_grouphistory_set( + self, group: Group, priority: PriorityLevel, reason: PriorityChangeReason + ) -> None: activity = ( Activity.objects.filter(group=group, type=ActivityType.SET_PRIORITY.value) .order_by("-datetime") diff --git a/tests/sentry/issues/test_producer.py b/tests/sentry/issues/test_producer.py index 5b4500b0c2bbad..2cce290c55b1d1 100644 --- a/tests/sentry/issues/test_producer.py +++ b/tests/sentry/issues/test_producer.py @@ -94,7 +94,7 @@ def test_with_only_occurrence(self) -> None: @patch("sentry.issues.producer._occurrence_producer.produce") @override_settings(SENTRY_EVENTSTREAM="sentry.eventstream.kafka.KafkaEventStream") def test_payload_sent_to_kafka_with_partition_key( - self, mock_produce, mock_prepare_occurrence_message + self, mock_produce: MagicMock, mock_prepare_occurrence_message: MagicMock ) -> None: occurrence = self.build_occurrence(project_id=self.project.id, fingerprint=["group-1"]) produce_occurrence_to_kafka( @@ -117,7 +117,7 @@ def test_payload_sent_to_kafka_with_partition_key( @patch("sentry.issues.producer._occurrence_producer.produce") @override_settings(SENTRY_EVENTSTREAM="sentry.eventstream.kafka.KafkaEventStream") def test_payload_sent_to_kafka_with_partition_key_no_fingerprint( - self, mock_produce, mock_prepare_occurrence_message + self, mock_produce: MagicMock, mock_prepare_occurrence_message: MagicMock ) -> None: occurrence = self.build_occurrence(project_id=self.project.id, fingerprint=[]) produce_occurrence_to_kafka( @@ -136,7 +136,7 @@ def test_payload_sent_to_kafka_with_partition_key_no_fingerprint( @patch("sentry.issues.producer._occurrence_producer.produce") @override_settings(SENTRY_EVENTSTREAM="sentry.eventstream.kafka.KafkaEventStream") def test_payload_sent_to_kafka_with_partition_key_no_occurrence( - self, mock_produce, mock_prepare_occurrence_message + self, mock_produce: MagicMock, mock_prepare_occurrence_message: MagicMock ) -> None: produce_occurrence_to_kafka( payload_type=PayloadType.OCCURRENCE, @@ -150,7 +150,7 @@ def test_payload_sent_to_kafka_with_partition_key_no_occurrence( class TestProduceOccurrenceForStatusChange(TestCase, OccurrenceTestMixin): - def setUp(self): + def setUp(self) -> None: self.fingerprint = ["group-1"] self.event = self.store_event( data={ @@ -253,7 +253,7 @@ def test_with_status_change_archived(self) -> None: status=STRING_TO_STATUS_LOOKUP[gh_status], ).exists() - def test_with_status_change_unresolved(self): + def test_with_status_change_unresolved(self) -> None: # We modify a single group through different substatuses that are supported in the UI # to ensure the status change is processed correctly. self.group.update(status=GroupStatus.IGNORED, substatus=GroupSubStatus.UNTIL_ESCALATING) @@ -361,7 +361,7 @@ def test_invalid_hashes(self, mock_metrics_incr: MagicMock) -> None: assert group.status == initial_status assert group.substatus == initial_substatus - def test_generate_status_changes_id(self): + def test_generate_status_changes_id(self) -> None: status_change_1 = StatusChangeMessage( fingerprint=["status-change-1"], project_id=self.project.id, diff --git a/tests/sentry/issues/test_run.py b/tests/sentry/issues/test_run.py index d3feab74f56eb6..72a1a2ee6698c4 100644 --- a/tests/sentry/issues/test_run.py +++ b/tests/sentry/issues/test_run.py @@ -1,4 +1,6 @@ +from collections.abc import Mapping, MutableMapping from datetime import datetime +from typing import Any from unittest import mock from arroyo.backends.kafka import KafkaPayload @@ -20,13 +22,15 @@ # need to shut down the connections in the thread for tests to pass -def process_occurrence_group_with_shutdown(*args, **kwargs): - process_occurrence_group(*args, **kwargs) +def process_occurrence_group_with_shutdown(items: list[Mapping[str, Any]]) -> None: + process_occurrence_group(items) close_old_connections() class TestOccurrenceConsumer(TestCase, OccurrenceTestMixin): - def build_mock_message(self, data, topic=None): + def build_mock_message( + self, data: MutableMapping[str, Any] | None, topic: ArroyoTopic | None = None + ) -> mock.Mock: message = mock.Mock() message.value.return_value = json.dumps(data) if topic: @@ -35,7 +39,7 @@ def build_mock_message(self, data, topic=None): @with_feature("organizations:profile-file-io-main-thread-ingest") @mock.patch("sentry.issues.occurrence_consumer.save_issue_occurrence") - def test_saves_issue_occurrence(self, mock_save_issue_occurrence): + def test_saves_issue_occurrence(self, mock_save_issue_occurrence: mock.MagicMock) -> None: topic = ArroyoTopic(get_topic_definition(Topic.INGEST_OCCURRENCES)["real_topic_name"]) partition_1 = Partition(topic, 0) partition_2 = Partition(topic, 1) @@ -107,7 +111,9 @@ def test_saves_issue_occurrence(self, mock_save_issue_occurrence): class TestBatchedOccurrenceConsumer(TestCase, OccurrenceTestMixin): - def build_mock_message(self, data, topic=None): + def build_mock_message( + self, data: MutableMapping[str, Any] | None, topic: ArroyoTopic | None = None + ) -> mock.Mock: message = mock.Mock() message.value.return_value = json.dumps(data) if topic: @@ -122,8 +128,10 @@ def build_mock_message(self, data, topic=None): ) @mock.patch("sentry.issues.occurrence_consumer.save_issue_occurrence") def test_saves_issue_occurrence( - self, mock_save_issue_occurrence, mock_process_occurrence_group - ): + self, + mock_save_issue_occurrence: mock.MagicMock, + mock_process_occurrence_group: mock.MagicMock, + ) -> None: topic = ArroyoTopic(get_topic_definition(Topic.INGEST_OCCURRENCES)["real_topic_name"]) partition_1 = Partition(topic, 0) partition_2 = Partition(topic, 1) From d2019f479c959c1a6b8c124efa3130049711f6a9 Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Mon, 13 May 2024 12:35:02 -0400 Subject: [PATCH 345/376] feat(insights): add cache miss rate chart to sidebar (#70767) You might want to investigate a drop in miss rate scoped to a transaction, so to cover that case we're adding a transaction miss rate chart in the sidebar. ![image](https://github.com/getsentry/sentry/assets/44422760/706596cc-3f2a-48bf-a82d-0b3fb1c51721) --- .../app/views/performance/cache/referrers.ts | 1 + .../cache/samplePanel/samplePanel.tsx | 69 +++++++++++-------- 2 files changed, 42 insertions(+), 28 deletions(-) diff --git a/static/app/views/performance/cache/referrers.ts b/static/app/views/performance/cache/referrers.ts index 167502fdf0476f..b565f5f74de361 100644 --- a/static/app/views/performance/cache/referrers.ts +++ b/static/app/views/performance/cache/referrers.ts @@ -9,4 +9,5 @@ export enum Referrer { SAMPLES_CACHE_TRANSACTION_DURATION = 'api.performance.cache.samples-cache-transaction-duration', SAMPLES_CACHE_SPAN_SAMPLES = 'api.performance.cache.samples-cache-span-samples', SAMPLES_CACHE_SPAN_SAMPLES_TRANSACTION_DURATION = 'api.performance.cache.samples-cache-span-samples', + SAMPLES_CACHE_HIT_MISS_CHART = 'api.performance.cache.samples-cache-hit-miss-chart', } diff --git a/static/app/views/performance/cache/samplePanel/samplePanel.tsx b/static/app/views/performance/cache/samplePanel/samplePanel.tsx index 43c533aead2dd1..f5523a8883ea2c 100644 --- a/static/app/views/performance/cache/samplePanel/samplePanel.tsx +++ b/static/app/views/performance/cache/samplePanel/samplePanel.tsx @@ -17,6 +17,7 @@ import useOrganization from 'sentry/utils/useOrganization'; import useProjects from 'sentry/utils/useProjects'; import useRouter from 'sentry/utils/useRouter'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; +import {CacheHitMissChart} from 'sentry/views/performance/cache/charts/hitMissChart'; import {Referrer} from 'sentry/views/performance/cache/referrers'; import {TransactionDurationChart} from 'sentry/views/performance/cache/samplePanel/charts/transactionDurationChart'; import {BASE_FILTERS} from 'sentry/views/performance/cache/settings'; @@ -27,6 +28,7 @@ import * as ModuleLayout from 'sentry/views/performance/moduleLayout'; import DetailPanel from 'sentry/views/starfish/components/detailPanel'; import {getTimeSpentExplanation} from 'sentry/views/starfish/components/tableCells/timeSpentCell'; import {useMetrics, useSpanMetrics} from 'sentry/views/starfish/queries/useDiscover'; +import {useSpanMetricsSeries} from 'sentry/views/starfish/queries/useDiscoverSeries'; import {useIndexedSpans} from 'sentry/views/starfish/queries/useIndexedSpans'; import {useTransactions} from 'sentry/views/starfish/queries/useTransactions'; import { @@ -72,6 +74,14 @@ export function CacheSamplePanel() { 'project.id': query.project, }; + const {data: cacheHitRateData, isLoading: isCacheHitRateLoading} = useSpanMetricsSeries( + { + search: MutableSearch.fromQueryObject(filters satisfies SpanMetricsQueryFilters), + yAxis: [`${SpanFunction.CACHE_MISS_RATE}()`], + }, + Referrer.SAMPLES_CACHE_HIT_MISS_CHART + ); + const {data: cacheTransactionMetrics, isFetching: areCacheTransactionMetricsFetching} = useSpanMetrics( { @@ -280,35 +290,38 @@ export function CacheSamplePanel() { /> - - - - + + + + { + const firstHighlight = highlights[0]; + + if (!firstHighlight) { + setHighlightedSpanId(undefined); + return; } - highlightedSpanId={highlightedSpanId} - onHighlight={highlights => { - const firstHighlight = highlights[0]; - - if (!firstHighlight) { - setHighlightedSpanId(undefined); - return; - } - - const sample = findSampleFromDataPoint<(typeof spansWithDuration)[0]>( - firstHighlight.dataPoint, - spansWithDuration, - 'transaction.duration' - ); - setHighlightedSpanId(sample?.span_id); - }} - /> - - + + const sample = findSampleFromDataPoint<(typeof spansWithDuration)[0]>( + firstHighlight.dataPoint, + spansWithDuration, + 'transaction.duration' + ); + setHighlightedSpanId(sample?.span_id); + }} + /> + Date: Mon, 13 May 2024 12:39:06 -0400 Subject: [PATCH 346/376] fix(ai-monitoring): Null pointer for AI-related issues (#70773) https://sentry.sentry.io/issues/5336783230/?referrer=github-pr-bot --- .../groupEventDetails/groupEventDetailsContent.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx b/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx index f79c06c142bc39..e46737823d07eb 100644 --- a/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx +++ b/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx @@ -143,8 +143,9 @@ function DefaultGroupEventDetailsContent({ ?.filter((x): x is EntryException => x.type === EntryType.EXCEPTION) .flatMap(x => x.data.values ?? []) .some(({value}) => { - const lowerText = value.toLowerCase(); + const lowerText = value?.toLowerCase(); return ( + lowerText && (lowerText.includes('api key') || lowerText.includes('429')) && (lowerText.includes('openai') || lowerText.includes('anthropic') || From 5d320d2b930c1e044c715200523cdd4d6e157eb0 Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Mon, 13 May 2024 09:47:24 -0700 Subject: [PATCH 347/376] feat(api-idorslug): Updated Subset of Replay Endpoints and API Docs Tests to use `organization_id_or_slug` (#70711) A subset of changes from https://github.com/getsentry/sentry/pull/70081! --- src/sentry/api/urls.py | 98 +++++++-------- src/sentry/feedback/blueprints/api.md | 4 +- .../organization_alert_rule_details.py | 6 +- src/sentry/replays/blueprints/api.md | 22 ++-- .../endpoints/organization_replay_count.py | 2 +- .../endpoints/organization_replay_details.py | 2 +- .../endpoints/organization_replay_index.py | 2 +- .../organization_replay_selector_index.py | 2 +- .../endpoints/project_replay_clicks_index.py | 2 +- .../endpoints/project_replay_details.py | 2 +- ...roject_replay_recording_segment_details.py | 2 +- .../project_replay_recording_segment_index.py | 2 +- .../endpoints/project_replay_video_details.py | 2 +- .../endpoints/project_replay_viewed_by.py | 2 +- src/sentry/testutils/cases.py | 11 +- .../events/test_project_event_details.py | 2 +- .../events/test_project_tagkey_values.py | 2 +- .../organizations/test_event_id_lookup.py | 2 +- .../organizations/test_org_details.py | 2 +- .../endpoints/organizations/test_org_repos.py | 2 +- .../organizations/test_repo_commits.py | 2 +- .../endpoints/organizations/test_shortid.py | 2 +- .../endpoints/projects/test_project_stats.py | 2 +- .../projects/test_service_hook_details.py | 2 +- .../endpoints/projects/test_service_hooks.py | 2 +- .../endpoints/projects/test_tag_values.py | 2 +- .../apidocs/endpoints/projects/test_users.py | 2 +- .../endpoints/releases/test_deploys.py | 5 +- .../test_organization_release_commit_files.py | 5 +- .../test_organization_release_commits.py | 5 +- .../test_organization_release_details.py | 2 +- .../test_organization_release_file_details.py | 2 +- .../test_organization_release_files.py | 5 +- .../releases/test_organization_releases.py | 2 +- .../releases/test_project_release_commits.py | 2 +- .../test_project_release_file_details.py | 2 +- .../releases/test_project_release_files.py | 2 +- .../test_release_threshold.py | 6 +- .../test_release_threshold_details.py | 28 ++--- .../test_organization_release_commits.py | 5 +- .../test_organization_release_details.py | 119 ++++++++++++------ .../test_organization_release_file_details.py | 10 +- .../test_organization_release_files.py | 50 ++++++-- .../test_organization_release_meta.py | 8 +- ...t_organization_release_previous_commits.py | 12 +- .../endpoints/test_organization_releases.py | 104 ++++++++++----- .../endpoints/test_organization_shortid.py | 2 +- .../test_project_release_file_details.py | 14 +-- .../endpoints/test_project_release_files.py | 26 ++-- .../api/endpoints/test_project_stats.py | 4 +- .../endpoints/test_project_tagkey_details.py | 6 +- .../endpoints/test_project_tagkey_values.py | 8 +- .../api/endpoints/test_release_deploys.py | 18 ++- tests/sentry/api/endpoints/test_sudo.py | 4 +- .../replays/test_organization_replay_count.py | 2 +- .../test_organization_replay_events_meta.py | 2 +- .../endpoints/test_organization_eventid.py | 10 +- .../endpoints/test_project_event_details.py | 24 ++-- .../api/endpoints/test_project_events.py | 10 +- 59 files changed, 416 insertions(+), 273 deletions(-) diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 0e7ce0fffd3784..2e72830157bf80 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -1118,7 +1118,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organizations", ), re_path( - r"^(?P[^\/]+)/$", + r"^(?P[^\/]+)/$", OrganizationDetailsEndpoint.as_view(), name="sentry-api-0-organization-details", ), @@ -1138,7 +1138,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-alert-rule-available-actions", ), re_path( - r"^(?P[^\/]+)/alert-rules/(?P[^\/]+)/$", + r"^(?P[^\/]+)/alert-rules/(?P[^\/]+)/$", OrganizationAlertRuleDetailsEndpoint.as_view(), name="sentry-api-0-organization-alert-rule-details", ), @@ -1294,12 +1294,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-dashboard-visit", ), re_path( - r"^(?P[^\/]+)/shortids/(?P[^\/]+)/$", + r"^(?P[^\/]+)/shortids/(?P[^\/]+)/$", ShortIdLookupEndpoint.as_view(), name="sentry-api-0-short-id-lookup", ), re_path( - r"^(?P[^\/]+)/eventids/(?P(?:\d+|[A-Fa-f0-9-]{32,36}))/$", + r"^(?P[^\/]+)/eventids/(?P(?:\d+|[A-Fa-f0-9-]{32,36}))/$", EventIdLookupEndpoint.as_view(), name="sentry-api-0-event-id-lookup", ), @@ -1774,17 +1774,17 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-sent-first-event", ), re_path( - r"^(?P[^\/]+)/repos/$", + r"^(?P[^\/]+)/repos/$", OrganizationRepositoriesEndpoint.as_view(), name="sentry-api-0-organization-repositories", ), re_path( - r"^(?P[^\/]+)/repos/(?P[^\/]+)/$", + r"^(?P[^\/]+)/repos/(?P[^\/]+)/$", OrganizationRepositoryDetailsEndpoint.as_view(), name="sentry-api-0-organization-repository-details", ), re_path( - r"^(?P[^\/]+)/repos/(?P[^\/]+)/commits/$", + r"^(?P[^\/]+)/repos/(?P[^\/]+)/commits/$", OrganizationRepositoryCommitsEndpoint.as_view(), name="sentry-api-0-organization-repository-commits", ), @@ -1799,12 +1799,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-plugins-configs", ), re_path( - r"^(?P[^\/]+)/releases/$", + r"^(?P[^\/]+)/releases/$", OrganizationReleasesEndpoint.as_view(), name="sentry-api-0-organization-releases", ), re_path( - r"^(?P[^\/]+)/release-thresholds/$", + r"^(?P[^\/]+)/release-thresholds/$", ReleaseThresholdIndexEndpoint.as_view(), name="sentry-api-0-organization-release-thresholds", ), @@ -1815,52 +1815,52 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-release-threshold-statuses", ), re_path( - r"^(?P[^\/]+)/releases/stats/$", + r"^(?P[^\/]+)/releases/stats/$", OrganizationReleasesStatsEndpoint.as_view(), name="sentry-api-0-organization-releases-stats", ), re_path( - r"^(?P[^\/]+)/releases/(?P[^/]+)/$", + r"^(?P[^\/]+)/releases/(?P[^/]+)/$", OrganizationReleaseDetailsEndpoint.as_view(), name="sentry-api-0-organization-release-details", ), re_path( - r"^(?P[^\/]+)/releases/(?P[^/]+)/meta/$", + r"^(?P[^\/]+)/releases/(?P[^/]+)/meta/$", OrganizationReleaseMetaEndpoint.as_view(), name="sentry-api-0-organization-release-meta", ), re_path( - r"^(?P[^\/]+)/releases/(?P[^/]+)/assemble/$", + r"^(?P[^\/]+)/releases/(?P[^/]+)/assemble/$", OrganizationReleaseAssembleEndpoint.as_view(), name="sentry-api-0-organization-release-assemble", ), re_path( - r"^(?P[^\/]+)/releases/(?P[^/]+)/files/$", + r"^(?P[^\/]+)/releases/(?P[^/]+)/files/$", OrganizationReleaseFilesEndpoint.as_view(), name="sentry-api-0-organization-release-files", ), re_path( - r"^(?P[^\/]+)/releases/(?P[^/]+)/files/(?P[^/]+)/$", + r"^(?P[^\/]+)/releases/(?P[^/]+)/files/(?P[^/]+)/$", OrganizationReleaseFileDetailsEndpoint.as_view(), name="sentry-api-0-organization-release-file-details", ), re_path( - r"^(?P[^\/]+)/releases/(?P[^/]+)/commitfiles/$", + r"^(?P[^\/]+)/releases/(?P[^/]+)/commitfiles/$", CommitFileChangeEndpoint.as_view(), name="sentry-api-0-release-commitfilechange", ), re_path( - r"^(?P[^\/]+)/releases/(?P[^/]+)/deploys/$", + r"^(?P[^\/]+)/releases/(?P[^/]+)/deploys/$", ReleaseDeploysEndpoint.as_view(), name="sentry-api-0-organization-release-deploys", ), re_path( - r"^(?P[^\/]+)/releases/(?P[^/]+)/commits/$", + r"^(?P[^\/]+)/releases/(?P[^/]+)/commits/$", OrganizationReleaseCommitsEndpoint.as_view(), name="sentry-api-0-organization-release-commits", ), re_path( - r"^(?P[^\/]+)/releases/(?P[^/]+)/previous-with-commits/$", + r"^(?P[^\/]+)/releases/(?P[^/]+)/previous-with-commits/$", OrganizationReleasePreviousCommitsEndpoint.as_view(), name="sentry-api-0-organization-release-previous-with-commits", ), @@ -1971,32 +1971,32 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-relay-usage", ), re_path( - r"^(?P[^\/]+)/replays/$", + r"^(?P[^\/]+)/replays/$", OrganizationReplayIndexEndpoint.as_view(), name="sentry-api-0-organization-replay-index", ), re_path( - r"^(?P[^\/]+)/replay-selectors/$", + r"^(?P[^\/]+)/replay-selectors/$", OrganizationReplaySelectorIndexEndpoint.as_view(), name="sentry-api-0-organization-replay-selectors-index", ), re_path( - r"^(?P[^\/]+)/replay-count/$", + r"^(?P[^\/]+)/replay-count/$", OrganizationReplayCountEndpoint.as_view(), name="sentry-api-0-organization-replay-count", ), re_path( - r"^(?P[^/]+)/replays/(?P[\w-]+)/$", + r"^(?P[^/]+)/replays/(?P[\w-]+)/$", OrganizationReplayDetailsEndpoint.as_view(), name="sentry-api-0-organization-replay-details", ), re_path( - r"^(?P[^\/]+)/replays-events-meta/$", + r"^(?P[^\/]+)/replays-events-meta/$", OrganizationReplayEventsMetaEndpoint.as_view(), name="sentry-api-0-organization-replay-events-meta", ), re_path( - r"^(?P[^\/]+)/request-project-creation/$", + r"^(?P[^\/]+)/request-project-creation/$", OrganizationRequestProjectCreation.as_view(), name="sentry-api-0-organization-request-project-creation", ), @@ -2203,12 +2203,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-platform-details", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/events/$", + r"^(?P[^\/]+)/(?P[^\/]+)/events/$", ProjectEventsEndpoint.as_view(), name="sentry-api-0-project-events", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P(?:\d+|[A-Fa-f0-9]{32}))/$", + r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P(?:\d+|[A-Fa-f0-9]{32}))/$", ProjectEventDetailsEndpoint.as_view(), name="sentry-api-0-project-event-details", ), @@ -2248,7 +2248,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-event-file-committers", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/json/$", + r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/json/$", EventJsonEndpoint.as_view(), name="sentry-api-0-event-json", ), @@ -2318,17 +2318,17 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-filters-details", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/hooks/$", + r"^(?P[^\/]+)/(?P[^\/]+)/hooks/$", ProjectServiceHooksEndpoint.as_view(), name="sentry-api-0-service-hooks", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/hooks/(?P[^\/]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/hooks/(?P[^\/]+)/$", ProjectServiceHookDetailsEndpoint.as_view(), name="sentry-api-0-project-service-hook-details", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/hooks/(?P[^\/]+)/stats/$", + r"^(?P[^\/]+)/(?P[^\/]+)/hooks/(?P[^\/]+)/stats/$", ProjectServiceHookStatsEndpoint.as_view(), ), re_path( @@ -2371,12 +2371,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-releases", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/release-thresholds/$", + r"^(?P[^\/]+)/(?P[^\/]+)/release-thresholds/$", ReleaseThresholdEndpoint.as_view(), name="sentry-api-0-project-release-thresholds", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/release-thresholds/(?P[^/]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/release-thresholds/(?P[^/]+)/$", ReleaseThresholdDetailsEndpoint.as_view(), name="sentry-api-0-project-release-thresholds-details", ), @@ -2401,7 +2401,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-release-details", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/releases/(?P[^/]+)/commits/$", + r"^(?P[^\/]+)/(?P[^\/]+)/releases/(?P[^/]+)/commits/$", ProjectReleaseCommitsEndpoint.as_view(), name="sentry-api-0-project-release-commits", ), @@ -2431,12 +2431,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-artifact-bundle-file-details", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/releases/(?P[^/]+)/files/$", + r"^(?P[^\/]+)/(?P[^\/]+)/releases/(?P[^/]+)/files/$", ProjectReleaseFilesEndpoint.as_view(), name="sentry-api-0-project-release-files", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/releases/(?P[^/]+)/files/(?P[^/]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/releases/(?P[^/]+)/files/(?P[^/]+)/$", ProjectReleaseFileDetailsEndpoint.as_view(), name="sentry-api-0-project-release-file-details", ), @@ -2451,37 +2451,37 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-rules", ), re_path( - r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/$", + r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/$", ProjectReplayDetailsEndpoint.as_view(), name="sentry-api-0-project-replay-details", ), re_path( - r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/viewed-by/$", + r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/viewed-by/$", ProjectReplayViewedByEndpoint.as_view(), name="sentry-api-0-project-replay-viewed-by", ), re_path( - r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/accessibility-issues/$", + r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/accessibility-issues/$", ProjectReplayAccessibilityIssuesEndpoint.as_view(), name="sentry-api-0-project-replay-accessibility-issues", ), re_path( - r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/clicks/$", + r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/clicks/$", ProjectReplayClicksIndexEndpoint.as_view(), name="sentry-api-0-project-replay-clicks-index", ), re_path( - r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/recording-segments/$", + r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/recording-segments/$", ProjectReplayRecordingSegmentIndexEndpoint.as_view(), name="sentry-api-0-project-replay-recording-segment-index", ), re_path( - r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/recording-segments/(?P\d+)/$", + r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/recording-segments/(?P\d+)/$", ProjectReplayRecordingSegmentDetailsEndpoint.as_view(), name="sentry-api-0-project-replay-recording-segment-details", ), re_path( - r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/videos/(?P\d+)/$", + r"^(?P[^/]+)/(?P[^\/]+)/replays/(?P[\w-]+)/videos/(?P\d+)/$", ProjectReplayVideoDetailsEndpoint.as_view(), name="sentry-api-0-project-replay-video-details", ), @@ -2531,12 +2531,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-rule-stats-index", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/rule-task/(?P[^\/]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/rule-task/(?P[^\/]+)/$", ProjectRuleTaskDetailsEndpoint.as_view(), name="sentry-api-0-project-rule-task-details", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/stats/$", + r"^(?P[^\/]+)/(?P[^\/]+)/stats/$", ProjectStatsEndpoint.as_view(), name="sentry-api-0-project-stats", ), @@ -2546,17 +2546,17 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-symbol-sources", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/tags/$", + r"^(?P[^\/]+)/(?P[^\/]+)/tags/$", ProjectTagsEndpoint.as_view(), name="sentry-api-0-project-tags", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/tags/(?P[^/]+)/$", + r"^(?P[^\/]+)/(?P[^\/]+)/tags/(?P[^/]+)/$", ProjectTagKeyDetailsEndpoint.as_view(), name="sentry-api-0-project-tagkey-details", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/tags/(?P[^/]+)/values/$", + r"^(?P[^\/]+)/(?P[^\/]+)/tags/(?P[^/]+)/values/$", ProjectTagKeyValuesEndpoint.as_view(), name="sentry-api-0-project-tagkey-values", ), @@ -2576,7 +2576,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-project-transfer", ), re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/users/$", + r"^(?P[^\/]+)/(?P[^\/]+)/users/$", ProjectUsersEndpoint.as_view(), name="sentry-api-0-project-users", ), diff --git a/src/sentry/feedback/blueprints/api.md b/src/sentry/feedback/blueprints/api.md index 76d2ea07d50261..590df12679d6b7 100644 --- a/src/sentry/feedback/blueprints/api.md +++ b/src/sentry/feedback/blueprints/api.md @@ -14,7 +14,7 @@ returns a response _will_ document the full interchange format. Clients may opt to restrict response data or provide a subset of the request data. The API may or may not accept partial payloads. -## Feedback Index [/organizations//feedback/] +## Feedback Index [/organizations//feedback/] - Parameters @@ -155,7 +155,7 @@ Retrieve a collection of feedback items. } ``` -## Feedback [/projects///feedback//] +## Feedback [/projects///feedback//] - Model diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_details.py b/src/sentry/incidents/endpoints/organization_alert_rule_details.py index 29985aae28f2e5..79e613c97336c5 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_details.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_details.py @@ -335,7 +335,7 @@ def wrapper(self, request: Request, organization, alert_rule): @extend_schema( operation_id="Retrieve a Metric Alert Rule for an Organization", - parameters=[GlobalParams.ORG_SLUG, MetricAlertParams.METRIC_RULE_ID], + parameters=[GlobalParams.ORG_ID_OR_SLUG, MetricAlertParams.METRIC_RULE_ID], responses={ 200: AlertRuleSerializer, 401: RESPONSE_UNAUTHORIZED, @@ -360,7 +360,7 @@ def get(self, request: Request, organization, alert_rule) -> Response: @extend_schema( operation_id="Update a Metric Alert Rule", - parameters=[GlobalParams.ORG_SLUG, MetricAlertParams.METRIC_RULE_ID], + parameters=[GlobalParams.ORG_ID_OR_SLUG, MetricAlertParams.METRIC_RULE_ID], request=OrganizationAlertRuleDetailsPutSerializer, responses={ 200: AlertRuleSerializer, @@ -391,7 +391,7 @@ def put(self, request: Request, organization, alert_rule) -> Response: @extend_schema( operation_id="Delete a Metric Alert Rule", - parameters=[GlobalParams.ORG_SLUG, MetricAlertParams.METRIC_RULE_ID], + parameters=[GlobalParams.ORG_ID_OR_SLUG, MetricAlertParams.METRIC_RULE_ID], responses={ 202: RESPONSE_ACCEPTED, 401: RESPONSE_UNAUTHORIZED, diff --git a/src/sentry/replays/blueprints/api.md b/src/sentry/replays/blueprints/api.md index bd3c5a67b0b213..1e58fa793cca52 100644 --- a/src/sentry/replays/blueprints/api.md +++ b/src/sentry/replays/blueprints/api.md @@ -11,7 +11,7 @@ Host: https://sentry.io/api/0 This document is structured by resource with each resource having actions that can be performed against it. Every action that either accepts a request or returns a response WILL document the full interchange format. Clients may opt to restrict response data or provide a subset of the request data. The API may or may not accept partial payloads. -## Replays [/organizations//replays/] +## Replays [/organizations//replays/] - Parameters @@ -173,7 +173,7 @@ Retrieve a collection of replays. } ``` -## Replay [/projects///replays//] +## Replay [/projects///replays//] - Parameters - field (optional, string) @@ -244,7 +244,7 @@ Deletes a replay instance. - Response 204 -## Replay Accessibility Issues [/projects///replays//accessibility-issues] +## Replay Accessibility Issues [/projects///replays//accessibility-issues] This resource does not accept any URI parameters and is not paginated. Responses are ingested whole. @@ -336,7 +336,7 @@ IssueElementAlternative Type: } ``` -## Replay Selectors [/organizations//replay-selectors/] +## Replay Selectors [/organizations//replay-selectors/] - Parameters @@ -411,7 +411,7 @@ Retrieve a collection of selectors. } ``` -## Replay Recording Segments [/projects///replays//recording-segments/] +## Replay Recording Segments [/projects///replays//recording-segments/] - Parameters - per_page @@ -463,7 +463,7 @@ With download query argument, rrweb events JSON ] ``` -## Replay Recording Segment [/projects///replays//recording-segments//] +## Replay Recording Segment [/projects///replays//recording-segments//] - Parameters - download - Instruct the API to return a streaming bytes response. @@ -493,7 +493,7 @@ With download query argument. Content-Type application/octet-stream -## Replay Video [/projects///replays//videos//] +## Replay Video [/projects///replays//videos//] ### Fetch Replay Video [GET] @@ -505,7 +505,7 @@ Returns the bytes of a replay-segment video. \x00\x00\x00 ``` -## Replay Tag Keys [/projects///replays/tags/] +## Replay Tag Keys [/projects///replays/tags/] ### Fetch Tag Keys [GET] @@ -529,7 +529,7 @@ Retrieve a collection of tag keys associated with the replays dataset. ] ``` -## Replay Tag Values [/projects///replays/tags//values/] +## Replay Tag Values [/projects///replays/tags//values/] ### Fetch Tag Values [GET] @@ -559,7 +559,7 @@ Retrieve a collection of tag values associated with a tag key on the replays dat ] ``` -## Replay Click [/projects///replays//clicks/] +## Replay Click [/projects///replays//clicks/] Parameters: @@ -614,7 +614,7 @@ Retrieve a collection of click events associated with a replay. } ``` -## Replay Viewed By [/projects///replays//viewed-by/] +## Replay Viewed By [/projects///replays//viewed-by/] ### Fetch Replay Viewed By [GET] diff --git a/src/sentry/replays/endpoints/organization_replay_count.py b/src/sentry/replays/endpoints/organization_replay_count.py index 2d492e8480fe84..93d0bdeb494508 100644 --- a/src/sentry/replays/endpoints/organization_replay_count.py +++ b/src/sentry/replays/endpoints/organization_replay_count.py @@ -60,7 +60,7 @@ class OrganizationReplayCountEndpoint(OrganizationEventsV2EndpointBase): parameters=[ GlobalParams.END, GlobalParams.ENVIRONMENT, - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.START, GlobalParams.STATS_PERIOD, OrganizationParams.PROJECT, diff --git a/src/sentry/replays/endpoints/organization_replay_details.py b/src/sentry/replays/endpoints/organization_replay_details.py index 00f074fba83ec5..51656f5abc684b 100644 --- a/src/sentry/replays/endpoints/organization_replay_details.py +++ b/src/sentry/replays/endpoints/organization_replay_details.py @@ -36,7 +36,7 @@ class OrganizationReplayDetailsEndpoint(OrganizationEndpoint): @extend_schema( operation_id="Retrieve a Replay Instance", - parameters=[GlobalParams.ORG_SLUG, ReplayParams.REPLAY_ID, ReplayValidator], + parameters=[GlobalParams.ORG_ID_OR_SLUG, ReplayParams.REPLAY_ID, ReplayValidator], responses={ 200: inline_sentry_response_serializer("GetReplay", ReplayDetailsResponse), 400: RESPONSE_BAD_REQUEST, diff --git a/src/sentry/replays/endpoints/organization_replay_index.py b/src/sentry/replays/endpoints/organization_replay_index.py index 18692184936e3f..01f5cacbf83a26 100644 --- a/src/sentry/replays/endpoints/organization_replay_index.py +++ b/src/sentry/replays/endpoints/organization_replay_index.py @@ -34,7 +34,7 @@ class OrganizationReplayIndexEndpoint(OrganizationEndpoint): @extend_schema( operation_id="List an Organization's Replays", - parameters=[GlobalParams.ORG_SLUG, ReplayValidator], + parameters=[GlobalParams.ORG_ID_OR_SLUG, ReplayValidator], responses={ 200: inline_sentry_response_serializer("ListReplays", list[ReplayDetailsResponse]), 400: RESPONSE_BAD_REQUEST, diff --git a/src/sentry/replays/endpoints/organization_replay_selector_index.py b/src/sentry/replays/endpoints/organization_replay_selector_index.py index 8944b82c948923..8ca259004b7895 100644 --- a/src/sentry/replays/endpoints/organization_replay_selector_index.py +++ b/src/sentry/replays/endpoints/organization_replay_selector_index.py @@ -95,7 +95,7 @@ def get_replay_filter_params(self, request, organization): @extend_schema( operation_id="List an Organization's Selectors", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.ENVIRONMENT, ReplaySelectorValidator, CursorQueryParam, diff --git a/src/sentry/replays/endpoints/project_replay_clicks_index.py b/src/sentry/replays/endpoints/project_replay_clicks_index.py index 2c63b3be7d14ad..9e9b22a23dc7cb 100644 --- a/src/sentry/replays/endpoints/project_replay_clicks_index.py +++ b/src/sentry/replays/endpoints/project_replay_clicks_index.py @@ -68,7 +68,7 @@ class ProjectReplayClicksIndexEndpoint(ProjectEndpoint): operation_id="List Clicked Nodes", parameters=[ CursorQueryParam, - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, GlobalParams.ENVIRONMENT, ReplayParams.REPLAY_ID, diff --git a/src/sentry/replays/endpoints/project_replay_details.py b/src/sentry/replays/endpoints/project_replay_details.py index ea72baf56a89ff..07269ecd25eee4 100644 --- a/src/sentry/replays/endpoints/project_replay_details.py +++ b/src/sentry/replays/endpoints/project_replay_details.py @@ -73,7 +73,7 @@ def get(self, request: Request, project: Project, replay_id: str) -> Response: @extend_schema( operation_id="Delete a Replay Instance", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ReplayParams.REPLAY_ID, ], diff --git a/src/sentry/replays/endpoints/project_replay_recording_segment_details.py b/src/sentry/replays/endpoints/project_replay_recording_segment_details.py index 983cf03e7fd434..3d90c05c533597 100644 --- a/src/sentry/replays/endpoints/project_replay_recording_segment_details.py +++ b/src/sentry/replays/endpoints/project_replay_recording_segment_details.py @@ -34,7 +34,7 @@ class ProjectReplayRecordingSegmentDetailsEndpoint(ProjectEndpoint): @extend_schema( operation_id="Fetch Recording Segment", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ReplayParams.REPLAY_ID, ReplayParams.SEGMENT_ID, diff --git a/src/sentry/replays/endpoints/project_replay_recording_segment_index.py b/src/sentry/replays/endpoints/project_replay_recording_segment_index.py index 0359c325108eac..fee02128577774 100644 --- a/src/sentry/replays/endpoints/project_replay_recording_segment_index.py +++ b/src/sentry/replays/endpoints/project_replay_recording_segment_index.py @@ -36,7 +36,7 @@ def __init__(self, **options) -> None: operation_id="List Recording Segments", parameters=[ CursorQueryParam, - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ReplayParams.REPLAY_ID, VisibilityParams.PER_PAGE, diff --git a/src/sentry/replays/endpoints/project_replay_video_details.py b/src/sentry/replays/endpoints/project_replay_video_details.py index 03fd12d1ddaf35..3dcf4d6e66e01e 100644 --- a/src/sentry/replays/endpoints/project_replay_video_details.py +++ b/src/sentry/replays/endpoints/project_replay_video_details.py @@ -41,7 +41,7 @@ class ProjectReplayVideoDetailsEndpoint(ProjectEndpoint): @extend_schema( operation_id="Fetch Replay Video", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ReplayParams.REPLAY_ID, ReplayParams.SEGMENT_ID, diff --git a/src/sentry/replays/endpoints/project_replay_viewed_by.py b/src/sentry/replays/endpoints/project_replay_viewed_by.py index c7a6cd409d0bcc..7c5ab9b96f8482 100644 --- a/src/sentry/replays/endpoints/project_replay_viewed_by.py +++ b/src/sentry/replays/endpoints/project_replay_viewed_by.py @@ -41,7 +41,7 @@ class ProjectReplayViewedByEndpoint(ProjectEndpoint): @extend_schema( operation_id="Get list of user who have viewed a replay", parameters=[ - GlobalParams.ORG_SLUG, + GlobalParams.ORG_ID_OR_SLUG, GlobalParams.PROJECT_ID_OR_SLUG, ReplayParams.REPLAY_ID, ], diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index 3727ffde44d30f..bfc39b0e3215c7 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -857,13 +857,15 @@ def enable_org_2fa(self, organization): def api_enable_org_2fa(self, organization, user): self.login_as(user) url = reverse( - "sentry-api-0-organization-details", kwargs={"organization_slug": organization.slug} + "sentry-api-0-organization-details", + kwargs={"organization_id_or_slug": organization.slug}, ) return self.client.put(url, data={"require2FA": True}) def api_disable_org_2fa(self, organization, user): url = reverse( - "sentry-api-0-organization-details", kwargs={"organization_slug": organization.slug} + "sentry-api-0-organization-details", + kwargs={"organization_id_or_slug": organization.slug}, ) return self.client.put(url, data={"require2FA": False}) @@ -1193,7 +1195,10 @@ def setUp(self): self.init_path = reverse( "sentry-organization-integrations-setup", - kwargs={"organization_slug": self.organization.slug, "provider_id": self.provider.key}, + kwargs={ + "organization_slug": self.organization.slug, + "provider_id": self.provider.key, + }, ) self.setup_path = reverse( diff --git a/tests/apidocs/endpoints/events/test_project_event_details.py b/tests/apidocs/endpoints/events/test_project_event_details.py index d6aaea37593e19..43480e91bc93f2 100644 --- a/tests/apidocs/endpoints/events/test_project_event_details.py +++ b/tests/apidocs/endpoints/events/test_project_event_details.py @@ -17,7 +17,7 @@ def setUp(self): self.url = reverse( self.endpoint, kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "event_id": event.event_id, }, diff --git a/tests/apidocs/endpoints/events/test_project_tagkey_values.py b/tests/apidocs/endpoints/events/test_project_tagkey_values.py index 9672aeea8e6730..54c3a946cd4627 100644 --- a/tests/apidocs/endpoints/events/test_project_tagkey_values.py +++ b/tests/apidocs/endpoints/events/test_project_tagkey_values.py @@ -14,7 +14,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-project-tagkey-values", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "key": key, }, diff --git a/tests/apidocs/endpoints/organizations/test_event_id_lookup.py b/tests/apidocs/endpoints/organizations/test_event_id_lookup.py index 3042b5d4bf102d..ee04b4b77b129d 100644 --- a/tests/apidocs/endpoints/organizations/test_event_id_lookup.py +++ b/tests/apidocs/endpoints/organizations/test_event_id_lookup.py @@ -9,7 +9,7 @@ def setUp(self): event = self.create_event("a", message="oh no") self.url = reverse( "sentry-api-0-event-id-lookup", - kwargs={"organization_slug": self.organization.slug, "event_id": event.event_id}, + kwargs={"organization_id_or_slug": self.organization.slug, "event_id": event.event_id}, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/organizations/test_org_details.py b/tests/apidocs/endpoints/organizations/test_org_details.py index 84d56815aaa800..92185fada732a7 100644 --- a/tests/apidocs/endpoints/organizations/test_org_details.py +++ b/tests/apidocs/endpoints/organizations/test_org_details.py @@ -10,7 +10,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-details", - kwargs={"organization_slug": organization.slug}, + kwargs={"organization_id_or_slug": organization.slug}, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/organizations/test_org_repos.py b/tests/apidocs/endpoints/organizations/test_org_repos.py index 8601854f2bf3ae..e2299c23cf4172 100644 --- a/tests/apidocs/endpoints/organizations/test_org_repos.py +++ b/tests/apidocs/endpoints/organizations/test_org_repos.py @@ -12,7 +12,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-repositories", - kwargs={"organization_slug": organization.slug}, + kwargs={"organization_id_or_slug": organization.slug}, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/organizations/test_repo_commits.py b/tests/apidocs/endpoints/organizations/test_repo_commits.py index 272d6acd699865..31b0c95bc4b920 100644 --- a/tests/apidocs/endpoints/organizations/test_repo_commits.py +++ b/tests/apidocs/endpoints/organizations/test_repo_commits.py @@ -14,7 +14,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-repository-commits", - kwargs={"organization_slug": organization.slug, "repo_id": repo.id}, + kwargs={"organization_id_or_slug": organization.slug, "repo_id": repo.id}, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/organizations/test_shortid.py b/tests/apidocs/endpoints/organizations/test_shortid.py index 08fc89c37236f9..ad8895cabc1d8a 100644 --- a/tests/apidocs/endpoints/organizations/test_shortid.py +++ b/tests/apidocs/endpoints/organizations/test_shortid.py @@ -11,7 +11,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-short-id-lookup", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "short_id": group.qualified_short_id, }, ) diff --git a/tests/apidocs/endpoints/projects/test_project_stats.py b/tests/apidocs/endpoints/projects/test_project_stats.py index 111882371a7089..4e947f6d5b80c6 100644 --- a/tests/apidocs/endpoints/projects/test_project_stats.py +++ b/tests/apidocs/endpoints/projects/test_project_stats.py @@ -12,7 +12,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-project-stats", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ) diff --git a/tests/apidocs/endpoints/projects/test_service_hook_details.py b/tests/apidocs/endpoints/projects/test_service_hook_details.py index 246edbc1e588eb..8bdaac91f26ffe 100644 --- a/tests/apidocs/endpoints/projects/test_service_hook_details.py +++ b/tests/apidocs/endpoints/projects/test_service_hook_details.py @@ -11,7 +11,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-project-service-hook-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "hook_id": hook.guid, }, diff --git a/tests/apidocs/endpoints/projects/test_service_hooks.py b/tests/apidocs/endpoints/projects/test_service_hooks.py index 6db62b94c3b350..f324eb7216eb83 100644 --- a/tests/apidocs/endpoints/projects/test_service_hooks.py +++ b/tests/apidocs/endpoints/projects/test_service_hooks.py @@ -12,7 +12,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-service-hooks", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ) diff --git a/tests/apidocs/endpoints/projects/test_tag_values.py b/tests/apidocs/endpoints/projects/test_tag_values.py index ca61354ed7ae90..cd75f59b7077ec 100644 --- a/tests/apidocs/endpoints/projects/test_tag_values.py +++ b/tests/apidocs/endpoints/projects/test_tag_values.py @@ -12,7 +12,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-project-tagkey-values", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "key": key, }, diff --git a/tests/apidocs/endpoints/projects/test_users.py b/tests/apidocs/endpoints/projects/test_users.py index 1749567835282c..35e16c43eadb5d 100644 --- a/tests/apidocs/endpoints/projects/test_users.py +++ b/tests/apidocs/endpoints/projects/test_users.py @@ -18,7 +18,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-project-users", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ) diff --git a/tests/apidocs/endpoints/releases/test_deploys.py b/tests/apidocs/endpoints/releases/test_deploys.py index c8ad56c2fbb7e6..07124f370c1669 100644 --- a/tests/apidocs/endpoints/releases/test_deploys.py +++ b/tests/apidocs/endpoints/releases/test_deploys.py @@ -48,7 +48,10 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-release-deploys", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py b/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py index 0626a9303da72e..902f0cd23a9693 100644 --- a/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py +++ b/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py @@ -36,7 +36,10 @@ def setUp(self): ) self.url = reverse( "sentry-api-0-release-commitfilechange", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/releases/test_organization_release_commits.py b/tests/apidocs/endpoints/releases/test_organization_release_commits.py index 1df1568853f7d2..d458fd47addf74 100644 --- a/tests/apidocs/endpoints/releases/test_organization_release_commits.py +++ b/tests/apidocs/endpoints/releases/test_organization_release_commits.py @@ -26,7 +26,10 @@ def setUp(self): ) self.url = reverse( "sentry-api-0-organization-release-commits", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/releases/test_organization_release_details.py b/tests/apidocs/endpoints/releases/test_organization_release_details.py index 8488b1ac4d0337..a162c8f43849e1 100644 --- a/tests/apidocs/endpoints/releases/test_organization_release_details.py +++ b/tests/apidocs/endpoints/releases/test_organization_release_details.py @@ -32,7 +32,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) def test_get(self): diff --git a/tests/apidocs/endpoints/releases/test_organization_release_file_details.py b/tests/apidocs/endpoints/releases/test_organization_release_file_details.py index bdb558b1035cd0..0918b57996635c 100644 --- a/tests/apidocs/endpoints/releases/test_organization_release_file_details.py +++ b/tests/apidocs/endpoints/releases/test_organization_release_file_details.py @@ -24,7 +24,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-release-file-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "version": release.version, "file_id": releasefile.id, }, diff --git a/tests/apidocs/endpoints/releases/test_organization_release_files.py b/tests/apidocs/endpoints/releases/test_organization_release_files.py index 781725c3d62103..5f434d4fedd6aa 100644 --- a/tests/apidocs/endpoints/releases/test_organization_release_files.py +++ b/tests/apidocs/endpoints/releases/test_organization_release_files.py @@ -22,7 +22,10 @@ def setUp(self): self.url = reverse( "sentry-api-0-organization-release-files", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) diff --git a/tests/apidocs/endpoints/releases/test_organization_releases.py b/tests/apidocs/endpoints/releases/test_organization_releases.py index b096f46c2becd6..ef9da5d846deeb 100644 --- a/tests/apidocs/endpoints/releases/test_organization_releases.py +++ b/tests/apidocs/endpoints/releases/test_organization_releases.py @@ -42,7 +42,7 @@ def setUp(self): release2.add_project(self.project2) self.url = reverse( - "sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug} + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} ) def test_get(self): diff --git a/tests/apidocs/endpoints/releases/test_project_release_commits.py b/tests/apidocs/endpoints/releases/test_project_release_commits.py index 21923bfb6b51d5..6db546d16b13e1 100644 --- a/tests/apidocs/endpoints/releases/test_project_release_commits.py +++ b/tests/apidocs/endpoints/releases/test_project_release_commits.py @@ -27,7 +27,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-project-release-commits", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, }, diff --git a/tests/apidocs/endpoints/releases/test_project_release_file_details.py b/tests/apidocs/endpoints/releases/test_project_release_file_details.py index fa395364cf8ea0..c9ba02d24972ec 100644 --- a/tests/apidocs/endpoints/releases/test_project_release_file_details.py +++ b/tests/apidocs/endpoints/releases/test_project_release_file_details.py @@ -24,7 +24,7 @@ def setUp(self): "sentry-api-0-project-release-file-details", kwargs={ "project_id_or_slug": project.slug, - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "version": release.version, "file_id": releasefile.id, }, diff --git a/tests/apidocs/endpoints/releases/test_project_release_files.py b/tests/apidocs/endpoints/releases/test_project_release_files.py index 0f7519e6a11fc1..f5cb575b958c79 100644 --- a/tests/apidocs/endpoints/releases/test_project_release_files.py +++ b/tests/apidocs/endpoints/releases/test_project_release_files.py @@ -25,7 +25,7 @@ def setUp(self): "sentry-api-0-project-release-files", kwargs={ "project_id_or_slug": project.slug, - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "version": release.version, }, ) diff --git a/tests/sentry/api/endpoints/release_thresholds/test_release_threshold.py b/tests/sentry/api/endpoints/release_thresholds/test_release_threshold.py index d8fa44ce187a88..f6c1aa3292f1ee 100644 --- a/tests/sentry/api/endpoints/release_thresholds/test_release_threshold.py +++ b/tests/sentry/api/endpoints/release_thresholds/test_release_threshold.py @@ -21,7 +21,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-project-release-thresholds", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, }, ) @@ -75,7 +75,7 @@ def test_post_invalid_project(self): url_with_invalid_project = reverse( "sentry-api-0-project-release-thresholds", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": "Why did it have to be snakes?", }, ) @@ -156,7 +156,7 @@ def test_get_invalid_project(self): url_with_invalid_project = reverse( "sentry-api-0-project-release-thresholds", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": "Why did it have to be snakes?", }, ) diff --git a/tests/sentry/api/endpoints/release_thresholds/test_release_threshold_details.py b/tests/sentry/api/endpoints/release_thresholds/test_release_threshold_details.py index 67c320f64b57a0..bd073f9ca49dee 100644 --- a/tests/sentry/api/endpoints/release_thresholds/test_release_threshold_details.py +++ b/tests/sentry/api/endpoints/release_thresholds/test_release_threshold_details.py @@ -32,7 +32,7 @@ def test_invalid_threshold_id(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "release_threshold": 123, }, @@ -45,7 +45,7 @@ def test_invalid_project(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": "kingdom_of_the_crystal_skull", "release_threshold": self.basic_threshold.id, }, @@ -58,7 +58,7 @@ def test_valid(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "release_threshold": self.basic_threshold.id, }, @@ -99,7 +99,7 @@ def test_invalid_threshold_id(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "release_threshold": 123, }, @@ -112,7 +112,7 @@ def test_invalid_project(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": "kingdom_of_the_crystal_skull", "release_threshold": self.basic_threshold.id, }, @@ -125,7 +125,7 @@ def test_valid(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "release_threshold": self.basic_threshold.id, }, @@ -161,7 +161,7 @@ def test_invalid_threshold_id(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "release_threshold": 123, }, @@ -174,7 +174,7 @@ def test_invalid_missing_data(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "release_threshold": self.basic_threshold.id, }, @@ -191,7 +191,7 @@ def test_invalid_trigger_type(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "release_threshold": self.basic_threshold.id, }, @@ -212,7 +212,7 @@ def test_invalid_threshold_type(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "release_threshold": self.basic_threshold.id, }, @@ -233,7 +233,7 @@ def test_invalid_window(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "release_threshold": self.basic_threshold.id, }, @@ -254,7 +254,7 @@ def test_invalid_project(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": "kingdom_of_the_crystal_skull", "release_threshold": self.basic_threshold.id, }, @@ -277,7 +277,7 @@ def test_valid(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "release_threshold": self.basic_threshold.id, }, @@ -308,7 +308,7 @@ def test_valid_with_extra_data(self): url = reverse( "sentry-api-0-project-release-thresholds-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "release_threshold": self.basic_threshold.id, }, diff --git a/tests/sentry/api/endpoints/test_organization_release_commits.py b/tests/sentry/api/endpoints/test_organization_release_commits.py index b9aad7a7617c75..52016245828d5a 100644 --- a/tests/sentry/api/endpoints/test_organization_release_commits.py +++ b/tests/sentry/api/endpoints/test_organization_release_commits.py @@ -27,7 +27,10 @@ def test_simple(self): ) url = reverse( "sentry-api-0-organization-release-commits", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) diff --git a/tests/sentry/api/endpoints/test_organization_release_details.py b/tests/sentry/api/endpoints/test_organization_release_details.py index 1c58c3d677142b..fd549dfc9fedeb 100644 --- a/tests/sentry/api/endpoints/test_organization_release_details.py +++ b/tests/sentry/api/endpoints/test_organization_release_details.py @@ -67,7 +67,7 @@ def test_simple(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release.version}, + kwargs={"organization_id_or_slug": self.organization.slug, "version": release.version}, ) response = self.client.get(url) @@ -81,7 +81,7 @@ def test_simple(self): # no access url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release2.version}, + kwargs={"organization_id_or_slug": self.organization.slug, "version": release2.version}, ) response = self.client.get(url) assert response.status_code == 404 @@ -103,7 +103,7 @@ def test_multiple_projects(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release.version}, + kwargs={"organization_id_or_slug": self.organization.slug, "version": release.version}, ) response = self.client.get(url) @@ -121,7 +121,7 @@ def test_wrong_project(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release.version}, + kwargs={"organization_id_or_slug": self.organization.slug, "version": release.version}, ) response = self.client.get(url, {"project": project2.id}) @@ -145,7 +145,7 @@ def test_correct_project_contains_current_project_meta(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release.version}, + kwargs={"organization_id_or_slug": self.organization.slug, "version": release.version}, ) response = self.client.get(url, {"project": self.project1.id}) @@ -171,7 +171,7 @@ def test_incorrect_sort_option_should_return_invalid_sort_response(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release.version}, + kwargs={"organization_id_or_slug": self.organization.slug, "version": release.version}, ) response = self.client.get(url, {"project": self.project1.id, "sort": "invalid_sort"}) assert response.status_code == 400 @@ -199,7 +199,10 @@ def test_get_prev_and_next_release_to_current_release_on_date_sort(self): # Test for middle release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_2.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_2.version, + }, ) response = self.client.get(url, {"project": self.project1.id}) assert response.status_code == 200 @@ -209,7 +212,10 @@ def test_get_prev_and_next_release_to_current_release_on_date_sort(self): # Test for first release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_3.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_3.version, + }, ) response = self.client.get(url, {"project": self.project1.id}) assert response.status_code == 200 @@ -219,7 +225,10 @@ def test_get_prev_and_next_release_to_current_release_on_date_sort(self): # Test for last release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_1.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_1.version, + }, ) response = self.client.get(url, {"project": self.project1.id}) assert response.status_code == 200 @@ -249,7 +258,10 @@ def test_get_prev_and_next_release_to_current_release_on_date_sort_with_same_dat # Test for middle release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_1.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_1.version, + }, ) response = self.client.get(url, {"project": self.project1.id}) assert response.status_code == 200 @@ -259,7 +271,10 @@ def test_get_prev_and_next_release_to_current_release_on_date_sort_with_same_dat # Test for first release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_2.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_2.version, + }, ) response = self.client.get(url, {"project": self.project1.id}) assert response.status_code == 200 @@ -309,7 +324,10 @@ def test_get_prev_and_next_release_to_current_release_on_date_sort_env_filter_ap # Test for middle release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_3.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_3.version, + }, ) response = self.client.get(url, {"project": self.project1.id, "environment": ["prod"]}) assert response.status_code == 200 @@ -361,7 +379,10 @@ def test_get_prev_and_next_release_to_current_release_on_date_sort_status_filter # Test for middle release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_3.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_3.version, + }, ) response = self.client.get(url, {"project": self.project1.id, "status": "archived"}) assert response.status_code == 200 @@ -395,7 +416,10 @@ def test_get_prev_and_next_release_to_current_release_on_date_sort_query_filter_ # Test for middle release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_2.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_2.version, + }, ) response = self.client.get(url, {"project": self.project1.id, "query": "foobar@1"}) assert response.status_code == 200 @@ -432,7 +456,10 @@ def test_get_prev_and_next_release_on_date_sort_does_not_apply_stats_period_filt # Test for middle release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_1.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_1.version, + }, ) response = self.client.get(url, {"project": self.project1.id, "summaryStatsPeriod": "24h"}) assert response.status_code == 200 @@ -460,7 +487,10 @@ def test_get_first_and_last_release_on_date_sort(self): # Test for middle release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_1.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_1.version, + }, ) response = self.client.get(url, {"project": self.project1.id}) assert response.status_code == 200 @@ -496,7 +526,10 @@ def test_get_first_and_last_release_on_date_sort_with_exact_same_date(self): # Test for middle release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_1.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_1.version, + }, ) response = self.client.get(url, {"project": self.project1.id}) assert response.status_code == 200 @@ -543,7 +576,10 @@ def test_get_first_and_last_release_on_date_sort_env_filter_applied(self): # Test for middle release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_3.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_3.version, + }, ) response = self.client.get(url, {"project": self.project1.id, "environment": ["prod"]}) assert response.status_code == 200 @@ -566,7 +602,10 @@ def test_get_first_and_last_release_on_non_date_sort(self): # Test for middle release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release_1.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": release_1.version, + }, ) response = self.client.get(url, {"project": self.project1.id, "sort": "sessions"}) assert response.status_code == 400 @@ -591,7 +630,7 @@ def test_get_first_and_last_release_when_project_has_no_releases(self): # Test for middle release of the list url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release.version}, + kwargs={"organization_id_or_slug": self.organization.slug, "version": release.version}, ) response = self.client.get(url, {"project": self.project1.id, "environment": ["test"]}) assert response.status_code == 200 @@ -614,7 +653,7 @@ def test_with_adoption_stages(self): release1.add_project(project1) url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": self.organization.slug, "version": release1.version}, + kwargs={"organization_id_or_slug": self.organization.slug, "version": release1.version}, ) response = self.client.get(url, format="json") @@ -662,7 +701,7 @@ def test_simple(self, mock_fetch_commits): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": base_release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": base_release.version}, ) self.client.put( url, @@ -677,7 +716,7 @@ def test_simple(self, mock_fetch_commits): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put( url, @@ -711,7 +750,7 @@ def test_simple(self, mock_fetch_commits): # no access url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release2.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release2.version}, ) response = self.client.put(url, {"ref": "master"}) assert response.status_code == 404 @@ -750,7 +789,7 @@ def test_deprecated_head_commits(self, mock_fetch_commits): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": base_release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": base_release.version}, ) self.client.put( url, @@ -765,7 +804,7 @@ def test_deprecated_head_commits(self, mock_fetch_commits): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put( url, @@ -799,7 +838,7 @@ def test_deprecated_head_commits(self, mock_fetch_commits): # no access url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release2.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release2.version}, ) response = self.client.put(url, {"ref": "master"}) assert response.status_code == 404 @@ -820,7 +859,7 @@ def test_commits(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put(url, data={"commits": [{"id": "a" * 40}, {"id": "b" * 40}]}) @@ -851,7 +890,7 @@ def test_commits_patchset_character_limit_255(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put( url, @@ -892,7 +931,7 @@ def test_commits_patchset_character_limit_reached(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put( url, @@ -930,7 +969,7 @@ def test_commits_lock_conflict(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put(url, data={"commits": [{"id": "a" * 40}, {"id": "b" * 40}]}) assert response.status_code == 409, (response.status_code, response.content) @@ -956,7 +995,7 @@ def test_release_archiving(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put(url, data={"status": "archived"}) @@ -984,7 +1023,7 @@ def test_activity_generation(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put(url, data={"dateReleased": datetime.now(UTC).isoformat()}) @@ -1018,7 +1057,7 @@ def test_activity_generation_long_release(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put(url, data={"dateReleased": datetime.now(UTC).isoformat()}) @@ -1061,7 +1100,7 @@ def test_org_auth_token(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": base_release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": base_release.version}, ) self.client.put( url, @@ -1076,7 +1115,7 @@ def test_org_auth_token(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put( url, @@ -1123,7 +1162,7 @@ def test_simple(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.delete(url) @@ -1153,7 +1192,7 @@ def test_existing_group(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.delete(url) @@ -1178,7 +1217,7 @@ def test_bad_repo_name(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put( url, @@ -1209,7 +1248,7 @@ def test_bad_commit_list(self): url = reverse( "sentry-api-0-organization-release-details", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.put( url, diff --git a/tests/sentry/api/endpoints/test_organization_release_file_details.py b/tests/sentry/api/endpoints/test_organization_release_file_details.py index 4e8b8d0ca3c8c0..96635ec40aa994 100644 --- a/tests/sentry/api/endpoints/test_organization_release_file_details.py +++ b/tests/sentry/api/endpoints/test_organization_release_file_details.py @@ -29,7 +29,7 @@ def test_simple(self): url = reverse( "sentry-api-0-organization-release-file-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "version": release.version, "file_id": releasefile.id, }, @@ -64,7 +64,7 @@ def test_file_download(self): url = reverse( "sentry-api-0-organization-release-file-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "version": release.version, "file_id": releasefile.id, }, @@ -86,7 +86,7 @@ def _get(self, file_id): url = reverse( "sentry-api-0-organization-release-file-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "version": self.release.version, "file_id": file_id, }, @@ -138,7 +138,7 @@ def test_simple(self): url = reverse( "sentry-api-0-organization-release-file-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "version": release.version, "file_id": releasefile.id, }, @@ -179,7 +179,7 @@ def test_simple(self): url = reverse( "sentry-api-0-organization-release-file-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "version": release.version, "file_id": releasefile.id, }, diff --git a/tests/sentry/api/endpoints/test_organization_release_files.py b/tests/sentry/api/endpoints/test_organization_release_files.py index 37f508d7ce7dfb..f26c99288e31d2 100644 --- a/tests/sentry/api/endpoints/test_organization_release_files.py +++ b/tests/sentry/api/endpoints/test_organization_release_files.py @@ -24,7 +24,10 @@ def test_simple(self): url = reverse( "sentry-api-0-organization-release-files", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) @@ -56,7 +59,10 @@ def test_name_search(self): url = reverse( "sentry-api-0-organization-release-files", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) @@ -115,7 +121,10 @@ def test_checksum_search(self): url = reverse( "sentry-api-0-organization-release-files", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) @@ -172,7 +181,10 @@ def test_queries_should_be_narrowing_search(self): url = reverse( "sentry-api-0-organization-release-files", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) @@ -204,7 +216,10 @@ def test_simple(self): url = reverse( "sentry-api-0-organization-release-files", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) @@ -241,7 +256,10 @@ def test_no_file(self): url = reverse( "sentry-api-0-organization-release-files", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) @@ -260,7 +278,10 @@ def test_missing_name(self): url = reverse( "sentry-api-0-organization-release-files", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) @@ -289,7 +310,10 @@ def test_invalid_name(self): url = reverse( "sentry-api-0-organization-release-files", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) @@ -316,7 +340,10 @@ def test_bad_headers(self): url = reverse( "sentry-api-0-organization-release-files", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) @@ -359,7 +386,10 @@ def test_duplicate_file(self): url = reverse( "sentry-api-0-organization-release-files", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) diff --git a/tests/sentry/api/endpoints/test_organization_release_meta.py b/tests/sentry/api/endpoints/test_organization_release_meta.py index c2a88091e20b85..98329dea44b408 100644 --- a/tests/sentry/api/endpoints/test_organization_release_meta.py +++ b/tests/sentry/api/endpoints/test_organization_release_meta.py @@ -70,7 +70,7 @@ def test_multiple_projects(self): url = reverse( "sentry-api-0-organization-release-meta", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.get(url) @@ -104,7 +104,7 @@ def test_artifact_count_without_weak_association(self): url = reverse( "sentry-api-0-organization-release-meta", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.get(url) @@ -142,7 +142,7 @@ def test_artifact_count_with_single_weak_association(self): url = reverse( "sentry-api-0-organization-release-meta", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.get(url) @@ -193,7 +193,7 @@ def test_artifact_count_with_multiple_weak_association(self): url = reverse( "sentry-api-0-organization-release-meta", - kwargs={"organization_slug": org.slug, "version": release.version}, + kwargs={"organization_id_or_slug": org.slug, "version": release.version}, ) response = self.client.get(url) diff --git a/tests/sentry/api/endpoints/test_organization_release_previous_commits.py b/tests/sentry/api/endpoints/test_organization_release_previous_commits.py index 80cee1409f0632..8585d4df8ee837 100644 --- a/tests/sentry/api/endpoints/test_organization_release_previous_commits.py +++ b/tests/sentry/api/endpoints/test_organization_release_previous_commits.py @@ -49,7 +49,10 @@ def setUp(self): new_release.add_project(self.project2) self.url = reverse( "sentry-api-0-organization-release-previous-with-commits", - kwargs={"organization_slug": self.organization.slug, "version": new_release.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": new_release.version, + }, ) def test_previous_release_has_commits(self): @@ -68,7 +71,10 @@ def test_no_previous_release_with_commit(self): new_release.add_project(self.project2) url = reverse( "sentry-api-0-organization-release-previous-with-commits", - kwargs={"organization_slug": self.organization.slug, "version": new_release.version}, + kwargs={ + "organization_id_or_slug": self.organization.slug, + "version": new_release.version, + }, ) response = self.client.get(url) assert response.status_code == 200, response.content @@ -80,7 +86,7 @@ def test_wrong_release_version(self): url = reverse( "sentry-api-0-organization-release-previous-with-commits", - kwargs={"organization_slug": self.organization.slug, "version": release.version}, + kwargs={"organization_id_or_slug": self.organization.slug, "version": release.version}, ) response = self.client.get(url) diff --git a/tests/sentry/api/endpoints/test_organization_releases.py b/tests/sentry/api/endpoints/test_organization_releases.py index 971890aa0fd86b..4e51e37f7c5c47 100644 --- a/tests/sentry/api/endpoints/test_organization_releases.py +++ b/tests/sentry/api/endpoints/test_organization_releases.py @@ -373,14 +373,18 @@ def test_query_filter_suffix(self): ) release.add_project(project) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.get(url + "?query=1.0+(1234)", format="json") assert response.status_code == 200, response.content assert len(response.data) == 1 assert response.data[0]["version"] == release.version - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.get(url + "?query=1.0%2B1234", format="json") assert response.status_code == 200, response.content @@ -739,7 +743,7 @@ def test_archive_release(self): self.login_as(user=self.user) url = reverse( "sentry-api-0-organization-releases", - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) # test legacy status value of None (=open) @@ -810,7 +814,7 @@ def test_simple(self): url = reverse( "sentry-api-0-organization-releases-stats", - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) response = self.client.get(url, format="json") @@ -869,7 +873,7 @@ def test_release_list_order_by_date_added(self): url = reverse( "sentry-api-0-organization-releases-stats", - kwargs={"organization_slug": self.organization.slug}, + kwargs={"organization_id_or_slug": self.organization.slug}, ) response = self.client.get(url, format="json") @@ -893,7 +897,9 @@ def test_with_adoption_stages(self): date_added=datetime(2013, 8, 13, 3, 8, 24, 880386, tzinfo=UTC), ) release1.add_project(project1) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.get(url, format="json") @@ -1125,7 +1131,9 @@ def test_empty_release_version(self): self.create_member(teams=[team], user=user, organization=org) self.login_as(user=user) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post( url, data={"version": "", "projects": [project.slug, project2.slug]} ) @@ -1145,7 +1153,9 @@ def test_minimal(self): self.create_member(teams=[team], user=user, organization=org) self.login_as(user=user) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post( url, data={"version": "1.2.1", "projects": [project.slug, project2.slug]}, @@ -1180,7 +1190,9 @@ def test_duplicate(self): release = Release.objects.create(version="1.2.1", organization=org) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) with self.tasks(): response = self.client.post( @@ -1272,7 +1284,9 @@ def test_activity(self): ) release.add_project(project) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post(url, data={"version": "1.2.1", "projects": [project.slug]}) assert response.status_code == 208, response.content @@ -1308,7 +1322,9 @@ def test_activity_with_long_release(self): ) release.add_project(project) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post(url, data={"version": "x" * 65, "projects": [project.slug]}) assert response.status_code == 208, response.content @@ -1338,7 +1354,9 @@ def test_version_whitespace(self): self.create_member(teams=[team], user=user, organization=org) self.login_as(user=user) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post(url, data={"version": "1.2.3\n", "projects": [project.slug]}) assert response.status_code == 400, response.content @@ -1375,7 +1393,9 @@ def test_features(self): self.create_member(teams=[team], user=self.user, organization=org) self.login_as(user=user) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post( url, data={"version": "1.2.1", "owner": self.user.email, "projects": [project.slug]} ) @@ -1398,7 +1418,9 @@ def test_commits(self): self.create_member(teams=[team], user=user, organization=org) self.login_as(user=user) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post( url, data={ @@ -1442,7 +1464,9 @@ def test_commits_from_provider(self, mock_fetch_commits): self.create_member(teams=[team], user=user, organization=org) self.login_as(user=user) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) self.client.post( url, data={ @@ -1499,7 +1523,9 @@ def test_commits_from_provider_deprecated_head_commits(self, mock_fetch_commits) self.create_member(teams=[team], user=user, organization=org) self.login_as(user=user) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) self.client.post( url, data={ @@ -1555,7 +1581,9 @@ def test_commits_lock_conflict(self): lock = locks.get(Release.get_lock_key(org.id, release.id), duration=10, name="release") lock.acquire() - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post( url, data={ @@ -1579,7 +1607,9 @@ def test_bad_project_slug(self): self.create_member(teams=[team], user=user, organization=org) self.login_as(user=user) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post( url, data={"version": "1.2.1", "projects": [project.slug, "banana"]} ) @@ -1623,7 +1653,9 @@ def test_project_permissions(self): ) release3.add_project(project1) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post( url, data={"version": "1.2.1", "projects": [project1.slug, project2.slug]} ) @@ -1651,7 +1683,9 @@ def test_api_key(self): ) release1.add_project(project1) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) # test right org, wrong permissions level with assume_test_silo_mode(SiloMode.CONTROL): @@ -1703,7 +1737,9 @@ def test_org_auth_token(self): ) release1.add_project(project1) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) # test right org, wrong permissions level with assume_test_silo_mode(SiloMode.CONTROL): @@ -1794,7 +1830,9 @@ def test_api_token(self, mock_fetch_commits): ) release1.add_project(project1) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post( url, @@ -1835,7 +1873,9 @@ def test_bad_repo_name(self): self.create_member(teams=[team], user=user, organization=org) self.login_as(user=user) - url = reverse("sentry-api-0-organization-releases", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": org.slug} + ) response = self.client.post( url, data={ @@ -1852,7 +1892,7 @@ class OrganizationReleaseCommitRangesTest(SetRefsTestCase): def setUp(self): super().setUp() self.url = reverse( - "sentry-api-0-organization-releases", kwargs={"organization_slug": self.org.slug} + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": self.org.slug} ) @patch("sentry.tasks.commits.fetch_commits") @@ -2046,7 +2086,7 @@ def assert_releases(self, response, releases): def test_environments_filter(self): url = reverse( - "sentry-api-0-organization-releases", kwargs={"organization_slug": self.org.slug} + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": self.org.slug} ) response = self.client.get(url + "?environment=" + self.env1.name, format="json") self.assert_releases(response, [self.release1, self.release5]) @@ -2056,7 +2096,7 @@ def test_environments_filter(self): def test_empty_environment(self): url = reverse( - "sentry-api-0-organization-releases", kwargs={"organization_slug": self.org.slug} + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": self.org.slug} ) env = self.make_environment("", self.project2) ReleaseProjectEnvironment.objects.create( @@ -2067,7 +2107,7 @@ def test_empty_environment(self): def test_all_environments(self): url = reverse( - "sentry-api-0-organization-releases", kwargs={"organization_slug": self.org.slug} + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": self.org.slug} ) response = self.client.get(url, format="json") self.assert_releases( @@ -2076,14 +2116,14 @@ def test_all_environments(self): def test_invalid_environment(self): url = reverse( - "sentry-api-0-organization-releases", kwargs={"organization_slug": self.org.slug} + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": self.org.slug} ) response = self.client.get(url + "?environment=" + "invalid_environment", format="json") assert response.status_code == 404 def test_specify_project_ids(self): url = reverse( - "sentry-api-0-organization-releases", kwargs={"organization_slug": self.org.slug} + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": self.org.slug} ) response = self.client.get(url, format="json", data={"project": self.project1.id}) self.assert_releases(response, [self.release1, self.release3, self.release5]) @@ -2098,7 +2138,7 @@ def test_specify_project_ids(self): def test_date_range(self): url = reverse( - "sentry-api-0-organization-releases", kwargs={"organization_slug": self.org.slug} + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": self.org.slug} ) response = self.client.get( url, @@ -2112,7 +2152,7 @@ def test_date_range(self): def test_invalid_date_range(self): url = reverse( - "sentry-api-0-organization-releases", kwargs={"organization_slug": self.org.slug} + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": self.org.slug} ) response = self.client.get(url, format="json", data={"start": "null", "end": "null"}) assert response.status_code == 400 @@ -2122,7 +2162,7 @@ class OrganizationReleaseCreateCommitPatch(ReleaseCommitPatchTest): @cached_property def url(self): return reverse( - "sentry-api-0-organization-releases", kwargs={"organization_slug": self.org.slug} + "sentry-api-0-organization-releases", kwargs={"organization_id_or_slug": self.org.slug} ) def test_commits_with_patch_set(self): diff --git a/tests/sentry/api/endpoints/test_organization_shortid.py b/tests/sentry/api/endpoints/test_organization_shortid.py index 2b162f37513494..7d412f44744849 100644 --- a/tests/sentry/api/endpoints/test_organization_shortid.py +++ b/tests/sentry/api/endpoints/test_organization_shortid.py @@ -15,7 +15,7 @@ def test_simple(self): self.login_as(user=self.user) url = reverse( "sentry-api-0-short-id-lookup", - kwargs={"organization_slug": org.slug, "short_id": group.qualified_short_id}, + kwargs={"organization_id_or_slug": org.slug, "short_id": group.qualified_short_id}, ) response = self.client.get(url, format="json") diff --git a/tests/sentry/api/endpoints/test_project_release_file_details.py b/tests/sentry/api/endpoints/test_project_release_file_details.py index a1d07ef155e75b..78bd0924f42f98 100644 --- a/tests/sentry/api/endpoints/test_project_release_file_details.py +++ b/tests/sentry/api/endpoints/test_project_release_file_details.py @@ -41,7 +41,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-release-file-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, "file_id": releasefile.id, @@ -77,7 +77,7 @@ def test_file_download(self): url = reverse( "sentry-api-0-project-release-file-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, "file_id": releasefile.id, @@ -121,7 +121,7 @@ def _get(self, file_id, postfix=""): url = reverse( "sentry-api-0-project-release-file-details", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "version": self.release.version, "file_id": file_id, @@ -204,7 +204,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-release-file-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, "file_id": releasefile.id, @@ -229,7 +229,7 @@ def test_update_archived(self): url = reverse( "sentry-api-0-project-release-file-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "version": self.release.version, "file_id": id, @@ -266,7 +266,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-release-file-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, "file_id": releasefile.id, @@ -290,7 +290,7 @@ def test_delete_archived(self): url = lambda id: reverse( "sentry-api-0-project-release-file-details", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "version": self.release.version, "file_id": id, diff --git a/tests/sentry/api/endpoints/test_project_release_files.py b/tests/sentry/api/endpoints/test_project_release_files.py index a350c415924dbe..482469614ecf7b 100644 --- a/tests/sentry/api/endpoints/test_project_release_files.py +++ b/tests/sentry/api/endpoints/test_project_release_files.py @@ -28,7 +28,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, }, @@ -49,7 +49,7 @@ def test_with_archive(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, }, @@ -131,7 +131,7 @@ def test_sort_order(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "version": self.release.version, }, @@ -148,7 +148,7 @@ def test_archive_name_search(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "version": self.release.version, }, @@ -164,7 +164,7 @@ def test_archive_checksum_search(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "version": self.release.version, }, @@ -199,7 +199,7 @@ def test_archive_queries_should_be_narrowing_search(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "version": self.release.version, }, @@ -226,7 +226,7 @@ def test_archive_paging(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, "project_id_or_slug": self.project.slug, "version": self.release.version, }, @@ -255,7 +255,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, }, @@ -296,7 +296,7 @@ def test_no_file(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, }, @@ -319,7 +319,7 @@ def test_missing_name(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, }, @@ -352,7 +352,7 @@ def test_invalid_name(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, }, @@ -383,7 +383,7 @@ def test_bad_headers(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, }, @@ -430,7 +430,7 @@ def test_duplicate_file(self): url = reverse( "sentry-api-0-project-release-files", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "version": release.version, }, diff --git a/tests/sentry/api/endpoints/test_project_stats.py b/tests/sentry/api/endpoints/test_project_stats.py index 947cef1439999d..a4582ba0dc8de4 100644 --- a/tests/sentry/api/endpoints/test_project_stats.py +++ b/tests/sentry/api/endpoints/test_project_stats.py @@ -46,7 +46,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-stats", kwargs={ - "organization_slug": project1.organization.slug, + "organization_id_or_slug": project1.organization.slug, "project_id_or_slug": project1.slug, }, ) @@ -92,7 +92,7 @@ def test_get_error_message_stats(self): url = reverse( "sentry-api-0-project-stats", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) diff --git a/tests/sentry/api/endpoints/test_project_tagkey_details.py b/tests/sentry/api/endpoints/test_project_tagkey_details.py index 9f8c00b13fa870..6da793f7f26cbe 100644 --- a/tests/sentry/api/endpoints/test_project_tagkey_details.py +++ b/tests/sentry/api/endpoints/test_project_tagkey_details.py @@ -32,7 +32,7 @@ def make_event(i): url = reverse( "sentry-api-0-project-tagkey-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key": "foo", }, @@ -64,7 +64,7 @@ def test_simple(self, mock_eventstream): url = reverse( "sentry-api-0-project-tagkey-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key": key, }, @@ -89,7 +89,7 @@ def test_protected(self): url = reverse( "sentry-api-0-project-tagkey-details", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key": "environment", }, diff --git a/tests/sentry/api/endpoints/test_project_tagkey_values.py b/tests/sentry/api/endpoints/test_project_tagkey_values.py index da7ba2de5595be..ac39a691e60e44 100644 --- a/tests/sentry/api/endpoints/test_project_tagkey_values.py +++ b/tests/sentry/api/endpoints/test_project_tagkey_values.py @@ -17,7 +17,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-tagkey-values", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key": "foo", }, @@ -42,7 +42,7 @@ def test_query(self): url = reverse( "sentry-api-0-project-tagkey-values", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key": "foo", }, @@ -71,7 +71,7 @@ def test_statperiod_query(self): url = reverse( "sentry-api-0-project-tagkey-values", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key": "foo", }, @@ -99,7 +99,7 @@ def test_start_end_query(self): url = reverse( "sentry-api-0-project-tagkey-values", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key": "foo", }, diff --git a/tests/sentry/api/endpoints/test_release_deploys.py b/tests/sentry/api/endpoints/test_release_deploys.py index 50dbe1a58585a7..0478c41cdf3078 100644 --- a/tests/sentry/api/endpoints/test_release_deploys.py +++ b/tests/sentry/api/endpoints/test_release_deploys.py @@ -57,7 +57,10 @@ def test_simple(self): url = reverse( "sentry-api-0-organization-release-deploys", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) @@ -102,7 +105,10 @@ def test_with_project(self): url = reverse( "sentry-api-0-organization-release-deploys", - kwargs={"organization_slug": project.organization.slug, "version": release.version}, + kwargs={ + "organization_id_or_slug": project.organization.slug, + "version": release.version, + }, ) self.login_as(user=self.user) @@ -152,7 +158,7 @@ def test_simple(self): url = reverse( "sentry-api-0-organization-release-deploys", kwargs={ - "organization_slug": self.org.slug, + "organization_id_or_slug": self.org.slug, "version": release.version, }, ) @@ -192,7 +198,7 @@ def test_with_project_slugs(self): url = reverse( "sentry-api-0-organization-release-deploys", kwargs={ - "organization_slug": self.org.slug, + "organization_id_or_slug": self.org.slug, "version": release.version, }, ) @@ -240,7 +246,7 @@ def test_with_invalid_project_slug(self): url = reverse( "sentry-api-0-organization-release-deploys", kwargs={ - "organization_slug": self.org.slug, + "organization_id_or_slug": self.org.slug, "version": release.version, }, ) @@ -268,7 +274,7 @@ def test_environment_validation_failure(self): url = reverse( "sentry-api-0-organization-release-deploys", kwargs={ - "organization_slug": self.org.slug, + "organization_id_or_slug": self.org.slug, "version": release.version, }, ) diff --git a/tests/sentry/api/endpoints/test_sudo.py b/tests/sentry/api/endpoints/test_sudo.py index 0bdbdd730747e1..d8c28d3dd66f91 100644 --- a/tests/sentry/api/endpoints/test_sudo.py +++ b/tests/sentry/api/endpoints/test_sudo.py @@ -9,7 +9,9 @@ class SudoTest(APITestCase): def test_sudo_required_del_org(self): org = self.create_organization() - url = reverse("sentry-api-0-organization-details", kwargs={"organization_slug": org.slug}) + url = reverse( + "sentry-api-0-organization-details", kwargs={"organization_id_or_slug": org.slug} + ) user = self.create_user(email="foo@example.com") self.create_member(organization=org, user=user, role="owner") diff --git a/tests/sentry/replays/test_organization_replay_count.py b/tests/sentry/replays/test_organization_replay_count.py index 45820302627a98..db69f9991db30a 100644 --- a/tests/sentry/replays/test_organization_replay_count.py +++ b/tests/sentry/replays/test_organization_replay_count.py @@ -34,7 +34,7 @@ def setUp(self): self.login_as(user=self.user) self.url = reverse( "sentry-api-0-organization-replay-count", - kwargs={"organization_slug": self.project.organization.slug}, + kwargs={"organization_id_or_slug": self.project.organization.slug}, ) self.features = {"organizations:session-replay": True} diff --git a/tests/sentry/replays/test_organization_replay_events_meta.py b/tests/sentry/replays/test_organization_replay_events_meta.py index 0789b689ca9878..84b82a5c1042b5 100644 --- a/tests/sentry/replays/test_organization_replay_events_meta.py +++ b/tests/sentry/replays/test_organization_replay_events_meta.py @@ -20,7 +20,7 @@ def setUp(self): self.project_2 = self.create_project() self.url = reverse( "sentry-api-0-organization-replay-events-meta", - kwargs={"organization_slug": self.project.organization.slug}, + kwargs={"organization_id_or_slug": self.project.organization.slug}, ) self.features = {"organizations:session-replay": True} diff --git a/tests/snuba/api/endpoints/test_organization_eventid.py b/tests/snuba/api/endpoints/test_organization_eventid.py index 6bc2be90a42aab..e3fae59feb5484 100644 --- a/tests/snuba/api/endpoints/test_organization_eventid.py +++ b/tests/snuba/api/endpoints/test_organization_eventid.py @@ -29,7 +29,7 @@ def setUp(self): def test_simple(self): url = reverse( "sentry-api-0-event-id-lookup", - kwargs={"organization_slug": self.org.slug, "event_id": self.event.event_id}, + kwargs={"organization_id_or_slug": self.org.slug, "event_id": self.event.event_id}, ) response = self.client.get(url, format="json") @@ -43,7 +43,7 @@ def test_simple(self): def test_missing_eventid(self): url = reverse( "sentry-api-0-event-id-lookup", - kwargs={"organization_slug": self.org.slug, "event_id": "c" * 32}, + kwargs={"organization_id_or_slug": self.org.slug, "event_id": "c" * 32}, ) response = self.client.get(url, format="json") @@ -53,7 +53,7 @@ def test_missing_eventid(self): def test_ratelimit(self): url = reverse( "sentry-api-0-event-id-lookup", - kwargs={"organization_slug": self.org.slug, "event_id": self.event.event_id}, + kwargs={"organization_id_or_slug": self.org.slug, "event_id": self.event.event_id}, ) with freeze_time("2000-01-01"): for i in range(10): @@ -66,14 +66,14 @@ def test_invalid_event_id(self): reverse( "sentry-api-0-event-id-lookup", kwargs={ - "organization_slug": self.org.slug, + "organization_id_or_slug": self.org.slug, "event_id": "not-an-event", }, ) url = reverse( "sentry-api-0-event-id-lookup", - kwargs={"organization_slug": self.org.slug, "event_id": 123456}, + kwargs={"organization_id_or_slug": self.org.slug, "event_id": 123456}, ) resp = self.client.get(url, format="json") diff --git a/tests/snuba/api/endpoints/test_project_event_details.py b/tests/snuba/api/endpoints/test_project_event_details.py index 1a0305e5794101..5d576eefaec5c3 100644 --- a/tests/snuba/api/endpoints/test_project_event_details.py +++ b/tests/snuba/api/endpoints/test_project_event_details.py @@ -56,7 +56,7 @@ def test_simple(self): kwargs={ "event_id": self.cur_event.event_id, "project_id_or_slug": self.project.slug, - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, }, ) response = self.client.get(url, format="json") @@ -73,7 +73,7 @@ def test_snuba_no_prev(self): kwargs={ "event_id": self.prev_event.event_id, "project_id_or_slug": self.project.slug, - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, }, ) response = self.client.get(url, format="json") @@ -90,7 +90,7 @@ def test_snuba_with_environment(self): kwargs={ "event_id": self.cur_event.event_id, "project_id_or_slug": self.project.slug, - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, }, ) response = self.client.get( @@ -109,7 +109,7 @@ def test_ignores_different_group(self): kwargs={ "event_id": self.next_event.event_id, "project_id_or_slug": self.project.slug, - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, }, ) response = self.client.get(url, format="json") @@ -180,7 +180,7 @@ def test_generic_event_with_occurrence(self): kwargs={ "event_id": self.cur_event.event_id, "project_id_or_slug": self.project.slug, - "organization_slug": self.project.organization.slug, + "organization_id_or_slug": self.project.organization.slug, }, ) response = self.client.get(url, format="json", data={"group_id": self.cur_group.id}) @@ -251,7 +251,7 @@ def test_transaction_event(self): kwargs={ "event_id": self.cur_transaction_event.event_id, "project_id_or_slug": self.cur_transaction_event.project.slug, - "organization_slug": self.cur_transaction_event.project.organization.slug, + "organization_id_or_slug": self.cur_transaction_event.project.organization.slug, }, ) response = self.client.get(url, format="json", data={"group_id": self.group.id}) @@ -269,7 +269,7 @@ def test_no_previous_event(self): kwargs={ "event_id": self.prev_transaction_event.event_id, "project_id_or_slug": self.prev_transaction_event.project.slug, - "organization_slug": self.prev_transaction_event.project.organization.slug, + "organization_id_or_slug": self.prev_transaction_event.project.organization.slug, }, ) response = self.client.get(url, format="json", data={"group_id": self.group.id}) @@ -287,7 +287,7 @@ def test_ignores_different_group(self): kwargs={ "event_id": self.next_transaction_event.event_id, "project_id_or_slug": self.next_transaction_event.project.slug, - "organization_slug": self.next_transaction_event.project.organization.slug, + "organization_id_or_slug": self.next_transaction_event.project.organization.slug, }, ) response = self.client.get(url, format="json", data={"group_id": self.group.id}) @@ -303,7 +303,7 @@ def test_no_group_id(self): kwargs={ "event_id": self.cur_transaction_event.event_id, "project_id_or_slug": self.cur_transaction_event.project.slug, - "organization_slug": self.cur_transaction_event.project.organization.slug, + "organization_id_or_slug": self.cur_transaction_event.project.organization.slug, }, ) response = self.client.get(url, format="json") @@ -334,7 +334,7 @@ def setUp(self): self.url = reverse( "sentry-api-0-event-json", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "event_id": self.event_id, }, @@ -355,7 +355,7 @@ def test_event_does_not_exist(self): self.url = reverse( "sentry-api-0-event-json", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": self.project.slug, "event_id": "no" * 16, }, @@ -377,7 +377,7 @@ def test_project_not_associated_with_event(self): url = reverse( "sentry-api-0-event-json", kwargs={ - "organization_slug": self.organization.slug, + "organization_id_or_slug": self.organization.slug, "project_id_or_slug": project2.slug, "event_id": self.event_id, }, diff --git a/tests/snuba/api/endpoints/test_project_events.py b/tests/snuba/api/endpoints/test_project_events.py index 3d07b705999095..d0179ac0d5b60f 100644 --- a/tests/snuba/api/endpoints/test_project_events.py +++ b/tests/snuba/api/endpoints/test_project_events.py @@ -19,7 +19,7 @@ def test_simple(self): url = reverse( "sentry-api-0-project-events", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -47,7 +47,7 @@ def test_message_search(self): url = reverse( "sentry-api-0-project-events", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -71,7 +71,7 @@ def test_filters_based_on_retention(self): url = reverse( "sentry-api-0-project-events", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -93,7 +93,7 @@ def test_limited_to_week(self): url = reverse( "sentry-api-0-project-events", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) @@ -118,7 +118,7 @@ def test_sample(self): url = reverse( "sentry-api-0-project-events", kwargs={ - "organization_slug": project.organization.slug, + "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, }, ) From deae600a3d95b65422ec6f348c4d6d6f0725fdef Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Mon, 13 May 2024 12:48:38 -0400 Subject: [PATCH 348/376] feat(sdk): Bump SDK to 8.0.0 (#70772) --- package.json | 12 ++-- yarn.lock | 178 +++++++++++++++++++++++++-------------------------- 2 files changed, 95 insertions(+), 95 deletions(-) diff --git a/package.json b/package.json index dd46b7d713b072..4f14d5bf158339 100644 --- a/package.json +++ b/package.json @@ -57,13 +57,13 @@ "@sentry-internal/rrweb-player": "2.12.0", "@sentry-internal/rrweb-snapshot": "2.12.0", "@sentry/babel-plugin-component-annotate": "^2.16.1", - "@sentry/core": "^8.0.0-rc.2", - "@sentry/node": "^8.0.0-rc.2", - "@sentry/react": "^8.0.0-rc.2", + "@sentry/core": "^8.0.0", + "@sentry/node": "^8.0.0", + "@sentry/react": "^8.0.0", "@sentry/release-parser": "^1.3.1", "@sentry/status-page-list": "^0.1.0", - "@sentry/types": "^8.0.0-rc.2", - "@sentry/utils": "^8.0.0-rc.2", + "@sentry/types": "^8.0.0", + "@sentry/utils": "^8.0.0", "@spotlightjs/spotlight": "^2.0.0-alpha.1", "@tanstack/react-query": "^4.29.7", "@tanstack/react-query-devtools": "^4.36.1", @@ -178,7 +178,7 @@ "@codecov/webpack-plugin": "^0.0.1-beta.6", "@pmmmwh/react-refresh-webpack-plugin": "0.5.11", "@sentry/jest-environment": "^4.0.0", - "@sentry/profiling-node": "^8.0.0-rc.2", + "@sentry/profiling-node": "^8.0.0", "@styled/typescript-styled-plugin": "^1.0.1", "@testing-library/jest-dom": "^6.4.2", "@testing-library/react": "^14.2.1", diff --git a/yarn.lock b/yarn.lock index a943cbf82d241a..b6aa815672cd90 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3004,23 +3004,23 @@ fs-extra "^11.1.1" lodash "^4.17.21" -"@sentry-internal/browser-utils@8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry-internal/browser-utils/-/browser-utils-8.0.0-rc.2.tgz#1e938eef8bb371343a87c49c5185d250fb7b0fd9" - integrity sha512-QwN4OHPoaXK6FShj7jNUXQIsm9/mqxcqFK2jAdTZZHhucyOYSgpu17Mb+LVYvncBF+4O2MXrt5oa8KwTUKohRw== +"@sentry-internal/browser-utils@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry-internal/browser-utils/-/browser-utils-8.0.0.tgz#69bd216abf78d9eef2e6fcdf99d7c281986508d7" + integrity sha512-dzmoDK7mzP7MvNt/jjs9a4OQ18H/8NyhDiKoJakVZnvk8ComGIv01vOOxDhrNmLyaJSq2KVNsiIJ+AkTmwcmyQ== dependencies: - "@sentry/core" "8.0.0-rc.2" - "@sentry/types" "8.0.0-rc.2" - "@sentry/utils" "8.0.0-rc.2" + "@sentry/core" "8.0.0" + "@sentry/types" "8.0.0" + "@sentry/utils" "8.0.0" -"@sentry-internal/feedback@8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry-internal/feedback/-/feedback-8.0.0-rc.2.tgz#123fb2db78da44833bf4688708c5082c86541820" - integrity sha512-VEnlWZ2hDQBdr5fbPbljdfFvboFHTAfU6fjth5HaAxBwf+BXMTnNuZ0qBFa68MKOy+IKt4r+a/LwbMW8faVicA== +"@sentry-internal/feedback@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry-internal/feedback/-/feedback-8.0.0.tgz#94795aea2d7fb23e698e50032899e9b4f4d04a68" + integrity sha512-2Jj0B5xn1y5kiOwso7EWQDlLGRt1tGcnglIYqCIpwNQM38yqn+5NMwH/Df7TkBlxBerKo4MYZZ2yHNUpkTXQ7Q== dependencies: - "@sentry/core" "8.0.0-rc.2" - "@sentry/types" "8.0.0-rc.2" - "@sentry/utils" "8.0.0-rc.2" + "@sentry/core" "8.0.0" + "@sentry/types" "8.0.0" + "@sentry/utils" "8.0.0" "@sentry-internal/global-search@^1.0.0": version "1.0.0" @@ -3041,25 +3041,25 @@ resolved "https://registry.yarnpkg.com/@sentry-internal/react-inspector/-/react-inspector-6.0.1-4.tgz#10758f3461cf2cf48df8c80f0514c55ca18872c5" integrity sha512-uL2RyvW8EqDEchnbo8Hu/c4IpBqM3LLxUpZPHs8o40kynerzPset6bC/m5SU124gEhy4PqjdvJ7DhTYR75NetQ== -"@sentry-internal/replay-canvas@8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry-internal/replay-canvas/-/replay-canvas-8.0.0-rc.2.tgz#06a3d0d6486cb93dffb9283b5ac3e3220d7dd81c" - integrity sha512-27HKSeZNR9ZI3PcOLQFvPVl25fi7YNluWl1o9VRRXV96ug7bjX6+4n3m8z17r6kZAKP4jDV8YtAnzAjuz0MhOQ== +"@sentry-internal/replay-canvas@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry-internal/replay-canvas/-/replay-canvas-8.0.0.tgz#eb4da3ec5c8371ed9112ad69712d960c24ee28b5" + integrity sha512-jeE5YQ42groVRvbM41iL4rxvWuKOVnZ7UXacHDgWerR2S+C7OtN3Ydzr34rfRYTVagqFPDcDswFrxrcWuZD+Kw== dependencies: - "@sentry-internal/replay" "8.0.0-rc.2" - "@sentry/core" "8.0.0-rc.2" - "@sentry/types" "8.0.0-rc.2" - "@sentry/utils" "8.0.0-rc.2" + "@sentry-internal/replay" "8.0.0" + "@sentry/core" "8.0.0" + "@sentry/types" "8.0.0" + "@sentry/utils" "8.0.0" -"@sentry-internal/replay@8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry-internal/replay/-/replay-8.0.0-rc.2.tgz#6e194883ff787ad3b6f6cfbd06e70191b570d82f" - integrity sha512-Xn3XYETGcQECwN/hkcSK9OPg7ch4yekVQ6C2hnsRFA2qxHc5cOXw6jGvYUyzKMw6VMKOzO+ONJAzc+kpvrB+8Q== +"@sentry-internal/replay@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry-internal/replay/-/replay-8.0.0.tgz#3ba2dea8df79d1e31e4ec0ce948dfbb994731b0f" + integrity sha512-lh0opJuhvKFgLK0TxeN2FDhnCc9qNdgBOpjA69hwpKl10kMxDoZy+oLxE4hx8j5RYKtM2o7mCv2rf1n0wK22Kg== dependencies: - "@sentry-internal/browser-utils" "8.0.0-rc.2" - "@sentry/core" "8.0.0-rc.2" - "@sentry/types" "8.0.0-rc.2" - "@sentry/utils" "8.0.0-rc.2" + "@sentry-internal/browser-utils" "8.0.0" + "@sentry/core" "8.0.0" + "@sentry/types" "8.0.0" + "@sentry/utils" "8.0.0" "@sentry-internal/rrdom@2.12.0": version "2.12.0" @@ -3107,36 +3107,36 @@ resolved "https://registry.yarnpkg.com/@sentry/babel-plugin-component-annotate/-/babel-plugin-component-annotate-2.16.1.tgz#da3bf4ec1c1dc68a97d6a7e27bd710001d6b07fb" integrity sha512-pJka66URsqQbk6hTs9H1XFpUeI0xxuqLYf9Dy5pRGNHSJMtfv91U+CaYSWt03aRRMGDXMduh62zAAY7Wf0HO+A== -"@sentry/browser@8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-8.0.0-rc.2.tgz#c8a3f93a94e5adbab9859f67c60ad8054c9c9b51" - integrity sha512-FpsW4JiU47QEP3pe6huC0n98GYfJxcaLHKV+MYKi12XS7ca6bftOJPVv0BmHvBa9xrEBKbVsyCZIHZTNXrTI4w== - dependencies: - "@sentry-internal/browser-utils" "8.0.0-rc.2" - "@sentry-internal/feedback" "8.0.0-rc.2" - "@sentry-internal/replay" "8.0.0-rc.2" - "@sentry-internal/replay-canvas" "8.0.0-rc.2" - "@sentry/core" "8.0.0-rc.2" - "@sentry/types" "8.0.0-rc.2" - "@sentry/utils" "8.0.0-rc.2" - -"@sentry/core@8.0.0-rc.2", "@sentry/core@^8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry/core/-/core-8.0.0-rc.2.tgz#839f447b263d69ec20264533e905a6db06835bc6" - integrity sha512-GNG0VYFS5EiJJHbJ9nRc3CPb2EU2eAtkDlWlQtkKu/jvHE7NG6ik8qk841Yw3ki7KWN05IVMD5FhtxDHjEYXkw== +"@sentry/browser@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-8.0.0.tgz#9c2f2d62f196a5b9b5a7174d0970c4f974722c98" + integrity sha512-HZt5bjujxz2XJA1iUqD51gEz/h8Ij+BYLu6D+qh6WpVtCSS1cfKoxJj8mQef7j5tIVVofxRtRr9PvAoFnehO0A== + dependencies: + "@sentry-internal/browser-utils" "8.0.0" + "@sentry-internal/feedback" "8.0.0" + "@sentry-internal/replay" "8.0.0" + "@sentry-internal/replay-canvas" "8.0.0" + "@sentry/core" "8.0.0" + "@sentry/types" "8.0.0" + "@sentry/utils" "8.0.0" + +"@sentry/core@8.0.0", "@sentry/core@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry/core/-/core-8.0.0.tgz#fd5f94c9ef72ce386ae37de852f156106ea807d5" + integrity sha512-PgOqQPdlIbiLFOo0F6IBzMbvusiEQ86+yXd76pIsuqQ2tj+iFL5gdYOckF/FKVpAwhfzIx64GKin2C+535c1qQ== dependencies: - "@sentry/types" "8.0.0-rc.2" - "@sentry/utils" "8.0.0-rc.2" + "@sentry/types" "8.0.0" + "@sentry/utils" "8.0.0" "@sentry/jest-environment@^4.0.0": version "4.0.0" resolved "https://registry.yarnpkg.com/@sentry/jest-environment/-/jest-environment-4.0.0.tgz#037844bed70c8f13259ee01ab65ff8d36aef0209" integrity sha512-91jLBS8KbX2Ng0aDSP7kdE9sjiLc4qjp/jczTbmvOvuHxoaQ9hSLaEpsthnnUQ/zNeprZMkOC9xlS+zABw3Zmw== -"@sentry/node@8.0.0-rc.2", "@sentry/node@^8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry/node/-/node-8.0.0-rc.2.tgz#d36bc14bd3348e5199aa1cd0bd7898bb27084c9a" - integrity sha512-CcoU2ANmEPG1I1E1a8G57bRmy+6etxNprnoe4b6L5+y7b5KVpJq2nnq1RjYiJwN3H5oG4h7sOrc8RIbqPox4Zw== +"@sentry/node@8.0.0", "@sentry/node@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry/node/-/node-8.0.0.tgz#bf5bf7dd7810ede3cc7526a94990faeb545019b0" + integrity sha512-yOmJV0gyRA5KMw4lUAuB2LytUwcwSByjFn2KO5Xy9Oc8XpgJ91CIU/v1Udv3GsrYo2HpdQn/dyZLwqqhbyM55Q== dependencies: "@opentelemetry/api" "^1.8.0" "@opentelemetry/context-async-hooks" "^1.23.0" @@ -3160,43 +3160,43 @@ "@opentelemetry/sdk-trace-base" "^1.23.0" "@opentelemetry/semantic-conventions" "^1.23.0" "@prisma/instrumentation" "5.13.0" - "@sentry/core" "8.0.0-rc.2" - "@sentry/opentelemetry" "8.0.0-rc.2" - "@sentry/types" "8.0.0-rc.2" - "@sentry/utils" "8.0.0-rc.2" + "@sentry/core" "8.0.0" + "@sentry/opentelemetry" "8.0.0" + "@sentry/types" "8.0.0" + "@sentry/utils" "8.0.0" optionalDependencies: opentelemetry-instrumentation-fetch-node "1.2.0" -"@sentry/opentelemetry@8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry/opentelemetry/-/opentelemetry-8.0.0-rc.2.tgz#25a1dc55ec30204380fcc6f83afbe0378feabd3a" - integrity sha512-HW+LYzI90/ptu8pTkof/G2V9TOxu07p0vDwI4KayrjrLUVgi0q7JvhXviKghJRqBbLd1G9bOj3gD5J5yHvPDzg== +"@sentry/opentelemetry@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry/opentelemetry/-/opentelemetry-8.0.0.tgz#14a9a28144cd4c3da8d65a764aee20740033c03b" + integrity sha512-AvUUZpiJTq3H69Hd9k0tiOqGTA87uq0wZN+WaV4iT6sItG2MVaqYup5wSmqNKD6iVErfx7djzZ5C3LWsFQX3KQ== dependencies: - "@sentry/core" "8.0.0-rc.2" - "@sentry/types" "8.0.0-rc.2" - "@sentry/utils" "8.0.0-rc.2" + "@sentry/core" "8.0.0" + "@sentry/types" "8.0.0" + "@sentry/utils" "8.0.0" -"@sentry/profiling-node@^8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry/profiling-node/-/profiling-node-8.0.0-rc.2.tgz#d5280a5c5497e3c1b1531c2507d7a3a87e8ae2e1" - integrity sha512-I3y9bv3SJdF5R9mM260BOgfyeTp2QZ+22W28lJXhg1hQbN8Bdf2eoWyxxgfo8fswf/uqLrlfKIjzugJ+BaNedw== +"@sentry/profiling-node@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry/profiling-node/-/profiling-node-8.0.0.tgz#d580ffd300047a85ebb80e959f2abe0f42576593" + integrity sha512-mhpErfHgYxlkxmlLlDzX8qvk12t8B0jyoS9oCIPe1m9Z+hQCI4BL/fEl01kIysMXaoKNaJ6ttXYoacLb+Xu+vA== dependencies: - "@sentry/core" "8.0.0-rc.2" - "@sentry/node" "8.0.0-rc.2" - "@sentry/types" "8.0.0-rc.2" - "@sentry/utils" "8.0.0-rc.2" + "@sentry/core" "8.0.0" + "@sentry/node" "8.0.0" + "@sentry/types" "8.0.0" + "@sentry/utils" "8.0.0" detect-libc "^2.0.2" node-abi "^3.61.0" -"@sentry/react@^8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry/react/-/react-8.0.0-rc.2.tgz#0ae48313a97467dcce437d72f3e20ff52e3e4416" - integrity sha512-6fniK1v2OxyUOy7qb0nLOtiY2xsk4VL9FFmqFvxLzSzyIe2RBSSfENtEmOg+xVvNSrnG96EOUhZMCsDdJote0g== +"@sentry/react@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry/react/-/react-8.0.0.tgz#0b23c2abb93557ac623aadee90126f6c6a188e6a" + integrity sha512-TbAtOAKY2QKqpuy0uoO/ujL0J5djZjQ2K8iE3/j3+EX/3DaZ3ydUGNdJ0rDQQoJgRUsPidBM+SBco2dI38sCdQ== dependencies: - "@sentry/browser" "8.0.0-rc.2" - "@sentry/core" "8.0.0-rc.2" - "@sentry/types" "8.0.0-rc.2" - "@sentry/utils" "8.0.0-rc.2" + "@sentry/browser" "8.0.0" + "@sentry/core" "8.0.0" + "@sentry/types" "8.0.0" + "@sentry/utils" "8.0.0" hoist-non-react-statics "^3.3.2" "@sentry/release-parser@^1.3.1": @@ -3209,17 +3209,17 @@ resolved "https://registry.yarnpkg.com/@sentry/status-page-list/-/status-page-list-0.1.0.tgz#49e8683091de0531aba96fc95f19891970929701" integrity sha512-wXWu3IihxFO0l5WQkr6V138ZJKHpL8G7fw/9l0Dl6Nl1ggWcJZOaBN/o5sXasS1e0Atvy2dL9DiPsKmBq8D4MA== -"@sentry/types@8.0.0-rc.2", "@sentry/types@^8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry/types/-/types-8.0.0-rc.2.tgz#e2ec9e29f1b0b0af2ce20b6ba6be45b606955707" - integrity sha512-A52WamMnmJnRFrw6S9tmp52eGSdRWlTTvbXMF5mBE/8RCwknAFuPcXetFpKtU/ixqK+oeGtXLrJOuSJhWDvnVg== +"@sentry/types@8.0.0", "@sentry/types@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry/types/-/types-8.0.0.tgz#5047dcbcff6a38699e4490acd941baffafb72f45" + integrity sha512-Dtd8dtFEq1XtdAntkguYHaL4tokzm4Aq5t0HP6Vl1P+MPImokDE1UcpKglkh0Z5aym/vF6e0qW9/CM7lAI5R/A== -"@sentry/utils@8.0.0-rc.2", "@sentry/utils@^8.0.0-rc.2": - version "8.0.0-rc.2" - resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-8.0.0-rc.2.tgz#d915ed23a71d1a5b627f209380b1595eccbf81da" - integrity sha512-Ev0nhHVfMb82gtUHuqfbJNaeQZG/wzzO0+hiUFOXuGdJEFItYpv/z2TlfXEFQ8NX8nD0gWSFMlUCF8ySi0IMXA== +"@sentry/utils@8.0.0", "@sentry/utils@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-8.0.0.tgz#24b2f3f24cf521c0180e4335da63a4c6e51fa7dd" + integrity sha512-oZex/dRKfkWHoK99W7QDQtr26IZrAD9EDd2+pwLmkFclApxVDGLLKNkmcbfj4LX1zMROxKWww/GTE7eo08tEKg== dependencies: - "@sentry/types" "8.0.0-rc.2" + "@sentry/types" "8.0.0" "@sinclair/typebox@^0.27.8": version "0.27.8" From d3fd56ae673aabc4b6e1e9babd3d75680e7e6c85 Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Mon, 13 May 2024 10:02:37 -0700 Subject: [PATCH 349/376] ref(delayed rules): Don't use a decorator, call add_handler instead (#70689) It doesn't seem like any of the delayed processing is being called since it's not imported by anything, so this PR changes the registry pattern a bit so that it does. --- src/sentry/buffer/redis.py | 9 ++++----- src/sentry/rules/processing/delayed_processing.py | 3 +-- src/sentry/tasks/post_process.py | 6 ++++++ 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/src/sentry/buffer/redis.py b/src/sentry/buffer/redis.py index e1937ce9e59922..38a093561b4db3 100644 --- a/src/sentry/buffer/redis.py +++ b/src/sentry/buffer/redis.py @@ -63,12 +63,11 @@ class BufferHookRegistry: def __init__(self, *args: Any, **kwargs: Any) -> None: self._registry: dict[BufferHookEvent, Callable[..., Any]] = {} - def add_handler(self, key: BufferHookEvent) -> Callable[..., Any]: - def decorator(func: Callable[..., Any]) -> Callable[..., Any]: - self._registry[key] = func - return func + def add_handler(self, key: BufferHookEvent, func: Callable[..., Any]) -> None: + self._registry[key] = func - return decorator + def has(self, key: BufferHookEvent) -> bool: + return self._registry.get(key) is not None def callback(self, buffer_hook_event: BufferHookEvent, data: RedisBuffer) -> bool: try: diff --git a/src/sentry/rules/processing/delayed_processing.py b/src/sentry/rules/processing/delayed_processing.py index e018b117497937..4cabe5086fe34d 100644 --- a/src/sentry/rules/processing/delayed_processing.py +++ b/src/sentry/rules/processing/delayed_processing.py @@ -5,7 +5,7 @@ from typing import Any, DefaultDict, NamedTuple from sentry import nodestore -from sentry.buffer.redis import BufferHookEvent, RedisBuffer, redis_buffer_registry +from sentry.buffer.redis import RedisBuffer from sentry.eventstore.models import Event, GroupEvent from sentry.issues.issue_occurrence import IssueOccurrence from sentry.models.group import Group @@ -292,7 +292,6 @@ def get_group_to_groupevent( return group_to_groupevent -@redis_buffer_registry.add_handler(BufferHookEvent.FLUSH) def process_delayed_alert_conditions(buffer: RedisBuffer) -> None: with metrics.timer("delayed_processing.process_all_conditions.duration"): fetch_time = datetime.now(tz=timezone.utc) diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index aa05b4494ca032..5469e381a06369 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -990,6 +990,12 @@ def _get_replay_id(event): def process_rules(job: PostProcessJob) -> None: + from sentry.buffer.redis import BufferHookEvent, redis_buffer_registry + from sentry.rules.processing.delayed_processing import process_delayed_alert_conditions + + if not redis_buffer_registry.has(BufferHookEvent.FLUSH): + redis_buffer_registry.add_handler(BufferHookEvent.FLUSH, process_delayed_alert_conditions) + if job["is_reprocessed"]: return From da8fba1ff86e9df1a10d78222fdc41821d10e8f7 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 13 May 2024 13:22:24 -0400 Subject: [PATCH 350/376] fix(feedback): check latest event for attachment (#70778) we send screenshots through the feedback event, so instead of checking the issue for attachments, we should check the latest event. this should fix the issue where the `IconImage` isn't showing up when a feedback has a screenshot attached. SCR-20240513-jcau --- src/sentry/api/endpoints/group_details.py | 9 ++++++--- src/sentry/api/serializers/models/group_stream.py | 13 ++++++++----- .../endpoints/test_organization_group_index.py | 12 ++++++------ 3 files changed, 20 insertions(+), 14 deletions(-) diff --git a/src/sentry/api/endpoints/group_details.py b/src/sentry/api/endpoints/group_details.py index e1332d62dbf0f8..1efa34de8cd93d 100644 --- a/src/sentry/api/endpoints/group_details.py +++ b/src/sentry/api/endpoints/group_details.py @@ -236,7 +236,7 @@ def get(self, request: Request, group) -> Response: ) data.update({"sentryAppIssues": sentry_app_issues}) - if "hasAttachments" in expand: + if "latestEventHasAttachments" in expand: if not features.has( "organizations:event-attachments", group.project.organization, @@ -244,8 +244,11 @@ def get(self, request: Request, group) -> Response: ): return self.respond(status=404) - num_attachments = EventAttachment.objects.filter(group_id=group.id).count() - data.update({"hasAttachments": num_attachments > 0}) + latest_event = group.get_latest_event() + num_attachments = EventAttachment.objects.filter( + project_id=latest_event.project_id, event_id=latest_event.event_id + ).count() + data.update({"latestEventHasAttachments": num_attachments > 0}) data.update( { diff --git a/src/sentry/api/serializers/models/group_stream.py b/src/sentry/api/serializers/models/group_stream.py index 93c770a64f6df9..c80ba5c2ed225d 100644 --- a/src/sentry/api/serializers/models/group_stream.py +++ b/src/sentry/api/serializers/models/group_stream.py @@ -397,7 +397,7 @@ def get_attrs( ) attrs[item].update({"sentryAppIssues": sentry_app_issues}) - if self._expand("hasAttachments"): + if self._expand("latestEventHasAttachments"): if not features.has( "organizations:event-attachments", item.project.organization, @@ -405,9 +405,12 @@ def get_attrs( ): return self.respond(status=404) + latest_event = item.get_latest_event() for item in item_list: - num_attachments = EventAttachment.objects.filter(group_id=item.id).count() - attrs[item].update({"hasAttachments": num_attachments > 0}) + num_attachments = EventAttachment.objects.filter( + project_id=latest_event.project_id, event_id=latest_event.event_id + ).count() + attrs[item].update({"latestEventHasAttachments": num_attachments > 0}) return attrs @@ -467,8 +470,8 @@ def serialize( if self._expand("sentryAppIssues"): result["sentryAppIssues"] = attrs["sentryAppIssues"] - if self._expand("hasAttachments"): - result["hasAttachments"] = attrs["hasAttachments"] + if self._expand("latestEventHasAttachments"): + result["latestEventHasAttachments"] = attrs["latestEventHasAttachments"] return result diff --git a/tests/sentry/issues/endpoints/test_organization_group_index.py b/tests/sentry/issues/endpoints/test_organization_group_index.py index f9b6646b8d3903..16862652328f7d 100644 --- a/tests/sentry/issues/endpoints/test_organization_group_index.py +++ b/tests/sentry/issues/endpoints/test_organization_group_index.py @@ -1862,7 +1862,7 @@ def test_expand_sentry_app_issues(self) -> None: assert response.data[0]["sentryAppIssues"][1]["displayName"] == issue_2.display_name @with_feature("organizations:event-attachments") - def test_expand_has_attachments(self) -> None: + def test_expand_latest_event_has_attachments(self) -> None: event = self.store_event( data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]}, project_id=self.project.id, @@ -1870,20 +1870,20 @@ def test_expand_has_attachments(self) -> None: query = "status:unresolved" self.login_as(user=self.user) response = self.get_response( - sort_by="date", limit=10, query=query, expand=["hasAttachments"] + sort_by="date", limit=10, query=query, expand=["latestEventHasAttachments"] ) assert response.status_code == 200 assert len(response.data) == 1 assert int(response.data[0]["id"]) == event.group.id # No attachments - assert response.data[0]["hasAttachments"] is False + assert response.data[0]["latestEventHasAttachments"] is False # Test with no expand response = self.get_response(sort_by="date", limit=10, query=query) assert response.status_code == 200 assert len(response.data) == 1 assert int(response.data[0]["id"]) == event.group.id - assert "hasAttachments" not in response.data[0] + assert "latestEventHasAttachments" not in response.data[0] # Add 1 attachment file_attachment = File.objects.create(name="hello.png", type="image/png") @@ -1897,10 +1897,10 @@ def test_expand_has_attachments(self) -> None: ) response = self.get_response( - sort_by="date", limit=10, query=query, expand=["hasAttachments"] + sort_by="date", limit=10, query=query, expand=["latestEventHasAttachments"] ) assert response.status_code == 200 - assert response.data[0]["hasAttachments"] is True + assert response.data[0]["latestEventHasAttachments"] is True def test_expand_owners(self) -> None: event = self.store_event( From 92d2852522398f5578990529872ec072fe751d97 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Mon, 13 May 2024 13:49:45 -0400 Subject: [PATCH 351/376] feat(crons): Remove culprit from crons incident issues (#70781) This is a bit confusing, we don't need a culprit --- src/sentry/monitors/logic/mark_failed.py | 2 +- tests/sentry/monitors/logic/test_mark_failed.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sentry/monitors/logic/mark_failed.py b/src/sentry/monitors/logic/mark_failed.py index 73b20e3e69d4e1..083f721e196cbc 100644 --- a/src/sentry/monitors/logic/mark_failed.py +++ b/src/sentry/monitors/logic/mark_failed.py @@ -265,7 +265,7 @@ def create_issue_platform_occurrence( ), ], evidence_data={}, - culprit="incident", + culprit="", detection_time=current_timestamp, level="error", assignee=monitor_env.monitor.owner_actor, diff --git a/tests/sentry/monitors/logic/test_mark_failed.py b/tests/sentry/monitors/logic/test_mark_failed.py index 2d13f9069f5ecb..65fd5f00e47d71 100644 --- a/tests/sentry/monitors/logic/test_mark_failed.py +++ b/tests/sentry/monitors/logic/test_mark_failed.py @@ -296,7 +296,7 @@ def test_mark_failed_default_params_issue_platform(self, mock_produce_occurrence ], "type": MonitorIncidentType.type_id, "level": "error", - "culprit": "incident", + "culprit": "", }, ) == dict(occurrence) From ac80048be29f718d4caf7000034be7ac19f0e8f3 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Mon, 13 May 2024 13:50:03 -0400 Subject: [PATCH 352/376] ref(crons): Fix typing in testutils (#70774) --- src/sentry/monitors/testutils.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/sentry/monitors/testutils.py b/src/sentry/monitors/testutils.py index 316ae35fc6d3c5..2957781443f4b9 100644 --- a/src/sentry/monitors/testutils.py +++ b/src/sentry/monitors/testutils.py @@ -10,14 +10,19 @@ from sentry.monitors.types import CheckinItem, CheckinPayload -def build_checkin_item(ts=None, partition=0, message_overrides=None, payload_overrides=None): +def build_checkin_item( + ts: datetime | None = None, + partition: int = 0, + message_overrides=None, + payload_overrides=None, +): if ts is None: ts = datetime.now() message: CheckIn = { "message_type": "check_in", "payload": {}, - "start_time": ts, + "start_time": ts.timestamp(), "project_id": 1, "sdk": None, "retention_days": 10, From 72cf165bad13a009e373c6e59e8cf78ad1ca80c4 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Mon, 13 May 2024 14:29:10 -0400 Subject: [PATCH 353/376] feat(dashboards): Add feature flag for span metrics (#70788) We're going to surface more span metrics in the Dashboards product. Adding a feature flag to conditionally show these changes in the metrics product. --- src/sentry/features/temporary.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py index 30271366d96c9d..395cd93c605072 100644 --- a/src/sentry/features/temporary.py +++ b/src/sentry/features/temporary.py @@ -57,6 +57,7 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:dashboards-import", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:dashboards-mep", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:dashboards-rh-widget", OrganizationFeature, FeatureHandlerStrategy.REMOTE) + manager.add("organizations:dashboards-span-metrics", OrganizationFeature, FeatureHandlerStrategy.OPTIONS) manager.add("organizations:ddm-dashboard-import", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:custom-metrics-experimental", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:ddm-sidebar-item-hidden", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From 0fc316c974f88927566ca8f333a684cf238b6fda Mon Sep 17 00:00:00 2001 From: Enoch Tang Date: Mon, 13 May 2024 14:32:00 -0400 Subject: [PATCH 354/376] chore(snuba-sdk): Bump snuba sdk to 2.0.34 (#70589) --- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 5676b0b44002fd..47270c6d094104 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -189,7 +189,7 @@ sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 sniffio==1.2.0 -snuba-sdk==2.0.33 +snuba-sdk==2.0.34 sortedcontainers==2.4.0 soupsieve==2.3.2.post1 sqlparse==0.4.4 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index bfb645df01a6da..dc97340ac8709e 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -129,7 +129,7 @@ sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 sniffio==1.3.0 -snuba-sdk==2.0.33 +snuba-sdk==2.0.34 soupsieve==2.3.2.post1 sqlparse==0.4.4 statsd==3.3.0 From 51d0f0517f258ec5152df20c322ccac2419eef02 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Mon, 13 May 2024 14:32:41 -0400 Subject: [PATCH 355/376] re(crons): Consistent import of `.base` in endpoints (#70787) --- .../endpoints/monitor_ingest_checkin_attachment.py | 7 ++----- .../monitors/endpoints/organization_monitor_details.py | 5 +++-- .../endpoints/organization_monitor_environment_details.py | 7 +++---- src/sentry/monitors/endpoints/project_monitor_details.py | 5 +++-- .../endpoints/project_monitor_environment_details.py | 7 +++---- .../endpoints/project_monitor_processing_errors_index.py | 3 ++- .../endpoints/project_processing_errors_details.py | 3 ++- 7 files changed, 18 insertions(+), 19 deletions(-) diff --git a/src/sentry/monitors/endpoints/monitor_ingest_checkin_attachment.py b/src/sentry/monitors/endpoints/monitor_ingest_checkin_attachment.py index 19fc7fc53e61c7..ceb89788a34490 100644 --- a/src/sentry/monitors/endpoints/monitor_ingest_checkin_attachment.py +++ b/src/sentry/monitors/endpoints/monitor_ingest_checkin_attachment.py @@ -24,14 +24,11 @@ from sentry.models.organization import Organization from sentry.models.project import Project from sentry.models.projectkey import ProjectKey -from sentry.monitors.endpoints.base import ( - ProjectMonitorPermission, - get_monitor_by_org_id_or_slug, - try_checkin_lookup, -) from sentry.monitors.models import Monitor, MonitorCheckIn from sentry.utils.sdk import bind_organization_context +from .base import ProjectMonitorPermission, get_monitor_by_org_id_or_slug, try_checkin_lookup + MAX_ATTACHMENT_SIZE = 1024 * 100 # 100kb diff --git a/src/sentry/monitors/endpoints/organization_monitor_details.py b/src/sentry/monitors/endpoints/organization_monitor_details.py index f26db8299041eb..05f8bf95376fca 100644 --- a/src/sentry/monitors/endpoints/organization_monitor_details.py +++ b/src/sentry/monitors/endpoints/organization_monitor_details.py @@ -15,12 +15,13 @@ RESPONSE_UNAUTHORIZED, ) from sentry.apidocs.parameters import GlobalParams, MonitorParams -from sentry.monitors.endpoints.base import MonitorEndpoint -from sentry.monitors.endpoints.base_monitor_details import MonitorDetailsMixin from sentry.monitors.serializers import MonitorSerializer from sentry.monitors.validators import MonitorValidator from sentry.utils.auth import AuthenticatedHttpRequest +from .base import MonitorEndpoint +from .base_monitor_details import MonitorDetailsMixin + @region_silo_endpoint @extend_schema(tags=["Crons"]) diff --git a/src/sentry/monitors/endpoints/organization_monitor_environment_details.py b/src/sentry/monitors/endpoints/organization_monitor_environment_details.py index 9c87b9294236d2..06037bde55ba16 100644 --- a/src/sentry/monitors/endpoints/organization_monitor_environment_details.py +++ b/src/sentry/monitors/endpoints/organization_monitor_environment_details.py @@ -15,12 +15,11 @@ RESPONSE_UNAUTHORIZED, ) from sentry.apidocs.parameters import GlobalParams, MonitorParams -from sentry.monitors.endpoints.base import MonitorEndpoint -from sentry.monitors.endpoints.base_monitor_environment_details import ( - MonitorEnvironmentDetailsMixin, -) from sentry.monitors.serializers import MonitorSerializer +from .base import MonitorEndpoint +from .base_monitor_environment_details import MonitorEnvironmentDetailsMixin + @region_silo_endpoint @extend_schema(tags=["Crons"]) diff --git a/src/sentry/monitors/endpoints/project_monitor_details.py b/src/sentry/monitors/endpoints/project_monitor_details.py index d4312e827b1059..639efbfae338bc 100644 --- a/src/sentry/monitors/endpoints/project_monitor_details.py +++ b/src/sentry/monitors/endpoints/project_monitor_details.py @@ -15,12 +15,13 @@ RESPONSE_UNAUTHORIZED, ) from sentry.apidocs.parameters import GlobalParams, MonitorParams -from sentry.monitors.endpoints.base import ProjectMonitorEndpoint -from sentry.monitors.endpoints.base_monitor_details import MonitorDetailsMixin from sentry.monitors.serializers import MonitorSerializer from sentry.monitors.validators import MonitorValidator from sentry.utils.auth import AuthenticatedHttpRequest +from .base import ProjectMonitorEndpoint +from .base_monitor_details import MonitorDetailsMixin + @region_silo_endpoint @extend_schema(tags=["Crons"]) diff --git a/src/sentry/monitors/endpoints/project_monitor_environment_details.py b/src/sentry/monitors/endpoints/project_monitor_environment_details.py index 0f050eed1d9f82..1ff7e28cb83de0 100644 --- a/src/sentry/monitors/endpoints/project_monitor_environment_details.py +++ b/src/sentry/monitors/endpoints/project_monitor_environment_details.py @@ -15,12 +15,11 @@ RESPONSE_UNAUTHORIZED, ) from sentry.apidocs.parameters import GlobalParams, MonitorParams -from sentry.monitors.endpoints.base import ProjectMonitorEnvironmentEndpoint -from sentry.monitors.endpoints.base_monitor_environment_details import ( - MonitorEnvironmentDetailsMixin, -) from sentry.monitors.serializers import MonitorSerializer +from .base import ProjectMonitorEnvironmentEndpoint +from .base_monitor_environment_details import MonitorEnvironmentDetailsMixin + @region_silo_endpoint @extend_schema(tags=["Crons"]) diff --git a/src/sentry/monitors/endpoints/project_monitor_processing_errors_index.py b/src/sentry/monitors/endpoints/project_monitor_processing_errors_index.py index eaadb96874455a..c7b516a0b37709 100644 --- a/src/sentry/monitors/endpoints/project_monitor_processing_errors_index.py +++ b/src/sentry/monitors/endpoints/project_monitor_processing_errors_index.py @@ -9,13 +9,14 @@ from sentry.apidocs.constants import RESPONSE_FORBIDDEN, RESPONSE_NOT_FOUND, RESPONSE_UNAUTHORIZED from sentry.apidocs.parameters import GlobalParams, MonitorParams from sentry.apidocs.utils import inline_sentry_response_serializer -from sentry.monitors.endpoints.base import ProjectMonitorEndpoint from sentry.monitors.processing_errors import ( CheckinProcessErrorsManager, CheckinProcessingErrorData, ) from sentry.utils.auth import AuthenticatedHttpRequest +from .base import ProjectMonitorEndpoint + @region_silo_endpoint @extend_schema(tags=["Crons"]) diff --git a/src/sentry/monitors/endpoints/project_processing_errors_details.py b/src/sentry/monitors/endpoints/project_processing_errors_details.py index 3a85add3a80eca..67169046a9b4a4 100644 --- a/src/sentry/monitors/endpoints/project_processing_errors_details.py +++ b/src/sentry/monitors/endpoints/project_processing_errors_details.py @@ -20,9 +20,10 @@ ) from sentry.apidocs.parameters import GlobalParams, MonitorParams from sentry.models.project import Project -from sentry.monitors.endpoints.base import ProjectMonitorPermission from sentry.monitors.processing_errors import CheckinProcessErrorsManager, InvalidProjectError +from .base import ProjectMonitorPermission + @region_silo_endpoint @extend_schema(tags=["Crons"]) From 716dcf2e5ee429b177dcdb576794fb6637af33e4 Mon Sep 17 00:00:00 2001 From: Raj Joshi Date: Mon, 13 May 2024 11:38:17 -0700 Subject: [PATCH 356/376] feat(chartcuterie): Refactor Function Regression Chart to separate data (#70250) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I'm adding Chartcuterie support for Function Regression chart. The first step is to refactor it so we can generate the chart props in the backend. I updated the `getBreakpointChartOptionsFromData` function to remove backwards compatibility for `functionBreakpointChart` since I am updating it to move all the transformation to a separate function. I tested using `yarn dev-ui`: image image Preview of the Slack Image 👀 ![example3](https://github.com/getsentry/sentry/assets/33237075/de8da223-13b4-427e-a2e4-4220ef256a05) --- static/app/chartcuterie/performance.tsx | 55 +++++++- static/app/chartcuterie/types.tsx | 1 + .../breakpointChart.tsx | 2 + .../breakpointChartOptions.tsx | 44 ++++-- .../functionBreakpointChart.tsx | 19 +-- .../eventStatisticalDetector/lineChart.tsx | 24 ++-- .../profiling/hooks/useProfileEventsStats.tsx | 125 +----------------- .../hooks/useProfileTopEventsStats.tsx | 2 +- static/app/utils/profiling/hooks/utils.tsx | 122 +++++++++++++++++ 9 files changed, 229 insertions(+), 165 deletions(-) diff --git a/static/app/chartcuterie/performance.tsx b/static/app/chartcuterie/performance.tsx index 452d440eecddcf..b2c9240c528e31 100644 --- a/static/app/chartcuterie/performance.tsx +++ b/static/app/chartcuterie/performance.tsx @@ -3,7 +3,10 @@ import {transformToLineSeries} from 'sentry/components/charts/lineChart'; import getBreakpointChartOptionsFromData, { type EventBreakpointChartData, } from 'sentry/components/events/eventStatisticalDetector/breakpointChartOptions'; +import type {EventsStatsSeries} from 'sentry/types'; +import {transformStatsResponse} from 'sentry/utils/profiling/hooks/utils'; import {lightTheme as theme} from 'sentry/utils/theme'; +import type {NormalizedTrendsTransaction} from 'sentry/views/performance/trends/types'; import {slackChartDefaults, slackChartSize} from './slack'; import type {RenderDescriptor} from './types'; @@ -11,6 +14,10 @@ import {ChartType} from './types'; export const performanceCharts: RenderDescriptor[] = []; +export type FunctionRegressionPercentileData = { + data: EventsStatsSeries<'p95()'>; +}; + function modifyOptionsForSlack(options: Omit) { options.legend = options.legend || {}; options.legend.icon = 'none'; @@ -23,11 +30,57 @@ function modifyOptionsForSlack(options: Omit) { visualMap: options.options?.visualMap, }; } +type FunctionRegressionChartData = { + evidenceData: NormalizedTrendsTransaction; + rawResponse: any; +}; performanceCharts.push({ key: ChartType.SLACK_PERFORMANCE_ENDPOINT_REGRESSION, getOption: (data: EventBreakpointChartData) => { - const {chartOptions, series} = getBreakpointChartOptionsFromData(data, theme); + const {chartOptions, series} = getBreakpointChartOptionsFromData( + data, + ChartType.SLACK_PERFORMANCE_ENDPOINT_REGRESSION, + theme + ); + const transformedSeries = transformToLineSeries({series}); + const modifiedOptions = modifyOptionsForSlack(chartOptions); + + return { + ...modifiedOptions, + + backgroundColor: theme.background, + series: transformedSeries, + grid: slackChartDefaults.grid, + visualMap: modifiedOptions.options?.visualMap, + }; + }, + ...slackChartSize, +}); + +performanceCharts.push({ + key: ChartType.SLACK_PERFORMANCE_FUNCTION_REGRESSION, + getOption: (data: FunctionRegressionChartData) => { + const transformed = transformStatsResponse( + 'profileFunctions', + ['p95()'], + data.rawResponse + ); + + const percentileData = { + data: transformed, + }; + + const param = { + percentileData: percentileData as FunctionRegressionPercentileData, + evidenceData: data.evidenceData, + }; + + const {chartOptions, series} = getBreakpointChartOptionsFromData( + param, + ChartType.SLACK_PERFORMANCE_FUNCTION_REGRESSION, + theme + ); const transformedSeries = transformToLineSeries({series}); const modifiedOptions = modifyOptionsForSlack(chartOptions); diff --git a/static/app/chartcuterie/types.tsx b/static/app/chartcuterie/types.tsx index 7bd1c2ee7c8024..526a15747f79b8 100644 --- a/static/app/chartcuterie/types.tsx +++ b/static/app/chartcuterie/types.tsx @@ -17,6 +17,7 @@ export enum ChartType { SLACK_METRIC_ALERT_EVENTS = 'slack:metricAlert.events', SLACK_METRIC_ALERT_SESSIONS = 'slack:metricAlert.sessions', SLACK_PERFORMANCE_ENDPOINT_REGRESSION = 'slack:performance.endpointRegression', + SLACK_PERFORMANCE_FUNCTION_REGRESSION = 'slack:performance.functionRegression', } /** diff --git a/static/app/components/events/eventStatisticalDetector/breakpointChart.tsx b/static/app/components/events/eventStatisticalDetector/breakpointChart.tsx index b4e9a83bce88c6..2c9ce2fc6633e5 100644 --- a/static/app/components/events/eventStatisticalDetector/breakpointChart.tsx +++ b/static/app/components/events/eventStatisticalDetector/breakpointChart.tsx @@ -1,3 +1,4 @@ +import {ChartType} from 'sentry/chartcuterie/types'; import TransitionChart from 'sentry/components/charts/transitionChart'; import TransparentLoadingMask from 'sentry/components/charts/transparentLoadingMask'; import type {Event, EventsStatsData} from 'sentry/types'; @@ -80,6 +81,7 @@ function EventBreakpointChart({event}: EventBreakpointChartProps) { percentileData={data?.['p95(transaction.duration)']?.data ?? []} evidenceData={normalizedOccurrenceEvent} datetime={datetime} + chartType={ChartType.SLACK_PERFORMANCE_ENDPOINT_REGRESSION} /> diff --git a/static/app/components/events/eventStatisticalDetector/breakpointChartOptions.tsx b/static/app/components/events/eventStatisticalDetector/breakpointChartOptions.tsx index ea5559b4c8831f..13df1d1dc777e1 100644 --- a/static/app/components/events/eventStatisticalDetector/breakpointChartOptions.tsx +++ b/static/app/components/events/eventStatisticalDetector/breakpointChartOptions.tsx @@ -1,8 +1,9 @@ import type {Theme} from '@emotion/react'; +import type {FunctionRegressionPercentileData} from 'sentry/chartcuterie/performance'; +import {ChartType} from 'sentry/chartcuterie/types'; import VisualMap from 'sentry/components/charts/components/visualMap'; import type {LineChart as EChartsLineChart} from 'sentry/components/charts/lineChart'; -import type {Series} from 'sentry/types/echarts'; import type {EventsStatsData} from 'sentry/types/organization'; import { axisLabelFormatter, @@ -20,22 +21,41 @@ import {getIntervalLine} from 'sentry/views/performance/utils/getIntervalLine'; export type EventBreakpointChartData = { evidenceData: NormalizedTrendsTransaction; - percentileData?: EventsStatsData; - percentileSeries?: Series[]; + percentileData: EventsStatsData | FunctionRegressionPercentileData; }; function getBreakpointChartOptionsFromData( - {percentileData, evidenceData, percentileSeries}: EventBreakpointChartData, + {percentileData, evidenceData}: EventBreakpointChartData, + chartType: ChartType, theme: Theme ) { - const transformedSeries = percentileData - ? transformEventStats( - percentileData, - generateTrendFunctionAsString(TrendFunctionField.P95, 'transaction.duration') - ) - : percentileSeries - ? percentileSeries - : []; + const trendFunctionName: Partial<{[key in ChartType]: string}> = { + [ChartType.SLACK_PERFORMANCE_ENDPOINT_REGRESSION]: 'transaction.duration', + [ChartType.SLACK_PERFORMANCE_FUNCTION_REGRESSION]: 'function.duration', + }; + + const defaultTransform = stats => stats; + + const transformFunctionStats = (stats: any) => { + const rawData = stats?.data?.data?.find(({axis}) => axis === 'p95()'); + const timestamps = stats?.data?.timestamps; + if (!timestamps) { + return []; + } + return timestamps.map((timestamp, i) => [timestamp, [{count: rawData.values[i]}]]); + }; + + // Mapping from BreakpointType to transformation functions + const transformFunction: Partial<{[key in ChartType]: (arg: any) => EventsStatsData}> = + { + [ChartType.SLACK_PERFORMANCE_ENDPOINT_REGRESSION]: defaultTransform, + [ChartType.SLACK_PERFORMANCE_FUNCTION_REGRESSION]: transformFunctionStats, + }; + + const transformedSeries = transformEventStats( + transformFunction[chartType]!(percentileData), + generateTrendFunctionAsString(TrendFunctionField.P95, trendFunctionName[chartType]!) + ); const intervalSeries = getIntervalLine( theme, diff --git a/static/app/components/events/eventStatisticalDetector/functionBreakpointChart.tsx b/static/app/components/events/eventStatisticalDetector/functionBreakpointChart.tsx index 7f2fc8f7d67901..4103edf7961754 100644 --- a/static/app/components/events/eventStatisticalDetector/functionBreakpointChart.tsx +++ b/static/app/components/events/eventStatisticalDetector/functionBreakpointChart.tsx @@ -1,6 +1,7 @@ -import {useEffect, useMemo} from 'react'; +import {useEffect} from 'react'; import * as Sentry from '@sentry/react'; +import {ChartType} from 'sentry/chartcuterie/types'; import Chart from 'sentry/components/events/eventStatisticalDetector/lineChart'; import {DataSection} from 'sentry/components/events/styles'; import type {Event} from 'sentry/types/event'; @@ -8,7 +9,6 @@ import {defined} from 'sentry/utils'; import {useProfileEventsStats} from 'sentry/utils/profiling/hooks/useProfileEventsStats'; import {useRelativeDateTime} from 'sentry/utils/profiling/hooks/useRelativeDateTime'; import type {NormalizedTrendsTransaction} from 'sentry/views/performance/trends/types'; -import transformEventStats from 'sentry/views/performance/trends/utils/transformEventStats'; import {RELATIVE_DAYS_WINDOW} from './consts'; @@ -75,18 +75,6 @@ function EventFunctionBreakpointChartInner({ yAxes: SERIES, }); - const p95Series = useMemo(() => { - const rawData = functionStats?.data?.data?.find(({axis}) => axis === 'p95()'); - const timestamps = functionStats?.data?.timestamps; - if (!timestamps) { - return []; - } - return transformEventStats( - timestamps.map((timestamp, i) => [timestamp, [{count: rawData.values[i]}]]), - 'p95(function.duration)' - ); - }, [functionStats]); - const normalizedOccurrenceEvent = { aggregate_range_1: evidenceData.aggregateRange1 / 1e6, aggregate_range_2: evidenceData.aggregateRange2 / 1e6, @@ -96,9 +84,10 @@ function EventFunctionBreakpointChartInner({ return ( ); diff --git a/static/app/components/events/eventStatisticalDetector/lineChart.tsx b/static/app/components/events/eventStatisticalDetector/lineChart.tsx index b274c2e32ee24e..9988f34fc99b27 100644 --- a/static/app/components/events/eventStatisticalDetector/lineChart.tsx +++ b/static/app/components/events/eventStatisticalDetector/lineChart.tsx @@ -1,37 +1,35 @@ import {useMemo} from 'react'; import {useTheme} from '@emotion/react'; +import type {FunctionRegressionPercentileData} from 'sentry/chartcuterie/performance'; +import type {ChartType} from 'sentry/chartcuterie/types'; import ChartZoom from 'sentry/components/charts/chartZoom'; import {LineChart as EChartsLineChart} from 'sentry/components/charts/lineChart'; import getBreakpointChartOptionsFromData from 'sentry/components/events/eventStatisticalDetector/breakpointChartOptions'; -import type {EventsStatsData, PageFilters} from 'sentry/types'; -import type {Series} from 'sentry/types/echarts'; +import type {PageFilters} from 'sentry/types'; +import type {EventsStatsData} from 'sentry/types/organization'; import useRouter from 'sentry/utils/useRouter'; import type {NormalizedTrendsTransaction} from 'sentry/views/performance/trends/types'; interface ChartProps { + chartType: ChartType; datetime: PageFilters['datetime']; evidenceData: NormalizedTrendsTransaction; - // TODO @athena: Refactor functionBreakpointChart to use percentileData - percentileData?: EventsStatsData; - percentileSeries?: Series[]; + percentileData: EventsStatsData | FunctionRegressionPercentileData; + trendFunctionName?: string; } -function LineChart({ - datetime, - percentileData, - percentileSeries, - evidenceData, -}: ChartProps) { +function LineChart({datetime, percentileData, evidenceData, chartType}: ChartProps) { const theme = useTheme(); const router = useRouter(); const {series, chartOptions} = useMemo(() => { return getBreakpointChartOptionsFromData( - {percentileData, percentileSeries, evidenceData}, + {percentileData, evidenceData}, + chartType, theme ); - }, [percentileData, percentileSeries, evidenceData, theme]); + }, [percentileData, evidenceData, chartType, theme]); return ( diff --git a/static/app/utils/profiling/hooks/useProfileEventsStats.tsx b/static/app/utils/profiling/hooks/useProfileEventsStats.tsx index 9e4ebf9221a898..6841a60ea8c317 100644 --- a/static/app/utils/profiling/hooks/useProfileEventsStats.tsx +++ b/static/app/utils/profiling/hooks/useProfileEventsStats.tsx @@ -1,10 +1,8 @@ import {useMemo} from 'react'; import {normalizeDateTimeParams} from 'sentry/components/organizations/pageFilters/parse'; -import type {EventsStatsSeries, PageFilters} from 'sentry/types'; -import {defined} from 'sentry/utils'; -import {getAggregateAlias} from 'sentry/utils/discover/fields'; -import {makeFormatTo} from 'sentry/utils/profiling/units/units'; +import type {PageFilters} from 'sentry/types'; +import {transformStatsResponse} from 'sentry/utils/profiling/hooks/utils'; import {useApiQuery} from 'sentry/utils/queryClient'; import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; @@ -73,122 +71,3 @@ export function useProfileEventsStats({ ...rest, }; } - -export function transformStatsResponse( - dataset: 'discover' | 'profiles' | 'profileFunctions', - yAxes: readonly F[], - rawData: any -): EventsStatsSeries { - // the events stats endpoint has a legacy response format so here we transform it - // into the proposed update for forward compatibility and ease of use - - if (yAxes.length === 0) { - return { - data: [], - meta: { - dataset, - end: 0, - start: 0, - }, - timestamps: [], - }; - } - - if (yAxes.length === 1) { - const {series, meta, timestamps} = transformSingleSeries(dataset, yAxes[0], rawData); - return { - data: [series], - meta, - timestamps, - }; - } - - const data: EventsStatsSeries['data'] = []; - let meta: EventsStatsSeries['meta'] = { - dataset, - end: -1, - start: -1, - }; - let timestamps: EventsStatsSeries['timestamps'] = []; - - let firstAxis = true; - - for (const yAxis of yAxes) { - const dataForYAxis = rawData[yAxis]; - if (!defined(dataForYAxis)) { - continue; - } - const transformed = transformSingleSeries(dataset, yAxis, dataForYAxis); - - if (firstAxis) { - meta = transformed.meta; - timestamps = transformed.timestamps; - } else if ( - meta.start !== transformed.meta.start || - meta.end !== transformed.meta.end - ) { - throw new Error('Mismatching start/end times'); - } else if ( - timestamps.length !== transformed.timestamps.length || - timestamps.some((ts, i) => ts !== transformed.timestamps[i]) - ) { - throw new Error('Mismatching timestamps'); - } - - data.push(transformed.series); - - firstAxis = false; - } - - return { - data, - meta, - timestamps, - }; -} - -export function transformSingleSeries( - dataset: 'discover' | 'profiles' | 'profileFunctions', - yAxis: F, - rawSeries: any, - label?: string -) { - const type = - rawSeries.meta.fields[yAxis] ?? rawSeries.meta.fields[getAggregateAlias(yAxis)]; - const formatter = - type === 'duration' - ? makeFormatTo( - rawSeries.meta.units[yAxis] ?? - rawSeries.meta.units[getAggregateAlias(yAxis)] ?? - 'nanoseconds', - 'milliseconds' - ) - : type === 'string' - ? value => value || '' - : value => value; - - const series: EventsStatsSeries['data'][number] = { - axis: yAxis, - values: [], - label, - }; - const meta: EventsStatsSeries['meta'] = { - dataset, - end: rawSeries.end, - start: rawSeries.start, - }; - const timestamps: EventsStatsSeries['timestamps'] = []; - - for (let i = 0; i < rawSeries.data.length; i++) { - const [timestamp, value] = rawSeries.data[i]; - // the api has this awkward structure for legacy reason - series.values.push(formatter(value[0].count as number)); - timestamps.push(timestamp); - } - - return { - series, - meta, - timestamps, - }; -} diff --git a/static/app/utils/profiling/hooks/useProfileTopEventsStats.tsx b/static/app/utils/profiling/hooks/useProfileTopEventsStats.tsx index ccfce76a67c8b1..96f8b512c07340 100644 --- a/static/app/utils/profiling/hooks/useProfileTopEventsStats.tsx +++ b/static/app/utils/profiling/hooks/useProfileTopEventsStats.tsx @@ -3,7 +3,7 @@ import {useMemo} from 'react'; import {normalizeDateTimeParams} from 'sentry/components/organizations/pageFilters/parse'; import type {EventsStatsSeries, PageFilters} from 'sentry/types'; import {defined} from 'sentry/utils'; -import {transformSingleSeries} from 'sentry/utils/profiling/hooks/useProfileEventsStats'; +import {transformSingleSeries} from 'sentry/utils/profiling/hooks/utils'; import {useApiQuery} from 'sentry/utils/queryClient'; import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; diff --git a/static/app/utils/profiling/hooks/utils.tsx b/static/app/utils/profiling/hooks/utils.tsx index 65ab1eec465bb7..752ec270119c6a 100644 --- a/static/app/utils/profiling/hooks/utils.tsx +++ b/static/app/utils/profiling/hooks/utils.tsx @@ -1,5 +1,8 @@ import {t} from 'sentry/locale'; +import type {EventsStatsSeries} from 'sentry/types'; import {defined} from 'sentry/utils'; +import {getAggregateAlias} from 'sentry/utils/discover/fields'; +import {makeFormatTo} from 'sentry/utils/profiling/units/units'; import type {Sort} from './types'; @@ -36,3 +39,122 @@ export function formatError(error: any): string | null { return t('An unknown error occurred.'); } + +export function transformStatsResponse( + dataset: 'discover' | 'profiles' | 'profileFunctions', + yAxes: readonly F[], + rawData: any +): EventsStatsSeries { + // the events stats endpoint has a legacy response format so here we transform it + // into the proposed update for forward compatibility and ease of use + + if (yAxes.length === 0) { + return { + data: [], + meta: { + dataset, + end: 0, + start: 0, + }, + timestamps: [], + }; + } + + if (yAxes.length === 1) { + const {series, meta, timestamps} = transformSingleSeries(dataset, yAxes[0], rawData); + return { + data: [series], + meta, + timestamps, + }; + } + + const data: EventsStatsSeries['data'] = []; + let meta: EventsStatsSeries['meta'] = { + dataset, + end: -1, + start: -1, + }; + let timestamps: EventsStatsSeries['timestamps'] = []; + + let firstAxis = true; + + for (const yAxis of yAxes) { + const dataForYAxis = rawData[yAxis]; + if (!defined(dataForYAxis)) { + continue; + } + const transformed = transformSingleSeries(dataset, yAxis, dataForYAxis); + + if (firstAxis) { + meta = transformed.meta; + timestamps = transformed.timestamps; + } else if ( + meta.start !== transformed.meta.start || + meta.end !== transformed.meta.end + ) { + throw new Error('Mismatching start/end times'); + } else if ( + timestamps.length !== transformed.timestamps.length || + timestamps.some((ts, i) => ts !== transformed.timestamps[i]) + ) { + throw new Error('Mismatching timestamps'); + } + + data.push(transformed.series); + + firstAxis = false; + } + + return { + data, + meta, + timestamps, + }; +} + +export function transformSingleSeries( + dataset: 'discover' | 'profiles' | 'profileFunctions', + yAxis: F, + rawSeries: any, + label?: string +) { + const type = + rawSeries.meta.fields[yAxis] ?? rawSeries.meta.fields[getAggregateAlias(yAxis)]; + const formatter = + type === 'duration' + ? makeFormatTo( + rawSeries.meta.units[yAxis] ?? + rawSeries.meta.units[getAggregateAlias(yAxis)] ?? + 'nanoseconds', + 'milliseconds' + ) + : type === 'string' + ? value => value || '' + : value => value; + + const series: EventsStatsSeries['data'][number] = { + axis: yAxis, + values: [], + label, + }; + const meta: EventsStatsSeries['meta'] = { + dataset, + end: rawSeries.end, + start: rawSeries.start, + }; + const timestamps: EventsStatsSeries['timestamps'] = []; + + for (let i = 0; i < rawSeries.data.length; i++) { + const [timestamp, value] = rawSeries.data[i]; + // the api has this awkward structure for legacy reason + series.values.push(formatter(value[0].count as number)); + timestamps.push(timestamp); + } + + return { + series, + meta, + timestamps, + }; +} From 93405b6da3f360c6e08b199862927e24e3c4ca5d Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 13 May 2024 14:41:54 -0400 Subject: [PATCH 357/376] fix(feedback): fix types for checking if a feedback has a screenshot (#70779) follow up to https://github.com/getsentry/sentry/pull/70778 - just changing the type name --- static/app/components/feedback/list/feedbackListItem.tsx | 2 +- static/app/components/feedback/useFeedbackListQueryKey.tsx | 2 +- static/app/types/group.tsx | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/static/app/components/feedback/list/feedbackListItem.tsx b/static/app/components/feedback/list/feedbackListItem.tsx index 73099836fe69c6..8ca33e99d08548 100644 --- a/static/app/components/feedback/list/feedbackListItem.tsx +++ b/static/app/components/feedback/list/feedbackListItem.tsx @@ -56,7 +56,7 @@ const FeedbackListItem = forwardRef( const isCrashReport = feedbackItem.metadata.source === 'crash_report_embed_form'; const isUserReportWithError = feedbackItem.metadata.source === 'user_report_envelope'; - const hasAttachments = feedbackItem.hasAttachments; + const hasAttachments = feedbackItem.latestEventHasAttachments; const hasComments = feedbackItem.numComments > 0; const theme = isOpen || config.theme === 'dark' ? darkTheme : lightTheme; diff --git a/static/app/components/feedback/useFeedbackListQueryKey.tsx b/static/app/components/feedback/useFeedbackListQueryKey.tsx index 74aa34e1fa68b0..bd6b239f018c5c 100644 --- a/static/app/components/feedback/useFeedbackListQueryKey.tsx +++ b/static/app/components/feedback/useFeedbackListQueryKey.tsx @@ -94,7 +94,7 @@ export default function useFeedbackListQueryKey({ 'pluginIssues', // Gives us plugin issues available 'integrationIssues', // Gives us integration issues available 'sentryAppIssues', // Gives us Sentry app issues available - 'hasAttachments', // Gives us whether the feedback has screenshots + 'latestEventHasAttachments', // Gives us whether the feedback has screenshots ], shortIdLookup: 0, query: `issue.category:feedback status:${mailbox} ${fixedQueryView.query}`, diff --git a/static/app/types/group.tsx b/static/app/types/group.tsx index b155ab26f8b53c..8f8e896a354a17 100644 --- a/static/app/types/group.tsx +++ b/static/app/types/group.tsx @@ -804,10 +804,10 @@ export interface BaseGroup { title: string; type: EventOrGroupType; userReportCount: number; - hasAttachments?: boolean; inbox?: InboxDetails | null | false; integrationIssues?: ExternalIssue[]; latestEvent?: Event; + latestEventHasAttachments?: boolean; owners?: SuggestedOwner[] | null; sentryAppIssues?: PlatformExternalIssue[]; substatus?: GroupSubstatus | null; From 52648937e5efc299de8d480ae6864ff888bc10fa Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Mon, 13 May 2024 11:47:12 -0700 Subject: [PATCH 358/376] fix(assignee-badge): Fix storybook issues (#70775) Fixes JAVASCRIPT-2T1X and an issue where the title for the AssigneeBadge's storybook showed up as `` instead of `` --- .../app/components/assigneeBadge.stories.tsx | 28 ++++++++++++++++--- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/static/app/components/assigneeBadge.stories.tsx b/static/app/components/assigneeBadge.stories.tsx index 7b78f2627cad5d..e2e990ab7b117e 100644 --- a/static/app/components/assigneeBadge.stories.tsx +++ b/static/app/components/assigneeBadge.stories.tsx @@ -1,12 +1,13 @@ import {Fragment, useState} from 'react'; +import {uuid4} from '@sentry/utils'; import {AssigneeBadge} from 'sentry/components/assigneeBadge'; import storyBook from 'sentry/stories/storyBook'; -import type {Actor} from 'sentry/types'; +import type {Actor, Team} from 'sentry/types'; import {useUser} from 'sentry/utils/useUser'; import {useUserTeams} from 'sentry/utils/useUserTeams'; -export default storyBook(AssigneeBadge, story => { +export default storyBook('AssigneeBadge', story => { story('User Assignee', () => { const user = useUser(); const [chevron1Toggle, setChevron1Toggle] = useState<'up' | 'down'>('down'); @@ -39,10 +40,29 @@ export default storyBook(AssigneeBadge, story => { const [chevron1Toggle, setChevron1Toggle] = useState<'up' | 'down'>('down'); const [chevron2Toggle, setChevron2Toggle] = useState<'up' | 'down'>('down'); + const team: Team = teams.length + ? teams[0] + : { + id: '1', + slug: 'team-slug', + name: 'Team Name', + access: ['team:read'], + teamRole: null, + isMember: true, + memberCount: 0, + avatar: {avatarType: 'letter_avatar', avatarUuid: uuid4()}, + flags: { + 'idp:provisioned': false, + }, + externalTeams: [], + hasAccess: false, + isPending: false, + }; + const teamActor: Actor = { type: 'team', - id: teams[0].id, - name: teams[0].name, + id: team.id, + name: team.name, }; return ( From d0993758949430abedcf99301311fa3a0ba27c97 Mon Sep 17 00:00:00 2001 From: Matej Minar Date: Mon, 13 May 2024 20:55:41 +0200 Subject: [PATCH 359/376] chore(metrics): Remove metrics-stats flag on backend (#70764) Closes https://github.com/getsentry/sentry/issues/70724 Requires https://github.com/getsentry/getsentry/pull/13934 --- src/sentry/api/endpoints/organization_stats_v2.py | 2 +- src/sentry/conf/server.py | 2 -- src/sentry/features/temporary.py | 1 - tests/snuba/api/endpoints/test_organization_stats_v2.py | 6 +++--- 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/src/sentry/api/endpoints/organization_stats_v2.py b/src/sentry/api/endpoints/organization_stats_v2.py index 31ae658a810996..e43595649c3fb7 100644 --- a/src/sentry/api/endpoints/organization_stats_v2.py +++ b/src/sentry/api/endpoints/organization_stats_v2.py @@ -167,7 +167,7 @@ def get(self, request: Request, organization) -> Response: """ with self.handle_query_errors(): - if features.has("organizations:metrics-stats", organization): + if features.has("organizations:custom-metrics", organization): if ( request.GET.get("category") == "metrics" or request.GET.get("category") == "metricSecond" diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 930fd60b18657a..215d51c246806b 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1521,8 +1521,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:ddm-sidebar-item-hidden": False, # Enables import of metric dashboards "organizations:ddm-dashboard-import": False, - # Enables category "metrics" in stats_v2 endpoint - "organizations:metrics-stats": False, # Enable the default alert at project creation to be the high priority alert "organizations:default-high-priority-alerts": False, # Enables automatically deriving of code mappings diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py index 395cd93c605072..605a4c4912c667 100644 --- a/src/sentry/features/temporary.py +++ b/src/sentry/features/temporary.py @@ -61,7 +61,6 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:ddm-dashboard-import", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:custom-metrics-experimental", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:ddm-sidebar-item-hidden", OrganizationFeature, FeatureHandlerStrategy.REMOTE) - manager.add("organizations:metrics-stats", OrganizationFeature, FeatureHandlerStrategy.REMOTE) manager.add("organizations:default-high-priority-alerts", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) manager.add("organizations:derive-code-mappings", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) manager.add("organizations:device-class-synthesis", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) diff --git a/tests/snuba/api/endpoints/test_organization_stats_v2.py b/tests/snuba/api/endpoints/test_organization_stats_v2.py index dca7f15a90c9a6..37e3ed82b6e03a 100644 --- a/tests/snuba/api/endpoints/test_organization_stats_v2.py +++ b/tests/snuba/api/endpoints/test_organization_stats_v2.py @@ -951,7 +951,7 @@ def setUp(self): ) @freeze_time("2021-03-14T12:27:28.303Z") - @with_feature("organizations:metrics-stats") + @with_feature("organizations:custom-metrics") def test_metrics_category(self): response = self.do_request( { @@ -974,7 +974,7 @@ def test_metrics_category(self): } @freeze_time("2021-03-14T12:27:28.303Z") - @with_feature("organizations:metrics-stats") + @with_feature("organizations:custom-metrics") def test_metrics_group_by_project(self): response = self.do_request( { @@ -1007,7 +1007,7 @@ def test_metrics_group_by_project(self): } @freeze_time("2021-03-14T12:27:28.303Z") - @with_feature("organizations:metrics-stats") + @with_feature("organizations:custom-metrics") def test_metrics_multiple_group_by(self): response = self.do_request( { From f5b1a7738d8c13de2636438824caa2a238b7970c Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 13 May 2024 15:00:36 -0400 Subject: [PATCH 360/376] ref(tsc): convert adminSettings.tsx to FC (#70792) ref https://github.com/getsentry/frontend-tsc/issues/2 converts this file into FC and use `useApiQuery` instead of `DeprecatedAsync` --- static/app/views/admin/adminSettings.tsx | 299 ++++++++++++----------- 1 file changed, 152 insertions(+), 147 deletions(-) diff --git a/static/app/views/admin/adminSettings.tsx b/static/app/views/admin/adminSettings.tsx index 384490e1840be7..19f3d932adc2a6 100644 --- a/static/app/views/admin/adminSettings.tsx +++ b/static/app/views/admin/adminSettings.tsx @@ -1,9 +1,11 @@ import Feature from 'sentry/components/acl/feature'; import Form from 'sentry/components/forms/form'; +import LoadingError from 'sentry/components/loadingError'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; import Panel from 'sentry/components/panels/panel'; import PanelHeader from 'sentry/components/panels/panelHeader'; import {t} from 'sentry/locale'; -import DeprecatedAsyncView from 'sentry/views/deprecatedAsyncView'; +import {useApiQuery} from 'sentry/utils/queryClient'; import {getOption, getOptionField} from './options'; @@ -67,165 +69,168 @@ type FieldDef = { value: string | undefined; }; -type State = DeprecatedAsyncView['state'] & { - data: Record; -}; +export default function AdminSettings() { + const {data, isLoading, isError} = useApiQuery>( + ['/internal/options/'], + { + staleTime: 0, + } + ); -export default class AdminSettings extends DeprecatedAsyncView<{}, State> { - get endpoint() { - return '/internal/options/'; + if (isError) { + return ; } - getEndpoints(): ReturnType { - return [['data', this.endpoint]]; + if (isLoading) { + return ; } - renderBody() { - const {data} = this.state; - - const initialData = {}; - const fields = {}; - for (const key of optionsAvailable) { - // TODO(dcramer): we should not be mutating options - const option = data[key] ?? {field: {}, value: undefined}; + const initialData = {}; + const fields = {}; + for (const key of optionsAvailable) { + // TODO(dcramer): we should not be mutating options + const option = data[key] ?? {field: {}, value: undefined}; - if (option.value === undefined || option.value === '') { - const defn = getOption(key); - initialData[key] = defn.defaultValue ? defn.defaultValue() : ''; - } else { - initialData[key] = option.value; - } - fields[key] = getOptionField(key, option.field); + if (option.value === undefined || option.value === '') { + const defn = getOption(key); + initialData[key] = defn.defaultValue ? defn.defaultValue() : ''; + } else { + initialData[key] = option.value; } + fields[key] = getOptionField(key, option.field); + } + + return ( +
+

{t('Settings')}

- return ( -
-

{t('Settings')}

+
+ + {t('General')} + {fields['system.url-prefix']} + {fields['system.admin-email']} + {fields['system.support-email']} + {fields['system.security-email']} + {fields['system.rate-limit']} + - + + {t('Security & Abuse')} + {fields['auth.allow-registration']} + {fields['auth.ip-rate-limit']} + {fields['auth.user-rate-limit']} + {fields['api.rate-limit.org-create']} + + + + {t('Beacon')} + {fields['beacon.anonymous']} + + + - General - {fields['system.url-prefix']} - {fields['system.admin-email']} - {fields['system.support-email']} - {fields['system.security-email']} - {fields['system.rate-limit']} + {t('Performance Issues - All')} + {fields['performance.issues.all.problem-detection']} - - Security & Abuse - {fields['auth.allow-registration']} - {fields['auth.ip-rate-limit']} - {fields['auth.user-rate-limit']} - {fields['api.rate-limit.org-create']} + {t('Performance Issues - Detectors')} + {fields['performance.issues.n_plus_one_db.problem-creation']} + {fields['performance.issues.n_plus_one_db_ext.problem-creation']} + {fields['performance.issues.n_plus_one_db.count_threshold']} + {fields['performance.issues.n_plus_one_db.duration_threshold']} - - Beacon - {fields['beacon.anonymous']} + {t('Performance Issues - Consecutive DB Detector')} + {fields['performance.issues.consecutive_db.problem-creation']} + {fields['performance.issues.consecutive_db.la-rollout']} + {fields['performance.issues.consecutive_db.ea-rollout']} + {fields['performance.issues.consecutive_db.ga-rollout']} - - - - Performance Issues - All - {fields['performance.issues.all.problem-detection']} - - - Performance Issues - Detectors - {fields['performance.issues.n_plus_one_db.problem-creation']} - {fields['performance.issues.n_plus_one_db_ext.problem-creation']} - {fields['performance.issues.n_plus_one_db.count_threshold']} - {fields['performance.issues.n_plus_one_db.duration_threshold']} - - - Performance Issues - Consecutive DB Detector - {fields['performance.issues.consecutive_db.problem-creation']} - {fields['performance.issues.consecutive_db.la-rollout']} - {fields['performance.issues.consecutive_db.ea-rollout']} - {fields['performance.issues.consecutive_db.ga-rollout']} - - - Performance Issues - N+1 API Calls Detector - {fields['performance.issues.n_plus_one_api_calls.problem-creation']} - {fields['performance.issues.n_plus_one_api_calls.la-rollout']} - {fields['performance.issues.n_plus_one_api_calls.ea-rollout']} - {fields['performance.issues.n_plus_one_api_calls.ga-rollout']} - - - Performance Issues - Compressed Assets Detector - {fields['performance.issues.compressed_assets.problem-creation']} - {fields['performance.issues.compressed_assets.la-rollout']} - {fields['performance.issues.compressed_assets.ea-rollout']} - {fields['performance.issues.compressed_assets.ga-rollout']} - - - Performance Issues - File IO on Main Thread - {fields['performance.issues.file_io_main_thread.problem-creation']} - - - Performance Issues - Slow DB Span Detector - {fields['performance.issues.slow_db_query.problem-creation']} - {fields['performance.issues.slow_db_query.la-rollout']} - {fields['performance.issues.slow_db_query.ea-rollout']} - {fields['performance.issues.slow_db_query.ga-rollout']} - - - - Performance Issues - Large Render Blocking Asset Detector - - {fields['performance.issues.render_blocking_assets.problem-creation']} - {fields['performance.issues.render_blocking_assets.la-rollout']} - {fields['performance.issues.render_blocking_assets.ea-rollout']} - {fields['performance.issues.render_blocking_assets.ga-rollout']} - - - Performance Issues - MN+1 DB Detector - {fields['performance.issues.m_n_plus_one_db.problem-creation']} - {fields['performance.issues.m_n_plus_one_db.la-rollout']} - {fields['performance.issues.m_n_plus_one_db.ea-rollout']} - {fields['performance.issues.m_n_plus_one_db.ga-rollout']} - - - - Performance Issues - Consecutive HTTP Span Detector - - {fields['performance.issues.consecutive_http.max_duration_between_spans']} - {fields['performance.issues.consecutive_http.consecutive_count_threshold']} - {fields['performance.issues.consecutive_http.span_duration_threshold']} - - - Performance Issues - Large HTTP Payload Detector - {fields['performance.issues.large_http_payload.size_threshold']} - - - - Profiling Issues - Block Main Thread Detector Ingest - - {fields['profile.issues.blocked_main_thread-ingest.la-rollout']} - {fields['profile.issues.blocked_main_thread-ingest.ea-rollout']} - {fields['profile.issues.blocked_main_thread-ingest.ga-rollout']} - - - - Profiling Issues - Block Main Thread Detector Post Process Group - - {fields['profile.issues.blocked_main_thread-ppg.la-rollout']} - {fields['profile.issues.blocked_main_thread-ppg.ea-rollout']} - {fields['profile.issues.blocked_main_thread-ppg.ga-rollout']} - - - - - View Hierarchy - - - -
- ); - } + + {t('Performance Issues - N+1 API Calls Detector')} + {fields['performance.issues.n_plus_one_api_calls.problem-creation']} + {fields['performance.issues.n_plus_one_api_calls.la-rollout']} + {fields['performance.issues.n_plus_one_api_calls.ea-rollout']} + {fields['performance.issues.n_plus_one_api_calls.ga-rollout']} + + + + {t('Performance Issues - Compressed Assets Detector')} + + {fields['performance.issues.compressed_assets.problem-creation']} + {fields['performance.issues.compressed_assets.la-rollout']} + {fields['performance.issues.compressed_assets.ea-rollout']} + {fields['performance.issues.compressed_assets.ga-rollout']} + + + {t('Performance Issues - File IO on Main Thread')} + {fields['performance.issues.file_io_main_thread.problem-creation']} + + + {t('Performance Issues - Slow DB Span Detector')} + {fields['performance.issues.slow_db_query.problem-creation']} + {fields['performance.issues.slow_db_query.la-rollout']} + {fields['performance.issues.slow_db_query.ea-rollout']} + {fields['performance.issues.slow_db_query.ga-rollout']} + + + + {t('Performance Issues - Large Render Blocking Asset Detector')} + + {fields['performance.issues.render_blocking_assets.problem-creation']} + {fields['performance.issues.render_blocking_assets.la-rollout']} + {fields['performance.issues.render_blocking_assets.ea-rollout']} + {fields['performance.issues.render_blocking_assets.ga-rollout']} + + + {t('Performance Issues - MN+1 DB Detector')} + {fields['performance.issues.m_n_plus_one_db.problem-creation']} + {fields['performance.issues.m_n_plus_one_db.la-rollout']} + {fields['performance.issues.m_n_plus_one_db.ea-rollout']} + {fields['performance.issues.m_n_plus_one_db.ga-rollout']} + + + + {t('Performance Issues - Consecutive HTTP Span Detector')} + + {fields['performance.issues.consecutive_http.max_duration_between_spans']} + {fields['performance.issues.consecutive_http.consecutive_count_threshold']} + {fields['performance.issues.consecutive_http.span_duration_threshold']} + + + + {t('Performance Issues - Large HTTP Payload Detector')} + + {fields['performance.issues.large_http_payload.size_threshold']} + + + + {t('Profiling Issues - Block Main Thread Detector Ingest')} + + {fields['profile.issues.blocked_main_thread-ingest.la-rollout']} + {fields['profile.issues.blocked_main_thread-ingest.ea-rollout']} + {fields['profile.issues.blocked_main_thread-ingest.ga-rollout']} + + + + {t('Profiling Issues - Block Main Thread Detector Post Process Group')} + + {fields['profile.issues.blocked_main_thread-ppg.la-rollout']} + {fields['profile.issues.blocked_main_thread-ppg.ea-rollout']} + {fields['profile.issues.blocked_main_thread-ppg.ga-rollout']} + + + + + {t('View Hierarchy')} + + + +
+ ); } From 80fae32d5e44c51a24011af9043f6c9a7fb24bfd Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Mon, 13 May 2024 15:17:03 -0400 Subject: [PATCH 361/376] fix(trace-explorer): Merge parallel spans (#70681) Parallel spans can create a deeply nested timeline. This tries to merge them together for a cleaner flatten visualization. We're explicitly not merging root spans as that can create strange visualizations. --- .../api/endpoints/organization_traces.py | 35 ++- .../api/endpoints/test_organization_traces.py | 220 ++++++++++++++++++ 2 files changed, 244 insertions(+), 11 deletions(-) diff --git a/src/sentry/api/endpoints/organization_traces.py b/src/sentry/api/endpoints/organization_traces.py index ea3518afe78d82..42e2c0b8e8c5d9 100644 --- a/src/sentry/api/endpoints/organization_traces.py +++ b/src/sentry/api/endpoints/organization_traces.py @@ -41,6 +41,7 @@ class TraceInterval(TypedDict): kind: Literal["project", "missing", "other"] opCategory: str | None duration: int + isRoot: bool components: NotRequired[list[tuple[int, int]]] @@ -722,7 +723,7 @@ def get_traces_breakdown_projects_query( "precise.start_ts", "precise.finish_ts", ], - orderby=["precise.start_ts", "precise.finish_ts"], + orderby=["precise.start_ts", "-precise.finish_ts"], # limit the number of segments we fetch per trace so a single # large trace does not result in the rest being blank limitby=("trace", int(MAX_SNUBA_RESULTS / len(trace_ids))), @@ -760,10 +761,11 @@ def get_traces_breakdown_categories_query( "transaction", "span.category", "sdk.name", + "parent_span", "precise.start_ts", "precise.finish_ts", ], - orderby=["precise.start_ts", "precise.finish_ts"], + orderby=["precise.start_ts", "-precise.finish_ts"], # limit the number of segments we fetch per trace so a single # large trace does not result in the rest being blank limitby=("trace", int(MAX_SNUBA_RESULTS / len(trace_ids))), @@ -1000,7 +1002,12 @@ def process_breakdowns(data, traces_range): def should_merge(interval_a, interval_b): return ( - interval_a["end"] >= interval_b["start"] + # only merge intervals that have parent spans, i.e. those that aren't the trace root + not interval_a["isRoot"] + and not interval_b["isRoot"] + # only merge intervals that overlap + and interval_a["end"] >= interval_b["start"] + # only merge intervals that are part of the same service and interval_a["project"] == interval_b["project"] and interval_a["sdkName"] == interval_b["sdkName"] and interval_a["opCategory"] == interval_b["opCategory"] @@ -1032,14 +1039,16 @@ def breakdown_push(trace, interval): "components": [ (last_interval["components"][-1][1], interval["components"][0][0]), ], + "isRoot": False, } ) breakdown.append(interval) def stack_push(trace, interval): - last_interval = stack_peek(trace) - if last_interval and should_merge(last_interval, interval): + for last_interval in reversed(stacks[trace]): + if not should_merge(last_interval, interval): + continue # update the end of this interval and it will # be updated in the breakdown as well last_interval["end"] = max(interval["end"], last_interval["end"]) @@ -1107,7 +1116,14 @@ def stack_clear(trace, until=None): row["quantized.start_ts"] = quantized_start row["quantized.finish_ts"] = quantized_end - data.sort(key=lambda row: (row["quantized.start_ts"], -row["quantized.finish_ts"])) + data.sort( + key=lambda row: ( + row["quantized.start_ts"], + row["precise.start_ts"], + -row["quantized.finish_ts"], + -row["precise.finish_ts"], + ) + ) last_timestamp_per_trace: dict[str, int] = defaultdict(int) @@ -1131,6 +1147,7 @@ def stack_clear(trace, until=None): "end": row["quantized.finish_ts"], "duration": 0, "components": [(row["precise.start_ts"], row["precise.finish_ts"])], + "isRoot": not bool(row.get("parent_span")), } # Clear the stack of any intervals that end before the current interval @@ -1139,11 +1156,6 @@ def stack_clear(trace, until=None): stack_push(trace, cur) - # Clear the stack of any intervals that end before the current interval - # ends. Here we do not need to push them to the breakdowns because - # that time has already be attributed to the most recent interval. - stack_clear(trace, until=cur["end"]) - for trace, trace_range in traces_range.items(): # Check to see if there is still a gap before the trace ends and fill it # with an other interval. @@ -1158,6 +1170,7 @@ def stack_clear(trace, until=None): "start": other_start, "end": other_end, "duration": 0, + "isRoot": False, } # Clear the remaining intervals on the stack to find the latest end time diff --git a/tests/sentry/api/endpoints/test_organization_traces.py b/tests/sentry/api/endpoints/test_organization_traces.py index 3c63bddc49eb15..0cf9755d89ade4 100644 --- a/tests/sentry/api/endpoints/test_organization_traces.py +++ b/tests/sentry/api/endpoints/test_organization_traces.py @@ -458,6 +458,7 @@ def test_matching_tag(self): "project": project_1.slug, "opCategory": None, "sdkName": "sentry.javascript.node", + "isRoot": False, "start": int(timestamps[0].timestamp() * 1000), "end": int(timestamps[0].timestamp() * 1000) + 60_100, "kind": "project", @@ -467,6 +468,7 @@ def test_matching_tag(self): "project": project_2.slug, "opCategory": None, "sdkName": "sentry.javascript.node", + "isRoot": False, "start": int(timestamps[1].timestamp() * 1000), "end": int(timestamps[3].timestamp() * 1000) + 30_003, "kind": "project", @@ -513,6 +515,7 @@ def test_matching_tag(self): "project": project_1.slug, "opCategory": None, "sdkName": "sentry.javascript.node", + "isRoot": False, "start": int(timestamps[4].timestamp() * 1000), "end": int(timestamps[4].timestamp() * 1000) + 90_123, "kind": "project", @@ -522,6 +525,7 @@ def test_matching_tag(self): "project": project_2.slug, "opCategory": None, "sdkName": "sentry.javascript.node", + "isRoot": False, "start": int(timestamps[5].timestamp() * 1000), "end": int(timestamps[6].timestamp() * 1000) + 20_006, "kind": "project", @@ -611,6 +615,7 @@ def test_matching_tag_breakdown_with_category(self): "project": project_1.slug, "opCategory": None, "sdkName": "sentry.javascript.node", + "isRoot": False, "start": int(timestamps[4].timestamp() * 1000), "end": int(timestamps[4].timestamp() * 1000) + 90_123, "kind": "project", @@ -620,6 +625,7 @@ def test_matching_tag_breakdown_with_category(self): "project": project_1.slug, "opCategory": "http", "sdkName": "", + "isRoot": False, "start": int(timestamps[7].timestamp() * 1000), "end": int(timestamps[7].timestamp() * 1000) + 1_000, "kind": "project", @@ -629,6 +635,7 @@ def test_matching_tag_breakdown_with_category(self): "project": project_2.slug, "opCategory": None, "sdkName": "sentry.javascript.node", + "isRoot": False, "start": int(timestamps[5].timestamp() * 1000), "end": int(timestamps[6].timestamp() * 1000) + 20_006, "kind": "project", @@ -638,6 +645,7 @@ def test_matching_tag_breakdown_with_category(self): "project": project_1.slug, "opCategory": "db", "sdkName": "", + "isRoot": False, "start": int(timestamps[8].timestamp() * 1000), "end": int(timestamps[8].timestamp() * 1000) + 3_000, "kind": "project", @@ -723,6 +731,7 @@ def test_matching_tag_metrics(self): "project": project_1.slug, "opCategory": None, "sdkName": "sentry.javascript.remix", + "isRoot": False, "start": int(timestamps[10].timestamp() * 1000), "end": int(timestamps[10].timestamp() * 1000) + 40_000, "kind": "project", @@ -732,6 +741,7 @@ def test_matching_tag_metrics(self): "project": project_1.slug, "opCategory": None, "sdkName": "sentry.javascript.node", + "isRoot": False, "start": int(timestamps[11].timestamp() * 1000), "end": int(timestamps[11].timestamp() * 1000) + 10_000, "kind": "project", @@ -808,6 +818,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -825,6 +836,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 100, "kind": "project", "duration": 100, + "isRoot": False, }, ], }, @@ -836,6 +848,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -844,6 +857,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "bar", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "bar1", "precise.start_ts": 0.025, "precise.finish_ts": 0.075, @@ -861,6 +875,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 100, "kind": "project", "duration": 100, + "isRoot": False, }, { "project": "bar", @@ -870,6 +885,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 75, "kind": "project", "duration": 50, + "isRoot": False, }, ], }, @@ -881,6 +897,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.05, @@ -889,6 +906,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "bar", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "bar1", "precise.start_ts": 0.025, "precise.finish_ts": 0.075, @@ -897,6 +915,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "baz", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "baz1", "precise.start_ts": 0.05, "precise.finish_ts": 0.1, @@ -914,6 +933,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 50, "kind": "project", "duration": 50, + "isRoot": False, }, { "project": "bar", @@ -923,6 +943,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 75, "kind": "project", "duration": 50, + "isRoot": False, }, { "project": "baz", @@ -932,6 +953,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 100, "kind": "project", "duration": 50, + "isRoot": False, }, ], }, @@ -943,6 +965,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.025, @@ -951,6 +974,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "bar", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "bar1", "precise.start_ts": 0.05, "precise.finish_ts": 0.075, @@ -968,6 +992,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 25, "kind": "project", "duration": 25, + "isRoot": False, }, { "project": None, @@ -977,6 +1002,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 50, "kind": "missing", "duration": 25, + "isRoot": False, }, { "project": "bar", @@ -986,6 +1012,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 75, "kind": "project", "duration": 25, + "isRoot": False, }, ], }, @@ -997,6 +1024,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -1005,6 +1033,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo2", "precise.start_ts": 0.025, "precise.finish_ts": 0.075, @@ -1022,6 +1051,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 100, "kind": "project", "duration": 100, + "isRoot": False, }, ], }, @@ -1033,6 +1063,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.075, @@ -1041,6 +1072,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo2", "precise.start_ts": 0.025, "precise.finish_ts": 0.1, @@ -1058,6 +1090,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 100, "kind": "project", "duration": 100, + "isRoot": False, }, ], }, @@ -1069,6 +1102,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.025, @@ -1077,6 +1111,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo2", "precise.start_ts": 0.05, "precise.finish_ts": 0.075, @@ -1094,6 +1129,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 25, "kind": "project", "duration": 25, + "isRoot": False, }, { "project": None, @@ -1103,6 +1139,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 50, "kind": "missing", "duration": 25, + "isRoot": False, }, { "project": "foo", @@ -1112,6 +1149,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 75, "kind": "project", "duration": 25, + "isRoot": False, }, ], }, @@ -1123,6 +1161,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -1131,6 +1170,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "bar", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "bar1", "precise.start_ts": 0.02, "precise.finish_ts": 0.08, @@ -1139,6 +1179,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "baz", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "baz1", "precise.start_ts": 0.04, "precise.finish_ts": 0.06, @@ -1156,6 +1197,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 100, "kind": "project", "duration": 100, + "isRoot": False, }, { "project": "bar", @@ -1165,6 +1207,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 80, "kind": "project", "duration": 60, + "isRoot": False, }, { "project": "baz", @@ -1174,6 +1217,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 60, "kind": "project", "duration": 20, + "isRoot": False, }, ], }, @@ -1185,6 +1229,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -1193,6 +1238,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "bar", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "bar1", "precise.start_ts": 0.025, "precise.finish_ts": 0.05, @@ -1200,6 +1246,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): { "trace": "a" * 32, "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "project": "baz", "transaction": "baz1", "precise.start_ts": 0.05, @@ -1218,6 +1265,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 100, "kind": "project", "duration": 100, + "isRoot": False, }, { "project": "bar", @@ -1227,6 +1275,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 50, "kind": "project", "duration": 25, + "isRoot": False, }, { "project": "baz", @@ -1236,6 +1285,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 75, "kind": "project", "duration": 25, + "isRoot": False, }, ], }, @@ -1247,6 +1297,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.05, @@ -1255,6 +1306,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "bar", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "bar1", "precise.start_ts": 0.02, "precise.finish_ts": 0.03, @@ -1263,6 +1315,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "baz", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "baz1", "precise.start_ts": 0.05, "precise.finish_ts": 0.075, @@ -1280,6 +1333,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 50, "kind": "project", "duration": 50, + "isRoot": False, }, { "project": "bar", @@ -1289,6 +1343,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 30, "kind": "project", "duration": 10, + "isRoot": False, }, { "project": "baz", @@ -1298,6 +1353,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 75, "kind": "project", "duration": 25, + "isRoot": False, }, ], }, @@ -1309,6 +1365,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.05, @@ -1317,6 +1374,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "bar", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "bar1", "precise.start_ts": 0.02, "precise.finish_ts": 0.03, @@ -1325,6 +1383,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "baz", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "baz1", "precise.start_ts": 0.04, "precise.finish_ts": 0.06, @@ -1342,6 +1401,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 50, "kind": "project", "duration": 50, + "isRoot": False, }, { "project": "bar", @@ -1351,6 +1411,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 30, "kind": "project", "duration": 10, + "isRoot": False, }, { "project": "baz", @@ -1360,6 +1421,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 60, "kind": "project", "duration": 20, + "isRoot": False, }, ], }, @@ -1371,6 +1433,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.05, @@ -1379,6 +1442,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "bar", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "bar1", "precise.start_ts": 0.01, "precise.finish_ts": 0.02, @@ -1387,6 +1451,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0.03, "precise.finish_ts": 0.04, @@ -1404,6 +1469,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 50, "kind": "project", "duration": 50, + "isRoot": False, }, { "project": "bar", @@ -1413,6 +1479,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 20, "kind": "project", "duration": 10, + "isRoot": False, }, ], }, @@ -1424,6 +1491,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -1441,6 +1509,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 50, "kind": "project", "duration": 50, + "isRoot": False, }, ], }, @@ -1452,6 +1521,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.05, @@ -1469,6 +1539,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 40, "kind": "project", "duration": 50, + "isRoot": False, }, { "project": None, @@ -1478,6 +1549,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 100, "kind": "other", "duration": 50, + "isRoot": False, }, ], }, @@ -1489,6 +1561,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.012, @@ -1497,6 +1570,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0.013, "precise.finish_ts": 0.024, @@ -1505,6 +1579,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0.032, "precise.finish_ts": 0.040, @@ -1522,6 +1597,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 20, "kind": "project", "duration": 23, + "isRoot": False, }, { "project": None, @@ -1531,6 +1607,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 30, "kind": "missing", "duration": 8, + "isRoot": False, }, { "project": "foo", @@ -1540,6 +1617,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 40, "kind": "project", "duration": 8, + "isRoot": False, }, ], }, @@ -1551,6 +1629,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.1, @@ -1559,6 +1638,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "bar", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "bar1", "precise.start_ts": 0.002, "precise.finish_ts": 0.044, @@ -1567,6 +1647,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0.007, "precise.finish_ts": 0.1, @@ -1584,6 +1665,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 100, "kind": "project", "duration": 100, + "isRoot": False, }, { "project": "bar", @@ -1593,6 +1675,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 40, "kind": "project", "duration": 42, + "isRoot": False, }, ], }, @@ -1604,6 +1687,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0, "precise.finish_ts": 0.051, @@ -1612,6 +1696,7 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "trace": "a" * 32, "project": "foo", "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, "transaction": "foo1", "precise.start_ts": 0.069, "precise.finish_ts": 0.1, @@ -1629,11 +1714,146 @@ def test_matching_tag_metrics_but_no_matching_spans(self): "end": 100, "kind": "project", "duration": 82, + "isRoot": False, }, ], }, id="merges nearby spans", ), + pytest.param( + [ + { + "trace": "a" * 32, + "project": "foo", + "sdk.name": "sentry.javascript.remix", + "transaction": "foo1", + "precise.start_ts": 0, + "precise.finish_ts": 0.1, + }, + { + "trace": "a" * 32, + "project": "bar", + "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, + "transaction": "bar", + "precise.start_ts": 0.02, + "precise.finish_ts": 0.06, + }, + { + "trace": "a" * 32, + "project": "foo", + "sdk.name": "sentry.javascript.remix", + "parent_span": "a" * 16, + "transaction": "foo1", + "precise.start_ts": 0.03, + "precise.finish_ts": 0.07, + }, + { + "trace": "a" * 32, + "project": "bar", + "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, + "transaction": "bar1", + "precise.start_ts": 0.04, + "precise.finish_ts": 0.08, + }, + { + "trace": "a" * 32, + "project": "foo", + "sdk.name": "sentry.javascript.remix", + "parent_span": "a" * 16, + "transaction": "foo1", + "precise.start_ts": 0.05, + "precise.finish_ts": 0.07, + }, + ], + {"a" * 32: (0, 100, 10)}, + { + "a" + * 32: [ + { + "project": "foo", + "opCategory": None, + "sdkName": "sentry.javascript.remix", + "start": 0, + "end": 100, + "kind": "project", + "duration": 100, + "isRoot": True, + }, + { + "project": "bar", + "opCategory": None, + "sdkName": "sentry.javascript.node", + "start": 20, + "end": 80, + "kind": "project", + "duration": 60, + "isRoot": False, + }, + { + "project": "foo", + "opCategory": None, + "sdkName": "sentry.javascript.remix", + "start": 30, + "end": 70, + "kind": "project", + "duration": 40, + "isRoot": False, + }, + ], + }, + id="merges spans at different depths", + ), + pytest.param( + [ + { + "trace": "a" * 32, + "project": "foo", + "sdk.name": "sentry.javascript.node", + "parent_span": "a" * 16, + "transaction": "foo1", + "precise.start_ts": 0.003, + "precise.finish_ts": 0.097, + }, + { + "trace": "a" * 32, + "project": "foo", + "sdk.name": "sentry.javascript.remix", + "parent_span": "a" * 16, + "transaction": "foo1", + "precise.start_ts": 0.002, + "precise.finish_ts": 0.098, + }, + ], + {"a" * 32: (0, 100, 10)}, + { + "a" + * 32: [ + { + "project": "foo", + "opCategory": None, + "sdkName": "sentry.javascript.remix", + "start": 0, + "end": 100, + "kind": "project", + "duration": 96, + "isRoot": False, + }, + { + "project": "foo", + "opCategory": None, + "sdkName": "sentry.javascript.node", + "start": 0, + "end": 100, + "kind": "project", + "duration": 94, + "isRoot": False, + }, + ], + }, + id="orders spans by precise timestamps", + ), ], ) def test_process_breakdowns(data, traces_range, expected): From c81dd1a88be3bcb78f11404860b60d4dd44c3f1f Mon Sep 17 00:00:00 2001 From: Lyn Nagara <1779792+lynnagara@users.noreply.github.com> Date: Mon, 13 May 2024 12:17:45 -0700 Subject: [PATCH 362/376] test: requires_kafka if snuba subscriptions are created (#70478) required for https://github.com/getsentry/snuba/pull/5868 and https://github.com/getsentry/ops/pull/10392 --- tests/sentry/incidents/test_tasks.py | 4 ++-- tests/sentry/snuba/test_query_subscription_consumer.py | 4 ++-- tests/sentry/snuba/test_subscriptions.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/sentry/incidents/test_tasks.py b/tests/sentry/incidents/test_tasks.py index dcbc81d577676e..39bcb0b2345243 100644 --- a/tests/sentry/incidents/test_tasks.py +++ b/tests/sentry/incidents/test_tasks.py @@ -37,10 +37,10 @@ from sentry.snuba.subscriptions import create_snuba_query, create_snuba_subscription from sentry.testutils.cases import TestCase from sentry.testutils.helpers.datetime import freeze_time -from sentry.testutils.skips import requires_snuba +from sentry.testutils.skips import requires_kafka, requires_snuba from sentry.utils.http import absolute_uri -pytestmark = [pytest.mark.sentry_metrics, requires_snuba] +pytestmark = [pytest.mark.sentry_metrics, requires_snuba, requires_kafka] class BaseIncidentActivityTest(TestCase): diff --git a/tests/sentry/snuba/test_query_subscription_consumer.py b/tests/sentry/snuba/test_query_subscription_consumer.py index 7051e1713ebdbb..0f20e42bcdeeb1 100644 --- a/tests/sentry/snuba/test_query_subscription_consumer.py +++ b/tests/sentry/snuba/test_query_subscription_consumer.py @@ -24,10 +24,10 @@ from sentry.snuba.query_subscriptions.run import QuerySubscriptionStrategyFactory from sentry.snuba.subscriptions import create_snuba_query, create_snuba_subscription from sentry.testutils.cases import TestCase -from sentry.testutils.skips import requires_snuba +from sentry.testutils.skips import requires_kafka, requires_snuba from sentry.utils import json -pytestmark = [requires_snuba] +pytestmark = [requires_snuba, requires_kafka] @pytest.mark.snuba_ci diff --git a/tests/sentry/snuba/test_subscriptions.py b/tests/sentry/snuba/test_subscriptions.py index ab75ee7e587e54..df1e21aefd41c5 100644 --- a/tests/sentry/snuba/test_subscriptions.py +++ b/tests/sentry/snuba/test_subscriptions.py @@ -13,9 +13,9 @@ update_snuba_subscription, ) from sentry.testutils.cases import TestCase -from sentry.testutils.skips import requires_snuba +from sentry.testutils.skips import requires_kafka, requires_snuba -pytestmark = [pytest.mark.sentry_metrics, requires_snuba] +pytestmark = [pytest.mark.sentry_metrics, requires_snuba, requires_kafka] @pytest.mark.snuba_ci From 9ff054d1a35b6e1efe2856829bbc0bbd6b48ccd8 Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Mon, 13 May 2024 12:26:57 -0700 Subject: [PATCH 363/376] ref(delayed rules): Add instrumentation (#70693) Add instrumentation and logging to the delayed rule processor to measure how long the bulkier functions are taking and how many rules and groups we're processing. Closes https://getsentry.atlassian.net/browse/ALRT-19 and https://github.com/getsentry/team-core-product-foundations/issues/308 (a dupe) as a follow up to https://github.com/getsentry/sentry/pull/69830#pullrequestreview-2036397916 --- .../rules/processing/delayed_processing.py | 66 ++++++++++--------- src/sentry/rules/processing/processor.py | 3 +- 2 files changed, 37 insertions(+), 32 deletions(-) diff --git a/src/sentry/rules/processing/delayed_processing.py b/src/sentry/rules/processing/delayed_processing.py index 4cabe5086fe34d..136443124434a8 100644 --- a/src/sentry/rules/processing/delayed_processing.py +++ b/src/sentry/rules/processing/delayed_processing.py @@ -346,7 +346,8 @@ def apply_delayed(project_id: int, *args: Any, **kwargs: Any) -> None: condition_groups = get_condition_groups(alert_rules, rules_to_groups) # Step 5: Instantiate each unique condition, and evaluate the relevant # group_ids that apply for that condition - condition_group_results = get_condition_group_results(condition_groups, project) + with metrics.timer("delayed_processing.get_condition_group_results.duration"): + condition_group_results = get_condition_group_results(condition_groups, project) # Step 6: For each rule and group applying to that rule, check if the group # meets the conditions of the rule (basically doing BaseEventFrequencyCondition.passes) rule_to_slow_conditions = get_rule_to_slow_conditions(alert_rules) @@ -363,39 +364,42 @@ def apply_delayed(project_id: int, *args: Any, **kwargs: Any) -> None: now = datetime.now(tz=timezone.utc) parsed_rulegroup_to_event_data = parse_rulegroup_to_event_data(rulegroup_to_event_data) - for rule, group_ids in rules_to_fire.items(): - frequency = rule.data.get("frequency") or Rule.DEFAULT_FREQUENCY - freq_offset = now - timedelta(minutes=frequency) - group_to_groupevent = get_group_to_groupevent( - parsed_rulegroup_to_event_data, project.id, group_ids - ) - for group, groupevent in group_to_groupevent.items(): - rule_statuses = bulk_get_rule_status(alert_rules, group, project) - status = rule_statuses[rule.id] - if status.last_active and status.last_active > freq_offset: - logger.info( - "delayed_processing.last_active", - extra={"last_active": status.last_active, "freq_offset": freq_offset}, + with metrics.timer("delayed_processing.fire_rules.duration"): + for rule, group_ids in rules_to_fire.items(): + frequency = rule.data.get("frequency") or Rule.DEFAULT_FREQUENCY + freq_offset = now - timedelta(minutes=frequency) + group_to_groupevent = get_group_to_groupevent( + parsed_rulegroup_to_event_data, project.id, group_ids + ) + for group, groupevent in group_to_groupevent.items(): + rule_statuses = bulk_get_rule_status(alert_rules, group, project) + status = rule_statuses[rule.id] + if status.last_active and status.last_active > freq_offset: + logger.info( + "delayed_processing.last_active", + extra={"last_active": status.last_active, "freq_offset": freq_offset}, + ) + return + + updated = ( + GroupRuleStatus.objects.filter(id=status.id) + .exclude(last_active__gt=freq_offset) + .update(last_active=now) ) - return - updated = ( - GroupRuleStatus.objects.filter(id=status.id) - .exclude(last_active__gt=freq_offset) - .update(last_active=now) - ) + if not updated: + logger.info("delayed_processing.not_updated", extra={"status_id": status.id}) + return - if not updated: - logger.info("delayed_processing.not_updated", extra={"status_id": status.id}) - return - - notification_uuid = str(uuid.uuid4()) - groupevent = group_to_groupevent[group] - rule_fire_history = history.record(rule, group, groupevent.event_id, notification_uuid) - for callback, futures in activate_downstream_actions( - rule, groupevent, notification_uuid, rule_fire_history - ).values(): - safe_execute(callback, groupevent, futures, _with_transaction=False) + notification_uuid = str(uuid.uuid4()) + groupevent = group_to_groupevent[group] + rule_fire_history = history.record( + rule, group, groupevent.event_id, notification_uuid + ) + for callback, futures in activate_downstream_actions( + rule, groupevent, notification_uuid, rule_fire_history + ).values(): + safe_execute(callback, groupevent, futures, _with_transaction=False) # Step 8: Clean up Redis buffer data hashes_to_delete = [ diff --git a/src/sentry/rules/processing/processor.py b/src/sentry/rules/processing/processor.py index bbb96f1fd716ea..961cc03be31f64 100644 --- a/src/sentry/rules/processing/processor.py +++ b/src/sentry/rules/processing/processor.py @@ -26,7 +26,7 @@ from sentry.rules.conditions.event_frequency import EventFrequencyConditionData from sentry.rules.filters.base import EventFilter from sentry.types.rules import RuleFuture -from sentry.utils import json +from sentry.utils import json, metrics from sentry.utils.hashlib import hash_values from sentry.utils.safe import safe_execute @@ -275,6 +275,7 @@ def enqueue_rule(self, rule: Rule) -> None: field=f"{rule.id}:{self.group.id}", value=value, ) + metrics.incr("delayed_rule.group_added") def apply_rule(self, rule: Rule, status: GroupRuleStatus) -> None: """ From 682c237c5c2ff6b3311fa5ff7325880426a129bb Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Mon, 13 May 2024 16:19:01 -0400 Subject: [PATCH 364/376] chore(trace-explorer): Start passing breakdown slices to traces endpoint (#70804) Trying to refactor this endpoint. Going to be passing an integer number of slices instead of a floating point percentage of the trace for simplicity. The backend will quietly move to using this integer. --- static/app/views/performance/traces/content.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/static/app/views/performance/traces/content.tsx b/static/app/views/performance/traces/content.tsx index 7d9f7881c7b10c..946960a6341667 100644 --- a/static/app/views/performance/traces/content.tsx +++ b/static/app/views/performance/traces/content.tsx @@ -441,6 +441,7 @@ function useTraces({ suggestedQuery, sort, per_page: limit, + breakdownSlices: 40, minBreakdownPercentage: 1 / 40, maxSpansPerTrace: 5, mri, From 8f051f55a9dc5907cf2b7dc020ddb92071119613 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 13 May 2024 16:25:10 -0400 Subject: [PATCH 365/376] ref(replay): update feature name (#70801) For self-hosted users, replay details's Give Feedback button routes feedback to our `feedback` project on issues: https://github.com/getsentry/sentry/assets/56095982/30a3900a-e723-4e5e-a198-cb0cb9926ca5 This PR updates the feature name so that the title is `Replay Self-Hosted` for more clarity. (Does not affect the zendesk widget for SaaS users) SCR-20240513-lees SCR-20240513-leeh --- static/app/components/replays/header/feedbackButton.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/app/components/replays/header/feedbackButton.tsx b/static/app/components/replays/header/feedbackButton.tsx index e66b58da4d02bd..916432caf76caf 100644 --- a/static/app/components/replays/header/feedbackButton.tsx +++ b/static/app/components/replays/header/feedbackButton.tsx @@ -11,7 +11,7 @@ const FeedbackButtonHook = HookOrDefault({ function FeedbackButton() { return ( - + ); } From 1c5945c5ea60d64c1310c405e3f54839951b5127 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Mon, 13 May 2024 16:38:04 -0400 Subject: [PATCH 366/376] ref(screenload): fix display of platformSelector and feedback button (#70803) Before: SCR-20240513-lksj After: SCR-20240513-loqh --- static/app/views/performance/mobile/screenload/index.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/static/app/views/performance/mobile/screenload/index.tsx b/static/app/views/performance/mobile/screenload/index.tsx index 1e0965a12c1502..c401f811d2112f 100644 --- a/static/app/views/performance/mobile/screenload/index.tsx +++ b/static/app/views/performance/mobile/screenload/index.tsx @@ -61,13 +61,13 @@ export default function PageloadModule() { /> {t('Screen Loads')} - {organization.features.includes('spans-first-ui') && - project && - isCrossPlatform(project) && } + {organization.features.includes('spans-first-ui') && + project && + isCrossPlatform(project) && } From 3b02cd0220c1f0c95c0a20a219dae9ae91e565de Mon Sep 17 00:00:00 2001 From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com> Date: Mon, 13 May 2024 16:39:22 -0400 Subject: [PATCH 367/376] feat(insights): plot avg transaction duration on cache sidebar (#70808) Plot the average transaction duration over the time interval (dashed grey line) on the transaction duration graph image --- .../samplePanel/charts/transactionDurationChart.tsx | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx b/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx index d9ab18c3cb4de8..625d425bcabb20 100644 --- a/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx +++ b/static/app/views/performance/cache/samplePanel/charts/transactionDurationChart.tsx @@ -1,11 +1,12 @@ import {t} from 'sentry/locale'; -import type {EChartHighlightHandler} from 'sentry/types/echarts'; +import type {EChartHighlightHandler, Series} from 'sentry/types/echarts'; import {decodeScalar} from 'sentry/utils/queryString'; import {MutableSearch} from 'sentry/utils/tokenizeSearch'; import useLocationQuery from 'sentry/utils/url/useLocationQuery'; import {Referrer} from 'sentry/views/performance/cache/referrers'; import {CHART_HEIGHT} from 'sentry/views/performance/cache/settings'; import type {DataRow} from 'sentry/views/performance/cache/tables/spanSamplesTable'; +import {AverageValueMarkLine} from 'sentry/views/performance/charts/averageValueMarkLine'; import {AVG_COLOR} from 'sentry/views/starfish/colors'; import Chart, {ChartType} from 'sentry/views/starfish/components/chart'; import ChartPanel from 'sentry/views/starfish/components/chartPanel'; @@ -73,6 +74,14 @@ export function TransactionDurationChart({ onHighlight?.(highlightedDataPoints, event); }; + const baselineAvgSeries: Series = { + seriesName: 'Average', + data: [], + markLine: AverageValueMarkLine({ + value: averageTransactionDuration, + }), + }; + return ( Date: Mon, 13 May 2024 16:50:21 -0400 Subject: [PATCH 368/376] fix(feedback): move call for get_latest_event (#70807) `get_latest_event` should be called for every `item` in `item_list` --- src/sentry/api/serializers/models/group_stream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/api/serializers/models/group_stream.py b/src/sentry/api/serializers/models/group_stream.py index c80ba5c2ed225d..73db5721329557 100644 --- a/src/sentry/api/serializers/models/group_stream.py +++ b/src/sentry/api/serializers/models/group_stream.py @@ -405,8 +405,8 @@ def get_attrs( ): return self.respond(status=404) - latest_event = item.get_latest_event() for item in item_list: + latest_event = item.get_latest_event() num_attachments = EventAttachment.objects.filter( project_id=latest_event.project_id, event_id=latest_event.event_id ).count() From b8eafa515286c15760757edce8ccf18191d781ef Mon Sep 17 00:00:00 2001 From: Josh Ferge Date: Mon, 13 May 2024 14:06:22 -0700 Subject: [PATCH 369/376] fix(feedback): correct issue data type and add substatus (#70802) - in evidence_data, is_spam should be a boolean, not a string - the status change message needs a substatus. didn't see these logs: - https://cloudlogging.app.goo.gl/ZgZWLu2UqNjdtpBA7 --- .../feedback/usecases/create_feedback.py | 5 +- .../feedback/usecases/test_create_feedback.py | 87 ++++++++++++++++++- 2 files changed, 89 insertions(+), 3 deletions(-) diff --git a/src/sentry/feedback/usecases/create_feedback.py b/src/sentry/feedback/usecases/create_feedback.py index 74d3b5d64cf788..698dc54267fb9e 100644 --- a/src/sentry/feedback/usecases/create_feedback.py +++ b/src/sentry/feedback/usecases/create_feedback.py @@ -20,6 +20,7 @@ from sentry.models.group import GroupStatus from sentry.models.project import Project from sentry.signals import first_feedback_received, first_new_feedback_received +from sentry.types.group import GroupSubStatus from sentry.utils import metrics from sentry.utils.outcomes import Outcome, track_outcome from sentry.utils.safe import get_path @@ -88,7 +89,7 @@ def make_evidence(feedback, source: FeedbackCreationSource, is_message_spam: boo evidence_display.append(IssueEvidence(name="source", value=source.value, important=False)) if is_message_spam is True: - evidence_data["is_spam"] = str(is_message_spam) + evidence_data["is_spam"] = is_message_spam evidence_display.append( IssueEvidence(name="is_spam", value=str(is_message_spam), important=False) ) @@ -360,6 +361,6 @@ def auto_ignore_spam_feedbacks(project, issue_fingerprint): fingerprint=issue_fingerprint, project_id=project.id, new_status=GroupStatus.IGNORED, # we use ignored in the UI for the spam tab - new_substatus=None, + new_substatus=GroupSubStatus.FOREVER, ), ) diff --git a/tests/sentry/feedback/usecases/test_create_feedback.py b/tests/sentry/feedback/usecases/test_create_feedback.py index 006f7950f0733d..585fc9d50ed917 100644 --- a/tests/sentry/feedback/usecases/test_create_feedback.py +++ b/tests/sentry/feedback/usecases/test_create_feedback.py @@ -14,9 +14,10 @@ fix_for_issue_platform, validate_issue_platform_event_schema, ) -from sentry.models.group import GroupStatus +from sentry.models.group import Group, GroupStatus from sentry.testutils.helpers import Feature from sentry.testutils.pytest.fixtures import django_db_all +from sentry.types.group import GroupSubStatus @pytest.fixture @@ -688,3 +689,87 @@ def test_create_feedback_adds_associated_event_id( ] associated_event_id = associated_event_id_evidence[0] if associated_event_id_evidence else None assert associated_event_id == "56b08cf7852c42cbb95e4a6998c66ad6" + + +@django_db_all +def test_create_feedback_spam_detection_adds_field_calls( + default_project, + monkeypatch, +): + with Feature( + { + "organizations:user-feedback-spam-filter-actions": True, + "organizations:user-feedback-spam-filter-ingest": True, + "organizations:issue-platform": True, + "organizations:feedback-ingest": True, + "organizations:feedback-post-process-group": True, + } + ): + event = { + "project_id": default_project.id, + "request": { + "url": "https://sentry.sentry.io/feedback/?statsPeriod=14d", + "headers": { + "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36" + }, + }, + "event_id": "56b08cf7852c42cbb95e4a6998c66ad6", + "timestamp": 1698255009.574, + "received": "2021-10-24T22:23:29.574000+00:00", + "environment": "prod", + "release": "frontend@daf1316f209d961443664cd6eb4231ca154db502", + "user": { + "ip_address": "72.164.175.154", + "email": "josh.ferge@sentry.io", + "id": 880461, + "isStaff": False, + "name": "Josh Ferge", + }, + "contexts": { + "feedback": { + "contact_email": "josh.ferge@sentry.io", + "name": "Josh Ferge", + "message": "This is definitely spam", + "replay_id": "3d621c61593c4ff9b43f8490a78ae18e", + "url": "https://sentry.sentry.io/feedback/?statsPeriod=14d", + }, + }, + "breadcrumbs": [], + "platform": "javascript", + } + + def dummy_response(*args, **kwargs): + return ChatCompletion( + id="test", + choices=[ + Choice( + index=0, + message=ChatCompletionMessage( + content=( + "spam" + if "This is definitely spam" in kwargs["messages"][0]["content"] + else "not spam" + ), + role="assistant", + ), + finish_reason="stop", + ) + ], + created=time.time(), + model="gpt3.5-trubo", + object="chat.completion", + ) + + mock_openai = Mock() + mock_openai().chat.completions.create = dummy_response + + monkeypatch.setattr("sentry.llm.providers.openai.OpenAI", mock_openai) + + create_feedback_issue( + event, default_project.id, FeedbackCreationSource.NEW_FEEDBACK_ENVELOPE + ) + + assert Group.objects.all().count() == 1 + group = Group.objects.first() + assert group.status == GroupStatus.IGNORED + assert group.substatus == GroupSubStatus.FOREVER From 7c08bcb3d96aa9e6ccb3396a5cadb7cd0f94eb49 Mon Sep 17 00:00:00 2001 From: Matt Duncan <14761+mrduncan@users.noreply.github.com> Date: Mon, 13 May 2024 14:29:11 -0700 Subject: [PATCH 370/376] chore(hc): Enable stronger typing for a few hybridcloud modules (#70777) `sentry.hybridcloud.*` and `sentry.services.hybrid_cloud.*` aren't quite passing but we can enable these which already are in the mean time. --- pyproject.toml | 31 +++++++++++++++++++ .../control_organization_provisioning/impl.py | 4 +-- .../region_organization_provisioning/impl.py | 2 +- src/sentry/services/hybrid_cloud/app/impl.py | 2 +- .../hybrid_cloud/notifications/impl.py | 2 +- .../hybrid_cloud/organization_mapping/impl.py | 2 +- .../hybrid_cloud/project_key/model.py | 2 +- .../services/hybrid_cloud/replica/impl.py | 2 +- src/sentry/services/hybrid_cloud/rpc.py | 2 +- .../services/hybrid_cloud/rpcmetrics.py | 2 +- .../services/hybrid_cloud/user/model.py | 2 +- .../hybrid_cloud/organization/test_service.py | 12 +++---- .../hybrid_cloud/test_hybrid_cloud.py | 6 ++-- .../services/hybrid_cloud/test_rpcmetrics.py | 4 +-- .../services/hybrid_cloud/user/test_impl.py | 16 +++++----- 15 files changed, 61 insertions(+), 30 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 694a2250a6de57..fce15e3c2c596a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -558,6 +558,10 @@ module = [ "sentry.buffer.redis", "sentry.build.*", "sentry.eventstore.reprocessing.redis", + "sentry.hybridcloud", + "sentry.hybridcloud.migrations.*", + "sentry.hybridcloud.options", + "sentry.hybridcloud.rpc_services.*", "sentry.issues", "sentry.issues.analytics", "sentry.issues.apps", @@ -602,6 +606,32 @@ module = [ "sentry.relay.config.metric_extraction", "sentry.reprocessing2", "sentry.runner.*", + "sentry.services.hybrid_cloud.access.*", + "sentry.services.hybrid_cloud.app.*", + "sentry.services.hybrid_cloud.hook.*", + "sentry.services.hybrid_cloud.identity.*", + "sentry.services.hybrid_cloud.integration.*", + "sentry.services.hybrid_cloud.issue.*", + "sentry.services.hybrid_cloud.log.*", + "sentry.services.hybrid_cloud.lost_password_hash.*", + "sentry.services.hybrid_cloud.notifications.*", + "sentry.services.hybrid_cloud.organization_actions.*", + "sentry.services.hybrid_cloud.organization_mapping.*", + "sentry.services.hybrid_cloud.organization_provisioning.*", + "sentry.services.hybrid_cloud.organizationmember_mapping.*", + "sentry.services.hybrid_cloud.orgauthtoken.*", + "sentry.services.hybrid_cloud.pagination", + "sentry.services.hybrid_cloud.project.*", + "sentry.services.hybrid_cloud.project_key.*", + "sentry.services.hybrid_cloud.region", + "sentry.services.hybrid_cloud.replica.*", + "sentry.services.hybrid_cloud.repository.*", + "sentry.services.hybrid_cloud.rpcmetrics", + "sentry.services.hybrid_cloud.sig", + "sentry.services.hybrid_cloud.tombstone.*", + "sentry.services.hybrid_cloud.user.*", + "sentry.services.hybrid_cloud.user_option.*", + "sentry.services.hybrid_cloud.util", "sentry.snuba.metrics.extraction", "sentry.tasks.commit_context", "sentry.tasks.on_demand_metrics", @@ -667,6 +697,7 @@ module = [ "tests.sentry.issues.test_status_change_consumer", "tests.sentry.issues.test_update_inbox", "tests.sentry.relay.config.test_metric_extraction", + "tests.sentry.services.hybrid_cloud.*", "tests.sentry.tasks.test_on_demand_metrics", "tools.*", ] diff --git a/src/sentry/hybridcloud/rpc_services/control_organization_provisioning/impl.py b/src/sentry/hybridcloud/rpc_services/control_organization_provisioning/impl.py index 4a083e96a0560b..37556f331275d7 100644 --- a/src/sentry/hybridcloud/rpc_services/control_organization_provisioning/impl.py +++ b/src/sentry/hybridcloud/rpc_services/control_organization_provisioning/impl.py @@ -35,7 +35,7 @@ class SlugMismatchException(Exception): def create_post_provision_outbox( provisioning_options: OrganizationProvisioningOptions, org_id: int -): +) -> RegionOutbox: return RegionOutbox( shard_scope=OutboxScope.ORGANIZATION_SCOPE, shard_identifier=org_id, @@ -49,7 +49,7 @@ def create_organization_provisioning_outbox( organization_id: int, region_name: str, org_provision_payload: OrganizationProvisioningOptions | None, -): +) -> ControlOutbox: payload = org_provision_payload.json() if org_provision_payload is not None else None return ControlOutbox( region_name=region_name, diff --git a/src/sentry/hybridcloud/rpc_services/region_organization_provisioning/impl.py b/src/sentry/hybridcloud/rpc_services/region_organization_provisioning/impl.py index fdf9848bd72fcc..505e1eb8d86a74 100644 --- a/src/sentry/hybridcloud/rpc_services/region_organization_provisioning/impl.py +++ b/src/sentry/hybridcloud/rpc_services/region_organization_provisioning/impl.py @@ -20,7 +20,7 @@ def create_post_provision_outbox( provisioning_options: OrganizationProvisioningOptions, org_id: int -): +) -> RegionOutbox: return RegionOutbox( shard_scope=OutboxScope.ORGANIZATION_SCOPE, shard_identifier=org_id, diff --git a/src/sentry/services/hybrid_cloud/app/impl.py b/src/sentry/services/hybrid_cloud/app/impl.py index a73e4b5a7ded5f..5d3be4e6875c43 100644 --- a/src/sentry/services/hybrid_cloud/app/impl.py +++ b/src/sentry/services/hybrid_cloud/app/impl.py @@ -271,7 +271,7 @@ def create_internal_integration_for_channel_request( organization_id: int, integration_name: str, integration_scopes: list[str], - integration_creator_id, + integration_creator_id: int, metadata: dict[str, Any] | None = None, ) -> RpcSentryAppInstallation: admin_user = User.objects.get(id=integration_creator_id) diff --git a/src/sentry/services/hybrid_cloud/notifications/impl.py b/src/sentry/services/hybrid_cloud/notifications/impl.py index 144a261a7bd7c0..4fef88e4a5694c 100644 --- a/src/sentry/services/hybrid_cloud/notifications/impl.py +++ b/src/sentry/services/hybrid_cloud/notifications/impl.py @@ -64,7 +64,7 @@ def update_notification_options( scope_type: NotificationScopeEnum, scope_identifier: int, value: NotificationSettingsOptionEnum, - ): + ) -> None: kwargs = {} if actor.is_user: kwargs["user_id"] = actor.id diff --git a/src/sentry/services/hybrid_cloud/organization_mapping/impl.py b/src/sentry/services/hybrid_cloud/organization_mapping/impl.py index 3e17daeba19bae..c6a0c38382ac45 100644 --- a/src/sentry/services/hybrid_cloud/organization_mapping/impl.py +++ b/src/sentry/services/hybrid_cloud/organization_mapping/impl.py @@ -86,7 +86,7 @@ def _check_organization_mapping_integrity( def _upsert_organization_slug_reservation_for_monolith( self, organization_id: int, mapping_update: RpcOrganizationMappingUpdate - ): + ) -> None: org_slug_reservation_qs = OrganizationSlugReservation.objects.filter( organization_id=organization_id ) diff --git a/src/sentry/services/hybrid_cloud/project_key/model.py b/src/sentry/services/hybrid_cloud/project_key/model.py index c02f1ff137b7ac..3fed82bd348698 100644 --- a/src/sentry/services/hybrid_cloud/project_key/model.py +++ b/src/sentry/services/hybrid_cloud/project_key/model.py @@ -31,5 +31,5 @@ class RpcProjectKey(RpcModel): status: int = ProjectKeyStatus.INACTIVE @property - def is_active(self): + def is_active(self) -> bool: return self.status == ProjectKeyStatus.ACTIVE diff --git a/src/sentry/services/hybrid_cloud/replica/impl.py b/src/sentry/services/hybrid_cloud/replica/impl.py index d377ba5a628e18..c99e20b9d8fb43 100644 --- a/src/sentry/services/hybrid_cloud/replica/impl.py +++ b/src/sentry/services/hybrid_cloud/replica/impl.py @@ -123,7 +123,7 @@ def handle_replication( source_model: type[ReplicatedControlModel] | type[ReplicatedRegionModel], destination: BaseModel, fk: str | None = None, -): +) -> None: category: OutboxCategory = source_model.category destination_model: type[BaseModel] = type(destination) fk = fk or get_foreign_key_column(destination, source_model) diff --git a/src/sentry/services/hybrid_cloud/rpc.py b/src/sentry/services/hybrid_cloud/rpc.py index 97f9ca5450ff35..2c5e06d865c587 100644 --- a/src/sentry/services/hybrid_cloud/rpc.py +++ b/src/sentry/services/hybrid_cloud/rpc.py @@ -580,7 +580,7 @@ def _fire_request(self, headers: MutableMapping[str, str], data: bytes) -> reque except requests.exceptions.Timeout as e: raise self._remote_exception(f"Timeout of {settings.RPC_TIMEOUT} exceeded") from e - def _check_disabled(self): + def _check_disabled(self) -> None: if disabled_service_methods := options.get("hybrid_cloud.rpc.disabled-service-methods"): service_method = f"{self.service_name}.{self.method_name}" if service_method in disabled_service_methods: diff --git a/src/sentry/services/hybrid_cloud/rpcmetrics.py b/src/sentry/services/hybrid_cloud/rpcmetrics.py index 0b1c83d397f169..ae5533b1bf9931 100644 --- a/src/sentry/services/hybrid_cloud/rpcmetrics.py +++ b/src/sentry/services/hybrid_cloud/rpcmetrics.py @@ -52,7 +52,7 @@ def get_local(cls) -> RpcMetricTracker: new_tracker = _LOCAL_TRACKER.tracker = cls() return new_tracker - def save_record(self, record: RpcMetricRecord): + def save_record(self, record: RpcMetricRecord) -> None: for span in self.spans: span.records.append(record) diff --git a/src/sentry/services/hybrid_cloud/user/model.py b/src/sentry/services/hybrid_cloud/user/model.py index 4ce263f1b54d50..8d53b3d7b10800 100644 --- a/src/sentry/services/hybrid_cloud/user/model.py +++ b/src/sentry/services/hybrid_cloud/user/model.py @@ -71,7 +71,7 @@ def __hash__(self) -> int: # TODO: Remove the need for this return hash((self.id, self.pk)) - def __str__(self): # API compatibility with ORM User + def __str__(self) -> str: # API compatibility with ORM User return self.get_username() def by_email(self, email: str) -> "RpcUser": diff --git a/tests/sentry/services/hybrid_cloud/organization/test_service.py b/tests/sentry/services/hybrid_cloud/organization/test_service.py index 8113f285a0e5fe..87c264f1092adb 100644 --- a/tests/sentry/services/hybrid_cloud/organization/test_service.py +++ b/tests/sentry/services/hybrid_cloud/organization/test_service.py @@ -6,7 +6,7 @@ @all_silo_test class CheckOrganizationTest(TestCase): - def test_check_active_organization_by_slug(self): + def test_check_active_organization_by_slug(self) -> None: self.organization = self.create_organization(slug="test") assert ( organization_service.check_organization_by_slug(slug="test", only_visible=True) @@ -17,7 +17,7 @@ def test_check_active_organization_by_slug(self): == self.organization.id ) - def test_check_missing_organization_by_slug(self): + def test_check_missing_organization_by_slug(self) -> None: assert ( organization_service.check_organization_by_slug(slug="test", only_visible=True) is None ) @@ -25,7 +25,7 @@ def test_check_missing_organization_by_slug(self): organization_service.check_organization_by_slug(slug="test", only_visible=False) is None ) - def test_check_pending_deletion_organization_by_slug(self): + def test_check_pending_deletion_organization_by_slug(self) -> None: self.organization = self.create_organization(slug="test") self.organization.status = OrganizationStatus.PENDING_DELETION with assume_test_silo_mode_of(Organization): @@ -38,7 +38,7 @@ def test_check_pending_deletion_organization_by_slug(self): == self.organization.id ) - def test_check_active_organization_by_id(self): + def test_check_active_organization_by_id(self) -> None: organization = self.create_organization(slug="test") assert ( organization_service.check_organization_by_id(id=organization.id, only_visible=True) @@ -49,11 +49,11 @@ def test_check_active_organization_by_id(self): is True ) - def test_check_missing_organization_by_id(self): + def test_check_missing_organization_by_id(self) -> None: assert organization_service.check_organization_by_id(id=1234, only_visible=True) is False assert organization_service.check_organization_by_id(id=1234, only_visible=False) is False - def test_check_pending_deletion_organization_by_id(self): + def test_check_pending_deletion_organization_by_id(self) -> None: self.organization = self.create_organization(slug="test") self.organization.status = OrganizationStatus.PENDING_DELETION with assume_test_silo_mode_of(Organization): diff --git a/tests/sentry/services/hybrid_cloud/test_hybrid_cloud.py b/tests/sentry/services/hybrid_cloud/test_hybrid_cloud.py index 939040ef9d0d09..2ad6be506d8cdd 100644 --- a/tests/sentry/services/hybrid_cloud/test_hybrid_cloud.py +++ b/tests/sentry/services/hybrid_cloud/test_hybrid_cloud.py @@ -9,13 +9,13 @@ @control_silo_test class RpcModelTest(TestCase): - def test_schema_generation(self): + def test_schema_generation(self) -> None: for api_type in self._get_rpc_model_subclasses(): # We're mostly interested in whether an error occurs schema = api_type.schema_json() assert schema - def _get_rpc_model_subclasses(self): + def _get_rpc_model_subclasses(self) -> set[type[RpcModel]]: subclasses = set() stack = deque([RpcModel]) while stack: @@ -27,7 +27,7 @@ def _get_rpc_model_subclasses(self): subclasses.remove(RpcModel) return subclasses - def test_rpc_model_equals_method(self): + def test_rpc_model_equals_method(self) -> None: orm_user = self.create_user() Authenticator.objects.create(user=orm_user, type=1) diff --git a/tests/sentry/services/hybrid_cloud/test_rpcmetrics.py b/tests/sentry/services/hybrid_cloud/test_rpcmetrics.py index 163f3463b1bca1..25e022b5a8d819 100644 --- a/tests/sentry/services/hybrid_cloud/test_rpcmetrics.py +++ b/tests/sentry/services/hybrid_cloud/test_rpcmetrics.py @@ -12,14 +12,14 @@ def setUp(self) -> None: super().setUp() assert len(RpcMetricTracker.get_local().spans) == 0 - def test_single_thread(self): + def test_single_thread(self) -> None: with RpcMetricSpan() as span: for n in range(3): with RpcMetricRecord.measure(f"service{n}", f"method{n}"): pass assert len(span.records) == 3 - def test_multithreaded(self): + def test_multithreaded(self) -> None: record_queue: Queue[RpcMetricRecord] = Queue() def make_thread(n: int) -> Thread: diff --git a/tests/sentry/services/hybrid_cloud/user/test_impl.py b/tests/sentry/services/hybrid_cloud/user/test_impl.py index c3adfc3078bc5d..b8c431dc810f2a 100644 --- a/tests/sentry/services/hybrid_cloud/user/test_impl.py +++ b/tests/sentry/services/hybrid_cloud/user/test_impl.py @@ -13,7 +13,7 @@ class DatabaseBackedUserService(TestCase): def setUp(self) -> None: super().setUp() - def test_create_new_user(self): + def test_create_new_user(self) -> None: old_user_count = User.objects.all().count() rpc_user, created = user_service.get_or_create_user_by_email(email="test@email.com") user = User.objects.get(id=rpc_user.id) @@ -22,11 +22,11 @@ def test_create_new_user(self): assert user.flags.newsletter_consent_prompt assert created - def test_get_no_existing(self): + def test_get_no_existing(self) -> None: rpc_user = user_service.get_user_by_email(email="test@email.com") assert rpc_user is None - def test_get_or_create_user(self): + def test_get_or_create_user(self) -> None: user1 = self.create_user(email="test@email.com", username="1") user2 = self.create_user(email="test@email.com", username="2") user, created = user_service.get_or_create_user_by_email(email="test@email.com") @@ -34,7 +34,7 @@ def test_get_or_create_user(self): assert user2.id != user.id assert created is False - def test_get_active_user(self): + def test_get_active_user(self) -> None: inactive_user = self.create_user( email="test@email.com", username="inactive", is_active=False ) @@ -44,13 +44,13 @@ def test_get_active_user(self): assert inactive_user.id != user.id assert created is False - def test_get_user_ci(self): + def test_get_user_ci(self) -> None: user = self.create_user(email="tESt@email.com") fetched_user, created = user_service.get_or_create_user_by_email(email="TesT@email.com") assert user.id == fetched_user.id assert created is False - def test_get_user_with_ident(self): + def test_get_user_with_ident(self) -> None: user1 = self.create_user(email="test@email.com", username="1") user2 = self.create_user(email="test@email.com", username="2") org = self.create_organization(slug="test") @@ -67,7 +67,7 @@ def test_get_user_with_ident(self): assert user1.id != fetched_user.id assert created is False - def test_verify_user_emails(self): + def test_verify_user_emails(self) -> None: user1 = self.create_user(email="test@email.com") user2 = self.create_user(email="test2@email.com") verified_emails = user_service.verify_user_emails( @@ -82,7 +82,7 @@ def test_verify_user_emails(self): assert verified_emails[user1.id].exists assert not verified_emails[user2.id].exists - def test_verify_user_emails_only_verified(self): + def test_verify_user_emails_only_verified(self) -> None: user1 = self.create_user(email="test@email.com") user2 = self.create_user(email="test2@email.com") UserEmail.objects.filter(user=user2, email="test2@email.com").update(is_verified=False) From 81cffe85f6c43dfcc275e522fa7f248410c8412e Mon Sep 17 00:00:00 2001 From: Abdkhan14 <60121741+Abdkhan14@users.noreply.github.com> Date: Mon, 13 May 2024 17:31:34 -0400 Subject: [PATCH 371/376] fix(new-trace): Missing instrumentation span durations not visible. (#70716) Screenshot 2024-05-12 at 8 54 37 PM Co-authored-by: Abdullah Khan --- .../views/performance/newTraceDetails/trace.tsx | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/trace.tsx b/static/app/views/performance/newTraceDetails/trace.tsx index 8b5db326c4e28c..1117268b3b6354 100644 --- a/static/app/views/performance/newTraceDetails/trace.tsx +++ b/static/app/views/performance/newTraceDetails/trace.tsx @@ -1325,16 +1325,18 @@ function MissingInstrumentationTraceBar(props: MissingInstrumentationTraceBarPro }, [props.manager, props.node_space, props.virtualized_index, duration] ); + return ( -
+ +
+
+
+
+
{duration}
- -
-
-
-
+ ); } From 4a732daa47154d4afaa0d80b95c56dc7ff136336 Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Mon, 13 May 2024 17:39:12 -0400 Subject: [PATCH 372/376] feat(insights): add analytics to database sample panel (#70769) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds analytics to the span sample panel to track: - How often the panels are opened - How often spans are clicked - How often “Try different samples” is clicked - How often filters are interacted with This PR adds analytics to database only as a first step. I'll be adding analytics to the remaining modules in the next PR. --- .../analytics/performanceAnalyticsEvents.tsx | 25 +++++++++++++++++++ .../resources/resourceSummaryPage/index.tsx | 3 ++- .../database/databaseSpanSummaryPage.tsx | 3 ++- .../database/queryTransactionsTable.tsx | 13 +++++++++- .../samples/samplesContainer.tsx | 3 ++- .../samplesTable/spanSamplesTable.tsx | 11 +++++++- static/app/views/starfish/types.tsx | 5 ++++ .../spanSummaryPage/sampleList/index.tsx | 5 +++- .../sampleTable/sampleTable.spec.tsx | 7 +++++- .../sampleList/sampleTable/sampleTable.tsx | 17 +++++++++++-- .../views/spans/selectors/actionSelector.tsx | 5 ++++ .../views/spans/selectors/domainSelector.tsx | 5 ++++ .../starfish/views/spans/spanTimeCharts.tsx | 5 ++++ 13 files changed, 98 insertions(+), 9 deletions(-) diff --git a/static/app/utils/analytics/performanceAnalyticsEvents.tsx b/static/app/utils/analytics/performanceAnalyticsEvents.tsx index d23fd15758651d..cfe932f0d890ae 100644 --- a/static/app/utils/analytics/performanceAnalyticsEvents.tsx +++ b/static/app/utils/analytics/performanceAnalyticsEvents.tsx @@ -1,3 +1,4 @@ +import type {FieldValue} from 'sentry/components/forms/model'; import type {Organization, PlatformKey} from 'sentry/types'; type SampleTransactionParam = { @@ -127,6 +128,24 @@ export type PerformanceEventParameters = { 'performance_views.relative_breakdown.selection': { action: string; }; + 'performance_views.sample_spans.filter_updated': { + filter: string; + new_state: FieldValue; + organization: Organization; + source: string; + }; + 'performance_views.sample_spans.opened': { + organization: Organization; + source: string; + }; + 'performance_views.sample_spans.span_clicked': { + organization: Organization; + source: string; + }; + 'performance_views.sample_spans.try_different_samples_clicked': { + organization: Organization; + source: string; + }; 'performance_views.span_summary.change_chart': { change_to_display: string; }; @@ -253,6 +272,12 @@ export const performanceEventMap: Record = { 'performance_views.landingv3.table_pagination': 'Performance Views: Landing Page Transactions Table Page Changed', 'performance_views.overview.change_chart': 'Performance Views: Change Overview Chart', + 'performance_views.sample_spans.opened': 'Performance Views: Sample spans panel opened', + 'performance_views.sample_spans.span_clicked': 'Performance Views: Sample span clicked', + 'performance_views.sample_spans.try_different_samples_clicked': + 'Performance Views: Try Different Samples clicked', + 'performance_views.sample_spans.filter_updated': + 'Performance Views: Sample spans panel filter updated', 'performance_views.span_summary.change_chart': 'Performance Views: Span Summary displayed chart changed', 'performance_views.spans.change_op': 'Performance Views: Change span operation name', diff --git a/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx b/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx index 8503aca2c58bd6..6d2cd9f62990e1 100644 --- a/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx +++ b/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx @@ -26,7 +26,7 @@ import {ResourceSpanOps} from 'sentry/views/performance/browser/resources/shared import {useResourceModuleFilters} from 'sentry/views/performance/browser/resources/utils/useResourceFilters'; import {ModulePageProviders} from 'sentry/views/performance/modulePageProviders'; import {useSpanMetrics} from 'sentry/views/starfish/queries/useDiscover'; -import {SpanMetricsField} from 'sentry/views/starfish/types'; +import {ModuleName, SpanMetricsField} from 'sentry/views/starfish/types'; import {SampleList} from 'sentry/views/starfish/views/spanSummaryPage/sampleList'; const { @@ -149,6 +149,7 @@ function ResourceSummary() { diff --git a/static/app/views/performance/database/databaseSpanSummaryPage.tsx b/static/app/views/performance/database/databaseSpanSummaryPage.tsx index 2dcd5efd52f9cc..e66cdd7e9128f4 100644 --- a/static/app/views/performance/database/databaseSpanSummaryPage.tsx +++ b/static/app/views/performance/database/databaseSpanSummaryPage.tsx @@ -31,7 +31,7 @@ import {getTimeSpentExplanation} from 'sentry/views/starfish/components/tableCel import {useSpanMetrics} from 'sentry/views/starfish/queries/useDiscover'; import {useSpanMetricsSeries} from 'sentry/views/starfish/queries/useDiscoverSeries'; import type {SpanMetricsQueryFilters} from 'sentry/views/starfish/types'; -import {SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types'; +import {ModuleName, SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types'; import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; import {DataTitles, getThroughputTitle} from 'sentry/views/starfish/views/spans/types'; import {SampleList} from 'sentry/views/starfish/views/spanSummaryPage/sampleList'; @@ -266,6 +266,7 @@ export function DatabaseSpanSummaryPage({params}: Props) { diff --git a/static/app/views/performance/database/queryTransactionsTable.tsx b/static/app/views/performance/database/queryTransactionsTable.tsx index d4686b1e843135..3f224c3c58403a 100644 --- a/static/app/views/performance/database/queryTransactionsTable.tsx +++ b/static/app/views/performance/database/queryTransactionsTable.tsx @@ -10,6 +10,7 @@ import type {CursorHandler} from 'sentry/components/pagination'; import Pagination from 'sentry/components/pagination'; import {t} from 'sentry/locale'; import type {Organization} from 'sentry/types'; +import {trackAnalytics} from 'sentry/utils/analytics'; import type {EventsMetaType} from 'sentry/utils/discover/eventView'; import {getFieldRenderer} from 'sentry/utils/discover/fieldRenderers'; import type {Sort} from 'sentry/utils/discover/fields'; @@ -161,7 +162,17 @@ function renderBodyCell( return ( - {label} + + trackAnalytics('performance_views.sample_spans.opened', { + organization, + source: 'database', + }) + } + to={`${pathname}?${qs.stringify(query)}`} + > + {label} + ); } diff --git a/static/app/views/performance/mobile/screenload/screenLoadSpans/samples/samplesContainer.tsx b/static/app/views/performance/mobile/screenload/screenLoadSpans/samples/samplesContainer.tsx index f44c00fdaf3a6e..8893bc46fd04e6 100644 --- a/static/app/views/performance/mobile/screenload/screenLoadSpans/samples/samplesContainer.tsx +++ b/static/app/views/performance/mobile/screenload/screenLoadSpans/samples/samplesContainer.tsx @@ -24,7 +24,7 @@ import { import {isCrossPlatform} from 'sentry/views/performance/mobile/screenload/screens/utils'; import {useSpanMetrics} from 'sentry/views/starfish/queries/useDiscover'; import type {SpanMetricsQueryFilters} from 'sentry/views/starfish/types'; -import {SpanMetricsField} from 'sentry/views/starfish/types'; +import {ModuleName, SpanMetricsField} from 'sentry/views/starfish/types'; import {formatVersionAndCenterTruncate} from 'sentry/views/starfish/utils/centerTruncate'; import {DataTitles} from 'sentry/views/starfish/views/spans/types'; import DurationChart from 'sentry/views/starfish/views/spanSummaryPage/sampleList/durationChart'; @@ -182,6 +182,7 @@ export function ScreenLoadSampleContainer({ onMouseOverSample={sample => setHighlightedSpanId(sample.span_id)} groupId={groupId} transactionName={transactionName} + moduleName={ModuleName.SCREEN} release={release} columnOrder={[ { diff --git a/static/app/views/starfish/components/samplesTable/spanSamplesTable.tsx b/static/app/views/starfish/components/samplesTable/spanSamplesTable.tsx index b99db8206adb97..88496ae9e796c8 100644 --- a/static/app/views/starfish/components/samplesTable/spanSamplesTable.tsx +++ b/static/app/views/starfish/components/samplesTable/spanSamplesTable.tsx @@ -7,6 +7,7 @@ import Link from 'sentry/components/links/link'; import {Tooltip} from 'sentry/components/tooltip'; import {IconProfiling} from 'sentry/icons/iconProfiling'; import {t} from 'sentry/locale'; +import {trackAnalytics} from 'sentry/utils/analytics'; import {generateLinkToEventInTraceView} from 'sentry/utils/discover/urls'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; @@ -19,7 +20,7 @@ import { TextAlignRight, } from 'sentry/views/starfish/components/textAlign'; import type {SpanSample} from 'sentry/views/starfish/queries/useSpanSamples'; -import {SpanMetricsField} from 'sentry/views/starfish/types'; +import {type ModuleName, SpanMetricsField} from 'sentry/views/starfish/types'; const {HTTP_RESPONSE_CONTENT_LENGTH} = SpanMetricsField; @@ -67,6 +68,7 @@ type Props = { avg: number; data: SpanTableRow[]; isLoading: boolean; + moduleName: ModuleName; columnOrder?: SamplesTableColumnHeader[]; highlightedSpanId?: string; onMouseLeaveSample?: () => void; @@ -77,6 +79,7 @@ export function SpanSamplesTable({ isLoading, data, avg, + moduleName, highlightedSpanId, onMouseLeaveSample, onMouseOverSample, @@ -123,6 +126,12 @@ export function SpanSamplesTable({ if (column.key === 'span_id') { return ( + trackAnalytics('performance_views.sample_spans.span_clicked', { + organization, + source: moduleName, + }) + } to={generateLinkToEventInTraceView({ eventId: row['transaction.id'], timestamp: row.timestamp, diff --git a/static/app/views/starfish/types.tsx b/static/app/views/starfish/types.tsx index 810a49e9f43ff6..8b465aeb5f5d6c 100644 --- a/static/app/views/starfish/types.tsx +++ b/static/app/views/starfish/types.tsx @@ -13,6 +13,11 @@ export enum StarfishType { export enum ModuleName { HTTP = 'http', DB = 'db', + CACHE = 'cache', + VITAL = 'vital', + QUEUE = 'queue', + SCREEN = 'screen', + STARTUP = 'startup', RESOURCE = 'resource', ALL = '', OTHER = 'other', diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/index.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/index.tsx index 36c6bc81c64203..12013580abbdfe 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/index.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/index.tsx @@ -18,7 +18,7 @@ import useRouter from 'sentry/utils/useRouter'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; import DetailPanel from 'sentry/views/starfish/components/detailPanel'; import {DEFAULT_COLUMN_ORDER} from 'sentry/views/starfish/components/samplesTable/spanSamplesTable'; -import {SpanMetricsField} from 'sentry/views/starfish/types'; +import {type ModuleName, SpanMetricsField} from 'sentry/views/starfish/types'; import DurationChart from 'sentry/views/starfish/views/spanSummaryPage/sampleList/durationChart'; import SampleInfo from 'sentry/views/starfish/views/spanSummaryPage/sampleList/sampleInfo'; import SampleTable from 'sentry/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable'; @@ -27,6 +27,7 @@ const {HTTP_RESPONSE_CONTENT_LENGTH} = SpanMetricsField; type Props = { groupId: string; + moduleName: ModuleName; transactionName: string; additionalFields?: string[]; onClose?: () => void; @@ -37,6 +38,7 @@ type Props = { export function SampleList({ groupId, + moduleName, transactionName, transactionMethod, spanDescription, @@ -171,6 +173,7 @@ export function SampleList({ onMouseLeaveSample={() => setHighlightedSpanId(undefined)} onMouseOverSample={sample => setHighlightedSpanId(sample.span_id)} groupId={groupId} + moduleName={moduleName} transactionName={transactionName} query={extraQuery} columnOrder={columnOrder} diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.spec.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.spec.tsx index 1350af812722aa..fd4130905bb1dd 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.spec.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.spec.tsx @@ -7,7 +7,7 @@ import { import {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable'; import {t} from 'sentry/locale'; import type {PageFilters} from 'sentry/types/core'; -import {SpanMetricsField} from 'sentry/views/starfish/types'; +import {ModuleName, SpanMetricsField} from 'sentry/views/starfish/types'; import SampleTable from './sampleTable'; @@ -43,6 +43,7 @@ describe('SampleTable', function () { const container = render( @@ -55,6 +56,7 @@ describe('SampleTable', function () { const container = render( @@ -68,6 +70,7 @@ describe('SampleTable', function () { const container = render( @@ -89,6 +92,7 @@ describe('SampleTable', function () { const container = render( diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx index 3a185fdaa0528c..4691b9121fdf89 100644 --- a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/sampleTable.tsx @@ -16,7 +16,7 @@ import {useSpanMetrics} from 'sentry/views/starfish/queries/useDiscover'; import type {SpanSample} from 'sentry/views/starfish/queries/useSpanSamples'; import {useSpanSamples} from 'sentry/views/starfish/queries/useSpanSamples'; import {useTransactions} from 'sentry/views/starfish/queries/useTransactions'; -import type {SpanMetricsQueryFilters} from 'sentry/views/starfish/types'; +import type {ModuleName, SpanMetricsQueryFilters} from 'sentry/views/starfish/types'; import {SpanMetricsField} from 'sentry/views/starfish/types'; const {SPAN_SELF_TIME, SPAN_OP} = SpanMetricsField; @@ -27,6 +27,7 @@ const SpanSamplesTableContainer = styled('div')` type Props = { groupId: string; + moduleName: ModuleName; transactionName: string; additionalFields?: string[]; additionalFilters?: Record; @@ -41,6 +42,7 @@ type Props = { function SampleTable({ groupId, + moduleName, transactionName, highlightedSpanId, onMouseLeaveSample, @@ -152,6 +154,7 @@ function SampleTable({ hasData={spans.length > 0} > - + ); } diff --git a/static/app/views/starfish/views/spans/selectors/actionSelector.tsx b/static/app/views/starfish/views/spans/selectors/actionSelector.tsx index 24413915197299..b9518506f32a4d 100644 --- a/static/app/views/starfish/views/spans/selectors/actionSelector.tsx +++ b/static/app/views/starfish/views/spans/selectors/actionSelector.tsx @@ -102,6 +102,11 @@ const HTTP_ACTION_OPTIONS = [ const LABEL_FOR_MODULE_NAME: {[key in ModuleName]: ReactNode} = { http: t('HTTP Method'), db: t('SQL Command'), + cache: t('Action'), + vital: t('Action'), + queue: t('Action'), + screen: t('Action'), + startup: t('Action'), resource: t('Resource'), other: t('Action'), '': t('Action'), diff --git a/static/app/views/starfish/views/spans/selectors/domainSelector.tsx b/static/app/views/starfish/views/spans/selectors/domainSelector.tsx index 1cf6f4d8b03e38..4d3822de8d08bd 100644 --- a/static/app/views/starfish/views/spans/selectors/domainSelector.tsx +++ b/static/app/views/starfish/views/spans/selectors/domainSelector.tsx @@ -170,6 +170,11 @@ const LIMIT = 100; const LABEL_FOR_MODULE_NAME: {[key in ModuleName]: ReactNode} = { http: t('Host'), db: t('Table'), + cache: t('Domain'), + vital: t('Domain'), + queue: t('Domain'), + screen: t('Domain'), + startup: t('Domain'), resource: t('Resource'), other: t('Domain'), '': t('Domain'), diff --git a/static/app/views/starfish/views/spans/spanTimeCharts.tsx b/static/app/views/starfish/views/spans/spanTimeCharts.tsx index bd886e16dc70ea..957ecfb2b48a33 100644 --- a/static/app/views/starfish/views/spans/spanTimeCharts.tsx +++ b/static/app/views/starfish/views/spans/spanTimeCharts.tsx @@ -87,6 +87,11 @@ export function SpanTimeCharts({ {title: getDurationChartTitle(moduleName), Comp: DurationChart}, ], [ModuleName.DB]: [], + [ModuleName.CACHE]: [], + [ModuleName.VITAL]: [], + [ModuleName.QUEUE]: [], + [ModuleName.SCREEN]: [], + [ModuleName.STARTUP]: [], [ModuleName.RESOURCE]: features.includes( 'starfish-browser-resource-module-bundle-analysis' ) From 12e752c7552bf889b4aef9bf372a70cd24adc505 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Mon, 13 May 2024 14:59:11 -0700 Subject: [PATCH 373/376] test(ui): Use built-in test providers (#70795) --- .../eventReplay/replayClipPreview.spec.tsx | 27 +--- .../featureFeedback/feedbackModal.spec.tsx | 125 +++++++----------- static/app/views/performance/content.spec.tsx | 45 ++----- .../transactionReplays/index.spec.tsx | 34 +---- .../transactionVitals/index.spec.tsx | 63 ++++----- .../thresholdGroupRows.spec.tsx | 37 ++---- 6 files changed, 109 insertions(+), 222 deletions(-) diff --git a/static/app/components/events/eventReplay/replayClipPreview.spec.tsx b/static/app/components/events/eventReplay/replayClipPreview.spec.tsx index c75197b5595a53..02d7ce494853f0 100644 --- a/static/app/components/events/eventReplay/replayClipPreview.spec.tsx +++ b/static/app/components/events/eventReplay/replayClipPreview.spec.tsx @@ -1,5 +1,4 @@ import {duration} from 'moment'; -import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {RRWebInitFrameEventsFixture} from 'sentry-fixture/replay/rrweb'; import {ReplayRecordFixture} from 'sentry-fixture/replayRecord'; @@ -11,8 +10,6 @@ import type {DetailedOrganization} from 'sentry/types/organization'; import useReplayReader from 'sentry/utils/replays/hooks/useReplayReader'; import ReplayReader from 'sentry/utils/replays/replayReader'; import type RequestError from 'sentry/utils/requestError/requestError'; -import {OrganizationContext} from 'sentry/views/organizationContext'; -import {RouteContext} from 'sentry/views/routeContext'; import ReplayClipPreview from './replayClipPreview'; @@ -68,7 +65,8 @@ const render = ( children: React.ReactElement, orgParams: Partial = {} ) => { - const {router, routerContext} = initializeOrg({ + const {routerContext, organization} = initializeOrg({ + organization: {slug: mockOrgSlug, ...orgParams}, router: { routes: [ {path: '/'}, @@ -82,23 +80,10 @@ const render = ( }, }); - return baseRender( - - - {children} - - , - {context: routerContext} - ); + return baseRender(children, { + context: routerContext, + organization, + }); }; const mockIsFullscreen = jest.fn(); diff --git a/static/app/components/featureFeedback/feedbackModal.spec.tsx b/static/app/components/featureFeedback/feedbackModal.spec.tsx index 61e8ea0ea490ae..4a8fc43769426a 100644 --- a/static/app/components/featureFeedback/feedbackModal.spec.tsx +++ b/static/app/components/featureFeedback/feedbackModal.spec.tsx @@ -1,7 +1,6 @@ import {Fragment} from 'react'; import * as Sentry from '@sentry/react'; -import {initializeOrg} from 'sentry-test/initializeOrg'; import { act, renderGlobalModal, @@ -14,24 +13,6 @@ import * as indicators from 'sentry/actionCreators/indicator'; import {openModal} from 'sentry/actionCreators/modal'; import {FeedbackModal} from 'sentry/components/featureFeedback/feedbackModal'; import TextField from 'sentry/components/forms/fields/textField'; -import {RouteContext} from 'sentry/views/routeContext'; - -function ComponentProviders({children}: {children: React.ReactNode}) { - const {router} = initializeOrg(); - - return ( - - {children} - - ); -} describe('FeatureFeedback', function () { describe('default', function () { @@ -47,11 +28,7 @@ describe('FeatureFeedback', function () { renderGlobalModal(); act(() => - openModal(modalProps => ( - - - - )) + openModal(modalProps => ) ); // Form fields @@ -105,13 +82,11 @@ describe('FeatureFeedback', function () { act(() => openModal(modalProps => ( - - - + )) ); @@ -130,13 +105,11 @@ describe('FeatureFeedback', function () { act(() => openModal(modalProps => ( - - Test Secondary Action Link} - /> - + Test Secondary Action Link} + /> )) ); @@ -161,53 +134,51 @@ describe('FeatureFeedback', function () { act(() => openModal(modalProps => ( - - - {({Header, Body, Footer, state, onFieldChange}) => { - if (state.step === 0) { - return ( - -
First Step
- - onFieldChange('name', value)} - /> - -