Skip to content

Commit

Permalink
Merge branch 'master' into raj/api-idorslug/rename-path-params/organi…
Browse files Browse the repository at this point in the history
…zation_slug
  • Loading branch information
iamrajjoshi authored May 13, 2024
2 parents 6dbff1a + bef6fe8 commit fd524ce
Show file tree
Hide file tree
Showing 113 changed files with 2,031 additions and 1,286 deletions.
12 changes: 6 additions & 6 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,13 @@
"@sentry-internal/rrweb-player": "2.12.0",
"@sentry-internal/rrweb-snapshot": "2.12.0",
"@sentry/babel-plugin-component-annotate": "^2.16.1",
"@sentry/core": "^8.0.0-rc.2",
"@sentry/node": "^8.0.0-rc.2",
"@sentry/react": "^8.0.0-rc.2",
"@sentry/core": "^8.0.0",
"@sentry/node": "^8.0.0",
"@sentry/react": "^8.0.0",
"@sentry/release-parser": "^1.3.1",
"@sentry/status-page-list": "^0.1.0",
"@sentry/types": "^8.0.0-rc.2",
"@sentry/utils": "^8.0.0-rc.2",
"@sentry/types": "^8.0.0",
"@sentry/utils": "^8.0.0",
"@spotlightjs/spotlight": "^2.0.0-alpha.1",
"@tanstack/react-query": "^4.29.7",
"@tanstack/react-query-devtools": "^4.36.1",
Expand Down Expand Up @@ -178,7 +178,7 @@
"@codecov/webpack-plugin": "^0.0.1-beta.6",
"@pmmmwh/react-refresh-webpack-plugin": "0.5.11",
"@sentry/jest-environment": "^4.0.0",
"@sentry/profiling-node": "^8.0.0-rc.2",
"@sentry/profiling-node": "^8.0.0",
"@styled/typescript-styled-plugin": "^1.0.1",
"@testing-library/jest-dom": "^6.4.2",
"@testing-library/react": "^14.2.1",
Expand Down
31 changes: 31 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -558,6 +558,10 @@ module = [
"sentry.buffer.redis",
"sentry.build.*",
"sentry.eventstore.reprocessing.redis",
"sentry.hybridcloud",
"sentry.hybridcloud.migrations.*",
"sentry.hybridcloud.options",
"sentry.hybridcloud.rpc_services.*",
"sentry.issues",
"sentry.issues.analytics",
"sentry.issues.apps",
Expand Down Expand Up @@ -602,6 +606,32 @@ module = [
"sentry.relay.config.metric_extraction",
"sentry.reprocessing2",
"sentry.runner.*",
"sentry.services.hybrid_cloud.access.*",
"sentry.services.hybrid_cloud.app.*",
"sentry.services.hybrid_cloud.hook.*",
"sentry.services.hybrid_cloud.identity.*",
"sentry.services.hybrid_cloud.integration.*",
"sentry.services.hybrid_cloud.issue.*",
"sentry.services.hybrid_cloud.log.*",
"sentry.services.hybrid_cloud.lost_password_hash.*",
"sentry.services.hybrid_cloud.notifications.*",
"sentry.services.hybrid_cloud.organization_actions.*",
"sentry.services.hybrid_cloud.organization_mapping.*",
"sentry.services.hybrid_cloud.organization_provisioning.*",
"sentry.services.hybrid_cloud.organizationmember_mapping.*",
"sentry.services.hybrid_cloud.orgauthtoken.*",
"sentry.services.hybrid_cloud.pagination",
"sentry.services.hybrid_cloud.project.*",
"sentry.services.hybrid_cloud.project_key.*",
"sentry.services.hybrid_cloud.region",
"sentry.services.hybrid_cloud.replica.*",
"sentry.services.hybrid_cloud.repository.*",
"sentry.services.hybrid_cloud.rpcmetrics",
"sentry.services.hybrid_cloud.sig",
"sentry.services.hybrid_cloud.tombstone.*",
"sentry.services.hybrid_cloud.user.*",
"sentry.services.hybrid_cloud.user_option.*",
"sentry.services.hybrid_cloud.util",
"sentry.snuba.metrics.extraction",
"sentry.tasks.commit_context",
"sentry.tasks.on_demand_metrics",
Expand Down Expand Up @@ -667,6 +697,7 @@ module = [
"tests.sentry.issues.test_status_change_consumer",
"tests.sentry.issues.test_update_inbox",
"tests.sentry.relay.config.test_metric_extraction",
"tests.sentry.services.hybrid_cloud.*",
"tests.sentry.tasks.test_on_demand_metrics",
"tools.*",
]
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev-frozen.txt
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ sentry-usage-accountant==0.0.10
simplejson==3.17.6
six==1.16.0
sniffio==1.2.0
snuba-sdk==2.0.33
snuba-sdk==2.0.34
sortedcontainers==2.4.0
soupsieve==2.3.2.post1
sqlparse==0.4.4
Expand Down
2 changes: 1 addition & 1 deletion requirements-frozen.txt
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ sentry-usage-accountant==0.0.10
simplejson==3.17.6
six==1.16.0
sniffio==1.3.0
snuba-sdk==2.0.33
snuba-sdk==2.0.34
soupsieve==2.3.2.post1
sqlparse==0.4.4
statsd==3.3.0
Expand Down
9 changes: 6 additions & 3 deletions src/sentry/api/endpoints/group_details.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,16 +236,19 @@ def get(self, request: Request, group) -> Response:
)
data.update({"sentryAppIssues": sentry_app_issues})

if "hasAttachments" in expand:
if "latestEventHasAttachments" in expand:
if not features.has(
"organizations:event-attachments",
group.project.organization,
actor=request.user,
):
return self.respond(status=404)

num_attachments = EventAttachment.objects.filter(group_id=group.id).count()
data.update({"hasAttachments": num_attachments > 0})
latest_event = group.get_latest_event()
num_attachments = EventAttachment.objects.filter(
project_id=latest_event.project_id, event_id=latest_event.event_id
).count()
data.update({"latestEventHasAttachments": num_attachments > 0})

data.update(
{
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/api/endpoints/organization_stats_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def get(self, request: Request, organization) -> Response:
"""
with self.handle_query_errors():

if features.has("organizations:metrics-stats", organization):
if features.has("organizations:custom-metrics", organization):
if (
request.GET.get("category") == "metrics"
or request.GET.get("category") == "metricSecond"
Expand Down
35 changes: 24 additions & 11 deletions src/sentry/api/endpoints/organization_traces.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ class TraceInterval(TypedDict):
kind: Literal["project", "missing", "other"]
opCategory: str | None
duration: int
isRoot: bool
components: NotRequired[list[tuple[int, int]]]


Expand Down Expand Up @@ -722,7 +723,7 @@ def get_traces_breakdown_projects_query(
"precise.start_ts",
"precise.finish_ts",
],
orderby=["precise.start_ts", "precise.finish_ts"],
orderby=["precise.start_ts", "-precise.finish_ts"],
# limit the number of segments we fetch per trace so a single
# large trace does not result in the rest being blank
limitby=("trace", int(MAX_SNUBA_RESULTS / len(trace_ids))),
Expand Down Expand Up @@ -760,10 +761,11 @@ def get_traces_breakdown_categories_query(
"transaction",
"span.category",
"sdk.name",
"parent_span",
"precise.start_ts",
"precise.finish_ts",
],
orderby=["precise.start_ts", "precise.finish_ts"],
orderby=["precise.start_ts", "-precise.finish_ts"],
# limit the number of segments we fetch per trace so a single
# large trace does not result in the rest being blank
limitby=("trace", int(MAX_SNUBA_RESULTS / len(trace_ids))),
Expand Down Expand Up @@ -1000,7 +1002,12 @@ def process_breakdowns(data, traces_range):

def should_merge(interval_a, interval_b):
return (
interval_a["end"] >= interval_b["start"]
# only merge intervals that have parent spans, i.e. those that aren't the trace root
not interval_a["isRoot"]
and not interval_b["isRoot"]
# only merge intervals that overlap
and interval_a["end"] >= interval_b["start"]
# only merge intervals that are part of the same service
and interval_a["project"] == interval_b["project"]
and interval_a["sdkName"] == interval_b["sdkName"]
and interval_a["opCategory"] == interval_b["opCategory"]
Expand Down Expand Up @@ -1032,14 +1039,16 @@ def breakdown_push(trace, interval):
"components": [
(last_interval["components"][-1][1], interval["components"][0][0]),
],
"isRoot": False,
}
)

breakdown.append(interval)

def stack_push(trace, interval):
last_interval = stack_peek(trace)
if last_interval and should_merge(last_interval, interval):
for last_interval in reversed(stacks[trace]):
if not should_merge(last_interval, interval):
continue
# update the end of this interval and it will
# be updated in the breakdown as well
last_interval["end"] = max(interval["end"], last_interval["end"])
Expand Down Expand Up @@ -1107,7 +1116,14 @@ def stack_clear(trace, until=None):
row["quantized.start_ts"] = quantized_start
row["quantized.finish_ts"] = quantized_end

data.sort(key=lambda row: (row["quantized.start_ts"], -row["quantized.finish_ts"]))
data.sort(
key=lambda row: (
row["quantized.start_ts"],
row["precise.start_ts"],
-row["quantized.finish_ts"],
-row["precise.finish_ts"],
)
)

last_timestamp_per_trace: dict[str, int] = defaultdict(int)

Expand All @@ -1131,6 +1147,7 @@ def stack_clear(trace, until=None):
"end": row["quantized.finish_ts"],
"duration": 0,
"components": [(row["precise.start_ts"], row["precise.finish_ts"])],
"isRoot": not bool(row.get("parent_span")),
}

# Clear the stack of any intervals that end before the current interval
Expand All @@ -1139,11 +1156,6 @@ def stack_clear(trace, until=None):

stack_push(trace, cur)

# Clear the stack of any intervals that end before the current interval
# ends. Here we do not need to push them to the breakdowns because
# that time has already be attributed to the most recent interval.
stack_clear(trace, until=cur["end"])

for trace, trace_range in traces_range.items():
# Check to see if there is still a gap before the trace ends and fill it
# with an other interval.
Expand All @@ -1158,6 +1170,7 @@ def stack_clear(trace, until=None):
"start": other_start,
"end": other_end,
"duration": 0,
"isRoot": False,
}

# Clear the remaining intervals on the stack to find the latest end time
Expand Down
13 changes: 8 additions & 5 deletions src/sentry/api/serializers/models/group_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ def get_attrs(
)
attrs[item].update({"sentryAppIssues": sentry_app_issues})

if self._expand("hasAttachments"):
if self._expand("latestEventHasAttachments"):
if not features.has(
"organizations:event-attachments",
item.project.organization,
Expand All @@ -406,8 +406,11 @@ def get_attrs(
return self.respond(status=404)

for item in item_list:
num_attachments = EventAttachment.objects.filter(group_id=item.id).count()
attrs[item].update({"hasAttachments": num_attachments > 0})
latest_event = item.get_latest_event()
num_attachments = EventAttachment.objects.filter(
project_id=latest_event.project_id, event_id=latest_event.event_id
).count()
attrs[item].update({"latestEventHasAttachments": num_attachments > 0})

return attrs

Expand Down Expand Up @@ -467,8 +470,8 @@ def serialize(
if self._expand("sentryAppIssues"):
result["sentryAppIssues"] = attrs["sentryAppIssues"]

if self._expand("hasAttachments"):
result["hasAttachments"] = attrs["hasAttachments"]
if self._expand("latestEventHasAttachments"):
result["latestEventHasAttachments"] = attrs["latestEventHasAttachments"]

return result

Expand Down
9 changes: 4 additions & 5 deletions src/sentry/buffer/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,12 +63,11 @@ class BufferHookRegistry:
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._registry: dict[BufferHookEvent, Callable[..., Any]] = {}

def add_handler(self, key: BufferHookEvent) -> Callable[..., Any]:
def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
self._registry[key] = func
return func
def add_handler(self, key: BufferHookEvent, func: Callable[..., Any]) -> None:
self._registry[key] = func

return decorator
def has(self, key: BufferHookEvent) -> bool:
return self._registry.get(key) is not None

def callback(self, buffer_hook_event: BufferHookEvent, data: RedisBuffer) -> bool:
try:
Expand Down
2 changes: 0 additions & 2 deletions src/sentry/conf/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -1521,8 +1521,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
"organizations:ddm-sidebar-item-hidden": False,
# Enables import of metric dashboards
"organizations:ddm-dashboard-import": False,
# Enables category "metrics" in stats_v2 endpoint
"organizations:metrics-stats": False,
# Enable the default alert at project creation to be the high priority alert
"organizations:default-high-priority-alerts": False,
# Enables automatically deriving of code mappings
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/features/temporary.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,10 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:dashboards-import", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:dashboards-mep", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:dashboards-rh-widget", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:dashboards-span-metrics", OrganizationFeature, FeatureHandlerStrategy.OPTIONS)
manager.add("organizations:ddm-dashboard-import", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:custom-metrics-experimental", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:ddm-sidebar-item-hidden", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:metrics-stats", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:default-high-priority-alerts", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
manager.add("organizations:derive-code-mappings", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
manager.add("organizations:device-class-synthesis", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
Expand Down
5 changes: 3 additions & 2 deletions src/sentry/feedback/usecases/create_feedback.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from sentry.models.group import GroupStatus
from sentry.models.project import Project
from sentry.signals import first_feedback_received, first_new_feedback_received
from sentry.types.group import GroupSubStatus
from sentry.utils import metrics
from sentry.utils.outcomes import Outcome, track_outcome
from sentry.utils.safe import get_path
Expand Down Expand Up @@ -88,7 +89,7 @@ def make_evidence(feedback, source: FeedbackCreationSource, is_message_spam: boo
evidence_display.append(IssueEvidence(name="source", value=source.value, important=False))

if is_message_spam is True:
evidence_data["is_spam"] = str(is_message_spam)
evidence_data["is_spam"] = is_message_spam
evidence_display.append(
IssueEvidence(name="is_spam", value=str(is_message_spam), important=False)
)
Expand Down Expand Up @@ -360,6 +361,6 @@ def auto_ignore_spam_feedbacks(project, issue_fingerprint):
fingerprint=issue_fingerprint,
project_id=project.id,
new_status=GroupStatus.IGNORED, # we use ignored in the UI for the spam tab
new_substatus=None,
new_substatus=GroupSubStatus.FOREVER,
),
)
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class SlugMismatchException(Exception):

def create_post_provision_outbox(
provisioning_options: OrganizationProvisioningOptions, org_id: int
):
) -> RegionOutbox:
return RegionOutbox(
shard_scope=OutboxScope.ORGANIZATION_SCOPE,
shard_identifier=org_id,
Expand All @@ -49,7 +49,7 @@ def create_organization_provisioning_outbox(
organization_id: int,
region_name: str,
org_provision_payload: OrganizationProvisioningOptions | None,
):
) -> ControlOutbox:
payload = org_provision_payload.json() if org_provision_payload is not None else None
return ControlOutbox(
region_name=region_name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

def create_post_provision_outbox(
provisioning_options: OrganizationProvisioningOptions, org_id: int
):
) -> RegionOutbox:
return RegionOutbox(
shard_scope=OutboxScope.ORGANIZATION_SCOPE,
shard_identifier=org_id,
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/monitors/consumers/monitor_consumer.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,12 @@
MonitorLimitsExceeded,
MonitorType,
)
from sentry.monitors.processing_errors import (
from sentry.monitors.processing_errors.errors import (
CheckinValidationError,
ProcessingError,
ProcessingErrorType,
handle_processing_errors,
)
from sentry.monitors.processing_errors.manager import handle_processing_errors
from sentry.monitors.types import CheckinItem
from sentry.monitors.utils import (
get_new_timeout_at,
Expand Down
Loading

0 comments on commit fd524ce

Please sign in to comment.