Skip to content

Commit

Permalink
test
Browse files Browse the repository at this point in the history
  • Loading branch information
Rachel Chen authored and Rachel Chen committed Jan 27, 2025
1 parent bbd15ce commit 446fafc
Show file tree
Hide file tree
Showing 2 changed files with 175 additions and 0 deletions.
73 changes: 73 additions & 0 deletions tests/web/rpc/v1/test_endpoint_get_traces.py
Original file line number Diff line number Diff line change
Expand Up @@ -521,3 +521,76 @@ def test_with_data_and_aggregated_fields(self, setup_teardown: Any) -> None:
meta=ResponseMeta(request_id=_REQUEST_ID),
)
assert MessageToDict(response) == MessageToDict(expected_response)

def test_with_data_and_aggregated_fields_ignore_case(
self, setup_teardown: Any
) -> None:
ts = Timestamp(seconds=int(_BASE_TIME.timestamp()))
three_hours_later = int((_BASE_TIME + timedelta(hours=3)).timestamp())
start_timestamp_per_trace_id: dict[str, float] = defaultdict(lambda: 2 * 1e10)
for s in _SPANS:
start_timestamp_per_trace_id[s["trace_id"]] = min(
start_timestamp_per_trace_id[s["trace_id"]],
s["start_timestamp_precise"],
)
trace_id_per_start_timestamp: dict[float, str] = {
timestamp: trace_id
for trace_id, timestamp in start_timestamp_per_trace_id.items()
}
message = GetTracesRequest(
meta=RequestMeta(
project_ids=[1, 2, 3],
organization_id=1,
cogs_category="something",
referrer="something",
start_timestamp=ts,
end_timestamp=Timestamp(seconds=three_hours_later),
request_id=_REQUEST_ID,
),
attributes=[
TraceAttribute(
key=TraceAttribute.Key.KEY_START_TIMESTAMP,
type=AttributeKey.TYPE_DOUBLE,
),
],
filters=[
GetTracesRequest.TraceFilter(
item_type=TraceItemType.TRACE_ITEM_TYPE_SPAN,
filter=TraceItemFilter(
comparison_filter=ComparisonFilter(
key=AttributeKey(
name="sentry.op",
type=AttributeKey.TYPE_STRING,
),
op=ComparisonFilter.OP_EQUALS,
value=AttributeValue(val_str="DB"),
ignore_case=True,
),
),
),
],
)
response = EndpointGetTraces().execute(message)
expected_response = GetTracesResponse(
traces=[
GetTracesResponse.Trace(
attributes=[
TraceAttribute(
key=TraceAttribute.Key.KEY_START_TIMESTAMP,
type=AttributeKey.TYPE_DOUBLE,
value=AttributeValue(
val_double=start_timestamp_per_trace_id[
trace_id_per_start_timestamp[start_timestamp]
],
),
),
],
)
for start_timestamp in reversed(
sorted(trace_id_per_start_timestamp.keys())
)
],
page_token=PageToken(offset=len(_TRACE_IDS)),
meta=ResponseMeta(request_id=_REQUEST_ID),
)
assert MessageToDict(response) == MessageToDict(expected_response)
Original file line number Diff line number Diff line change
Expand Up @@ -607,6 +607,108 @@ def test_with_filters(self) -> None:
),
]

def test_with_filters_ignore_case(self) -> None:
# store a a test metric with a value of 1, every second of one hour
granularity_secs = 300
query_duration = 60 * 30
store_spans_timeseries(
BASE_TIME,
1,
3600,
metrics=[DummyMetric("test_metric", get_value=lambda x: 1)],
tags={"customer": "bob"},
)

store_spans_timeseries(
BASE_TIME,
1,
3600,
metrics=[DummyMetric("test_metric", get_value=lambda x: 999)],
tags={"customer": "alice"},
)

message = TimeSeriesRequest(
meta=RequestMeta(
project_ids=[1, 2, 3],
organization_id=1,
cogs_category="something",
referrer="something",
start_timestamp=Timestamp(seconds=int(BASE_TIME.timestamp())),
end_timestamp=Timestamp(
seconds=int(BASE_TIME.timestamp() + query_duration)
),
debug=True,
trace_item_type=TraceItemType.TRACE_ITEM_TYPE_SPAN,
),
aggregations=[
AttributeAggregation(
aggregate=Function.FUNCTION_SUM,
key=AttributeKey(type=AttributeKey.TYPE_FLOAT, name="test_metric"),
label="sum",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_NONE,
),
AttributeAggregation(
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(type=AttributeKey.TYPE_FLOAT, name="test_metric"),
label="avg",
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_NONE,
),
],
filter=TraceItemFilter(
and_filter=AndFilter(
filters=[
TraceItemFilter(
comparison_filter=ComparisonFilter(
key=AttributeKey(
type=AttributeKey.TYPE_STRING, name="customer"
),
op=ComparisonFilter.OP_EQUALS,
value=AttributeValue(val_str="bOb"),
ignore_case=True,
)
),
TraceItemFilter(
comparison_filter=ComparisonFilter(
key=AttributeKey(
type=AttributeKey.TYPE_STRING, name="customer"
),
op=ComparisonFilter.OP_IN,
value=AttributeValue(
val_str_array=StrArray(values=["bob", "ALICE"])
),
ignore_case=True,
)
),
]
)
),
granularity_secs=granularity_secs,
)
response = EndpointTimeSeries().execute(message)
# print(response)
expected_buckets = [
Timestamp(seconds=int(BASE_TIME.timestamp()) + secs)
for secs in range(0, query_duration, granularity_secs)
]
assert sorted(response.result_timeseries, key=lambda x: x.label) == [
TimeSeries(
label="avg",
buckets=expected_buckets,
data_points=[
DataPoint(data=1, data_present=True, sample_count=300)
for _ in range(len(expected_buckets))
],
),
TimeSeries(
label="sum",
buckets=expected_buckets,
data_points=[
DataPoint(data=300, data_present=True, sample_count=300)
for _ in range(len(expected_buckets))
],
),
]

def test_with_unaligned_granularities(self) -> None:
query_offset = 5
query_duration = 1800 + query_offset
Expand Down

0 comments on commit 446fafc

Please sign in to comment.