From 515fac2354158ef4bb2f0f2d91c83d56f2260bc6 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 5 Jun 2024 10:58:25 +0200 Subject: [PATCH 01/12] Added origin to spans and transactions --- sentry_sdk/tracing.py | 18 ++++++++++++++++-- tests/test_span_origin.py | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+), 2 deletions(-) create mode 100644 tests/test_span_origin.py diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index a6b1905a3c..bc5d047b5b 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -87,6 +87,13 @@ class SpanKwargs(TypedDict, total=False): scope: "sentry_sdk.Scope" """The scope to use for this span. If not provided, we use the current scope.""" + origin: str + """ + The origin of the span. + See https://develop.sentry.dev/sdk/performance/trace-origin/ + Default "manual". + """ + class TransactionKwargs(SpanKwargs, total=False): name: str """Identifier of the transaction. Will show up in the Sentry UI.""" @@ -206,6 +213,7 @@ class Span: "_containing_transaction", "_local_aggregator", "scope", + "origin", ) def __init__( @@ -222,6 +230,7 @@ def __init__( containing_transaction=None, # type: Optional[Transaction] start_timestamp=None, # type: Optional[Union[datetime, float]] scope=None, # type: Optional[sentry_sdk.Scope] + origin="manual", # type: str ): # type: (...) -> None self.trace_id = trace_id or uuid.uuid4().hex @@ -234,6 +243,7 @@ def __init__( self.status = status self.hub = hub self.scope = scope + self.origin = origin self._measurements = {} # type: Dict[str, MeasurementValue] self._tags = {} # type: MutableMapping[str, str] self._data = {} # type: Dict[str, Any] @@ -276,7 +286,7 @@ def _get_local_aggregator(self): def __repr__(self): # type: () -> str return ( - "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" + "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>" % ( self.__class__.__name__, self.op, @@ -285,6 +295,7 @@ def __repr__(self): self.span_id, self.parent_span_id, self.sampled, + self.origin, ) ) @@ -604,6 +615,7 @@ def to_json(self): "description": self.description, "start_timestamp": self.start_timestamp, "timestamp": self.timestamp, + "origin": self.origin, } # type: Dict[str, Any] if self.status: @@ -635,6 +647,7 @@ def get_trace_context(self): "parent_span_id": self.parent_span_id, "op": self.op, "description": self.description, + "origin": self.origin, } # type: Dict[str, Any] if self.status: rv["status"] = self.status @@ -701,7 +714,7 @@ def __init__( def __repr__(self): # type: () -> str return ( - "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, source=%r)>" + "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, source=%r, origin=%r)>" % ( self.__class__.__name__, self.name, @@ -711,6 +724,7 @@ def __repr__(self): self.parent_span_id, self.sampled, self.source, + self.origin, ) ) diff --git a/tests/test_span_origin.py b/tests/test_span_origin.py new file mode 100644 index 0000000000..f880279f08 --- /dev/null +++ b/tests/test_span_origin.py @@ -0,0 +1,38 @@ +from sentry_sdk import start_transaction, start_span + + +def test_span_origin_manual(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with start_transaction(name="hi"): + with start_span(op="foo", description="bar"): + pass + + (event,) = events + + assert len(events) == 1 + assert event["spans"][0]["origin"] == "manual" + assert event["contexts"]["trace"]["origin"] == "manual" + + +def test_span_origin_custom(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with start_transaction(name="hi"): + with start_span(op="foo", description="bar", origin="foo.foo2.foo3"): + pass + + with start_transaction(name="ho", origin="ho.ho2.ho3"): + with start_span(op="baz", description="qux", origin="baz.baz2.baz3"): + pass + + (first_transaction, second_transaction) = events + + assert len(events) == 2 + assert first_transaction["contexts"]["trace"]["origin"] == "manual" + assert first_transaction["spans"][0]["origin"] == "foo.foo2.foo3" + + assert second_transaction["contexts"]["trace"]["origin"] == "ho.ho2.ho3" + assert second_transaction["spans"][0]["origin"] == "baz.baz2.baz3" From fb68302d1ad34af9b7ed4b042621b571ab17d3d2 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 5 Jun 2024 11:11:55 +0200 Subject: [PATCH 02/12] Fixed test --- tests/test_new_scopes_compat_event.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_new_scopes_compat_event.py b/tests/test_new_scopes_compat_event.py index 034beb50b8..03828c159c 100644 --- a/tests/test_new_scopes_compat_event.py +++ b/tests/test_new_scopes_compat_event.py @@ -157,6 +157,7 @@ def create_expected_transaction_event(trx, span): "parent_span_id": None, "op": "test_transaction_op", "description": None, + "origin": "manual", }, "character": { "name": "Mighty Fighter changed by before_send_transaction", From 02991fd51eeb549e829dc98792cc2ecc5af166e2 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 5 Jun 2024 11:14:01 +0200 Subject: [PATCH 03/12] Fixed test --- tests/integrations/celery/test_celery.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index d8308c5978..e521c46c4b 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -226,6 +226,7 @@ def dummy_task(x, y): "data": ApproxDict(), "description": "dummy_task", "op": "queue.submit.celery", + "origin": "manual", "parent_span_id": submission_event["contexts"]["trace"]["span_id"], "same_process_as_parent": True, "span_id": submission_event["spans"][0]["span_id"], From 4a33d7ea96b8a695efeab61350859678bf3548e4 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 5 Jun 2024 11:20:48 +0200 Subject: [PATCH 04/12] Fix test --- .../test_clickhouse_driver.py | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py index b39f722c52..3f2f7c6746 100644 --- a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py +++ b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py @@ -247,6 +247,7 @@ def test_clickhouse_client_spans( expected_spans = [ { "op": "db", + "origin": "manual", "description": "DROP TABLE IF EXISTS test", "data": { "db.system": "clickhouse", @@ -261,6 +262,7 @@ def test_clickhouse_client_spans( }, { "op": "db", + "origin": "manual", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { "db.system": "clickhouse", @@ -275,6 +277,7 @@ def test_clickhouse_client_spans( }, { "op": "db", + "origin": "manual", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -289,6 +292,7 @@ def test_clickhouse_client_spans( }, { "op": "db", + "origin": "manual", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -303,6 +307,7 @@ def test_clickhouse_client_spans( }, { "op": "db", + "origin": "manual", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { "db.system": "clickhouse", @@ -365,6 +370,7 @@ def test_clickhouse_client_spans_with_pii( expected_spans = [ { "op": "db", + "origin": "manual", "description": "DROP TABLE IF EXISTS test", "data": { "db.system": "clickhouse", @@ -380,6 +386,7 @@ def test_clickhouse_client_spans_with_pii( }, { "op": "db", + "origin": "manual", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { "db.system": "clickhouse", @@ -395,6 +402,7 @@ def test_clickhouse_client_spans_with_pii( }, { "op": "db", + "origin": "manual", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -410,6 +418,7 @@ def test_clickhouse_client_spans_with_pii( }, { "op": "db", + "origin": "manual", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -425,6 +434,7 @@ def test_clickhouse_client_spans_with_pii( }, { "op": "db", + "origin": "manual", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { "db.system": "clickhouse", @@ -685,6 +695,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) expected_spans = [ { "op": "db", + "origin": "manual", "description": "DROP TABLE IF EXISTS test", "data": { "db.system": "clickhouse", @@ -699,6 +710,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) }, { "op": "db", + "origin": "manual", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { "db.system": "clickhouse", @@ -713,6 +725,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) }, { "op": "db", + "origin": "manual", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -727,6 +740,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) }, { "op": "db", + "origin": "manual", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -741,6 +755,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) }, { "op": "db", + "origin": "manual", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { "db.system": "clickhouse", @@ -803,6 +818,7 @@ def test_clickhouse_dbapi_spans_with_pii( expected_spans = [ { "op": "db", + "origin": "manual", "description": "DROP TABLE IF EXISTS test", "data": { "db.system": "clickhouse", @@ -818,6 +834,7 @@ def test_clickhouse_dbapi_spans_with_pii( }, { "op": "db", + "origin": "manual", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { "db.system": "clickhouse", @@ -833,6 +850,7 @@ def test_clickhouse_dbapi_spans_with_pii( }, { "op": "db", + "origin": "manual", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -848,6 +866,7 @@ def test_clickhouse_dbapi_spans_with_pii( }, { "op": "db", + "origin": "manual", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -863,6 +882,7 @@ def test_clickhouse_dbapi_spans_with_pii( }, { "op": "db", + "origin": "manual", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { "db.system": "clickhouse", From dd383d1c4fdda19fc43babdb1d2f9bb2b9e43928 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 5 Jun 2024 11:23:20 +0200 Subject: [PATCH 05/12] Fix test --- tests/test_new_scopes_compat_event.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_new_scopes_compat_event.py b/tests/test_new_scopes_compat_event.py index 03828c159c..5fb04f6463 100644 --- a/tests/test_new_scopes_compat_event.py +++ b/tests/test_new_scopes_compat_event.py @@ -75,6 +75,7 @@ def create_expected_error_event(trx, span): "span_id": span.span_id, "parent_span_id": span.parent_span_id, "op": "test_span", + "origin": "manual", "description": None, }, "runtime": { @@ -156,8 +157,8 @@ def create_expected_transaction_event(trx, span): "span_id": trx.span_id, "parent_span_id": None, "op": "test_transaction_op", - "description": None, "origin": "manual", + "description": None, }, "character": { "name": "Mighty Fighter changed by before_send_transaction", From 94997b243ae949265f3534be146e9d8bfc493c78 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 5 Jun 2024 11:34:59 +0200 Subject: [PATCH 06/12] Fix test --- tests/test_new_scopes_compat_event.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_new_scopes_compat_event.py b/tests/test_new_scopes_compat_event.py index 5fb04f6463..2c75e3887a 100644 --- a/tests/test_new_scopes_compat_event.py +++ b/tests/test_new_scopes_compat_event.py @@ -36,7 +36,7 @@ def create_expected_error_event(trx, span): "abs_path": mock.ANY, "function": "_faulty_function", "module": "tests.test_new_scopes_compat_event", - "lineno": 240, + "lineno": mock.ANY, "pre_context": [ " return create_expected_transaction_event", "", @@ -185,6 +185,7 @@ def create_expected_transaction_event(trx, span): "parent_span_id": span.parent_span_id, "same_process_as_parent": True, "op": "test_span", + "origin": "manual", "description": None, "start_timestamp": mock.ANY, "timestamp": mock.ANY, From c7e16661eb529060c996a0ba9bcf72f99e621ae8 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 5 Jun 2024 12:15:46 +0200 Subject: [PATCH 07/12] Add origin to continue_trace --- sentry_sdk/scope.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index b695bffa3c..fdf878c63f 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -1075,8 +1075,10 @@ def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): return span - def continue_trace(self, environ_or_headers, op=None, name=None, source=None): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction + def continue_trace( + self, environ_or_headers, op=None, name=None, source=None, origin="manual" + ): + # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], str) -> Transaction """ Sets the propagation context from environment or headers and returns a transaction. """ @@ -1085,6 +1087,7 @@ def continue_trace(self, environ_or_headers, op=None, name=None, source=None): transaction = Transaction.continue_from_headers( normalize_incoming_data(environ_or_headers), op=op, + origin=origin, name=name, source=source, ) From 4ab7af54cbb93dc81627b0fc6ef88553a3593c11 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 5 Jun 2024 12:18:01 +0200 Subject: [PATCH 08/12] Added origin to missing api --- sentry_sdk/api.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index ba042c0a9f..3dd6f9c737 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -378,11 +378,13 @@ def get_baggage(): return None -def continue_trace(environ_or_headers, op=None, name=None, source=None): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction +def continue_trace( + environ_or_headers, op=None, name=None, source=None, origin="manual" +): + # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], str) -> Transaction """ Sets the propagation context from environment or headers and returns a transaction. """ return Scope.get_isolation_scope().continue_trace( - environ_or_headers, op, name, source + environ_or_headers, op, name, source, origin ) From 5c30137152439c6e8efcaad02e2dc5da2e8bd1d5 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 24 Jun 2024 11:12:08 +0200 Subject: [PATCH 09/12] Moved file --- tests/{ => tracing}/test_span_origin.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{ => tracing}/test_span_origin.py (100%) diff --git a/tests/test_span_origin.py b/tests/tracing/test_span_origin.py similarity index 100% rename from tests/test_span_origin.py rename to tests/tracing/test_span_origin.py From 97c8aa2acdc30e5bf79efc951b356b9f007f2b09 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 24 Jun 2024 12:29:08 +0200 Subject: [PATCH 10/12] Add `origin` to transactions/spans created by integrations (#3135) * Added origin to aiohttp integration * Added origin to anthropic integration * Added origin to arq integration * format * Added origin to asyncio integration * Added origin to asyncpg integration * Added origin to boto3 integration * Added origin to clickhouse_driver integration * Only one type of origin for boto3 because the OP is different anyhow * Added origin to cohere integration * Added origin to httpx integration * Added origin to huey integration * Added origin to huggingface_hub integration * Added origin to langchain integration * Added origin to openai integration * Added origin to pymongo integration * Added origin to rq integration * fix * Added origin to sanic integration * Added origin to starlette integration * Added origin to starlite integration * Added origin to strawberry integration * Added origin to tornado integration * Disable test for Sanic versions not supporting performance * oops * Added origin to bottle integration * Added origin to falcon integration * Added origin to flask integration * Added origin to pyramid integration * Added origin to trytond integration * Added origin to django integration * Added origin tests to wsgi integration * Added origin to Django asgi integration * Since Django 3 the channels handler is deprecated * small fix * Added origin to quart integration * Added origin to starlite integration * Added origin to starlette integration * Added origin to aws_lambda integration * Added origin to gcp integration * trytond fix * aws_lambda fix * strawberry fix * formatting * needs django >1.11 * fixed merge problem * Added more origin to django integration * trigger ci * Added tests for all kinds of spans emmited by django * Fixed tests * Fixed tests again --- sentry_sdk/integrations/aiohttp.py | 3 + sentry_sdk/integrations/anthropic.py | 5 +- sentry_sdk/integrations/arq.py | 6 +- sentry_sdk/integrations/asgi.py | 17 ++- sentry_sdk/integrations/asyncio.py | 5 +- sentry_sdk/integrations/asyncpg.py | 5 +- sentry_sdk/integrations/aws_lambda.py | 2 + sentry_sdk/integrations/boto3.py | 3 + sentry_sdk/integrations/bottle.py | 8 +- sentry_sdk/integrations/clickhouse_driver.py | 7 +- sentry_sdk/integrations/cohere.py | 3 + sentry_sdk/integrations/django/__init__.py | 36 ++++- sentry_sdk/integrations/django/asgi.py | 14 +- sentry_sdk/integrations/django/caching.py | 6 +- sentry_sdk/integrations/django/middleware.py | 4 +- .../integrations/django/signals_handlers.py | 1 + sentry_sdk/integrations/django/templates.py | 2 + sentry_sdk/integrations/django/views.py | 10 +- sentry_sdk/integrations/falcon.py | 4 +- sentry_sdk/integrations/flask.py | 7 +- sentry_sdk/integrations/gcp.py | 2 + sentry_sdk/integrations/httpx.py | 3 + sentry_sdk/integrations/huey.py | 8 +- sentry_sdk/integrations/huggingface_hub.py | 2 + sentry_sdk/integrations/langchain.py | 6 + sentry_sdk/integrations/openai.py | 3 + sentry_sdk/integrations/pymongo.py | 7 +- sentry_sdk/integrations/pyramid.py | 7 +- sentry_sdk/integrations/quart.py | 6 +- sentry_sdk/integrations/rq.py | 5 +- sentry_sdk/integrations/sanic.py | 2 + sentry_sdk/integrations/starlette.py | 8 +- sentry_sdk/integrations/starlite.py | 28 ++-- sentry_sdk/integrations/strawberry.py | 29 +++- sentry_sdk/integrations/tornado.py | 2 + sentry_sdk/integrations/trytond.py | 6 +- sentry_sdk/integrations/wsgi.py | 8 +- sentry_sdk/tracing_utils.py | 7 +- tests/integrations/aiohttp/test_aiohttp.py | 31 +++- .../integrations/anthropic/test_anthropic.py | 26 ++++ tests/integrations/arq/test_arq.py | 40 +++++ tests/integrations/asyncio/test_asyncio.py | 28 ++++ tests/integrations/asyncpg/test_asyncpg.py | 17 +++ tests/integrations/aws_lambda/test_aws.py | 19 +++ tests/integrations/boto3/test_s3.py | 17 +++ tests/integrations/bottle/test_bottle.py | 19 +++ .../test_clickhouse_driver.py | 59 +++++--- tests/integrations/cohere/test_cohere.py | 70 +++++++++ tests/integrations/django/myapp/urls.py | 1 + tests/integrations/django/myapp/views.py | 9 ++ tests/integrations/django/test_basic.py | 29 ++++ .../integrations/django/test_cache_module.py | 31 ++++ .../integrations/django/test_db_query_data.py | 66 ++++++++ tests/integrations/falcon/test_falcon.py | 15 ++ tests/integrations/flask/test_flask.py | 15 ++ tests/integrations/gcp/test_gcp.py | 24 +++ tests/integrations/httpx/test_httpx.py | 27 ++++ tests/integrations/huey/test_huey.py | 34 +++++ .../huggingface_hub/test_huggingface_hub.py | 29 ++++ .../integrations/langchain/test_langchain.py | 98 ++++++++++++ tests/integrations/openai/test_openai.py | 108 ++++++++++++++ tests/integrations/pymongo/test_pymongo.py | 20 +++ tests/integrations/pyramid/test_pyramid.py | 15 ++ tests/integrations/quart/test_quart.py | 17 +++ tests/integrations/rq/test_rq.py | 15 ++ tests/integrations/sanic/test_sanic.py | 16 ++ .../integrations/starlette/test_starlette.py | 23 +++ tests/integrations/starlite/test_starlite.py | 34 +++++ .../strawberry/test_strawberry.py | 141 +++++++++++++++++- tests/integrations/tornado/test_tornado.py | 14 ++ tests/integrations/trytond/test_trytond.py | 19 +++ tests/integrations/wsgi/test_wsgi.py | 39 +++++ 72 files changed, 1374 insertions(+), 78 deletions(-) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 9edaaf5cc9..7a092499b2 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -63,6 +63,7 @@ class AioHttpIntegration(Integration): identifier = "aiohttp" + origin = f"auto.http.{identifier}" def __init__(self, transaction_style="handler_name"): # type: (str) -> None @@ -120,6 +121,7 @@ async def sentry_app_handle(self, request, *args, **kwargs): # URL resolver did not find a route or died trying. name="generic AIOHTTP request", source=TRANSACTION_SOURCE_ROUTE, + origin=AioHttpIntegration.origin, ) with sentry_sdk.start_transaction( transaction, @@ -206,6 +208,7 @@ async def on_request_start(session, trace_config_ctx, params): op=OP.HTTP_CLIENT, description="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), + origin=AioHttpIntegration.origin, ) span.set_data(SPANDATA.HTTP_METHOD, method) if parsed_url is not None: diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index 04583e38ea..41d8e9d7d5 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -30,6 +30,7 @@ class AnthropicIntegration(Integration): identifier = "anthropic" + origin = f"auto.ai.{identifier}" def __init__(self, include_prompts=True): # type: (AnthropicIntegration, bool) -> None @@ -92,7 +93,9 @@ def _sentry_patched_create(*args, **kwargs): model = kwargs.get("model") span = sentry_sdk.start_span( - op=OP.ANTHROPIC_MESSAGES_CREATE, description="Anthropic messages create" + op=OP.ANTHROPIC_MESSAGES_CREATE, + description="Anthropic messages create", + origin=AnthropicIntegration.origin, ) span.__enter__() diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index 12f73aa95f..5eec9d445b 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -39,6 +39,7 @@ class ArqIntegration(Integration): identifier = "arq" + origin = f"auto.queue.{identifier}" @staticmethod def setup_once(): @@ -76,7 +77,9 @@ async def _sentry_enqueue_job(self, function, *args, **kwargs): if integration is None: return await old_enqueue_job(self, function, *args, **kwargs) - with sentry_sdk.start_span(op=OP.QUEUE_SUBMIT_ARQ, description=function): + with sentry_sdk.start_span( + op=OP.QUEUE_SUBMIT_ARQ, description=function, origin=ArqIntegration.origin + ): return await old_enqueue_job(self, function, *args, **kwargs) ArqRedis.enqueue_job = _sentry_enqueue_job @@ -101,6 +104,7 @@ async def _sentry_run_job(self, job_id, score): status="ok", op=OP.QUEUE_TASK_ARQ, source=TRANSACTION_SOURCE_TASK, + origin=ArqIntegration.origin, ) with sentry_sdk.start_transaction(transaction): diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 8aca37ea40..c0553cb474 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -82,7 +82,13 @@ def _looks_like_asgi3(app): class SentryAsgiMiddleware: - __slots__ = ("app", "__call__", "transaction_style", "mechanism_type") + __slots__ = ( + "app", + "__call__", + "transaction_style", + "mechanism_type", + "span_origin", + ) def __init__( self, @@ -90,8 +96,9 @@ def __init__( unsafe_context_data=False, transaction_style="endpoint", mechanism_type="asgi", + span_origin="manual", ): - # type: (Any, bool, str, str) -> None + # type: (Any, bool, str, str, str) -> None """ Instrument an ASGI application with Sentry. Provides HTTP/websocket data to sent events and basic handling for exceptions bubbling up @@ -124,6 +131,7 @@ def __init__( self.transaction_style = transaction_style self.mechanism_type = mechanism_type + self.span_origin = span_origin self.app = app if _looks_like_asgi3(app): @@ -182,6 +190,7 @@ async def _run_app(self, scope, receive, send, asgi_version): op="{}.server".format(ty), name=transaction_name, source=transaction_source, + origin=self.span_origin, ) logger.debug( "[ASGI] Created transaction (continuing trace): %s", @@ -192,6 +201,7 @@ async def _run_app(self, scope, receive, send, asgi_version): op=OP.HTTP_SERVER, name=transaction_name, source=transaction_source, + origin=self.span_origin, ) logger.debug( "[ASGI] Created transaction (new): %s", transaction @@ -205,7 +215,8 @@ async def _run_app(self, scope, receive, send, asgi_version): ) with sentry_sdk.start_transaction( - transaction, custom_sampling_context={"asgi_scope": scope} + transaction, + custom_sampling_context={"asgi_scope": scope}, ): logger.debug("[ASGI] Started transaction: %s", transaction) try: diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py index 18c092e0c0..8a62755caa 100644 --- a/sentry_sdk/integrations/asyncio.py +++ b/sentry_sdk/integrations/asyncio.py @@ -45,7 +45,9 @@ async def _coro_creating_hub_and_span(): with sentry_sdk.isolation_scope(): with sentry_sdk.start_span( - op=OP.FUNCTION, description=get_name(coro) + op=OP.FUNCTION, + description=get_name(coro), + origin=AsyncioIntegration.origin, ): try: result = await coro @@ -97,6 +99,7 @@ def _capture_exception(): class AsyncioIntegration(Integration): identifier = "asyncio" + origin = f"auto.function.{identifier}" @staticmethod def setup_once(): diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py index cfcb8a0528..30cb50e5f7 100644 --- a/sentry_sdk/integrations/asyncpg.py +++ b/sentry_sdk/integrations/asyncpg.py @@ -29,6 +29,7 @@ class AsyncPGIntegration(Integration): identifier = "asyncpg" + origin = f"auto.db.{identifier}" _record_params = False def __init__(self, *, record_params: bool = False): @@ -154,7 +155,9 @@ async def _inner(*args: Any, **kwargs: Any) -> T: user = kwargs["params"].user database = kwargs["params"].database - with sentry_sdk.start_span(op=OP.DB, description="connect") as span: + with sentry_sdk.start_span( + op=OP.DB, description="connect", origin=AsyncPGIntegration.origin + ) as span: span.set_data(SPANDATA.DB_SYSTEM, "postgresql") addr = kwargs.get("addr") if addr: diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index bd1e3619de..3c909ad9af 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -139,6 +139,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): op=OP.FUNCTION_AWS, name=aws_context.function_name, source=TRANSACTION_SOURCE_COMPONENT, + origin=AwsLambdaIntegration.origin, ) with sentry_sdk.start_transaction( transaction, @@ -178,6 +179,7 @@ def _drain_queue(): class AwsLambdaIntegration(Integration): identifier = "aws_lambda" + origin = f"auto.function.{identifier}" def __init__(self, timeout_warning=False): # type: (bool) -> None diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py index e1c9ae698f..0fb997767b 100644 --- a/sentry_sdk/integrations/boto3.py +++ b/sentry_sdk/integrations/boto3.py @@ -30,6 +30,7 @@ class Boto3Integration(Integration): identifier = "boto3" + origin = f"auto.http.{identifier}" @staticmethod def setup_once(): @@ -69,6 +70,7 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs): span = sentry_sdk.start_span( op=OP.HTTP_CLIENT, description=description, + origin=Boto3Integration.origin, ) with capture_internal_exceptions(): @@ -106,6 +108,7 @@ def _sentry_after_call(context, parsed, **kwargs): streaming_span = span.start_child( op=OP.HTTP_CLIENT_STREAM, description=span.description, + origin=Boto3Integration.origin, ) orig_read = body.read diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py index 472f0a352b..f6dc454478 100644 --- a/sentry_sdk/integrations/bottle.py +++ b/sentry_sdk/integrations/bottle.py @@ -40,6 +40,7 @@ class BottleIntegration(Integration): identifier = "bottle" + origin = f"auto.http.{identifier}" transaction_style = "" @@ -69,10 +70,13 @@ def setup_once(): @ensure_integration_enabled(BottleIntegration, old_app) def sentry_patched_wsgi_app(self, environ, start_response): # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse - return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))( - environ, start_response + middleware = SentryWsgiMiddleware( + lambda *a, **kw: old_app(self, *a, **kw), + span_origin=BottleIntegration.origin, ) + return middleware(environ, start_response) + Bottle.__call__ = sentry_patched_wsgi_app old_handle = Bottle._handle diff --git a/sentry_sdk/integrations/clickhouse_driver.py b/sentry_sdk/integrations/clickhouse_driver.py index 075a735030..0f63f868d5 100644 --- a/sentry_sdk/integrations/clickhouse_driver.py +++ b/sentry_sdk/integrations/clickhouse_driver.py @@ -41,6 +41,7 @@ def __getitem__(self, _): class ClickhouseDriverIntegration(Integration): identifier = "clickhouse_driver" + origin = f"auto.db.{identifier}" @staticmethod def setup_once() -> None: @@ -81,7 +82,11 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: query_id = args[2] if len(args) > 2 else kwargs.get("query_id") params = args[3] if len(args) > 3 else kwargs.get("params") - span = sentry_sdk.start_span(op=OP.DB, description=query) + span = sentry_sdk.start_span( + op=OP.DB, + description=query, + origin=ClickhouseDriverIntegration.origin, + ) connection._sentry_span = span # type: ignore[attr-defined] diff --git a/sentry_sdk/integrations/cohere.py b/sentry_sdk/integrations/cohere.py index 1b6f9067ee..b32d720b77 100644 --- a/sentry_sdk/integrations/cohere.py +++ b/sentry_sdk/integrations/cohere.py @@ -66,6 +66,7 @@ class CohereIntegration(Integration): identifier = "cohere" + origin = f"auto.ai.{identifier}" def __init__(self, include_prompts=True): # type: (CohereIntegration, bool) -> None @@ -141,6 +142,7 @@ def new_chat(*args, **kwargs): span = sentry_sdk.start_span( op=consts.OP.COHERE_CHAT_COMPLETIONS_CREATE, description="cohere.client.Chat", + origin=CohereIntegration.origin, ) span.__enter__() try: @@ -225,6 +227,7 @@ def new_embed(*args, **kwargs): with sentry_sdk.start_span( op=consts.OP.COHERE_EMBEDDINGS_CREATE, description="Cohere Embedding Creation", + origin=CohereIntegration.origin, ) as span: integration = sentry_sdk.get_client().get_integration(CohereIntegration) if "texts" in kwargs and ( diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 6be0113241..080af8794e 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -115,6 +115,7 @@ class DjangoIntegration(Integration): """ identifier = "django" + origin = f"auto.http.{identifier}" transaction_style = "" middleware_spans = None @@ -171,9 +172,12 @@ def sentry_patched_wsgi_handler(self, environ, start_response): use_x_forwarded_for = settings.USE_X_FORWARDED_HOST - return SentryWsgiMiddleware(bound_old_app, use_x_forwarded_for)( - environ, start_response + middleware = SentryWsgiMiddleware( + bound_old_app, + use_x_forwarded_for, + span_origin=DjangoIntegration.origin, ) + return middleware(environ, start_response) WSGIHandler.__call__ = sentry_patched_wsgi_handler @@ -321,10 +325,14 @@ def sentry_patched_drf_initial(self, request, *args, **kwargs): def _patch_channels(): # type: () -> None try: + # Django < 3.0 from channels.http import AsgiHandler # type: ignore except ImportError: - return - + try: + # DJango 3.0+ + from django.core.handlers.asgi import ASGIHandler as AsgiHandler + except ImportError: + return if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. @@ -621,7 +629,12 @@ def install_sql_hook(): def execute(self, sql, params=None): # type: (CursorWrapper, Any, Optional[Any]) -> Any with record_sql_queries( - self.cursor, sql, params, paramstyle="format", executemany=False + cursor=self.cursor, + query=sql, + params_list=params, + paramstyle="format", + executemany=False, + span_origin=DjangoIntegration.origin, ) as span: _set_db_data(span, self) options = ( @@ -649,7 +662,12 @@ def execute(self, sql, params=None): def executemany(self, sql, param_list): # type: (CursorWrapper, Any, List[Any]) -> Any with record_sql_queries( - self.cursor, sql, param_list, paramstyle="format", executemany=True + cursor=self.cursor, + query=sql, + params_list=param_list, + paramstyle="format", + executemany=True, + span_origin=DjangoIntegration.origin, ) as span: _set_db_data(span, self) @@ -666,7 +684,11 @@ def connect(self): with capture_internal_exceptions(): sentry_sdk.add_breadcrumb(message="connect", category="query") - with sentry_sdk.start_span(op=OP.DB, description="connect") as span: + with sentry_sdk.start_span( + op=OP.DB, + description="connect", + origin=DjangoIntegration.origin, + ) as span: _set_db_data(span, self) return real_connect(self) diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index e62ce681e7..6667986312 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -95,7 +95,9 @@ async def sentry_patched_asgi_handler(self, scope, receive, send): return await old_app(self, scope, receive, send) middleware = SentryAsgiMiddleware( - old_app.__get__(self, cls), unsafe_context_data=True + old_app.__get__(self, cls), + unsafe_context_data=True, + span_origin=DjangoIntegration.origin, )._run_asgi3 return await middleware(scope, receive, send) @@ -145,7 +147,9 @@ async def sentry_patched_asgi_handler(self, receive, send): return await old_app(self, receive, send) middleware = SentryAsgiMiddleware( - lambda _scope: old_app.__get__(self, cls), unsafe_context_data=True + lambda _scope: old_app.__get__(self, cls), + unsafe_context_data=True, + span_origin=DjangoIntegration.origin, ) return await middleware(self.scope)(receive, send) @@ -160,6 +164,8 @@ async def sentry_patched_asgi_handler(self, receive, send): def wrap_async_view(callback): # type: (Any) -> Any + from sentry_sdk.integrations.django import DjangoIntegration + @functools.wraps(callback) async def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any @@ -168,7 +174,9 @@ async def sentry_wrapped_callback(request, *args, **kwargs): sentry_scope.profile.update_active_thread_id() with sentry_sdk.start_span( - op=OP.VIEW_RENDER, description=request.resolver_match.view_name + op=OP.VIEW_RENDER, + description=request.resolver_match.view_name, + origin=DjangoIntegration.origin, ): return await callback(request, *args, **kwargs) diff --git a/sentry_sdk/integrations/django/caching.py b/sentry_sdk/integrations/django/caching.py index 3c0e905c44..25b04f4820 100644 --- a/sentry_sdk/integrations/django/caching.py +++ b/sentry_sdk/integrations/django/caching.py @@ -50,7 +50,11 @@ def _instrument_call( op = OP.CACHE_PUT if is_set_operation else OP.CACHE_GET description = _get_span_description(method_name, args, kwargs) - with sentry_sdk.start_span(op=op, description=description) as span: + with sentry_sdk.start_span( + op=op, + description=description, + origin=DjangoIntegration.origin, + ) as span: value = original_method(*args, **kwargs) with capture_internal_exceptions(): diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py index 9d191ce076..6f75444cbf 100644 --- a/sentry_sdk/integrations/django/middleware.py +++ b/sentry_sdk/integrations/django/middleware.py @@ -83,7 +83,9 @@ def _check_middleware_span(old_method): description = "{}.{}".format(description, function_basename) middleware_span = sentry_sdk.start_span( - op=OP.MIDDLEWARE_DJANGO, description=description + op=OP.MIDDLEWARE_DJANGO, + description=description, + origin=DjangoIntegration.origin, ) middleware_span.set_tag("django.function_name", function_name) middleware_span.set_tag("django.middleware_name", middleware_name) diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py index 969316d2da..0cd084f697 100644 --- a/sentry_sdk/integrations/django/signals_handlers.py +++ b/sentry_sdk/integrations/django/signals_handlers.py @@ -67,6 +67,7 @@ def wrapper(*args, **kwargs): with sentry_sdk.start_span( op=OP.EVENT_DJANGO, description=signal_name, + origin=DjangoIntegration.origin, ) as span: span.set_data("signal", signal_name) return receiver(*args, **kwargs) diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py index 0c75ad7955..fb79fdf75b 100644 --- a/sentry_sdk/integrations/django/templates.py +++ b/sentry_sdk/integrations/django/templates.py @@ -71,6 +71,7 @@ def rendered_content(self): with sentry_sdk.start_span( op=OP.TEMPLATE_RENDER, description=_get_template_name_description(self.template_name), + origin=DjangoIntegration.origin, ) as span: span.set_data("context", self.context_data) return real_rendered_content.fget(self) @@ -98,6 +99,7 @@ def render(request, template_name, context=None, *args, **kwargs): with sentry_sdk.start_span( op=OP.TEMPLATE_RENDER, description=_get_template_name_description(template_name), + origin=DjangoIntegration.origin, ) as span: span.set_data("context", context) return real_render(request, template_name, context, *args, **kwargs) diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index 1fd53462b3..01f871a2f6 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -34,7 +34,9 @@ def patch_views(): def sentry_patched_render(self): # type: (SimpleTemplateResponse) -> Any with sentry_sdk.start_span( - op=OP.VIEW_RESPONSE_RENDER, description="serialize response" + op=OP.VIEW_RESPONSE_RENDER, + description="serialize response", + origin=DjangoIntegration.origin, ): return old_render(self) @@ -69,6 +71,8 @@ def sentry_patched_make_view_atomic(self, *args, **kwargs): def _wrap_sync_view(callback): # type: (Any) -> Any + from sentry_sdk.integrations.django import DjangoIntegration + @functools.wraps(callback) def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any @@ -79,7 +83,9 @@ def sentry_wrapped_callback(request, *args, **kwargs): sentry_scope.profile.update_active_thread_id() with sentry_sdk.start_span( - op=OP.VIEW_RENDER, description=request.resolver_match.view_name + op=OP.VIEW_RENDER, + description=request.resolver_match.view_name, + origin=DjangoIntegration.origin, ): return callback(request, *args, **kwargs) diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py index 61c11e11d5..be3fe27519 100644 --- a/sentry_sdk/integrations/falcon.py +++ b/sentry_sdk/integrations/falcon.py @@ -116,6 +116,7 @@ def process_request(self, req, resp, *args, **kwargs): class FalconIntegration(Integration): identifier = "falcon" + origin = f"auto.http.{identifier}" transaction_style = "" @@ -156,7 +157,8 @@ def sentry_patched_wsgi_app(self, env, start_response): return original_wsgi_app(self, env, start_response) sentry_wrapped = SentryWsgiMiddleware( - lambda envi, start_resp: original_wsgi_app(self, envi, start_resp) + lambda envi, start_resp: original_wsgi_app(self, envi, start_resp), + span_origin=FalconIntegration.origin, ) return sentry_wrapped(env, start_response) diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index 52b843c911..783576839a 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -47,6 +47,7 @@ class FlaskIntegration(Integration): identifier = "flask" + origin = f"auto.http.{identifier}" transaction_style = "" @@ -81,9 +82,11 @@ def sentry_patched_wsgi_app(self, environ, start_response): if sentry_sdk.get_client().get_integration(FlaskIntegration) is None: return old_app(self, environ, start_response) - return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))( - environ, start_response + middleware = SentryWsgiMiddleware( + lambda *a, **kw: old_app(self, *a, **kw), + span_origin=FlaskIntegration.origin, ) + return middleware(environ, start_response) Flask.__call__ = sentry_patched_wsgi_app diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index 0cab8f9b26..86d3706fda 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -87,6 +87,7 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs): op=OP.FUNCTION_GCP, name=environ.get("FUNCTION_NAME", ""), source=TRANSACTION_SOURCE_COMPONENT, + origin=GcpIntegration.origin, ) sampling_context = { "gcp_env": { @@ -123,6 +124,7 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs): class GcpIntegration(Integration): identifier = "gcp" + origin = f"auto.function.{identifier}" def __init__(self, timeout_warning=False): # type: (bool) -> None diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index fa75d1440b..e19455118d 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -28,6 +28,7 @@ class HttpxIntegration(Integration): identifier = "httpx" + origin = f"auto.http.{identifier}" @staticmethod def setup_once(): @@ -58,6 +59,7 @@ def send(self, request, **kwargs): request.method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), + origin=HttpxIntegration.origin, ) as span: span.set_data(SPANDATA.HTTP_METHOD, request.method) if parsed_url is not None: @@ -113,6 +115,7 @@ async def send(self, request, **kwargs): request.method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), + origin=HttpxIntegration.origin, ) as span: span.set_data(SPANDATA.HTTP_METHOD, request.method) if parsed_url is not None: diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index 9b457c08d6..09301476e5 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -40,6 +40,7 @@ class HueyIntegration(Integration): identifier = "huey" + origin = f"auto.queue.{identifier}" @staticmethod def setup_once(): @@ -55,7 +56,11 @@ def patch_enqueue(): @ensure_integration_enabled(HueyIntegration, old_enqueue) def _sentry_enqueue(self, task): # type: (Huey, Task) -> Optional[Union[Result, ResultGroup]] - with sentry_sdk.start_span(op=OP.QUEUE_SUBMIT_HUEY, description=task.name): + with sentry_sdk.start_span( + op=OP.QUEUE_SUBMIT_HUEY, + description=task.name, + origin=HueyIntegration.origin, + ): if not isinstance(task, PeriodicTask): # Attach trace propagation data to task kwargs. We do # not do this for periodic tasks, as these don't @@ -154,6 +159,7 @@ def _sentry_execute(self, task, timestamp=None): name=task.name, op=OP.QUEUE_TASK_HUEY, source=TRANSACTION_SOURCE_TASK, + origin=HueyIntegration.origin, ) transaction.set_status("ok") diff --git a/sentry_sdk/integrations/huggingface_hub.py b/sentry_sdk/integrations/huggingface_hub.py index 8e5f0e7339..c7ed6907dd 100644 --- a/sentry_sdk/integrations/huggingface_hub.py +++ b/sentry_sdk/integrations/huggingface_hub.py @@ -26,6 +26,7 @@ class HuggingfaceHubIntegration(Integration): identifier = "huggingface_hub" + origin = f"auto.ai.{identifier}" def __init__(self, include_prompts=True): # type: (HuggingfaceHubIntegration, bool) -> None @@ -73,6 +74,7 @@ def new_text_generation(*args, **kwargs): span = sentry_sdk.start_span( op=consts.OP.HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE, description="Text Generation", + origin=HuggingfaceHubIntegration.origin, ) span.__enter__() try: diff --git a/sentry_sdk/integrations/langchain.py b/sentry_sdk/integrations/langchain.py index 9af0bda71e..305b445b2e 100644 --- a/sentry_sdk/integrations/langchain.py +++ b/sentry_sdk/integrations/langchain.py @@ -73,6 +73,7 @@ def count_tokens(s): class LangchainIntegration(Integration): identifier = "langchain" + origin = f"auto.ai.{identifier}" # The most number of spans (e.g., LLM calls) that can be processed at the same time. max_spans = 1024 @@ -192,6 +193,7 @@ def on_llm_start( kwargs.get("parent_run_id"), op=OP.LANGCHAIN_RUN, description=kwargs.get("name") or "Langchain LLM call", + origin=LangchainIntegration.origin, ) span = watched_span.span if should_send_default_pii() and self.include_prompts: @@ -213,6 +215,7 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): kwargs.get("parent_run_id"), op=OP.LANGCHAIN_CHAT_COMPLETIONS_CREATE, description=kwargs.get("name") or "Langchain Chat Model", + origin=LangchainIntegration.origin, ) span = watched_span.span model = all_params.get( @@ -316,6 +319,7 @@ def on_chain_start(self, serialized, inputs, *, run_id, **kwargs): else OP.LANGCHAIN_PIPELINE ), description=kwargs.get("name") or "Chain execution", + origin=LangchainIntegration.origin, ) metadata = kwargs.get("metadata") if metadata: @@ -348,6 +352,7 @@ def on_agent_action(self, action, *, run_id, **kwargs): kwargs.get("parent_run_id"), op=OP.LANGCHAIN_AGENT, description=action.tool or "AI tool usage", + origin=LangchainIntegration.origin, ) if action.tool_input and should_send_default_pii() and self.include_prompts: set_data_normalized( @@ -382,6 +387,7 @@ def on_tool_start(self, serialized, input_str, *, run_id, **kwargs): description=serialized.get("name") or kwargs.get("name") or "AI tool usage", + origin=LangchainIntegration.origin, ) if should_send_default_pii() and self.include_prompts: set_data_normalized( diff --git a/sentry_sdk/integrations/openai.py b/sentry_sdk/integrations/openai.py index e280f23e9b..b2c9500026 100644 --- a/sentry_sdk/integrations/openai.py +++ b/sentry_sdk/integrations/openai.py @@ -53,6 +53,7 @@ def count_tokens(s): class OpenAIIntegration(Integration): identifier = "openai" + origin = f"auto.ai.{identifier}" def __init__(self, include_prompts=True): # type: (OpenAIIntegration, bool) -> None @@ -143,6 +144,7 @@ def new_chat_completion(*args, **kwargs): span = sentry_sdk.start_span( op=consts.OP.OPENAI_CHAT_COMPLETIONS_CREATE, description="Chat Completion", + origin=OpenAIIntegration.origin, ) span.__enter__() try: @@ -226,6 +228,7 @@ def new_embeddings_create(*args, **kwargs): with sentry_sdk.start_span( op=consts.OP.OPENAI_EMBEDDINGS_CREATE, description="OpenAI Embedding Creation", + origin=OpenAIIntegration.origin, ) as span: integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if "input" in kwargs and ( diff --git a/sentry_sdk/integrations/pymongo.py b/sentry_sdk/integrations/pymongo.py index 3492b9c5a6..947dbe3945 100644 --- a/sentry_sdk/integrations/pymongo.py +++ b/sentry_sdk/integrations/pymongo.py @@ -156,7 +156,11 @@ def started(self, event): command = _strip_pii(command) query = "{}".format(command) - span = sentry_sdk.start_span(op=op, description=query) + span = sentry_sdk.start_span( + op=op, + description=query, + origin=PyMongoIntegration.origin, + ) for tag, value in tags.items(): span.set_tag(tag, value) @@ -198,6 +202,7 @@ def succeeded(self, event): class PyMongoIntegration(Integration): identifier = "pymongo" + origin = f"auto.db.{identifier}" @staticmethod def setup_once(): diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py index 523ee4b5ec..ab33f7583e 100644 --- a/sentry_sdk/integrations/pyramid.py +++ b/sentry_sdk/integrations/pyramid.py @@ -53,6 +53,7 @@ def authenticated_userid(request): class PyramidIntegration(Integration): identifier = "pyramid" + origin = f"auto.http.{identifier}" transaction_style = "" @@ -123,9 +124,11 @@ def sentry_patched_inner_wsgi_call(environ, start_response): _capture_exception(einfo) reraise(*einfo) - return SentryWsgiMiddleware(sentry_patched_inner_wsgi_call)( - environ, start_response + middleware = SentryWsgiMiddleware( + sentry_patched_inner_wsgi_call, + span_origin=PyramidIntegration.origin, ) + return middleware(environ, start_response) router.Router.__call__ = sentry_patched_wsgi_call diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py index 3fc34221d0..662074cf9b 100644 --- a/sentry_sdk/integrations/quart.py +++ b/sentry_sdk/integrations/quart.py @@ -57,6 +57,7 @@ class QuartIntegration(Integration): identifier = "quart" + origin = f"auto.http.{identifier}" transaction_style = "" @@ -92,7 +93,10 @@ async def sentry_patched_asgi_app(self, scope, receive, send): if sentry_sdk.get_client().get_integration(QuartIntegration) is None: return await old_app(self, scope, receive, send) - middleware = SentryAsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw)) + middleware = SentryAsgiMiddleware( + lambda *a, **kw: old_app(self, *a, **kw), + span_origin=QuartIntegration.origin, + ) middleware.__call__ = middleware._run_asgi3 return await middleware(scope, receive, send) diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index 23035d3dd3..fc5c3faf76 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -37,6 +37,7 @@ class RqIntegration(Integration): identifier = "rq" + origin = f"auto.queue.{identifier}" @staticmethod def setup_once(): @@ -64,13 +65,15 @@ def sentry_patched_perform_job(self, job, *args, **kwargs): op=OP.QUEUE_TASK_RQ, name="unknown RQ task", source=TRANSACTION_SOURCE_TASK, + origin=RqIntegration.origin, ) with capture_internal_exceptions(): transaction.name = job.func_name with sentry_sdk.start_transaction( - transaction, custom_sampling_context={"rq_job": job} + transaction, + custom_sampling_context={"rq_job": job}, ): rv = old_perform_job(self, job, *args, **kwargs) diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index fac0991381..f2f9b8168e 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -58,6 +58,7 @@ class SanicIntegration(Integration): identifier = "sanic" + origin = f"auto.http.{identifier}" version = None def __init__(self, unsampled_statuses=frozenset({404})): @@ -199,6 +200,7 @@ async def _context_enter(request): # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction name=request.path, source=TRANSACTION_SOURCE_URL, + origin=SanicIntegration.origin, ) request.ctx._sentry_transaction = sentry_sdk.start_transaction( transaction diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index ac55f8058f..3f78dc4c43 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -69,6 +69,7 @@ class StarletteIntegration(Integration): identifier = "starlette" + origin = f"auto.http.{identifier}" transaction_style = "" @@ -123,7 +124,9 @@ async def _create_span_call(app, scope, receive, send, **kwargs): ) with sentry_sdk.start_span( - op=OP.MIDDLEWARE_STARLETTE, description=middleware_name + op=OP.MIDDLEWARE_STARLETTE, + description=middleware_name, + origin=StarletteIntegration.origin, ) as middleware_span: middleware_span.set_tag("starlette.middleware_name", middleware_name) @@ -133,6 +136,7 @@ async def _sentry_receive(*args, **kwargs): with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLETTE_RECEIVE, description=getattr(receive, "__qualname__", str(receive)), + origin=StarletteIntegration.origin, ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await receive(*args, **kwargs) @@ -147,6 +151,7 @@ async def _sentry_send(*args, **kwargs): with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLETTE_SEND, description=getattr(send, "__qualname__", str(send)), + origin=StarletteIntegration.origin, ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await send(*args, **kwargs) @@ -356,6 +361,7 @@ async def _sentry_patched_asgi_app(self, scope, receive, send): lambda *a, **kw: old_app(self, *a, **kw), mechanism_type=StarletteIntegration.identifier, transaction_style=integration.transaction_style, + span_origin=StarletteIntegration.origin, ) middleware.__call__ = middleware._run_asgi3 diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py index 9ef7329fd9..9ff5045d6c 100644 --- a/sentry_sdk/integrations/starlite.py +++ b/sentry_sdk/integrations/starlite.py @@ -44,18 +44,9 @@ _DEFAULT_TRANSACTION_NAME = "generic Starlite request" -class SentryStarliteASGIMiddleware(SentryAsgiMiddleware): - def __init__(self, app: "ASGIApp"): - super().__init__( - app=app, - unsafe_context_data=False, - transaction_style="endpoint", - mechanism_type="asgi", - ) - - class StarliteIntegration(Integration): identifier = "starlite" + origin = f"auto.http.{identifier}" @staticmethod def setup_once() -> None: @@ -64,6 +55,17 @@ def setup_once() -> None: patch_http_route_handle() +class SentryStarliteASGIMiddleware(SentryAsgiMiddleware): + def __init__(self, app: "ASGIApp", span_origin: str = StarliteIntegration.origin): + super().__init__( + app=app, + unsafe_context_data=False, + transaction_style="endpoint", + mechanism_type="asgi", + span_origin=span_origin, + ) + + def patch_app_init() -> None: """ Replaces the Starlite class's `__init__` function in order to inject `after_exception` handlers and set the @@ -130,7 +132,9 @@ async def _create_span_call( middleware_name = self.__class__.__name__ with sentry_sdk.start_span( - op=OP.MIDDLEWARE_STARLITE, description=middleware_name + op=OP.MIDDLEWARE_STARLITE, + description=middleware_name, + origin=StarliteIntegration.origin, ) as middleware_span: middleware_span.set_tag("starlite.middleware_name", middleware_name) @@ -141,6 +145,7 @@ async def _sentry_receive( with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLITE_RECEIVE, description=getattr(receive, "__qualname__", str(receive)), + origin=StarliteIntegration.origin, ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await receive(*args, **kwargs) @@ -154,6 +159,7 @@ async def _sentry_send(message: "Message") -> None: with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLITE_SEND, description=getattr(send, "__qualname__", str(send)), + origin=StarliteIntegration.origin, ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await send(message) diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py index 024907ab7b..5c16c60ff2 100644 --- a/sentry_sdk/integrations/strawberry.py +++ b/sentry_sdk/integrations/strawberry.py @@ -51,6 +51,7 @@ class StrawberryIntegration(Integration): identifier = "strawberry" + origin = f"auto.graphql.{identifier}" def __init__(self, async_execution=None): # type: (Optional[bool]) -> None @@ -177,9 +178,17 @@ def on_operation(self): scope = Scope.get_isolation_scope() if scope.span: - self.graphql_span = scope.span.start_child(op=op, description=description) + self.graphql_span = scope.span.start_child( + op=op, + description=description, + origin=StrawberryIntegration.origin, + ) else: - self.graphql_span = sentry_sdk.start_span(op=op, description=description) + self.graphql_span = sentry_sdk.start_span( + op=op, + description=description, + origin=StrawberryIntegration.origin, + ) self.graphql_span.set_data("graphql.operation.type", operation_type) self.graphql_span.set_data("graphql.operation.name", self._operation_name) @@ -193,7 +202,9 @@ def on_operation(self): def on_validate(self): # type: () -> Generator[None, None, None] self.validation_span = self.graphql_span.start_child( - op=OP.GRAPHQL_VALIDATE, description="validation" + op=OP.GRAPHQL_VALIDATE, + description="validation", + origin=StrawberryIntegration.origin, ) yield @@ -203,7 +214,9 @@ def on_validate(self): def on_parse(self): # type: () -> Generator[None, None, None] self.parsing_span = self.graphql_span.start_child( - op=OP.GRAPHQL_PARSE, description="parsing" + op=OP.GRAPHQL_PARSE, + description="parsing", + origin=StrawberryIntegration.origin, ) yield @@ -231,7 +244,9 @@ async def resolve(self, _next, root, info, *args, **kwargs): field_path = "{}.{}".format(info.parent_type, info.field_name) with self.graphql_span.start_child( - op=OP.GRAPHQL_RESOLVE, description="resolving {}".format(field_path) + op=OP.GRAPHQL_RESOLVE, + description="resolving {}".format(field_path), + origin=StrawberryIntegration.origin, ) as span: span.set_data("graphql.field_name", info.field_name) span.set_data("graphql.parent_type", info.parent_type.name) @@ -250,7 +265,9 @@ def resolve(self, _next, root, info, *args, **kwargs): field_path = "{}.{}".format(info.parent_type, info.field_name) with self.graphql_span.start_child( - op=OP.GRAPHQL_RESOLVE, description="resolving {}".format(field_path) + op=OP.GRAPHQL_RESOLVE, + description="resolving {}".format(field_path), + origin=StrawberryIntegration.origin, ) as span: span.set_data("graphql.field_name", info.field_name) span.set_data("graphql.parent_type", info.parent_type.name) diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index 6681037000..c459ee8922 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -47,6 +47,7 @@ class TornadoIntegration(Integration): identifier = "tornado" + origin = f"auto.http.{identifier}" @staticmethod def setup_once(): @@ -123,6 +124,7 @@ def _handle_request_impl(self): # setting a transaction name later. name="generic Tornado request", source=TRANSACTION_SOURCE_ROUTE, + origin=TornadoIntegration.origin, ) with sentry_sdk.start_transaction( diff --git a/sentry_sdk/integrations/trytond.py b/sentry_sdk/integrations/trytond.py index da8fc84df1..2c44c593a4 100644 --- a/sentry_sdk/integrations/trytond.py +++ b/sentry_sdk/integrations/trytond.py @@ -12,13 +12,17 @@ class TrytondWSGIIntegration(Integration): identifier = "trytond_wsgi" + origin = f"auto.http.{identifier}" def __init__(self): # type: () -> None pass @staticmethod def setup_once(): # type: () -> None - app.wsgi_app = SentryWsgiMiddleware(app.wsgi_app) + app.wsgi_app = SentryWsgiMiddleware( + app.wsgi_app, + span_origin=TrytondWSGIIntegration.origin, + ) @ensure_integration_enabled(TrytondWSGIIntegration) def error_handler(e): # type: (Exception) -> None diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index de6c3b8060..f946844de5 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -63,12 +63,13 @@ def get_request_url(environ, use_x_forwarded_for=False): class SentryWsgiMiddleware: - __slots__ = ("app", "use_x_forwarded_for") + __slots__ = ("app", "use_x_forwarded_for", "span_origin") - def __init__(self, app, use_x_forwarded_for=False): - # type: (Callable[[Dict[str, str], Callable[..., Any]], Any], bool) -> None + def __init__(self, app, use_x_forwarded_for=False, span_origin="manual"): + # type: (Callable[[Dict[str, str], Callable[..., Any]], Any], bool, str) -> None self.app = app self.use_x_forwarded_for = use_x_forwarded_for + self.span_origin = span_origin def __call__(self, environ, start_response): # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse @@ -93,6 +94,7 @@ def __call__(self, environ, start_response): op=OP.HTTP_SERVER, name="generic WSGI request", source=TRANSACTION_SOURCE_ROUTE, + origin=self.span_origin, ) with sentry_sdk.start_transaction( diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 146ec859e2..a3a03e65c1 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -112,6 +112,7 @@ def record_sql_queries( paramstyle, # type: Optional[str] executemany, # type: bool record_cursor_repr=False, # type: bool + span_origin="manual", # type: str ): # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None] @@ -141,7 +142,11 @@ def record_sql_queries( with capture_internal_exceptions(): sentry_sdk.add_breadcrumb(message=query, category="query", data=data) - with sentry_sdk.start_span(op=OP.DB, description=query) as span: + with sentry_sdk.start_span( + op=OP.DB, + description=query, + origin=span_origin, + ) as span: for k, v in data.items(): span.set_data(k, v) yield span diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index 2123f1c303..43e3bec546 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -4,7 +4,7 @@ from unittest import mock import pytest -from aiohttp import web +from aiohttp import web, ClientSession from aiohttp.client import ServerDisconnectedError from aiohttp.web_request import Request @@ -567,3 +567,32 @@ async def handler(request): resp.request_info.headers["baggage"] == "custom=value,sentry-trace_id=0123456789012345678901234567890,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" ) + + +@pytest.mark.asyncio +async def test_span_origin( + sentry_init, + aiohttp_client, + capture_events, +): + sentry_init( + integrations=[AioHttpIntegration()], + traces_sample_rate=1.0, + ) + + async def hello(request): + async with ClientSession() as session: + async with session.get("http://example.com"): + return web.Response(text="hello") + + app = web.Application() + app.router.add_get(r"/", hello) + + events = capture_events() + + client = await aiohttp_client(app) + await client.get("/") + + (event,) = events + assert event["contexts"]["trace"]["origin"] == "auto.http.aiohttp" + assert event["spans"][0]["origin"] == "auto.http.aiohttp" diff --git a/tests/integrations/anthropic/test_anthropic.py b/tests/integrations/anthropic/test_anthropic.py index 4c7380533d..5fefde9b5a 100644 --- a/tests/integrations/anthropic/test_anthropic.py +++ b/tests/integrations/anthropic/test_anthropic.py @@ -220,3 +220,29 @@ def test_exception_message_create(sentry_init, capture_events): (event,) = events assert event["level"] == "error" + + +def test_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[AnthropicIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + client = Anthropic(api_key="z") + client.messages._post = mock.Mock(return_value=EXAMPLE_MESSAGE) + + messages = [ + { + "role": "user", + "content": "Hello, Claude", + } + ] + + with start_transaction(name="anthropic"): + client.messages.create(max_tokens=1024, messages=messages, model="model") + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.ai.anthropic" diff --git a/tests/integrations/arq/test_arq.py b/tests/integrations/arq/test_arq.py index 1f597b5fec..e121a1baa2 100644 --- a/tests/integrations/arq/test_arq.py +++ b/tests/integrations/arq/test_arq.py @@ -251,3 +251,43 @@ async def dummy_job(_ctx): await worker.run_job(job.job_id, timestamp_ms()) assert await job.result() is None + + +@pytest.mark.parametrize("source", ["cls_functions", "kw_functions"]) +@pytest.mark.asyncio +async def test_span_origin_producer(capture_events, init_arq, source): + async def dummy_job(_): + pass + + pool, _ = init_arq(**{source: [dummy_job]}) + + events = capture_events() + + with start_transaction(): + await pool.enqueue_job("dummy_job") + + (event,) = events + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.queue.arq" + + +@pytest.mark.asyncio +async def test_span_origin_consumer(capture_events, init_arq): + async def job(ctx): + pass + + job.__qualname__ = job.__name__ + + pool, worker = init_arq([job]) + + job = await pool.enqueue_job("retry_job") + + events = capture_events() + + await worker.run_job(job.job_id, timestamp_ms()) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "auto.queue.arq" + assert event["spans"][0]["origin"] == "manual" # redis db access + assert event["spans"][1]["origin"] == "manual" # redis db access diff --git a/tests/integrations/asyncio/test_asyncio.py b/tests/integrations/asyncio/test_asyncio.py index 0d7addad44..a7ecd8034a 100644 --- a/tests/integrations/asyncio/test_asyncio.py +++ b/tests/integrations/asyncio/test_asyncio.py @@ -359,3 +359,31 @@ def test_sentry_task_factory_context_with_factory(mock_get_running_loop): assert "context" in task_factory_kwargs assert task_factory_kwargs["context"] == mock_context + + +@minimum_python_37 +@pytest.mark.asyncio +async def test_span_origin( + sentry_init, + capture_events, + event_loop, +): + sentry_init( + integrations=[AsyncioIntegration()], + traces_sample_rate=1.0, + ) + + events = capture_events() + + with sentry_sdk.start_transaction(name="something"): + tasks = [ + event_loop.create_task(foo()), + ] + await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION) + + sentry_sdk.flush() + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.function.asyncio" diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py index 9140216996..a048fc7139 100644 --- a/tests/integrations/asyncpg/test_asyncpg.py +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -742,3 +742,20 @@ def fake_record_sql_queries(*args, **kwargs): data.get(SPANDATA.CODE_FUNCTION) == "test_query_source_if_duration_over_threshold" ) + + +@pytest.mark.asyncio +async def test_span_origin(sentry_init, capture_events): + sentry_init(integrations=[AsyncPGIntegration()], traces_sample_rate=1.0) + + events = capture_events() + + with start_transaction(name="test_transaction"): + conn: Connection = await connect(PG_CONNECTION_URI) + await conn.execute("SELECT 1") + await conn.close() + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.db.asyncpg" diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index d18511397b..ffcaf877d7 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -877,3 +877,22 @@ def test_handler(event, context): (exception,) = event["exception"]["values"] assert exception["type"] == "Exception" assert exception["value"] == "Oh!" + + +def test_span_origin(run_lambda_function): + envelope_items, response = run_lambda_function( + LAMBDA_PRELUDE + + dedent( + """ + init_sdk(traces_sample_rate=1.0) + + def test_handler(event, context): + pass + """ + ), + b'{"foo": "bar"}', + ) + + (event,) = envelope_items + + assert event["contexts"]["trace"]["origin"] == "auto.function.aws_lambda" diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py index 6fb0434182..97a1543b0f 100644 --- a/tests/integrations/boto3/test_s3.py +++ b/tests/integrations/boto3/test_s3.py @@ -132,3 +132,20 @@ def test_omit_url_data_if_parsing_fails(sentry_init, capture_events): assert "aws.request.url" not in event["spans"][0]["data"] assert "http.fragment" not in event["spans"][0]["data"] assert "http.query" not in event["spans"][0]["data"] + + +def test_span_origin(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()]) + events = capture_events() + + s3 = session.resource("s3") + with sentry_sdk.start_transaction(), MockResponse( + s3.meta.client, 200, {}, read_fixture("s3_list.xml") + ): + bucket = s3.Bucket("bucket") + _ = [obj for obj in bucket.objects.all()] + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.http.boto3" diff --git a/tests/integrations/bottle/test_bottle.py b/tests/integrations/bottle/test_bottle.py index 660acb3902..c44327cea6 100644 --- a/tests/integrations/bottle/test_bottle.py +++ b/tests/integrations/bottle/test_bottle.py @@ -474,3 +474,22 @@ def here(): client.get("/") assert not events + + +def test_span_origin( + sentry_init, + get_client, + capture_events, +): + sentry_init( + integrations=[bottle_sentry.BottleIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + client = get_client() + client.get("/message") + + (_, event) = events + + assert event["contexts"]["trace"]["origin"] == "auto.http.bottle" diff --git a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py index 3f2f7c6746..3b07a82f03 100644 --- a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py +++ b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py @@ -247,7 +247,7 @@ def test_clickhouse_client_spans( expected_spans = [ { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { "db.system": "clickhouse", @@ -262,7 +262,7 @@ def test_clickhouse_client_spans( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { "db.system": "clickhouse", @@ -277,7 +277,7 @@ def test_clickhouse_client_spans( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -292,7 +292,7 @@ def test_clickhouse_client_spans( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -307,7 +307,7 @@ def test_clickhouse_client_spans( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { "db.system": "clickhouse", @@ -370,7 +370,7 @@ def test_clickhouse_client_spans_with_pii( expected_spans = [ { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { "db.system": "clickhouse", @@ -386,7 +386,7 @@ def test_clickhouse_client_spans_with_pii( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { "db.system": "clickhouse", @@ -402,7 +402,7 @@ def test_clickhouse_client_spans_with_pii( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -418,7 +418,7 @@ def test_clickhouse_client_spans_with_pii( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -434,7 +434,7 @@ def test_clickhouse_client_spans_with_pii( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { "db.system": "clickhouse", @@ -695,7 +695,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) expected_spans = [ { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { "db.system": "clickhouse", @@ -710,7 +710,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { "db.system": "clickhouse", @@ -725,7 +725,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -740,7 +740,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -755,7 +755,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { "db.system": "clickhouse", @@ -818,7 +818,7 @@ def test_clickhouse_dbapi_spans_with_pii( expected_spans = [ { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { "db.system": "clickhouse", @@ -834,7 +834,7 @@ def test_clickhouse_dbapi_spans_with_pii( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { "db.system": "clickhouse", @@ -850,7 +850,7 @@ def test_clickhouse_dbapi_spans_with_pii( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -866,7 +866,7 @@ def test_clickhouse_dbapi_spans_with_pii( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { "db.system": "clickhouse", @@ -882,7 +882,7 @@ def test_clickhouse_dbapi_spans_with_pii( }, { "op": "db", - "origin": "manual", + "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { "db.system": "clickhouse", @@ -911,3 +911,22 @@ def test_clickhouse_dbapi_spans_with_pii( span.pop("timestamp", None) assert event["spans"] == expected_spans + + +def test_span_origin(sentry_init, capture_events, capture_envelopes) -> None: + sentry_init( + integrations=[ClickhouseDriverIntegration()], + traces_sample_rate=1.0, + ) + + events = capture_events() + + with start_transaction(name="test_clickhouse_transaction"): + conn = connect("clickhouse://localhost") + cursor = conn.cursor() + cursor.execute("SELECT 1") + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.db.clickhouse_driver" diff --git a/tests/integrations/cohere/test_cohere.py b/tests/integrations/cohere/test_cohere.py index 52944e7bea..c0dff2214e 100644 --- a/tests/integrations/cohere/test_cohere.py +++ b/tests/integrations/cohere/test_cohere.py @@ -200,3 +200,73 @@ def test_embed(sentry_init, capture_events, send_default_pii, include_prompts): assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 assert span["measurements"]["ai_total_tokens_used"]["value"] == 10 + + +def test_span_origin_chat(sentry_init, capture_events): + sentry_init( + integrations=[CohereIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + client = Client(api_key="z") + HTTPXClient.request = mock.Mock( + return_value=httpx.Response( + 200, + json={ + "text": "the model response", + "meta": { + "billed_units": { + "output_tokens": 10, + "input_tokens": 20, + } + }, + }, + ) + ) + + with start_transaction(name="cohere tx"): + client.chat( + model="some-model", + chat_history=[ChatMessage(role="SYSTEM", message="some context")], + message="hello", + ).text + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.ai.cohere" + + +def test_span_origin_embed(sentry_init, capture_events): + sentry_init( + integrations=[CohereIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + client = Client(api_key="z") + HTTPXClient.request = mock.Mock( + return_value=httpx.Response( + 200, + json={ + "response_type": "embeddings_floats", + "id": "1", + "texts": ["hello"], + "embeddings": [[1.0, 2.0, 3.0]], + "meta": { + "billed_units": { + "input_tokens": 10, + } + }, + }, + ) + ) + + with start_transaction(name="cohere tx"): + client.embed(texts=["hello"], model="text-embedding-3-large") + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.ai.cohere" diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py index 1a1fa163a3..b9e821afa8 100644 --- a/tests/integrations/django/myapp/urls.py +++ b/tests/integrations/django/myapp/urls.py @@ -43,6 +43,7 @@ def path(path, *args, **kwargs): ), path("middleware-exc", views.message, name="middleware_exc"), path("message", views.message, name="message"), + path("view-with-signal", views.view_with_signal, name="view_with_signal"), path("mylogin", views.mylogin, name="mylogin"), path("classbased", views.ClassBasedView.as_view(), name="classbased"), path("sentryclass", views.SentryClassBasedView(), name="sentryclass"), diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index 971baf0785..dcd630363b 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -5,6 +5,7 @@ from django.contrib.auth import login from django.contrib.auth.models import User from django.core.exceptions import PermissionDenied +from django.dispatch import Signal from django.http import HttpResponse, HttpResponseNotFound, HttpResponseServerError from django.shortcuts import render from django.template import Context, Template @@ -14,6 +15,7 @@ from django.views.decorators.csrf import csrf_exempt from django.views.generic import ListView + from tests.integrations.django.myapp.signals import ( myapp_custom_signal, myapp_custom_signal_silenced, @@ -113,6 +115,13 @@ def message(request): return HttpResponse("ok") +@csrf_exempt +def view_with_signal(request): + custom_signal = Signal() + custom_signal.send(sender="hello") + return HttpResponse("ok") + + @csrf_exempt def mylogin(request): user = User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword") diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 5e1529c762..f79c6e13d5 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -1126,3 +1126,32 @@ def dummy(a, b): assert name == "functools.partial()" else: assert name == "partial()" + + +@pytest.mark.skipif(DJANGO_VERSION <= (1, 11), reason="Requires Django > 1.11") +def test_span_origin(sentry_init, client, capture_events): + sentry_init( + integrations=[ + DjangoIntegration( + middleware_spans=True, + signals_spans=True, + cache_spans=True, + ) + ], + traces_sample_rate=1.0, + ) + events = capture_events() + + client.get(reverse("view_with_signal")) + + (transaction,) = events + + assert transaction["contexts"]["trace"]["origin"] == "auto.http.django" + + signal_span_found = False + for span in transaction["spans"]: + assert span["origin"] == "auto.http.django" + if span["op"] == "event.django": + signal_span_found = True + + assert signal_span_found diff --git a/tests/integrations/django/test_cache_module.py b/tests/integrations/django/test_cache_module.py index 646c73ae04..263f9f36f8 100644 --- a/tests/integrations/django/test_cache_module.py +++ b/tests/integrations/django/test_cache_module.py @@ -595,3 +595,34 @@ def test_cache_spans_set_many(sentry_init, capture_events, use_django_caching): assert transaction["spans"][3]["op"] == "cache.get" assert transaction["spans"][3]["description"] == f"S{id}" + + +@pytest.mark.forked +@pytest_mark_django_db_decorator() +@pytest.mark.skipif(DJANGO_VERSION <= (1, 11), reason="Requires Django > 1.11") +def test_span_origin_cache(sentry_init, client, capture_events, use_django_caching): + sentry_init( + integrations=[ + DjangoIntegration( + middleware_spans=True, + signals_spans=True, + cache_spans=True, + ) + ], + traces_sample_rate=1.0, + ) + events = capture_events() + + client.get(reverse("cached_view")) + + (transaction,) = events + + assert transaction["contexts"]["trace"]["origin"] == "auto.http.django" + + cache_span_found = False + for span in transaction["spans"]: + assert span["origin"] == "auto.http.django" + if span["op"].startswith("cache."): + cache_span_found = True + + assert cache_span_found diff --git a/tests/integrations/django/test_db_query_data.py b/tests/integrations/django/test_db_query_data.py index 878babf507..087fc5ad49 100644 --- a/tests/integrations/django/test_db_query_data.py +++ b/tests/integrations/django/test_db_query_data.py @@ -14,6 +14,7 @@ from werkzeug.test import Client +from sentry_sdk import start_transaction from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.tracing_utils import record_sql_queries @@ -455,3 +456,68 @@ def __exit__(self, type, value, traceback): break else: raise AssertionError("No db span found") + + +@pytest.mark.forked +@pytest_mark_django_db_decorator(transaction=True) +def test_db_span_origin_execute(sentry_init, client, capture_events): + sentry_init( + integrations=[DjangoIntegration()], + traces_sample_rate=1.0, + ) + + if "postgres" not in connections: + pytest.skip("postgres tests disabled") + + # trigger Django to open a new connection by marking the existing one as None. + connections["postgres"].connection = None + + events = capture_events() + + client.get(reverse("postgres_select_orm")) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "auto.http.django" + + for span in event["spans"]: + assert span["origin"] == "auto.http.django" + + +@pytest.mark.forked +@pytest_mark_django_db_decorator(transaction=True) +def test_db_span_origin_executemany(sentry_init, client, capture_events): + sentry_init( + integrations=[DjangoIntegration()], + traces_sample_rate=1.0, + ) + + events = capture_events() + + if "postgres" not in connections: + pytest.skip("postgres tests disabled") + + with start_transaction(name="test_transaction"): + from django.db import connection, transaction + + cursor = connection.cursor() + + query = """UPDATE auth_user SET username = %s where id = %s;""" + query_list = ( + ( + "test1", + 1, + ), + ( + "test2", + 2, + ), + ) + cursor.executemany(query, query_list) + + transaction.commit() + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.http.django" diff --git a/tests/integrations/falcon/test_falcon.py b/tests/integrations/falcon/test_falcon.py index 0a202c0081..c88a95a531 100644 --- a/tests/integrations/falcon/test_falcon.py +++ b/tests/integrations/falcon/test_falcon.py @@ -446,3 +446,18 @@ def test_falcon_custom_error_handler(sentry_init, make_app, capture_events): client.simulate_get("/custom-error") assert len(events) == 0 + + +def test_span_origin(sentry_init, capture_events, make_client): + sentry_init( + integrations=[FalconIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + client = make_client() + client.simulate_get("/message") + + (_, event) = events + + assert event["contexts"]["trace"]["origin"] == "auto.http.falcon" diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index bfd8ed9938..c35bf2acb5 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -948,3 +948,18 @@ def test_response_status_code_not_found_in_transaction_context( "response" in transaction["contexts"].keys() ), "Response context not found in transaction" assert transaction["contexts"]["response"]["status_code"] == 404 + + +def test_span_origin(sentry_init, app, capture_events): + sentry_init( + integrations=[flask_sentry.FlaskIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + client = app.test_client() + client.get("/message") + + (_, event) = events + + assert event["contexts"]["trace"]["origin"] == "auto.http.flask" diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index 20ae6e56b0..22d104c817 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -537,3 +537,27 @@ def cloud_function(functionhandler, event): == error_event["contexts"]["trace"]["trace_id"] == "471a43a4192642f0b136d5159a501701" ) + + +def test_span_origin(run_cloud_function): + events, _ = run_cloud_function( + dedent( + """ + functionhandler = None + event = {} + def cloud_function(functionhandler, event): + return "test_string" + """ + ) + + FUNCTIONS_PRELUDE + + dedent( + """ + init_sdk(traces_sample_rate=1.0) + gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event) + """ + ) + ) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "auto.function.gcp" diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index ff93dd3835..17bf7017a5 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -320,3 +320,30 @@ def test_omit_url_data_if_parsing_fails(sentry_init, capture_events): assert "url" not in event["breadcrumbs"]["values"][0]["data"] assert SPANDATA.HTTP_FRAGMENT not in event["breadcrumbs"]["values"][0]["data"] assert SPANDATA.HTTP_QUERY not in event["breadcrumbs"]["values"][0]["data"] + + +@pytest.mark.parametrize( + "httpx_client", + (httpx.Client(), httpx.AsyncClient()), +) +def test_span_origin(sentry_init, capture_events, httpx_client): + sentry_init( + integrations=[HttpxIntegration()], + traces_sample_rate=1.0, + ) + + events = capture_events() + + url = "http://example.com/" + responses.add(responses.GET, url, status=200) + + with start_transaction(name="test_transaction"): + if asyncio.iscoroutinefunction(httpx_client.get): + asyncio.get_event_loop().run_until_complete(httpx_client.get(url)) + else: + httpx_client.get(url) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.http.httpx" diff --git a/tests/integrations/huey/test_huey.py b/tests/integrations/huey/test_huey.py index f887080533..143a369348 100644 --- a/tests/integrations/huey/test_huey.py +++ b/tests/integrations/huey/test_huey.py @@ -189,3 +189,37 @@ def propagated_trace_task(): events[0]["transaction"] == "propagated_trace_task" ) # the "inner" transaction assert events[0]["contexts"]["trace"]["trace_id"] == outer_transaction.trace_id + + +def test_span_origin_producer(init_huey, capture_events): + huey = init_huey() + + @huey.task(name="different_task_name") + def dummy_task(): + pass + + events = capture_events() + + with start_transaction(): + dummy_task() + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.queue.huey" + + +def test_span_origin_consumer(init_huey, capture_events): + huey = init_huey() + + events = capture_events() + + @huey.task() + def propagated_trace_task(): + pass + + execute_huey_task(huey, propagated_trace_task) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "auto.queue.huey" diff --git a/tests/integrations/huggingface_hub/test_huggingface_hub.py b/tests/integrations/huggingface_hub/test_huggingface_hub.py index 734778d08a..f43159d80e 100644 --- a/tests/integrations/huggingface_hub/test_huggingface_hub.py +++ b/tests/integrations/huggingface_hub/test_huggingface_hub.py @@ -137,3 +137,32 @@ def test_bad_chat_completion(sentry_init, capture_events): (event,) = events assert event["level"] == "error" + + +def test_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[HuggingfaceHubIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + client = InferenceClient("some-model") + client.post = mock.Mock( + return_value=[ + b"""data:{ + "token":{"id":1, "special": false, "text": "the model "} + }""", + ] + ) + with start_transaction(name="huggingface_hub tx"): + list( + client.text_generation( + prompt="hello", + stream=True, + ) + ) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.ai.huggingface_hub" diff --git a/tests/integrations/langchain/test_langchain.py b/tests/integrations/langchain/test_langchain.py index 7dcf5763df..5e7ebbbf1d 100644 --- a/tests/integrations/langchain/test_langchain.py +++ b/tests/integrations/langchain/test_langchain.py @@ -228,3 +228,101 @@ def test_langchain_error(sentry_init, capture_events): error = events[0] assert error["level"] == "error" + + +def test_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[LangchainIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + prompt = ChatPromptTemplate.from_messages( + [ + ( + "system", + "You are very powerful assistant, but don't know current events", + ), + ("user", "{input}"), + MessagesPlaceholder(variable_name="agent_scratchpad"), + ] + ) + global stream_result_mock + stream_result_mock = Mock( + side_effect=[ + [ + ChatGenerationChunk( + type="ChatGenerationChunk", + message=AIMessageChunk( + content="", + additional_kwargs={ + "tool_calls": [ + { + "index": 0, + "id": "call_BbeyNhCKa6kYLYzrD40NGm3b", + "function": { + "arguments": "", + "name": "get_word_length", + }, + "type": "function", + } + ] + }, + ), + ), + ChatGenerationChunk( + type="ChatGenerationChunk", + message=AIMessageChunk( + content="", + additional_kwargs={ + "tool_calls": [ + { + "index": 0, + "id": None, + "function": { + "arguments": '{"word": "eudca"}', + "name": None, + }, + "type": None, + } + ] + }, + ), + ), + ChatGenerationChunk( + type="ChatGenerationChunk", + message=AIMessageChunk(content="5"), + generation_info={"finish_reason": "function_call"}, + ), + ], + [ + ChatGenerationChunk( + text="The word eudca has 5 letters.", + type="ChatGenerationChunk", + message=AIMessageChunk(content="The word eudca has 5 letters."), + ), + ChatGenerationChunk( + type="ChatGenerationChunk", + generation_info={"finish_reason": "stop"}, + message=AIMessageChunk(content=""), + ), + ], + ] + ) + llm = MockOpenAI( + model_name="gpt-3.5-turbo", + temperature=0, + openai_api_key="badkey", + ) + agent = create_openai_tools_agent(llm, [get_word_length], prompt) + + agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True) + + with start_transaction(): + list(agent_executor.stream({"input": "How many letters in the word eudca"})) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + for span in event["spans"]: + assert span["origin"] == "auto.ai.langchain" diff --git a/tests/integrations/openai/test_openai.py b/tests/integrations/openai/test_openai.py index f14ae82333..9cd8761fd6 100644 --- a/tests/integrations/openai/test_openai.py +++ b/tests/integrations/openai/test_openai.py @@ -224,3 +224,111 @@ def test_embeddings_create( assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + + +def test_span_origin_nonstreaming_chat(sentry_init, capture_events): + sentry_init( + integrations=[OpenAIIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + client = OpenAI(api_key="z") + client.chat.completions._post = mock.Mock(return_value=EXAMPLE_CHAT_COMPLETION) + + with start_transaction(name="openai tx"): + client.chat.completions.create( + model="some-model", messages=[{"role": "system", "content": "hello"}] + ) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.ai.openai" + + +def test_span_origin_streaming_chat(sentry_init, capture_events): + sentry_init( + integrations=[OpenAIIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + client = OpenAI(api_key="z") + returned_stream = Stream(cast_to=None, response=None, client=client) + returned_stream._iterator = [ + ChatCompletionChunk( + id="1", + choices=[ + DeltaChoice( + index=0, delta=ChoiceDelta(content="hel"), finish_reason=None + ) + ], + created=100000, + model="model-id", + object="chat.completion.chunk", + ), + ChatCompletionChunk( + id="1", + choices=[ + DeltaChoice( + index=1, delta=ChoiceDelta(content="lo "), finish_reason=None + ) + ], + created=100000, + model="model-id", + object="chat.completion.chunk", + ), + ChatCompletionChunk( + id="1", + choices=[ + DeltaChoice( + index=2, delta=ChoiceDelta(content="world"), finish_reason="stop" + ) + ], + created=100000, + model="model-id", + object="chat.completion.chunk", + ), + ] + + client.chat.completions._post = mock.Mock(return_value=returned_stream) + with start_transaction(name="openai tx"): + response_stream = client.chat.completions.create( + model="some-model", messages=[{"role": "system", "content": "hello"}] + ) + "".join(map(lambda x: x.choices[0].delta.content, response_stream)) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.ai.openai" + + +def test_span_origin_embeddings(sentry_init, capture_events): + sentry_init( + integrations=[OpenAIIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + client = OpenAI(api_key="z") + + returned_embedding = CreateEmbeddingResponse( + data=[Embedding(object="embedding", index=0, embedding=[1.0, 2.0, 3.0])], + model="some-model", + object="list", + usage=EmbeddingTokenUsage( + prompt_tokens=20, + total_tokens=30, + ), + ) + + client.embeddings._post = mock.Mock(return_value=returned_embedding) + with start_transaction(name="openai tx"): + client.embeddings.create(input="hello", model="text-embedding-3-large") + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.ai.openai" diff --git a/tests/integrations/pymongo/test_pymongo.py b/tests/integrations/pymongo/test_pymongo.py index c25310e361..75a05856fb 100644 --- a/tests/integrations/pymongo/test_pymongo.py +++ b/tests/integrations/pymongo/test_pymongo.py @@ -422,3 +422,23 @@ def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii): ) def test_strip_pii(testcase): assert _strip_pii(testcase["command"]) == testcase["command_stripped"] + + +def test_span_origin(sentry_init, capture_events, mongo_server): + sentry_init( + integrations=[PyMongoIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + connection = MongoClient(mongo_server.uri) + + with start_transaction(): + list( + connection["test_db"]["test_collection"].find({"foobar": 1}) + ) # force query execution + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.db.pymongo" diff --git a/tests/integrations/pyramid/test_pyramid.py b/tests/integrations/pyramid/test_pyramid.py index a25dbef2fc..d42d7887c4 100644 --- a/tests/integrations/pyramid/test_pyramid.py +++ b/tests/integrations/pyramid/test_pyramid.py @@ -421,3 +421,18 @@ def index(request): client.get("/") assert not errors + + +def test_span_origin(sentry_init, capture_events, get_client): + sentry_init( + integrations=[PyramidIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + client = get_client() + client.get("/message") + + (_, event) = events + + assert event["contexts"]["trace"]["origin"] == "auto.http.pyramid" diff --git a/tests/integrations/quart/test_quart.py b/tests/integrations/quart/test_quart.py index 32948f6e1d..d4b4c61d97 100644 --- a/tests/integrations/quart/test_quart.py +++ b/tests/integrations/quart/test_quart.py @@ -547,3 +547,20 @@ async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, app): transactions = profile.payload.json["transactions"] assert len(transactions) == 1 assert str(data["active"]) == transactions[0]["active_thread_id"] + + +@pytest.mark.asyncio +async def test_span_origin(sentry_init, capture_events, app): + sentry_init( + integrations=[quart_sentry.QuartIntegration()], + traces_sample_rate=1.0, + ) + + events = capture_events() + + client = app.test_client() + await client.get("/message") + + (_, event) = events + + assert event["contexts"]["trace"]["origin"] == "auto.http.quart" diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index 094a458063..02db5eba8e 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -265,3 +265,18 @@ def test_job_with_retries(sentry_init, capture_events): worker.work(burst=True) assert len(events) == 1 + + +def test_span_origin(sentry_init, capture_events): + sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0) + events = capture_events() + + queue = rq.Queue(connection=FakeStrictRedis()) + worker = rq.SimpleWorker([queue], connection=queue.connection) + + queue.enqueue(do_trick, "Maisey", trick="kangaroo") + worker.work(burst=True) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "auto.queue.rq" diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py index d714690936..574fd673bb 100644 --- a/tests/integrations/sanic/test_sanic.py +++ b/tests/integrations/sanic/test_sanic.py @@ -444,3 +444,19 @@ def test_transactions(test_config, sentry_init, app, capture_events): or transaction_event["transaction_info"]["source"] == test_config.expected_source ) + + +@pytest.mark.skipif( + not PERFORMANCE_SUPPORTED, reason="Performance not supported on this Sanic version" +) +def test_span_origin(sentry_init, app, capture_events): + sentry_init(integrations=[SanicIntegration()], traces_sample_rate=1.0) + events = capture_events() + + c = get_client(app) + with c as client: + client.get("/message?foo=bar") + + (_, event) = events + + assert event["contexts"]["trace"]["origin"] == "auto.http.sanic" diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py index 503bc9e82a..411be72f6f 100644 --- a/tests/integrations/starlette/test_starlette.py +++ b/tests/integrations/starlette/test_starlette.py @@ -1081,6 +1081,29 @@ def test_transaction_name_in_middleware( ) +def test_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[StarletteIntegration()], + traces_sample_rate=1.0, + ) + starlette_app = starlette_app_factory( + middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())] + ) + events = capture_events() + + client = TestClient(starlette_app, raise_server_exceptions=False) + try: + client.get("/message", auth=("Gabriela", "hello123")) + except Exception: + pass + + (_, event) = events + + assert event["contexts"]["trace"]["origin"] == "auto.http.starlette" + for span in event["spans"]: + assert span["origin"] == "auto.http.starlette" + + @pytest.mark.parametrize( "failed_request_status_codes,status_code,expected_error", [ diff --git a/tests/integrations/starlite/test_starlite.py b/tests/integrations/starlite/test_starlite.py index 5f1b199be6..45075b5199 100644 --- a/tests/integrations/starlite/test_starlite.py +++ b/tests/integrations/starlite/test_starlite.py @@ -289,3 +289,37 @@ def test_middleware_partial_receive_send(sentry_init, capture_events): assert span["op"] == expected[idx]["op"] assert span["description"].startswith(expected[idx]["description"]) assert span["tags"] == expected[idx]["tags"] + + +def test_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[StarliteIntegration()], + traces_sample_rate=1.0, + ) + + logging_config = LoggingMiddlewareConfig() + session_config = MemoryBackendConfig() + rate_limit_config = RateLimitConfig(rate_limit=("hour", 5)) + + starlite_app = starlite_app_factory( + middleware=[ + session_config.middleware, + logging_config.middleware, + rate_limit_config.middleware, + ] + ) + events = capture_events() + + client = TestClient( + starlite_app, raise_server_exceptions=False, base_url="http://testserver.local" + ) + try: + client.get("/message") + except Exception: + pass + + (_, event) = events + + assert event["contexts"]["trace"]["origin"] == "auto.http.starlite" + for span in event["spans"]: + assert span["origin"] == "auto.http.starlite" diff --git a/tests/integrations/strawberry/test_strawberry.py b/tests/integrations/strawberry/test_strawberry.py index e84c5f6fa5..fc6f31710e 100644 --- a/tests/integrations/strawberry/test_strawberry.py +++ b/tests/integrations/strawberry/test_strawberry.py @@ -1,4 +1,5 @@ import pytest +from typing import AsyncGenerator, Optional strawberry = pytest.importorskip("strawberry") pytest.importorskip("fastapi") @@ -27,7 +28,6 @@ ) from tests.conftest import ApproxDict - parameterize_strawberry_test = pytest.mark.parametrize( "client_factory,async_execution,framework_integrations", ( @@ -59,6 +59,19 @@ def change(self, attribute: str) -> str: return attribute +@strawberry.type +class Message: + content: str + + +@strawberry.type +class Subscription: + @strawberry.subscription + async def message_added(self) -> Optional[AsyncGenerator[Message, None]]: + message = Message(content="Hello, world!") + yield message + + @pytest.fixture def async_app_client_factory(): def create_app(schema): @@ -627,3 +640,129 @@ def test_handle_none_query_gracefully( client.post("/graphql", json={}) assert len(events) == 0, "expected no events to be sent to Sentry" + + +@parameterize_strawberry_test +def test_span_origin( + request, + sentry_init, + capture_events, + client_factory, + async_execution, + framework_integrations, +): + """ + Tests for OP.GRAPHQL_MUTATION, OP.GRAPHQL_PARSE, OP.GRAPHQL_VALIDATE, OP.GRAPHQL_RESOLVE, + """ + sentry_init( + integrations=[ + StrawberryIntegration(async_execution=async_execution), + ] + + framework_integrations, + traces_sample_rate=1, + ) + events = capture_events() + + schema = strawberry.Schema(Query, mutation=Mutation) + + client_factory = request.getfixturevalue(client_factory) + client = client_factory(schema) + + query = 'mutation Change { change(attribute: "something") }' + client.post("/graphql", json={"query": query}) + + (event,) = events + + is_flask = "Flask" in str(framework_integrations[0]) + if is_flask: + assert event["contexts"]["trace"]["origin"] == "auto.http.flask" + else: + assert event["contexts"]["trace"]["origin"] == "auto.http.starlette" + + for span in event["spans"]: + if span["op"].startswith("graphql."): + assert span["origin"] == "auto.graphql.strawberry" + + +@parameterize_strawberry_test +def test_span_origin2( + request, + sentry_init, + capture_events, + client_factory, + async_execution, + framework_integrations, +): + """ + Tests for OP.GRAPHQL_QUERY + """ + sentry_init( + integrations=[ + StrawberryIntegration(async_execution=async_execution), + ] + + framework_integrations, + traces_sample_rate=1, + ) + events = capture_events() + + schema = strawberry.Schema(Query, mutation=Mutation) + + client_factory = request.getfixturevalue(client_factory) + client = client_factory(schema) + + query = "query GreetingQuery { hello }" + client.post("/graphql", json={"query": query, "operationName": "GreetingQuery"}) + + (event,) = events + + is_flask = "Flask" in str(framework_integrations[0]) + if is_flask: + assert event["contexts"]["trace"]["origin"] == "auto.http.flask" + else: + assert event["contexts"]["trace"]["origin"] == "auto.http.starlette" + + for span in event["spans"]: + if span["op"].startswith("graphql."): + assert span["origin"] == "auto.graphql.strawberry" + + +@parameterize_strawberry_test +def test_span_origin3( + request, + sentry_init, + capture_events, + client_factory, + async_execution, + framework_integrations, +): + """ + Tests for OP.GRAPHQL_SUBSCRIPTION + """ + sentry_init( + integrations=[ + StrawberryIntegration(async_execution=async_execution), + ] + + framework_integrations, + traces_sample_rate=1, + ) + events = capture_events() + + schema = strawberry.Schema(Query, subscription=Subscription) + + client_factory = request.getfixturevalue(client_factory) + client = client_factory(schema) + + query = "subscription { messageAdded { content } }" + client.post("/graphql", json={"query": query}) + + (event,) = events + + is_flask = "Flask" in str(framework_integrations[0]) + if is_flask: + assert event["contexts"]["trace"]["origin"] == "auto.http.flask" + else: + assert event["contexts"]["trace"]["origin"] == "auto.http.starlette" + + for span in event["spans"]: + if span["op"].startswith("graphql."): + assert span["origin"] == "auto.graphql.strawberry" diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py index 181c17cd49..d379d3dae4 100644 --- a/tests/integrations/tornado/test_tornado.py +++ b/tests/integrations/tornado/test_tornado.py @@ -436,3 +436,17 @@ def test_error_has_existing_trace_context_performance_disabled( == error_event["contexts"]["trace"]["trace_id"] == "471a43a4192642f0b136d5159a501701" ) + + +def test_span_origin(tornado_testcase, sentry_init, capture_events): + sentry_init(integrations=[TornadoIntegration()], traces_sample_rate=1.0) + events = capture_events() + client = tornado_testcase(Application([(r"/hi", CrashingHandler)])) + + client.fetch( + "/hi?foo=bar", headers={"Cookie": "name=value; name2=value2; name3=value3"} + ) + + (_, event) = events + + assert event["contexts"]["trace"]["origin"] == "auto.http.tornado" diff --git a/tests/integrations/trytond/test_trytond.py b/tests/integrations/trytond/test_trytond.py index f4ae81f3fa..33a138b50a 100644 --- a/tests/integrations/trytond/test_trytond.py +++ b/tests/integrations/trytond/test_trytond.py @@ -125,3 +125,22 @@ def _(app, request, e): assert status == "200 OK" assert headers.get("Content-Type") == "application/json" assert data == dict(id=42, error=["UserError", ["Sentry error.", "foo", None]]) + + +def test_span_origin(sentry_init, app, capture_events, get_client): + sentry_init( + integrations=[TrytondWSGIIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + @app.route("/something") + def _(request): + return "ok" + + client = get_client() + client.get("/something") + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "auto.http.trytond_wsgi" diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 9af05e977e..d2fa6f2135 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -437,3 +437,42 @@ def test_app(environ, start_response): profiles = [item for item in envelopes[0].items if item.type == "profile"] assert len(profiles) == 1 + + +def test_span_origin_manual(sentry_init, capture_events): + def dogpark(environ, start_response): + start_response("200 OK", []) + return ["Go get the ball! Good dog!"] + + sentry_init(send_default_pii=True, traces_sample_rate=1.0) + app = SentryWsgiMiddleware(dogpark) + + events = capture_events() + + client = Client(app) + client.get("/dogs/are/great/") + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + + +def test_span_origin_custom(sentry_init, capture_events): + def dogpark(environ, start_response): + start_response("200 OK", []) + return ["Go get the ball! Good dog!"] + + sentry_init(send_default_pii=True, traces_sample_rate=1.0) + app = SentryWsgiMiddleware( + dogpark, + span_origin="auto.dogpark.deluxe", + ) + + events = capture_events() + + client = Client(app) + client.get("/dogs/are/great/") + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "auto.dogpark.deluxe" From baea1b1f4a7e8028017943e129c2e66a85c982d8 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 24 Jun 2024 12:31:05 +0200 Subject: [PATCH 11/12] Add `origin` to transactions/spans created by integrations 2 (#3163) * Added origin to stdlib integration * Added origin to socket integration * Added origin to sqlalchemy integration * Middle of adding origin to grpc integration * missing file * Fixing tests * Fixed tests * Added origin to redis integration * Added origin to celery integration * trigger ci * linting * Added origin to opentelemetry integration * Added origin to asyncpg integration --- sentry_sdk/integrations/asyncpg.py | 22 +++-- sentry_sdk/integrations/celery/__init__.py | 18 +++- sentry_sdk/integrations/grpc/aio/client.py | 9 +- sentry_sdk/integrations/grpc/aio/server.py | 2 + sentry_sdk/integrations/grpc/client.py | 9 +- sentry_sdk/integrations/grpc/consts.py | 1 + sentry_sdk/integrations/grpc/server.py | 2 + .../opentelemetry/span_processor.py | 3 + .../integrations/redis/_async_common.py | 7 +- sentry_sdk/integrations/redis/_sync_common.py | 7 +- sentry_sdk/integrations/redis/consts.py | 2 + sentry_sdk/integrations/socket.py | 6 +- sentry_sdk/integrations/sqlalchemy.py | 2 + sentry_sdk/integrations/stdlib.py | 18 +++- tests/integrations/asyncpg/test_asyncpg.py | 11 ++- tests/integrations/celery/test_celery.py | 48 +++++++++- tests/integrations/grpc/test_grpc.py | 84 +++++++++++++----- tests/integrations/grpc/test_grpc_aio.py | 87 ++++++++++++------- .../opentelemetry/test_span_processor.py | 2 + .../redis/asyncio/test_redis_asyncio.py | 27 ++++++ .../redis/cluster/test_redis_cluster.py | 26 ++++++ .../test_redis_cluster_asyncio.py | 27 ++++++ tests/integrations/redis/test_redis.py | 26 ++++++ tests/integrations/socket/test_socket.py | 21 +++++ .../sqlalchemy/test_sqlalchemy.py | 20 +++++ tests/integrations/stdlib/test_httplib.py | 16 ++++ tests/integrations/stdlib/test_subprocess.py | 30 +++++++ 27 files changed, 460 insertions(+), 73 deletions(-) create mode 100644 sentry_sdk/integrations/grpc/consts.py diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py index 30cb50e5f7..4c1611613b 100644 --- a/sentry_sdk/integrations/asyncpg.py +++ b/sentry_sdk/integrations/asyncpg.py @@ -70,7 +70,14 @@ async def _inner(*args: Any, **kwargs: Any) -> T: return await f(*args, **kwargs) query = args[1] - with record_sql_queries(None, query, None, None, executemany=False) as span: + with record_sql_queries( + cursor=None, + query=query, + params_list=None, + paramstyle=None, + executemany=False, + span_origin=AsyncPGIntegration.origin, + ) as span: res = await f(*args, **kwargs) with capture_internal_exceptions(): @@ -99,12 +106,13 @@ def _record( param_style = "pyformat" if params_list else None with record_sql_queries( - cursor, - query, - params_list, - param_style, + cursor=cursor, + query=query, + params_list=params_list, + paramstyle=param_style, executemany=executemany, record_cursor_repr=cursor is not None, + span_origin=AsyncPGIntegration.origin, ) as span: yield span @@ -156,7 +164,9 @@ async def _inner(*args: Any, **kwargs: Any) -> T: database = kwargs["params"].database with sentry_sdk.start_span( - op=OP.DB, description="connect", origin=AsyncPGIntegration.origin + op=OP.DB, + description="connect", + origin=AsyncPGIntegration.origin, ) as span: span.set_data(SPANDATA.DB_SYSTEM, "postgresql") addr = kwargs.get("addr") diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index d0908a039e..67793ad6cf 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -58,6 +58,7 @@ class CeleryIntegration(Integration): identifier = "celery" + origin = f"auto.queue.{identifier}" def __init__( self, @@ -266,7 +267,11 @@ def apply_async(*args, **kwargs): ) span_mgr = ( - sentry_sdk.start_span(op=OP.QUEUE_SUBMIT_CELERY, description=task.name) + sentry_sdk.start_span( + op=OP.QUEUE_SUBMIT_CELERY, + description=task.name, + origin=CeleryIntegration.origin, + ) if not task_started_from_beat else NoOpMgr() ) # type: Union[Span, NoOpMgr] @@ -309,6 +314,7 @@ def _inner(*args, **kwargs): op=OP.QUEUE_TASK_CELERY, name="unknown celery task", source=TRANSACTION_SOURCE_TASK, + origin=CeleryIntegration.origin, ) transaction.name = task.name transaction.set_status("ok") @@ -362,7 +368,9 @@ def _inner(*args, **kwargs): # type: (*Any, **Any) -> Any try: with sentry_sdk.start_span( - op=OP.QUEUE_PROCESS, description=task.name + op=OP.QUEUE_PROCESS, + description=task.name, + origin=CeleryIntegration.origin, ) as span: _set_messaging_destination_name(task, span) @@ -483,7 +491,11 @@ def sentry_publish(self, *args, **kwargs): routing_key = kwargs.get("routing_key") exchange = kwargs.get("exchange") - with sentry_sdk.start_span(op=OP.QUEUE_PUBLISH, description=task_name) as span: + with sentry_sdk.start_span( + op=OP.QUEUE_PUBLISH, + description=task_name, + origin=CeleryIntegration.origin, + ) as span: if task_id is not None: span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task_id) diff --git a/sentry_sdk/integrations/grpc/aio/client.py b/sentry_sdk/integrations/grpc/aio/client.py index 91a06eaa7f..b67481b5b5 100644 --- a/sentry_sdk/integrations/grpc/aio/client.py +++ b/sentry_sdk/integrations/grpc/aio/client.py @@ -11,6 +11,7 @@ import sentry_sdk from sentry_sdk.consts import OP +from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN from sentry_sdk.scope import Scope @@ -46,7 +47,9 @@ async def intercept_unary_unary( method = client_call_details.method with sentry_sdk.start_span( - op=OP.GRPC_CLIENT, description="unary unary call to %s" % method.decode() + op=OP.GRPC_CLIENT, + description="unary unary call to %s" % method.decode(), + origin=SPAN_ORIGIN, ) as span: span.set_data("type", "unary unary") span.set_data("method", method) @@ -74,7 +77,9 @@ async def intercept_unary_stream( method = client_call_details.method with sentry_sdk.start_span( - op=OP.GRPC_CLIENT, description="unary stream call to %s" % method.decode() + op=OP.GRPC_CLIENT, + description="unary stream call to %s" % method.decode(), + origin=SPAN_ORIGIN, ) as span: span.set_data("type", "unary stream") span.set_data("method", method) diff --git a/sentry_sdk/integrations/grpc/aio/server.py b/sentry_sdk/integrations/grpc/aio/server.py index a3027dbd4f..2fdcb0b8f0 100644 --- a/sentry_sdk/integrations/grpc/aio/server.py +++ b/sentry_sdk/integrations/grpc/aio/server.py @@ -2,6 +2,7 @@ from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable +from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM from sentry_sdk.utils import event_from_exception @@ -47,6 +48,7 @@ async def wrapped(request, context): op=OP.GRPC_SERVER, name=name, source=TRANSACTION_SOURCE_CUSTOM, + origin=SPAN_ORIGIN, ) with sentry_sdk.start_transaction(transaction=transaction): diff --git a/sentry_sdk/integrations/grpc/client.py b/sentry_sdk/integrations/grpc/client.py index 96f2591bde..c4e89f3737 100644 --- a/sentry_sdk/integrations/grpc/client.py +++ b/sentry_sdk/integrations/grpc/client.py @@ -2,6 +2,7 @@ from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable +from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN from sentry_sdk.scope import Scope if TYPE_CHECKING: @@ -27,7 +28,9 @@ def intercept_unary_unary(self, continuation, client_call_details, request): method = client_call_details.method with sentry_sdk.start_span( - op=OP.GRPC_CLIENT, description="unary unary call to %s" % method + op=OP.GRPC_CLIENT, + description="unary unary call to %s" % method, + origin=SPAN_ORIGIN, ) as span: span.set_data("type", "unary unary") span.set_data("method", method) @@ -46,7 +49,9 @@ def intercept_unary_stream(self, continuation, client_call_details, request): method = client_call_details.method with sentry_sdk.start_span( - op=OP.GRPC_CLIENT, description="unary stream call to %s" % method + op=OP.GRPC_CLIENT, + description="unary stream call to %s" % method, + origin=SPAN_ORIGIN, ) as span: span.set_data("type", "unary stream") span.set_data("method", method) diff --git a/sentry_sdk/integrations/grpc/consts.py b/sentry_sdk/integrations/grpc/consts.py new file mode 100644 index 0000000000..9fdb975caf --- /dev/null +++ b/sentry_sdk/integrations/grpc/consts.py @@ -0,0 +1 @@ +SPAN_ORIGIN = "auto.grpc.grpc" diff --git a/sentry_sdk/integrations/grpc/server.py b/sentry_sdk/integrations/grpc/server.py index 50a1dc4dbe..74ab550529 100644 --- a/sentry_sdk/integrations/grpc/server.py +++ b/sentry_sdk/integrations/grpc/server.py @@ -2,6 +2,7 @@ from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable +from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM if TYPE_CHECKING: @@ -41,6 +42,7 @@ def behavior(request, context): op=OP.GRPC_SERVER, name=name, source=TRANSACTION_SOURCE_CUSTOM, + origin=SPAN_ORIGIN, ) with sentry_sdk.start_transaction(transaction=transaction): diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index a09a93d284..1b05ba9a2c 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -36,6 +36,7 @@ OPEN_TELEMETRY_CONTEXT = "otel" SPAN_MAX_TIME_OPEN_MINUTES = 10 +SPAN_ORIGIN = "auto.otel" def link_trace_context_to_error_event(event, otel_span_map): @@ -149,6 +150,7 @@ def on_start(self, otel_span, parent_context=None): otel_span.start_time / 1e9, timezone.utc ), # OTel spans have nanosecond precision instrumenter=INSTRUMENTER.OTEL, + origin=SPAN_ORIGIN, ) else: sentry_span = start_transaction( @@ -161,6 +163,7 @@ def on_start(self, otel_span, parent_context=None): otel_span.start_time / 1e9, timezone.utc ), # OTel spans have nanosecond precision instrumenter=INSTRUMENTER.OTEL, + origin=SPAN_ORIGIN, ) self.otel_span_map[trace_data["span_id"]] = sentry_span diff --git a/sentry_sdk/integrations/redis/_async_common.py b/sentry_sdk/integrations/redis/_async_common.py index 04c74cc69d..50d5ea6c82 100644 --- a/sentry_sdk/integrations/redis/_async_common.py +++ b/sentry_sdk/integrations/redis/_async_common.py @@ -1,5 +1,6 @@ from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP +from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN from sentry_sdk.integrations.redis.modules.caches import ( _compile_cache_span_properties, _set_cache_data, @@ -35,7 +36,9 @@ async def _sentry_execute(self, *args, **kwargs): return await old_execute(self, *args, **kwargs) with sentry_sdk.start_span( - op=OP.DB_REDIS, description="redis.pipeline.execute" + op=OP.DB_REDIS, + description="redis.pipeline.execute", + origin=SPAN_ORIGIN, ) as span: with capture_internal_exceptions(): set_db_data_fn(span, self) @@ -76,6 +79,7 @@ async def _sentry_execute_command(self, name, *args, **kwargs): cache_span = sentry_sdk.start_span( op=cache_properties["op"], description=cache_properties["description"], + origin=SPAN_ORIGIN, ) cache_span.__enter__() @@ -84,6 +88,7 @@ async def _sentry_execute_command(self, name, *args, **kwargs): db_span = sentry_sdk.start_span( op=db_properties["op"], description=db_properties["description"], + origin=SPAN_ORIGIN, ) db_span.__enter__() diff --git a/sentry_sdk/integrations/redis/_sync_common.py b/sentry_sdk/integrations/redis/_sync_common.py index e1578b3194..6a01f5e18b 100644 --- a/sentry_sdk/integrations/redis/_sync_common.py +++ b/sentry_sdk/integrations/redis/_sync_common.py @@ -1,5 +1,6 @@ from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP +from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN from sentry_sdk.integrations.redis.modules.caches import ( _compile_cache_span_properties, _set_cache_data, @@ -36,7 +37,9 @@ def sentry_patched_execute(self, *args, **kwargs): return old_execute(self, *args, **kwargs) with sentry_sdk.start_span( - op=OP.DB_REDIS, description="redis.pipeline.execute" + op=OP.DB_REDIS, + description="redis.pipeline.execute", + origin=SPAN_ORIGIN, ) as span: with capture_internal_exceptions(): set_db_data_fn(span, self) @@ -81,6 +84,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs): cache_span = sentry_sdk.start_span( op=cache_properties["op"], description=cache_properties["description"], + origin=SPAN_ORIGIN, ) cache_span.__enter__() @@ -89,6 +93,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs): db_span = sentry_sdk.start_span( op=db_properties["op"], description=db_properties["description"], + origin=SPAN_ORIGIN, ) db_span.__enter__() diff --git a/sentry_sdk/integrations/redis/consts.py b/sentry_sdk/integrations/redis/consts.py index a8d5509714..737e829735 100644 --- a/sentry_sdk/integrations/redis/consts.py +++ b/sentry_sdk/integrations/redis/consts.py @@ -1,3 +1,5 @@ +SPAN_ORIGIN = "auto.db.redis" + _SINGLE_KEY_COMMANDS = frozenset( ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"], ) diff --git a/sentry_sdk/integrations/socket.py b/sentry_sdk/integrations/socket.py index 1422551bf4..beec7dbf3e 100644 --- a/sentry_sdk/integrations/socket.py +++ b/sentry_sdk/integrations/socket.py @@ -14,6 +14,7 @@ class SocketIntegration(Integration): identifier = "socket" + origin = f"auto.socket.{identifier}" @staticmethod def setup_once(): @@ -55,6 +56,7 @@ def create_connection( with sentry_sdk.start_span( op=OP.SOCKET_CONNECTION, description=_get_span_description(address[0], address[1]), + origin=SocketIntegration.origin, ) as span: span.set_data("address", address) span.set_data("timeout", timeout) @@ -78,7 +80,9 @@ def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): return real_getaddrinfo(host, port, family, type, proto, flags) with sentry_sdk.start_span( - op=OP.SOCKET_DNS, description=_get_span_description(host, port) + op=OP.SOCKET_DNS, + description=_get_span_description(host, port), + origin=SocketIntegration.origin, ) as span: span.set_data("host", host) span.set_data("port", port) diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index 9c438ca3df..32eab36160 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -27,6 +27,7 @@ class SqlalchemyIntegration(Integration): identifier = "sqlalchemy" + origin = f"auto.db.{identifier}" @staticmethod def setup_once(): @@ -58,6 +59,7 @@ def _before_cursor_execute( parameters, paramstyle=context and context.dialect and context.dialect.paramstyle or None, executemany=executemany, + span_origin=SqlalchemyIntegration.origin, ) context._sentry_sql_span_manager = ctx_mgr diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index 62899e9a1b..58e561d4b2 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -91,8 +91,8 @@ def putrequest(self, method, url, *args, **kwargs): op=OP.HTTP_CLIENT, description="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), + origin="auto.http.stdlib.httplib", ) - span.set_data(SPANDATA.HTTP_METHOD, method) if parsed_url is not None: span.set_data("url", parsed_url.url) @@ -197,7 +197,11 @@ def sentry_patched_popen_init(self, *a, **kw): env = None - with sentry_sdk.start_span(op=OP.SUBPROCESS, description=description) as span: + with sentry_sdk.start_span( + op=OP.SUBPROCESS, + description=description, + origin="auto.subprocess.stdlib.subprocess", + ) as span: for k, v in Scope.get_current_scope().iter_trace_propagation_headers( span=span ): @@ -222,7 +226,10 @@ def sentry_patched_popen_init(self, *a, **kw): @ensure_integration_enabled(StdlibIntegration, old_popen_wait) def sentry_patched_popen_wait(self, *a, **kw): # type: (subprocess.Popen[Any], *Any, **Any) -> Any - with sentry_sdk.start_span(op=OP.SUBPROCESS_WAIT) as span: + with sentry_sdk.start_span( + op=OP.SUBPROCESS_WAIT, + origin="auto.subprocess.stdlib.subprocess", + ) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_wait(self, *a, **kw) @@ -233,7 +240,10 @@ def sentry_patched_popen_wait(self, *a, **kw): @ensure_integration_enabled(StdlibIntegration, old_popen_communicate) def sentry_patched_popen_communicate(self, *a, **kw): # type: (subprocess.Popen[Any], *Any, **Any) -> Any - with sentry_sdk.start_span(op=OP.SUBPROCESS_COMMUNICATE) as span: + with sentry_sdk.start_span( + op=OP.SUBPROCESS_COMMUNICATE, + origin="auto.subprocess.stdlib.subprocess", + ) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_communicate(self, *a, **kw) diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py index a048fc7139..94b02f4c32 100644 --- a/tests/integrations/asyncpg/test_asyncpg.py +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -746,16 +746,23 @@ def fake_record_sql_queries(*args, **kwargs): @pytest.mark.asyncio async def test_span_origin(sentry_init, capture_events): - sentry_init(integrations=[AsyncPGIntegration()], traces_sample_rate=1.0) + sentry_init( + integrations=[AsyncPGIntegration()], + traces_sample_rate=1.0, + ) events = capture_events() with start_transaction(name="test_transaction"): conn: Connection = await connect(PG_CONNECTION_URI) + await conn.execute("SELECT 1") + await conn.fetchrow("SELECT 2") await conn.close() (event,) = events assert event["contexts"]["trace"]["origin"] == "manual" - assert event["spans"][0]["origin"] == "auto.db.asyncpg" + + for span in event["spans"]: + assert span["origin"] == "auto.db.asyncpg" diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 20512a0b2d..1f3de09620 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -236,7 +236,7 @@ def dummy_task(x, y): "data": ApproxDict(), "description": "dummy_task", "op": "queue.submit.celery", - "origin": "manual", + "origin": "auto.queue.celery", "parent_span_id": submission_event["contexts"]["trace"]["span_id"], "same_process_as_parent": True, "span_id": submission_event["spans"][0]["span_id"], @@ -781,3 +781,49 @@ def task(): ... (span,) = event["spans"] assert "messaging.message.receive.latency" in span["data"] assert span["data"]["messaging.message.receive.latency"] > 0 + + +def tests_span_origin_consumer(init_celery, capture_events): + celery = init_celery(enable_tracing=True) + celery.conf.broker_url = "redis://example.com" # noqa: E231 + + events = capture_events() + + @celery.task() + def task(): ... + + task.apply_async() + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "auto.queue.celery" + assert event["spans"][0]["origin"] == "auto.queue.celery" + + +def tests_span_origin_producer(monkeypatch, sentry_init, capture_events): + old_publish = kombu.messaging.Producer._publish + + def publish(*args, **kwargs): + pass + + monkeypatch.setattr(kombu.messaging.Producer, "_publish", publish) + + sentry_init(integrations=[CeleryIntegration()], enable_tracing=True) + celery = Celery(__name__, broker="redis://example.com") # noqa: E231 + + events = capture_events() + + @celery.task() + def task(): ... + + with start_transaction(name="custom_transaction"): + task.apply_async() + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + + for span in event["spans"]: + assert span["origin"] == "auto.queue.celery" + + monkeypatch.setattr(kombu.messaging.Producer, "_publish", old_publish) diff --git a/tests/integrations/grpc/test_grpc.py b/tests/integrations/grpc/test_grpc.py index 50cf70cf44..66b65bbbf7 100644 --- a/tests/integrations/grpc/test_grpc.py +++ b/tests/integrations/grpc/test_grpc.py @@ -1,26 +1,45 @@ import os -from typing import List, Optional -from concurrent import futures -from unittest.mock import Mock import grpc import pytest +from concurrent import futures +from typing import List, Optional +from unittest.mock import Mock + from sentry_sdk import start_span, start_transaction from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc import GRPCIntegration from tests.conftest import ApproxDict from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage from tests.integrations.grpc.grpc_test_service_pb2_grpc import ( - gRPCTestServiceServicer, add_gRPCTestServiceServicer_to_server, + gRPCTestServiceServicer, gRPCTestServiceStub, ) + PORT = 50051 PORT += os.getpid() % 100 # avoid port conflicts when running tests in parallel +def _set_up(interceptors: Optional[List[grpc.ServerInterceptor]] = None): + server = grpc.server( + futures.ThreadPoolExecutor(max_workers=2), + interceptors=interceptors, + ) + + add_gRPCTestServiceServicer_to_server(TestService(), server) + server.add_insecure_port("[::]:{}".format(PORT)) + server.start() + + return server + + +def _tear_down(server: grpc.Server): + server.stop(None) + + @pytest.mark.forked def test_grpc_server_starts_transaction(sentry_init, capture_events_forksafe): sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) @@ -271,45 +290,64 @@ def test_grpc_client_and_servers_interceptors_integration( @pytest.mark.forked def test_stream_stream(sentry_init): sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) - _set_up() + server = _set_up() + with grpc.insecure_channel("localhost:{}".format(PORT)) as channel: stub = gRPCTestServiceStub(channel) response_iterator = stub.TestStreamStream(iter((gRPCTestMessage(text="test"),))) for response in response_iterator: assert response.text == "test" + _tear_down(server=server) + +@pytest.mark.forked def test_stream_unary(sentry_init): - """Test to verify stream-stream works. + """ + Test to verify stream-stream works. Tracing not supported for it yet. """ sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) - _set_up() + server = _set_up() + with grpc.insecure_channel("localhost:{}".format(PORT)) as channel: stub = gRPCTestServiceStub(channel) response = stub.TestStreamUnary(iter((gRPCTestMessage(text="test"),))) assert response.text == "test" + _tear_down(server=server) -def _set_up(interceptors: Optional[List[grpc.ServerInterceptor]] = None): - server = grpc.server( - futures.ThreadPoolExecutor(max_workers=2), - interceptors=interceptors, - ) - add_gRPCTestServiceServicer_to_server(TestService(), server) - server.add_insecure_port("[::]:{}".format(PORT)) - server.start() +@pytest.mark.forked +def test_span_origin(sentry_init, capture_events_forksafe): + sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) + events = capture_events_forksafe() - return server + server = _set_up() + with grpc.insecure_channel("localhost:{}".format(PORT)) as channel: + stub = gRPCTestServiceStub(channel) -def _tear_down(server: grpc.Server): - server.stop(None) + with start_transaction(name="custom_transaction"): + stub.TestServe(gRPCTestMessage(text="test")) + _tear_down(server=server) + + events.write_file.close() + + transaction_from_integration = events.read_event() + custom_transaction = events.read_event() + + assert ( + transaction_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" + ) + assert ( + transaction_from_integration["spans"][0]["origin"] + == "auto.grpc.grpc.TestService" + ) # manually created in TestService, not the instrumentation -def _find_name(request): - return request.__class__ + assert custom_transaction["contexts"]["trace"]["origin"] == "manual" + assert custom_transaction["spans"][0]["origin"] == "auto.grpc.grpc" class TestService(gRPCTestServiceServicer): @@ -317,7 +355,11 @@ class TestService(gRPCTestServiceServicer): @staticmethod def TestServe(request, context): # noqa: N802 - with start_span(op="test", description="test"): + with start_span( + op="test", + description="test", + origin="auto.grpc.grpc.TestService", + ): pass return gRPCTestMessage(text=request.text) diff --git a/tests/integrations/grpc/test_grpc_aio.py b/tests/integrations/grpc/test_grpc_aio.py index 4faebb6172..2ff91dcf16 100644 --- a/tests/integrations/grpc/test_grpc_aio.py +++ b/tests/integrations/grpc/test_grpc_aio.py @@ -6,14 +6,14 @@ import pytest_asyncio import sentry_sdk -from sentry_sdk import Hub, start_transaction +from sentry_sdk import start_span, start_transaction from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc import GRPCIntegration from tests.conftest import ApproxDict from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage from tests.integrations.grpc.grpc_test_service_pb2_grpc import ( - gRPCTestServiceServicer, add_gRPCTestServiceServicer_to_server, + gRPCTestServiceServicer, gRPCTestServiceStub, ) @@ -29,46 +29,46 @@ def event_loop(request): loop.close() -@pytest.mark.asyncio -async def test_noop_for_unimplemented_method(sentry_init, capture_events, event_loop): +@pytest_asyncio.fixture(scope="function") +async def grpc_server(sentry_init, event_loop): sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) server = grpc.aio.server() server.add_insecure_port("[::]:{}".format(AIO_PORT)) + add_gRPCTestServiceServicer_to_server(TestService, server) await event_loop.create_task(server.start()) - events = capture_events() try: - async with grpc.aio.insecure_channel( - "localhost:{}".format(AIO_PORT) - ) as channel: - stub = gRPCTestServiceStub(channel) - with pytest.raises(grpc.RpcError) as exc: - await stub.TestServe(gRPCTestMessage(text="test")) - assert exc.value.details() == "Method not found!" + yield server finally: await server.stop(None) - assert not events - -@pytest_asyncio.fixture(scope="function") -async def grpc_server(sentry_init, event_loop): +@pytest.mark.asyncio +async def test_noop_for_unimplemented_method(event_loop, sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) server = grpc.aio.server() server.add_insecure_port("[::]:{}".format(AIO_PORT)) - add_gRPCTestServiceServicer_to_server(TestService, server) await event_loop.create_task(server.start()) + events = capture_events() try: - yield server + async with grpc.aio.insecure_channel( + "localhost:{}".format(AIO_PORT) + ) as channel: + stub = gRPCTestServiceStub(channel) + with pytest.raises(grpc.RpcError) as exc: + await stub.TestServe(gRPCTestMessage(text="test")) + assert exc.value.details() == "Method not found!" finally: await server.stop(None) + assert not events + @pytest.mark.asyncio -async def test_grpc_server_starts_transaction(capture_events, grpc_server): +async def test_grpc_server_starts_transaction(grpc_server, capture_events): events = capture_events() async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: @@ -87,7 +87,7 @@ async def test_grpc_server_starts_transaction(capture_events, grpc_server): @pytest.mark.asyncio -async def test_grpc_server_continues_transaction(capture_events, grpc_server): +async def test_grpc_server_continues_transaction(grpc_server, capture_events): events = capture_events() async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: @@ -127,7 +127,7 @@ async def test_grpc_server_continues_transaction(capture_events, grpc_server): @pytest.mark.asyncio -async def test_grpc_server_exception(capture_events, grpc_server): +async def test_grpc_server_exception(grpc_server, capture_events): events = capture_events() async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: @@ -147,7 +147,7 @@ async def test_grpc_server_exception(capture_events, grpc_server): @pytest.mark.asyncio -async def test_grpc_server_abort(capture_events, grpc_server): +async def test_grpc_server_abort(grpc_server, capture_events): events = capture_events() async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: @@ -162,9 +162,7 @@ async def test_grpc_server_abort(capture_events, grpc_server): @pytest.mark.asyncio -async def test_grpc_client_starts_span( - grpc_server, sentry_init, capture_events_forksafe -): +async def test_grpc_client_starts_span(grpc_server, capture_events_forksafe): events = capture_events_forksafe() async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: @@ -224,7 +222,8 @@ async def test_grpc_client_unary_stream_starts_span( @pytest.mark.asyncio async def test_stream_stream(grpc_server): - """Test to verify stream-stream works. + """ + Test to verify stream-stream works. Tracing not supported for it yet. """ async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: @@ -236,7 +235,8 @@ async def test_stream_stream(grpc_server): @pytest.mark.asyncio async def test_stream_unary(grpc_server): - """Test to verify stream-stream works. + """ + Test to verify stream-stream works. Tracing not supported for it yet. """ async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: @@ -245,6 +245,32 @@ async def test_stream_unary(grpc_server): assert response.text == "test" +@pytest.mark.asyncio +async def test_span_origin(grpc_server, capture_events_forksafe): + events = capture_events_forksafe() + + async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel: + stub = gRPCTestServiceStub(channel) + with start_transaction(name="custom_transaction"): + await stub.TestServe(gRPCTestMessage(text="test")) + + events.write_file.close() + + transaction_from_integration = events.read_event() + custom_transaction = events.read_event() + + assert ( + transaction_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" + ) + assert ( + transaction_from_integration["spans"][0]["origin"] + == "auto.grpc.grpc.TestService.aio" + ) # manually created in TestService, not the instrumentation + + assert custom_transaction["contexts"]["trace"]["origin"] == "manual" + assert custom_transaction["spans"][0]["origin"] == "auto.grpc.grpc" + + class TestService(gRPCTestServiceServicer): class TestException(Exception): __test__ = False @@ -254,8 +280,11 @@ def __init__(self): @classmethod async def TestServe(cls, request, context): # noqa: N802 - hub = Hub.current - with hub.start_span(op="test", description="test"): + with start_span( + op="test", + description="test", + origin="auto.grpc.grpc.TestService.aio", + ): pass if request.text == "exception": diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py index 418d08b739..8064e127f6 100644 --- a/tests/integrations/opentelemetry/test_span_processor.py +++ b/tests/integrations/opentelemetry/test_span_processor.py @@ -326,6 +326,7 @@ def test_on_start_transaction(): otel_span.start_time / 1e9, timezone.utc ), instrumenter="otel", + origin="auto.otel", ) assert len(span_processor.otel_span_map.keys()) == 1 @@ -365,6 +366,7 @@ def test_on_start_child(): otel_span.start_time / 1e9, timezone.utc ), instrumenter="otel", + origin="auto.otel", ) assert len(span_processor.otel_span_map.keys()) == 2 diff --git a/tests/integrations/redis/asyncio/test_redis_asyncio.py b/tests/integrations/redis/asyncio/test_redis_asyncio.py index 4f024a2824..17130b337b 100644 --- a/tests/integrations/redis/asyncio/test_redis_asyncio.py +++ b/tests/integrations/redis/asyncio/test_redis_asyncio.py @@ -83,3 +83,30 @@ async def test_async_redis_pipeline( "redis.transaction": is_transaction, "redis.is_cluster": False, } + + +@pytest.mark.asyncio +async def test_async_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[RedisIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + connection = FakeRedis() + with start_transaction(name="custom_transaction"): + # default case + await connection.set("somekey", "somevalue") + + # pipeline + pipeline = connection.pipeline(transaction=False) + pipeline.get("somekey") + pipeline.set("anotherkey", 1) + await pipeline.execute() + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + + for span in event["spans"]: + assert span["origin"] == "auto.db.redis" diff --git a/tests/integrations/redis/cluster/test_redis_cluster.py b/tests/integrations/redis/cluster/test_redis_cluster.py index a16d66588c..83d1b45cc9 100644 --- a/tests/integrations/redis/cluster/test_redis_cluster.py +++ b/tests/integrations/redis/cluster/test_redis_cluster.py @@ -144,3 +144,29 @@ def test_rediscluster_pipeline( "redis.transaction": False, # For Cluster, this is always False "redis.is_cluster": True, } + + +def test_rediscluster_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[RedisIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + rc = redis.RedisCluster(host="localhost", port=6379) + with start_transaction(name="custom_transaction"): + # default case + rc.set("somekey", "somevalue") + + # pipeline + pipeline = rc.pipeline(transaction=False) + pipeline.get("somekey") + pipeline.set("anotherkey", 1) + pipeline.execute() + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + + for span in event["spans"]: + assert span["origin"] == "auto.db.redis" diff --git a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py index a6d8962afe..993a2962ca 100644 --- a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py +++ b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py @@ -147,3 +147,30 @@ async def test_async_redis_pipeline( "redis.transaction": False, "redis.is_cluster": True, } + + +@pytest.mark.asyncio +async def test_async_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[RedisIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + connection = cluster.RedisCluster(host="localhost", port=6379) + with start_transaction(name="custom_transaction"): + # default case + await connection.set("somekey", "somevalue") + + # pipeline + pipeline = connection.pipeline(transaction=False) + pipeline.get("somekey") + pipeline.set("anotherkey", 1) + await pipeline.execute() + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + + for span in event["spans"]: + assert span["origin"] == "auto.db.redis" diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py index 8203f75130..5173885f33 100644 --- a/tests/integrations/redis/test_redis.py +++ b/tests/integrations/redis/test_redis.py @@ -293,3 +293,29 @@ def test_db_connection_attributes_pipeline(sentry_init, capture_events): assert span["data"][SPANDATA.DB_NAME] == "1" assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost" assert span["data"][SPANDATA.SERVER_PORT] == 63791 + + +def test_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[RedisIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + connection = FakeStrictRedis() + with start_transaction(name="custom_transaction"): + # default case + connection.set("somekey", "somevalue") + + # pipeline + pipeline = connection.pipeline(transaction=False) + pipeline.get("somekey") + pipeline.set("anotherkey", 1) + pipeline.execute() + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + + for span in event["spans"]: + assert span["origin"] == "auto.db.redis" diff --git a/tests/integrations/socket/test_socket.py b/tests/integrations/socket/test_socket.py index 4f93c1f2a5..389256de33 100644 --- a/tests/integrations/socket/test_socket.py +++ b/tests/integrations/socket/test_socket.py @@ -56,3 +56,24 @@ def test_create_connection_trace(sentry_init, capture_events): "port": 443, } ) + + +def test_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[SocketIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + with start_transaction(name="foo"): + socket.create_connection(("example.com", 443), 1, None) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + + assert event["spans"][0]["op"] == "socket.connection" + assert event["spans"][0]["origin"] == "auto.socket.socket" + + assert event["spans"][1]["op"] == "socket.dns" + assert event["spans"][1]["origin"] == "auto.socket.socket" diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index 99d6a5c5fc..cedb542e93 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -670,3 +670,23 @@ def __exit__(self, type, value, traceback): break else: raise AssertionError("No db span found") + + +def test_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[SqlalchemyIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + engine = create_engine( + "sqlite:///:memory:", connect_args={"check_same_thread": False} + ) + with start_transaction(name="foo"): + with engine.connect() as con: + con.execute(text("SELECT 0")) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.db.sqlalchemy" diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index 3dc7c6c50f..c327331608 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -326,3 +326,19 @@ def test_option_trace_propagation_targets( else: assert "sentry-trace" not in request_headers assert "baggage" not in request_headers + + +def test_span_origin(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, debug=True) + events = capture_events() + + with start_transaction(name="foo"): + conn = HTTPSConnection("example.com") + conn.request("GET", "/foo") + conn.getresponse() + + (event,) = events + assert event["contexts"]["trace"]["origin"] == "manual" + + assert event["spans"][0]["op"] == "http.client" + assert event["spans"][0]["origin"] == "auto.http.stdlib.httplib" diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py index c931db09c4..1e0d63149b 100644 --- a/tests/integrations/stdlib/test_subprocess.py +++ b/tests/integrations/stdlib/test_subprocess.py @@ -181,3 +181,33 @@ def test_subprocess_invalid_args(sentry_init): subprocess.Popen(1) assert "'int' object is not iterable" in str(excinfo.value) + + +def test_subprocess_span_origin(sentry_init, capture_events): + sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0) + events = capture_events() + + with start_transaction(name="foo"): + args = [ + sys.executable, + "-c", + "print('hello world')", + ] + kw = {"args": args, "stdout": subprocess.PIPE} + + popen = subprocess.Popen(**kw) + popen.communicate() + popen.poll() + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + + assert event["spans"][0]["op"] == "subprocess" + assert event["spans"][0]["origin"] == "auto.subprocess.stdlib.subprocess" + + assert event["spans"][1]["op"] == "subprocess.communicate" + assert event["spans"][1]["origin"] == "auto.subprocess.stdlib.subprocess" + + assert event["spans"][2]["op"] == "subprocess.wait" + assert event["spans"][2]["origin"] == "auto.subprocess.stdlib.subprocess" From 21093959408bf22cfad577eaf78c881ebc4a86cf Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 24 Jun 2024 16:58:17 +0200 Subject: [PATCH 12/12] Updated tests --- tests/integrations/arq/test_arq.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integrations/arq/test_arq.py b/tests/integrations/arq/test_arq.py index e121a1baa2..cd4cad67b8 100644 --- a/tests/integrations/arq/test_arq.py +++ b/tests/integrations/arq/test_arq.py @@ -289,5 +289,5 @@ async def job(ctx): (event,) = events assert event["contexts"]["trace"]["origin"] == "auto.queue.arq" - assert event["spans"][0]["origin"] == "manual" # redis db access - assert event["spans"][1]["origin"] == "manual" # redis db access + assert event["spans"][0]["origin"] == "auto.db.redis" + assert event["spans"][1]["origin"] == "auto.db.redis"