Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(source-map-debugger): Return data about scraping attempts from source map debugger endpoint #57655

Merged
merged 3 commits into from
Oct 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,40 @@
ARTIFACT_INDEX_LOOKUP_LIMIT = 25


class ScrapingResultSuccess(TypedDict):
url: str
status: Literal["success"]


class ScrapingResultNotAttempted(TypedDict):
url: str
status: Literal["not_attempted"]


class ScrapingResultFailure(TypedDict):
url: str
status: Literal["failure"]
reason: Literal[
"not_found",
"disabled",
"invalid_host",
"permission_denied",
"timeout",
"download_error",
"other",
]
details: Optional[str]


class SourceMapScrapingProcessResult(TypedDict):
source_file: Optional[
Union[ScrapingResultSuccess, ScrapingResultNotAttempted, ScrapingResultFailure]
]
source_map: Optional[
Union[ScrapingResultSuccess, ScrapingResultNotAttempted, ScrapingResultFailure]
]


class SourceMapDebugIdProcessResult(TypedDict):
debug_id: Optional[str]
uploaded_source_file_with_correct_debug_id: bool
Expand All @@ -72,6 +106,7 @@ class SourceMapReleaseProcessResult(TypedDict):
class SourceMapDebugFrame(TypedDict):
debug_id_process: SourceMapDebugIdProcessResult
release_process: Optional[SourceMapReleaseProcessResult]
scraping_process: SourceMapScrapingProcessResult


class SourceMapDebugException(TypedDict):
Expand All @@ -88,6 +123,7 @@ class SourceMapDebugResponse(TypedDict):
release_has_some_artifact: bool
has_uploaded_some_artifact_with_a_debug_id: bool
sdk_debug_id_support: Literal["not-supported", "unofficial-sdk", "needs-upgrade", "full"]
has_scraping_data: bool


@region_silo_endpoint
Expand Down Expand Up @@ -197,6 +233,9 @@ def get(self, request: Request, project: Project, event_id: str) -> Response:
path_data = ReleaseLookupData(abs_path, project, release, event).to_dict()
release_process_abs_path_data[abs_path] = path_data

# Get a map that maps from abs_path to scraping data
scraping_attempt_map = get_scraping_attempt_map(event_data)

# build information about individual exceptions and their stack traces
processed_exceptions = []
exception_values = get_path(event_data, "exception", "values")
Expand Down Expand Up @@ -227,6 +266,9 @@ def get(self, request: Request, project: Project, event_id: str) -> Response:
in debug_ids_with_uploaded_source_map,
},
"release_process": release_process_abs_path_data.get(abs_path),
"scraping_process": get_scraping_data_for_frame(
scraping_attempt_map, frame
),
}
)
processed_exceptions.append({"frames": processed_frames})
Expand All @@ -243,10 +285,28 @@ def get(self, request: Request, project: Project, event_id: str) -> Response:
or has_uploaded_artifact_bundle_with_release,
"has_uploaded_some_artifact_with_a_debug_id": has_uploaded_some_artifact_with_a_debug_id,
"sdk_debug_id_support": get_sdk_debug_id_support(event_data),
"has_scraping_data": event_data.get("scraping_attempts") is not None,
}
)


def get_scraping_data_for_frame(scraping_attempt_map, frame):
abs_path = frame.get("abs_path")
if abs_path is None:
return {"source_file": None, "source_map": None}

source_file_data = scraping_attempt_map.get(abs_path)
source_map_data = None

data = frame.get("data", {})
source_map_url = data.get("sourcemap")

if source_map_url is not None:
source_map_data = scraping_attempt_map.get(source_map_url)

return {"source_file": source_file_data, "source_map": source_map_data}


class ReleaseLookupData:
def __init__(self, abs_path: str, project: Project, release: Release, event):
self.abs_path = abs_path
Expand Down Expand Up @@ -619,3 +679,21 @@ def get_abs_paths_in_event(event_data):
if abs_path:
abs_paths.add(abs_path)
return abs_paths


def get_scraping_attempt_map(event_data):
scraping_attempt_map = {} # maps from url to attempt
scraping_attempts = event_data.get("scraping_attempts") or []
for scraping_attempt in scraping_attempts:
attempt_data = {"status": scraping_attempt["status"], "url": scraping_attempt["url"]}

reason = scraping_attempt.get("reason")
if reason is not None:
attempt_data["reason"] = reason

details = scraping_attempt.get("details")
if details is not None:
attempt_data["details"] = details

scraping_attempt_map[scraping_attempt["url"]] = attempt_data
return scraping_attempt_map
Original file line number Diff line number Diff line change
Expand Up @@ -31,17 +31,30 @@ def create_exception_with_frame(frame):
}


def create_event(exceptions=None, debug_meta_images=None, sdk=None, release=None, dist=None):
def create_event(
exceptions=None,
debug_meta_images=None,
sdk=None,
release=None,
dist=None,
scraping_attempts=None,
):
exceptions = [] if exceptions is None else exceptions
return {
event = {
"event_id": "a" * 32,
"release": release,
"dist": dist,
"exception": {"values": exceptions},
"debug_meta": None if debug_meta_images is None else {"images": debug_meta_images},
"sdk": sdk,
"scraping_attempts": scraping_attempts,
}

if scraping_attempts is not None:
event["scraping_attempts"] = scraping_attempts

return event


@region_silo_test # TODO(hybrid-cloud): stable=True blocked on actors
class SourceMapDebugBlueThunderEditionEndpointTestCase(APITestCase):
Expand Down Expand Up @@ -1639,3 +1652,167 @@ def test_frame_release_file_wrong_dist(self):

assert release_process_result["source_file_lookup_result"] == "wrong-dist"
assert release_process_result["source_map_lookup_result"] == "unsuccessful"

def test_has_scraping_data_flag_true(self):
with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
event = self.store_event(
data=create_event(
exceptions=[],
scraping_attempts=[
{
"url": "https://example.com/bundle0.js",
"status": "success",
}
],
),
project_id=self.project.id,
)

resp = self.get_success_response(
self.organization.slug,
self.project.slug,
event.event_id,
)

assert resp.data["has_scraping_data"]

def test_has_scraping_data_flag_false(self):
with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
event = self.store_event(
data=create_event(exceptions=[]),
project_id=self.project.id,
)

resp = self.get_success_response(
self.organization.slug,
self.project.slug,
event.event_id,
)

assert not resp.data["has_scraping_data"]

def test_scraping_result_source_file(self):
with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
event = self.store_event(
data=create_event(
exceptions=[
create_exception_with_frame({"abs_path": "https://example.com/bundle0.js"}),
create_exception_with_frame({"abs_path": "https://example.com/bundle1.js"}),
create_exception_with_frame({"abs_path": "https://example.com/bundle2.js"}),
create_exception_with_frame({"abs_path": "https://example.com/bundle3.js"}),
],
scraping_attempts=[
{
"url": "https://example.com/bundle0.js",
"status": "success",
},
{
"url": "https://example.com/bundle1.js",
"status": "not_attempted",
},
{
"url": "https://example.com/bundle2.js",
"status": "failure",
"reason": "not_found",
"details": "Did not find source",
},
],
),
project_id=self.project.id,
)

resp = self.get_success_response(
self.organization.slug,
self.project.slug,
event.event_id,
)

assert resp.data["exceptions"][0]["frames"][0]["scraping_process"]["source_file"] == {
"url": "https://example.com/bundle0.js",
"status": "success",
}
assert resp.data["exceptions"][1]["frames"][0]["scraping_process"]["source_file"] == {
"url": "https://example.com/bundle1.js",
"status": "not_attempted",
}
assert resp.data["exceptions"][2]["frames"][0]["scraping_process"]["source_file"] == {
"url": "https://example.com/bundle2.js",
"status": "failure",
"reason": "not_found",
"details": "Did not find source",
}
assert (
resp.data["exceptions"][3]["frames"][0]["scraping_process"]["source_file"] is None
)

def test_scraping_result_source_map(self):
with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
event = self.store_event(
data=create_event(
exceptions=[
create_exception_with_frame(
{
"abs_path": "https://example.com/bundle0.js",
"data": {"sourcemap": "https://example.com/bundle0.js.map"},
}
),
create_exception_with_frame(
{
"abs_path": "https://example.com/bundle1.js",
"data": {"sourcemap": "https://example.com/bundle1.js.map"},
}
),
create_exception_with_frame(
{
"abs_path": "https://example.com/bundle2.js",
"data": {"sourcemap": "https://example.com/bundle2.js.map"},
}
),
create_exception_with_frame(
{
"abs_path": "https://example.com/bundle3.js",
"data": {"sourcemap": "https://example.com/bundle3.js.map"},
}
),
],
scraping_attempts=[
{
"url": "https://example.com/bundle0.js.map",
"status": "success",
},
{
"url": "https://example.com/bundle1.js.map",
"status": "not_attempted",
},
{
"url": "https://example.com/bundle2.js.map",
"status": "failure",
"reason": "not_found",
"details": "Did not find source",
},
],
),
project_id=self.project.id,
)

resp = self.get_success_response(
self.organization.slug,
self.project.slug,
event.event_id,
)

assert resp.data["exceptions"][0]["frames"][0]["scraping_process"]["source_map"] == {
"url": "https://example.com/bundle0.js.map",
"status": "success",
}
assert resp.data["exceptions"][1]["frames"][0]["scraping_process"]["source_map"] == {
"url": "https://example.com/bundle1.js.map",
"status": "not_attempted",
}
assert resp.data["exceptions"][2]["frames"][0]["scraping_process"]["source_map"] == {
"url": "https://example.com/bundle2.js.map",
"status": "failure",
"reason": "not_found",
"details": "Did not find source",
}
assert resp.data["exceptions"][3]["frames"][0]["scraping_process"]["source_map"] is None