Skip to content

Commit

Permalink
NH-57036: I reworked the merge script of integration tests to be more…
Browse files Browse the repository at this point in the history
… reliable now
  • Loading branch information
gantrior committed Oct 6, 2023
1 parent fb787d6 commit cf8222d
Show file tree
Hide file tree
Showing 2 changed files with 217 additions and 14 deletions.
187 changes: 187 additions & 0 deletions tests/integration/expected_output.json
Original file line number Diff line number Diff line change
Expand Up @@ -22271,6 +22271,127 @@
"scopeMetrics": [
{
"metrics": [
{
"gauge": {
"dataPoints": [
{
"asDouble": 31.78,
"timeUnixNano": "0"
}
]
},
"name": "k8s.cluster.cpu.allocatable"
},
{
"gauge": {
"dataPoints": [
{
"asDouble": 32,
"timeUnixNano": "0"
}
]
},
"name": "k8s.cluster.cpu.capacity"
},
{
"gauge": {
"dataPoints": [
{
"asDouble": 126595563520,
"timeUnixNano": "0"
}
]
},
"name": "k8s.cluster.memory.allocatable"
},
{
"gauge": {
"dataPoints": [
{
"asDouble": 132738121728,
"timeUnixNano": "0"
}
]
},
"name": "k8s.cluster.memory.capacity"
},
{
"gauge": {
"dataPoints": [
{
"asDouble": 2,
"timeUnixNano": "0"
}
]
},
"name": "k8s.cluster.nodes"
},
{
"gauge": {
"dataPoints": [
{
"asDouble": 2,
"timeUnixNano": "0"
}
]
},
"name": "k8s.cluster.nodes.ready"
},
{
"gauge": {
"dataPoints": [
{
"asDouble": 1,
"timeUnixNano": "0"
}
]
},
"name": "k8s.cluster.nodes.ready.avg"
},
{
"gauge": {
"dataPoints": [
{
"asDouble": 2,
"timeUnixNano": "0"
}
]
},
"name": "k8s.cluster.pods"
},
{
"gauge": {
"dataPoints": [
{
"asDouble": 2,
"timeUnixNano": "0"
}
]
},
"name": "k8s.cluster.pods.running"
},
{
"gauge": {
"dataPoints": [
{
"asDouble": 0.1,
"timeUnixNano": "0"
}
]
},
"name": "k8s.cluster.spec.cpu.requests"
},
{
"gauge": {
"dataPoints": [
{
"asDouble": 6442450944,
"timeUnixNano": "0"
}
]
},
"name": "k8s.cluster.spec.memory.requests"
},
{
"gauge": {
"dataPoints": [
Expand Down Expand Up @@ -35873,6 +35994,72 @@
"scope": {}
}
]
},
{
"resource": {
"attributes": [
{
"key": "http.scheme",
"value": {
"stringValue": "http"
}
},
{
"key": "k8s.cluster.name",
"value": {
"stringValue": "cluster name"
}
},
{
"key": "net.host.name",
"value": {
"stringValue": "test-node"
}
},
{
"key": "net.host.port",
"value": {
"stringValue": ""
}
},
{
"key": "sw.k8s.agent.app.version",
"value": {
"stringValue": "0.8.3"
}
},
{
"key": "sw.k8s.agent.manifest.version",
"value": {
"stringValue": "2.8.0-alpha.3"
}
},
{
"key": "sw.k8s.cluster.uid",
"value": {
"stringValue": "cluster-uid-123456789"
}
}
]
},
"scopeMetrics": [
{
"metrics": [
{
"gauge": {
"dataPoints": [
{
"asDouble": 2,
"timeUnixNano": "0"
}
]
},
"name": "k8s.node.pods"
}
],
"scope": {}
}
]
}
]
}
44 changes: 30 additions & 14 deletions tests/integration/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,17 +158,29 @@ def process_metric_type(metric):
sort_datapoints(metric)

def merge_datapoints(existing_datapoints, new_datapoints):
existing_datapoints_dict = {datapoint_sorting_key(dp): dp for dp in existing_datapoints}
merged_datapoints = [(datapoint_sorting_key(dp), dp) for dp in existing_datapoints]

for new_datapoint in new_datapoints:
new_datapoint_hash_key = datapoint_sorting_key(new_datapoint)
found = False # flag to track if a matching datapoint was found

for key, existing_datapoint in merged_datapoints:
if key == new_datapoint_hash_key:
existing_datapoint.update(new_datapoint) # update existing_datapoint in-place
found = True # set flag to True since a matching datapoint was found
break # exit the loop since a match was found and handled

if not found: # if no matching datapoint was found, append the new datapoint
merged_datapoints.append((new_datapoint_hash_key, new_datapoint))

if new_datapoint_hash_key in existing_datapoints_dict:
existing_datapoints_dict[new_datapoint_hash_key].update(new_datapoint)
else:
existing_datapoints.append(new_datapoint)

existing_datapoints.clear()
existing_datapoints.extend(dp for _, dp in merged_datapoints)

def merge_metrics(existing_metric, new_metric):
if new_metric["name"] == "k8s.cluster.nodes.ready":
print("Skipping merge of scrape_duration_seconds metric")
return

metric_types = ["sum", "gauge", "histogram"]

for metric_type in metric_types:
Expand All @@ -178,11 +190,12 @@ def merge_metrics(existing_metric, new_metric):
merge_datapoints(existing_datapoints, new_datapoints)

def merge_scope_metrics(existing_scope, new_scope):
existing_metrics = {metric["name"]: metric for metric in existing_scope["metrics"]}

for new_metric in new_scope["metrics"]:
if new_metric["name"] in existing_metrics:
merge_metrics(existing_metrics[new_metric["name"]], new_metric)
new_metric_name = new_metric["name"]
for existing_metric in existing_scope["metrics"]:
if existing_metric["name"] == new_metric_name:
merge_metrics(existing_metric, new_metric)
break
else:
existing_scope["metrics"].append(new_metric)

Expand All @@ -198,13 +211,16 @@ def merge_resources(existing_resource, new_resource):
existing_scopes.append(new_scope)

def custom_json_merge(result, new_json):
new_resources = {resource_sorting_key(resource): resource for resource in new_json["resourceMetrics"]}
new_resources = [(resource_sorting_key(resource), resource) for resource in new_json["resourceMetrics"]]

for existing_resource in result["resourceMetrics"]:
existing_key = resource_sorting_key(existing_resource)
if existing_key in new_resources:
merge_resources(existing_resource, new_resources.pop(existing_key))
matching_new_resources = [item for item in new_resources if item[0] == existing_key]
for _, new_resource in matching_new_resources:
merge_resources(existing_resource, new_resource)
new_resources.remove((existing_key, new_resource))

result["resourceMetrics"].extend(new_resources.values())
result["resourceMetrics"].extend(resource for _, resource in new_resources)

def get_merged_json(content):
result = {"resourceMetrics": []}
Expand Down

0 comments on commit cf8222d

Please sign in to comment.