Skip to content

Commit

Permalink
imporve tracesToProfiles and tracesToLogsV2
Browse files Browse the repository at this point in the history
Signed-off-by: Weifeng Wang <[email protected]>

imporve tracesToProfiles and tracesToLogsV2

Signed-off-by: Weifeng Wang <[email protected]>

imporve tracesToProfiles and tracesToLogsV2

Signed-off-by: Weifeng Wang <[email protected]>

imporve tracesToProfiles and tracesToLogsV2

Signed-off-by: Weifeng Wang <[email protected]>

update

Signed-off-by: Weifeng Wang <[email protected]>
  • Loading branch information
qclaogui committed Mar 6, 2024
1 parent 623e50c commit 6ba9146
Show file tree
Hide file tree
Showing 11 changed files with 307 additions and 237 deletions.
144 changes: 118 additions & 26 deletions docker-compose/common/config/agent-flow/monolithic-mode-all.river
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ discovery.relabel "containers" {
rule {
source_labels = ["__meta_docker_container_label_com_docker_compose_service"]
regex = "(.*)"
target_label = "service_name"
target_label = "app"
}

rule {
Expand Down Expand Up @@ -180,9 +180,9 @@ otelcol.receiver.jaeger "containers" {
}

output {
metrics = [otelcol.processor.batch.containers.input]
logs = [otelcol.processor.batch.containers.input]
traces = [otelcol.processor.batch.containers.input]
metrics = [otelcol.processor.transform.add_metric_datapoint_attributes.input]
logs = [otelcol.processor.resourcedetection.containers.input]
traces = [otelcol.processor.resourcedetection.containers.input]
}
}

Expand All @@ -200,23 +200,131 @@ otelcol.receiver.otlp "containers" {
}

output {
metrics = [otelcol.processor.batch.containers.input]
logs = [otelcol.processor.batch.containers.input]
metrics = [otelcol.processor.transform.add_metric_datapoint_attributes.input]
logs = [otelcol.processor.resourcedetection.containers.input]
traces = [
otelcol.processor.batch.containers.input,
otelcol.processor.resourcedetection.containers.input,
otelcol.connector.spanlogs.autologging.input,
]
}
}

otelcol.processor.resourcedetection "containers" {
detectors = ["env"]

output {
logs = [otelcol.processor.attributes.containers.input]
metrics = [otelcol.processor.attributes.containers.input]
traces = [otelcol.processor.attributes.containers.input]
}
}

otelcol.processor.transform "add_metric_datapoint_attributes" {
error_mode = "ignore"

metric_statements {
context = "datapoint"
statements = [
`set(attributes["deployment.environment"], resource.attributes["deployment.environment"])`,
`set(attributes["service.version"], resource.attributes["service.version"])`,
]
}

output {
metrics = [otelcol.processor.attributes.containers.input]
}
}

otelcol.processor.attributes "containers" {
// Inserts a new attribute "cluster" to spans where the key doesn't exist.
action {
key = "cluster"
value = "docker-compose"
action = "insert"
}

output {
metrics = [otelcol.processor.transform.add_resource_attributes.input]
logs = [otelcol.processor.transform.add_resource_attributes.input]
traces = [otelcol.processor.transform.add_resource_attributes.input]
}
}

otelcol.processor.transform "add_resource_attributes" {
error_mode = "ignore"

metric_statements {
context = "resource"
statements = [
`set(attributes["cluster"], "docker-compose") where attributes["cluster"] == nil`,
]
}

log_statements {
context = "resource"
statements = [
`set(attributes["pod"], attributes["pod.name"])`,
`set(attributes["namespace"], attributes["namespace.name"])`,
`set(attributes["loki.resource.labels"], "pod, namespace, cluster, job")`,
]
}

trace_statements {
context = "resource"
statements = [
`set(attributes["cluster"], "docker-compose") where attributes["cluster"] == nil`,
]
}

output {
metrics = [otelcol.processor.filter.containers.input]
logs = [otelcol.processor.filter.containers.input]
traces = [otelcol.processor.filter.containers.input]
}
}

otelcol.processor.filter "containers" {
error_mode = "ignore"

output {
metrics = [otelcol.processor.batch.containers.input]
logs = [otelcol.processor.batch.containers.input]
traces = [otelcol.processor.batch.containers.input]
}
}

otelcol.processor.batch "containers" {
send_batch_size = 16384
send_batch_max_size = 0
timeout = "2s"

output {
metrics = [otelcol.processor.memory_limiter.containers.input]
logs = [otelcol.processor.memory_limiter.containers.input]
traces = [otelcol.processor.memory_limiter.containers.input]
}
}

otelcol.processor.memory_limiter "containers" {
check_interval = "1s"
limit_percentage = 50
spike_limit_percentage = 30

output {
metrics = [otelcol.exporter.prometheus.containers.input]
logs = [otelcol.exporter.loki.containers.input]
traces = [module.file.docker_compose.exports.traces_receiver]
}
}

otelcol.exporter.prometheus "containers" {
forward_to = [module.file.docker_compose.exports.metrics_receiver]
}

otelcol.exporter.loki "containers" {
forward_to = [loki.process.containers.receiver]
}

// The OpenTelemetry spanlog connector processes incoming trace spans and extracts data from them ready
// for logging.
otelcol.connector.spanlogs "autologging" {
Expand Down Expand Up @@ -262,32 +370,16 @@ loki.process "autologging" {
forward_to = [loki.process.containers.receiver]
}

otelcol.processor.memory_limiter "containers" {
output {
metrics = [otelcol.exporter.prometheus.containers.input]
logs = [otelcol.exporter.loki.containers.input]
traces = [module.file.docker_compose.exports.traces_receiver]
}
}

otelcol.exporter.prometheus "containers" {
forward_to = [module.file.docker_compose.exports.metrics_receiver]
}

otelcol.exporter.loki "containers" {
forward_to = [loki.process.containers.receiver]
}

/********************************************
* Profiles
********************************************/

pyroscope.scrape "containers" {
targets = [
{"__address__" = "mimir:8080", "service_name" = "mimir"},
{"__address__" = "loki:3100", "service_name" = "loki-all"},
{"__address__" = "tempo:3200", "service_name" = "tempo-all"},
{"__address__" = "loki:3100", "service_name" = "loki"},
{"__address__" = "grafana:6060", "service_name" = "grafana"},
{"__address__" = "tempo:3200", "service_name" = "tempo"},
{"__address__" = "mimir:8080", "service_name" = "mimir"},
]

clustering {
Expand Down
101 changes: 82 additions & 19 deletions docker-compose/common/config/agent-flow/traces.river
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ discovery.relabel "containers" {
rule {
source_labels = ["__meta_docker_container_label_com_docker_compose_service"]
regex = "(.*)"
target_label = "service_name"
target_label = "app"
}

rule {
Expand Down Expand Up @@ -161,9 +161,8 @@ otelcol.receiver.jaeger "containers" {
}

output {
metrics = [otelcol.processor.batch.containers.input]
logs = [otelcol.processor.batch.containers.input]
traces = [otelcol.processor.batch.containers.input]
metrics = [otelcol.processor.transform.add_metric_datapoint_attributes.input]
traces = [otelcol.processor.resourcedetection.containers.input]
}
}

Expand All @@ -180,41 +179,105 @@ otelcol.receiver.otlp "containers" {
endpoint = "0.0.0.0:4318"
}

output {
metrics = [otelcol.processor.transform.add_metric_datapoint_attributes.input]
traces = [otelcol.processor.resourcedetection.containers.input]
}
}

otelcol.processor.resourcedetection "containers" {
detectors = ["env"]

output {
metrics = [otelcol.processor.attributes.containers.input]
traces = [otelcol.processor.attributes.containers.input]
}
}

otelcol.processor.transform "add_metric_datapoint_attributes" {
error_mode = "ignore"

metric_statements {
context = "datapoint"
statements = [
`set(attributes["deployment.environment"], resource.attributes["deployment.environment"])`,
`set(attributes["service.version"], resource.attributes["service.version"])`,
]
}

output {
metrics = [otelcol.processor.attributes.containers.input]
}
}

otelcol.processor.attributes "containers" {
// Inserts a new attribute "cluster" to spans where the key doesn't exist.
action {
key = "cluster"
value = "docker-compose"
action = "insert"
}

output {
metrics = [otelcol.processor.transform.add_resource_attributes.input]
traces = [otelcol.processor.transform.add_resource_attributes.input]
}
}

otelcol.processor.transform "add_resource_attributes" {
error_mode = "ignore"

metric_statements {
context = "resource"
statements = [
`set(attributes["cluster"], "docker-compose") where attributes["cluster"] == nil`,
]
}

trace_statements {
context = "resource"
statements = [
`set(attributes["cluster"], "docker-compose") where attributes["cluster"] == nil`,
]
}

output {
metrics = [otelcol.processor.filter.containers.input]
traces = [otelcol.processor.filter.containers.input]
}
}

otelcol.processor.filter "containers" {
error_mode = "ignore"

output {
metrics = [otelcol.processor.batch.containers.input]
logs = [otelcol.processor.batch.containers.input]
traces = [otelcol.processor.batch.containers.input]
}
}

otelcol.processor.batch "containers" {
send_batch_size = 16384
send_batch_max_size = 0
timeout = "2s"

output {
metrics = [otelcol.processor.memory_limiter.containers.input]
logs = [otelcol.processor.memory_limiter.containers.input]
traces = [otelcol.processor.memory_limiter.containers.input]
}
}

otelcol.processor.memory_limiter "containers" {
check_interval = "1s"
limit_percentage = 50
spike_limit_percentage = 30

output {
metrics = [otelcol.exporter.prometheus.containers.input]
logs = [otelcol.exporter.loki.containers.input]
traces = [module.file.docker_compose.exports.traces_receiver]
}
}

otelcol.exporter.prometheus "containers" {
forward_to = [module.file.docker_compose.exports.metrics_receiver]
}

otelcol.exporter.loki "containers" {
forward_to = [loki.process.containers.receiver]
}

loki.process "containers" {
stage.tenant {
value = "anonymous"
}

forward_to = [module.file.docker_compose.exports.logs_receiver]
}
12 changes: 11 additions & 1 deletion docker-compose/common/config/tempo/datasources.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
apiVersion: 1

deleteDatasources:
- name: Metrics
uid: metrics
- name: Logs
uid: logs
- name: Traces
uid: traces
- name: Profiles
uid: profiles

datasources:
# Mimir for metrics
- name: Metrics
Expand Down Expand Up @@ -60,7 +70,7 @@ datasources:
datasourceUid: metrics
spanStartTimeShift: '-30m'
spanEndTimeShift: '30m'
tags: [{ key: 'service.name', value: 'service' }, { key: 'span_name' }, { key: 'http_method' }]
tags: [{ key: 'service.name', value: 'service' }]
queries:
- name: '(R) Rate'
query: 'sum(rate(traces_spanmetrics_calls_total{$$__tags}[$$__rate_interval]))'
Expand Down
Loading

0 comments on commit 6ba9146

Please sign in to comment.