Skip to content

Commit

Permalink
fix: add header and rename to export metrics
Browse files Browse the repository at this point in the history
  • Loading branch information
Taylor-lagrange committed Dec 21, 2023
1 parent 70c2b45 commit 663d129
Show file tree
Hide file tree
Showing 18 changed files with 144 additions and 89 deletions.
14 changes: 8 additions & 6 deletions config/datanode.example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -102,16 +102,18 @@ parallel_scan_channel_size = 32
# dir = "/tmp/greptimedb/logs"
# level = "info"

# datanode export the metrics generated by itself
# Datanode export the metrics generated by itself
# encoded to Prometheus remote-write format
# and send to Prometheus remote-write compatible receiver (e.g. send to `greptimedb` itself)
# This is only used for `greptimedb` to export its own metric internally. Please see `logging` option for normal export of metric.
# [system_metric]
# whether enable export system_metric, default is false
# This is only used for `greptimedb` to export its own metrics internally. It's different from prometheus scrape.
# [export_metrics]
# whether enable export metrics, default is false
# enable = false
# The url of metric export endpoint, default is `greptimedb` default frontend endpoint
# The url of metrics export endpoint, default is `frontend` default gRPC endpoint.
# endpoint = "127.0.0.1:4000"
# The database name of exported metrics stores, user needs to specify a valid database
# db = ""
# The interval of export metric
# The interval of export metrics
# write_interval = "30s"
# Http headers of Prometheus remote-write carry
# headers = {}
14 changes: 8 additions & 6 deletions config/frontend.example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -78,16 +78,18 @@ timeout = "10s"
connect_timeout = "10s"
tcp_nodelay = true

# frontend export the metrics generated by itself
# Frontend export the metrics generated by itself
# encoded to Prometheus remote-write format
# and send to Prometheus remote-write compatible receiver (e.g. send to `greptimedb` itself)
# This is only used for `greptimedb` to export its own metric internally. Please see `logging` option for normal export of metric.
# [system_metric]
# whether enable export system_metric, default is false
# This is only used for `greptimedb` to export its own metrics internally. It's different from prometheus scrape.
# [export_metrics]
# whether enable export metrics, default is false
# enable = false
# The url of metric export endpoint, default is `greptimedb` default frontend endpoint
# The url of metrics export endpoint, default is `frontend` default gRPC endpoint.
# endpoint = "127.0.0.1:4000"
# The database name of exported metrics stores, user needs to specify a valid database
# db = ""
# The interval of export metric
# The interval of export metrics
# write_interval = "30s"
# Http headers of Prometheus remote-write carry
# headers = {}
14 changes: 8 additions & 6 deletions config/metasrv.example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -67,16 +67,18 @@ provider = "raft_engine"
# Expected number of replicas of each partition.
# replication_factor = 3

# metasrv export the metrics generated by itself
# Metasrv export the metrics generated by itself
# encoded to Prometheus remote-write format
# and send to Prometheus remote-write compatible receiver (e.g. send to `greptimedb` itself)
# This is only used for `greptimedb` to export its own metric internally. Please see `logging` option for normal export of metric.
# [system_metric]
# whether enable export system_metric, default is false
# This is only used for `greptimedb` to export its own metrics internally. It's different from prometheus scrape.
# [export_metrics]
# whether enable export metrics, default is false
# enable = false
# The url of metric export endpoint, default is `greptimedb` default frontend endpoint
# The url of metrics export endpoint, default is `frontend` default gRPC endpoint.
# endpoint = "127.0.0.1:4000"
# The database name of exported metrics stores, user needs to specify a valid database
# db = ""
# The interval of export metric
# The interval of export metrics
# write_interval = "30s"
# Http headers of Prometheus remote-write carry
# headers = {}
14 changes: 8 additions & 6 deletions config/standalone.example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -178,16 +178,18 @@ parallel_scan_channel_size = 32
# The percentage of tracing will be sampled and exported. Valid range `[0, 1]`, 1 means all traces are sampled, 0 means all traces are not sampled, the default value is 1. ratio > 1 are treated as 1. Fractions < 0 are treated as 0
# tracing_sample_ratio = 1.0

# standalone export the metrics generated by itself
# Standalone export the metrics generated by itself
# encoded to Prometheus remote-write format
# and send to Prometheus remote-write compatible receiver (e.g. send to `greptimedb` itself)
# This is only used for `greptimedb` to export its own metric internally. Please see `logging` option for normal export of metric.
# [system_metric]
# whether enable export system_metric, default is false
# This is only used for `greptimedb` to export its own metrics internally. It's different from prometheus scrape.
# [export_metrics]
# whether enable export metrics, default is false
# enable = false
# The url of metric export endpoint, default is `greptimedb` default frontend endpoint
# The url of metrics export endpoint, default is `frontend` default gRPC endpoint.
# endpoint = "127.0.0.1:4000"
# The database name of exported metrics stores, user needs to specify a valid database
# db = ""
# The interval of export metric
# The interval of export metrics
# write_interval = "30s"
# Http headers of Prometheus remote-write carry
# headers = {}
2 changes: 1 addition & 1 deletion src/cmd/src/frontend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ impl StartCommand {
.context(StartFrontendSnafu)?;

instance
.build_system_metric_task(&opts.system_metric)
.build_export_metrics_task(&opts.export_metrics)
.context(StartFrontendSnafu)?;

instance
Expand Down
12 changes: 6 additions & 6 deletions src/cmd/src/standalone.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ use frontend::service_config::{
};
use mito2::config::MitoConfig;
use serde::{Deserialize, Serialize};
use servers::export_metrics::ExportMetricsOption;
use servers::http::HttpOptions;
use servers::system_metric::SystemMetricOption;
use servers::tls::{TlsMode, TlsOption};
use servers::Mode;
use snafu::ResultExt;
Expand Down Expand Up @@ -113,7 +113,7 @@ pub struct StandaloneOptions {
pub user_provider: Option<String>,
/// Options for different store engines.
pub region_engine: Vec<RegionEngineConfig>,
pub system_metric: SystemMetricOption,
pub export_metrics: ExportMetricsOption,
}

impl Default for StandaloneOptions {
Expand All @@ -133,7 +133,7 @@ impl Default for StandaloneOptions {
metadata_store: KvBackendConfig::default(),
procedure: ProcedureConfig::default(),
logging: LoggingOptions::default(),
system_metric: SystemMetricOption::default(),
export_metrics: ExportMetricsOption::default(),
user_provider: None,
region_engine: vec![
RegionEngineConfig::Mito(MitoConfig::default()),
Expand All @@ -157,8 +157,8 @@ impl StandaloneOptions {
meta_client: None,
logging: self.logging,
user_provider: self.user_provider,
// Handle the system metric task run by standalone to frontend for execution
system_metric: self.system_metric,
// Handle the export metrics task run by standalone to frontend for execution
export_metrics: self.export_metrics,
..Default::default()
}
}
Expand Down Expand Up @@ -405,7 +405,7 @@ impl StartCommand {
.context(StartFrontendSnafu)?;

frontend
.build_system_metric_task(&opts.frontend.system_metric)
.build_export_metrics_task(&opts.frontend.export_metrics)
.context(StartFrontendSnafu)?;

frontend
Expand Down
6 changes: 3 additions & 3 deletions src/datanode/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ use meta_client::MetaClientOptions;
use mito2::config::MitoConfig;
use secrecy::SecretString;
use serde::{Deserialize, Serialize};
use servers::export_metrics::ExportMetricsOption;
use servers::heartbeat_options::HeartbeatOptions;
use servers::http::HttpOptions;
use servers::system_metric::SystemMetricOption;
use servers::Mode;

pub const DEFAULT_OBJECT_STORE_CACHE_SIZE: ReadableSize = ReadableSize::mb(256);
Expand Down Expand Up @@ -242,7 +242,7 @@ pub struct DatanodeOptions {
pub region_engine: Vec<RegionEngineConfig>,
pub logging: LoggingOptions,
pub enable_telemetry: bool,
pub system_metric: SystemMetricOption,
pub export_metrics: ExportMetricsOption,
}

impl Default for DatanodeOptions {
Expand All @@ -267,7 +267,7 @@ impl Default for DatanodeOptions {
logging: LoggingOptions::default(),
heartbeat: HeartbeatOptions::datanode_default(),
enable_telemetry: true,
system_metric: SystemMetricOption::default(),
export_metrics: ExportMetricsOption::default(),
}
}
}
Expand Down
12 changes: 6 additions & 6 deletions src/datanode/src/datanode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,11 @@ use mito2::engine::MitoEngine;
use object_store::manager::{ObjectStoreManager, ObjectStoreManagerRef};
use object_store::util::normalize_dir;
use query::QueryEngineFactory;
use servers::export_metrics::ExportMetricsTask;
use servers::grpc::{GrpcServer, GrpcServerConfig};
use servers::http::HttpServerBuilder;
use servers::metrics_handler::MetricsHandler;
use servers::server::{start_server, ServerHandler, ServerHandlers};
use servers::system_metric::SystemMetricTask;
use servers::Mode;
use snafu::{OptionExt, ResultExt};
use store_api::logstore::LogStore;
Expand Down Expand Up @@ -82,7 +82,7 @@ pub struct Datanode {
greptimedb_telemetry_task: Arc<GreptimeDBTelemetryTask>,
leases_notifier: Option<Arc<Notify>>,
plugins: Plugins,
system_metric_task: Option<SystemMetricTask>,
export_metrics_task: Option<ExportMetricsTask>,
}

impl Datanode {
Expand All @@ -94,7 +94,7 @@ impl Datanode {

self.start_telemetry();

if let Some(t) = self.system_metric_task.as_ref() {
if let Some(t) = self.export_metrics_task.as_ref() {
t.start()
}

Expand Down Expand Up @@ -265,8 +265,8 @@ impl DatanodeBuilder {
None
};

let system_metric_task =
SystemMetricTask::try_new(&self.opts.system_metric, Some(&self.plugins))
let export_metrics_task =
ExportMetricsTask::try_new(&self.opts.export_metrics, Some(&self.plugins))
.context(StartServerSnafu)?;

Ok(Datanode {
Expand All @@ -277,7 +277,7 @@ impl DatanodeBuilder {
region_event_receiver,
leases_notifier,
plugins: self.plugins.clone(),
system_metric_task,
export_metrics_task,
})
}

Expand Down
6 changes: 3 additions & 3 deletions src/frontend/src/frontend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
use common_telemetry::logging::LoggingOptions;
use meta_client::MetaClientOptions;
use serde::{Deserialize, Serialize};
use servers::export_metrics::ExportMetricsOption;
use servers::heartbeat_options::HeartbeatOptions;
use servers::http::HttpOptions;
use servers::system_metric::SystemMetricOption;
use servers::Mode;
use snafu::prelude::*;

Expand Down Expand Up @@ -45,7 +45,7 @@ pub struct FrontendOptions {
pub logging: LoggingOptions,
pub datanode: DatanodeOptions,
pub user_provider: Option<String>,
pub system_metric: SystemMetricOption,
pub export_metrics: ExportMetricsOption,
}

impl Default for FrontendOptions {
Expand All @@ -66,7 +66,7 @@ impl Default for FrontendOptions {
logging: LoggingOptions::default(),
datanode: DatanodeOptions::default(),
user_provider: None,
system_metric: SystemMetricOption::default(),
export_metrics: ExportMetricsOption::default(),
}
}
}
Expand Down
12 changes: 6 additions & 6 deletions src/frontend/src/instance.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ use query::QueryEngineRef;
use raft_engine::{Config, ReadableSize, RecoveryMode};
use servers::error as server_error;
use servers::error::{AuthSnafu, ExecuteQuerySnafu, ParsePromQLSnafu};
use servers::export_metrics::{ExportMetricsOption, ExportMetricsTask};
use servers::interceptor::{
PromQueryInterceptor, PromQueryInterceptorRef, SqlQueryInterceptor, SqlQueryInterceptorRef,
};
Expand All @@ -66,7 +67,6 @@ use servers::query_handler::{
PromStoreProtocolHandler, ScriptHandler,
};
use servers::server::{start_server, ServerHandlers};
use servers::system_metric::{SystemMetricOption, SystemMetricTask};
use session::context::QueryContextRef;
use snafu::prelude::*;
use sql::dialect::Dialect;
Expand Down Expand Up @@ -118,7 +118,7 @@ pub struct Instance {
heartbeat_task: Option<HeartbeatTask>,
inserter: InserterRef,
deleter: DeleterRef,
system_metric_task: Option<SystemMetricTask>,
export_metrics_task: Option<ExportMetricsTask>,
}

impl Instance {
Expand Down Expand Up @@ -196,9 +196,9 @@ impl Instance {
Ok(())
}

pub fn build_system_metric_task(&mut self, opts: &SystemMetricOption) -> Result<()> {
self.system_metric_task =
SystemMetricTask::try_new(opts, Some(&self.plugins)).context(StartServerSnafu)?;
pub fn build_export_metrics_task(&mut self, opts: &ExportMetricsOption) -> Result<()> {
self.export_metrics_task =
ExportMetricsTask::try_new(opts, Some(&self.plugins)).context(StartServerSnafu)?;
Ok(())
}

Expand Down Expand Up @@ -231,7 +231,7 @@ impl FrontendInstance for Instance {

self.script_executor.start(self)?;

if let Some(t) = self.system_metric_task.as_ref() {
if let Some(t) = self.export_metrics_task.as_ref() {
t.start()
}

Expand Down
2 changes: 1 addition & 1 deletion src/frontend/src/instance/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ impl FrontendBuilder {
heartbeat_task: self.heartbeat_task,
inserter,
deleter,
system_metric_task: None,
export_metrics_task: None,
})
}
}
14 changes: 7 additions & 7 deletions src/meta-srv/src/bootstrap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,18 @@ use common_meta::kv_backend::{KvBackendRef, ResettableKvBackendRef};
use common_telemetry::info;
use etcd_client::Client;
use servers::configurator::ConfiguratorRef;
use servers::export_metrics::ExportMetricsTask;
use servers::http::{HttpServer, HttpServerBuilder};
use servers::metrics_handler::MetricsHandler;
use servers::server::Server;
use servers::system_metric::SystemMetricTask;
use snafu::ResultExt;
use tokio::net::TcpListener;
use tokio::select;
use tokio::sync::mpsc::{self, Receiver, Sender};
use tonic::transport::server::{Router, TcpIncoming};

use crate::election::etcd::EtcdElection;
use crate::error::InitRemoteWriteMetricTaskSnafu;
use crate::error::InitExportMetricsTaskSnafu;
use crate::lock::etcd::EtcdLock;
use crate::lock::memory::MemLock;
use crate::metasrv::builder::MetaSrvBuilder;
Expand All @@ -60,7 +60,7 @@ pub struct MetaSrvInstance {

plugins: Plugins,

system_metric_task: Option<SystemMetricTask>,
export_metrics_task: Option<ExportMetricsTask>,
}

impl MetaSrvInstance {
Expand All @@ -77,22 +77,22 @@ impl MetaSrvInstance {
);
// put meta_srv into plugins for later use
plugins.insert::<Arc<MetaSrv>>(Arc::new(meta_srv.clone()));
let system_metric_task = SystemMetricTask::try_new(&opts.system_metric, Some(&plugins))
.context(InitRemoteWriteMetricTaskSnafu)?;
let export_metrics_task = ExportMetricsTask::try_new(&opts.export_metrics, Some(&plugins))
.context(InitExportMetricsTaskSnafu)?;
Ok(MetaSrvInstance {
meta_srv,
http_srv,
opts,
signal_sender: None,
plugins,
system_metric_task,
export_metrics_task,
})
}

pub async fn start(&mut self) -> Result<()> {
self.meta_srv.try_start().await?;

if let Some(t) = self.system_metric_task.as_ref() {
if let Some(t) = self.export_metrics_task.as_ref() {
t.start()
}

Expand Down
6 changes: 3 additions & 3 deletions src/meta-srv/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,8 +196,8 @@ pub enum Error {
location: Location,
source: servers::error::Error,
},
#[snafu(display("Failed to init remote write metric task"))]
InitRemoteWriteMetricTask {
#[snafu(display("Failed to init export metrics task"))]
InitExportMetricsTask {
location: Location,
source: servers::error::Error,
},
Expand Down Expand Up @@ -656,7 +656,7 @@ impl ErrorExt for Error {
| Error::ParseNum { .. }
| Error::UnsupportedSelectorType { .. }
| Error::InvalidArguments { .. }
| Error::InitRemoteWriteMetricTask { .. }
| Error::InitExportMetricsTask { .. }
| Error::InvalidHeartbeatRequest { .. }
| Error::TooManyPartitions { .. } => StatusCode::InvalidArguments,
Error::LeaseKeyFromUtf8 { .. }
Expand Down
Loading

0 comments on commit 663d129

Please sign in to comment.