Skip to content

Commit

Permalink
feat: make logging to stdout configurable
Browse files Browse the repository at this point in the history
  • Loading branch information
MichaelScofield committed Dec 26, 2023
1 parent 3bd2f79 commit 9c5ef80
Show file tree
Hide file tree
Showing 8 changed files with 45 additions and 18 deletions.
2 changes: 2 additions & 0 deletions config/standalone.example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,8 @@ parallel_scan_channel_size = 32
# otlp_endpoint = "localhost:4317"
# The percentage of tracing will be sampled and exported. Valid range `[0, 1]`, 1 means all traces are sampled, 0 means all traces are not sampled, the default value is 1. ratio > 1 are treated as 1. Fractions < 0 are treated as 0
# tracing_sample_ratio = 1.0
# Whether to append logs to stdout. Defaults to true.
# append_stdout = true

# Standalone export the metrics generated by itself
# encoded to Prometheus remote-write format
Expand Down
17 changes: 12 additions & 5 deletions src/common/telemetry/src/logging.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ pub struct LoggingOptions {
pub enable_otlp_tracing: bool,
pub otlp_endpoint: Option<String>,
pub tracing_sample_ratio: Option<f64>,
pub append_stdout: bool,
}

impl PartialEq for LoggingOptions {
Expand All @@ -52,6 +53,7 @@ impl PartialEq for LoggingOptions {
&& self.enable_otlp_tracing == other.enable_otlp_tracing
&& self.otlp_endpoint == other.otlp_endpoint
&& self.tracing_sample_ratio == other.tracing_sample_ratio
&& self.append_stdout == other.append_stdout
}
}

Expand All @@ -65,6 +67,7 @@ impl Default for LoggingOptions {
enable_otlp_tracing: false,
otlp_endpoint: None,
tracing_sample_ratio: None,
append_stdout: true,
}
}
}
Expand Down Expand Up @@ -129,10 +132,14 @@ pub fn init_global_logging(
// Enable log compatible layer to convert log record to tracing span.
LogTracer::init().expect("log tracer must be valid");

// Stdout layer.
let (stdout_writer, stdout_guard) = tracing_appender::non_blocking(std::io::stdout());
let stdout_logging_layer = Layer::new().with_writer(stdout_writer);
guards.push(stdout_guard);
let stdout_logging_layer = if opts.append_stdout {
let (stdout_writer, stdout_guard) = tracing_appender::non_blocking(std::io::stdout());
guards.push(stdout_guard);

Some(Layer::new().with_writer(stdout_writer))
} else {
None
};

// JSON log layer.
let rolling_appender = RollingFileAppender::new(Rotation::HOURLY, dir, app_name);
Expand Down Expand Up @@ -184,7 +191,7 @@ pub fn init_global_logging(
None
};

let stdout_logging_layer = stdout_logging_layer.with_filter(filter.clone());
let stdout_logging_layer = stdout_logging_layer.map(|x| x.with_filter(filter.clone()));

let file_logging_layer = file_logging_layer.with_filter(filter);

Expand Down
9 changes: 8 additions & 1 deletion src/operator/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -483,6 +483,12 @@ pub enum Error {
location: Location,
source: query::error::Error,
},

#[snafu(display("Invalid table name: {}", table_name))]
InvalidTableName {
table_name: String,
location: Location,
},
}

pub type Result<T> = std::result::Result<T, Error>;
Expand All @@ -507,7 +513,8 @@ impl ErrorExt for Error {
| Error::InvalidPartitionColumns { .. }
| Error::PrepareFileTable { .. }
| Error::InferFileTableSchema { .. }
| Error::SchemaIncompatible { .. } => StatusCode::InvalidArguments,
| Error::SchemaIncompatible { .. }
| Error::InvalidTableName { .. } => StatusCode::InvalidArguments,

Error::TableAlreadyExists { .. } => StatusCode::TableAlreadyExists,

Expand Down
6 changes: 3 additions & 3 deletions src/operator/src/insert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use common_meta::datanode_manager::{AffectedRows, DatanodeManagerRef};
use common_meta::peer::Peer;
use common_query::Output;
use common_telemetry::tracing_context::TracingContext;
use common_telemetry::{error, info};
use common_telemetry::{debug, error, info};
use datatypes::schema::Schema;
use futures_util::future;
use meter_macros::write_meter;
Expand Down Expand Up @@ -301,7 +301,7 @@ impl Inserter {
let request_schema = req.rows.as_ref().unwrap().schema.as_slice();
let create_table_expr = &mut build_create_table_expr(&table_ref, request_schema)?;

info!(
debug!(
"Table {}.{}.{} does not exist, try create table",
table_ref.catalog, table_ref.schema, table_ref.table,
);
Expand All @@ -320,7 +320,7 @@ impl Inserter {
Ok(())
}
Err(err) => {
error!(
debug!(
"Failed to create table {}.{}.{}: {}",
table_ref.catalog, table_ref.schema, table_ref.table, err
);
Expand Down
8 changes: 4 additions & 4 deletions src/operator/src/statement/ddl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ use table::TableRef;
use super::StatementExecutor;
use crate::error::{
self, AlterExprToRequestSnafu, CatalogSnafu, ColumnDataTypeSnafu, ColumnNotFoundSnafu,
DeserializePartitionSnafu, InvalidPartitionColumnsSnafu, ParseSqlSnafu, Result,
SchemaNotFoundSnafu, TableMetadataManagerSnafu, TableNotFoundSnafu,
DeserializePartitionSnafu, InvalidPartitionColumnsSnafu, InvalidTableNameSnafu, ParseSqlSnafu,
Result, SchemaNotFoundSnafu, TableMetadataManagerSnafu, TableNotFoundSnafu,
UnrecognizedTableOptionSnafu,
};
use crate::expr_factory;
Expand Down Expand Up @@ -131,8 +131,8 @@ impl StatementExecutor {

ensure!(
NAME_PATTERN_REG.is_match(&create_table.table_name),
error::UnexpectedSnafu {
violated: format!("Invalid table name: {}", create_table.table_name)
InvalidTableNameSnafu {
table_name: create_table.table_name.clone(),
}
);

Expand Down
8 changes: 6 additions & 2 deletions src/servers/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use catalog;
use common_error::ext::{BoxedError, ErrorExt};
use common_error::status_code::StatusCode;
use common_macro::stack_trace_debug;
use common_telemetry::logging;
use common_telemetry::{debug, error};
use datatypes::prelude::ConcreteDataType;
use query::parser::PromQuery;
use serde_json::json;
Expand Down Expand Up @@ -620,7 +620,11 @@ impl IntoResponse for Error {
| Error::InvalidQuery { .. }
| Error::TimePrecision { .. } => HttpStatusCode::BAD_REQUEST,
_ => {
logging::error!(self; "Failed to handle HTTP request");
if self.status_code().should_log_error() {
error!(self; "Failed to handle HTTP request: ");
} else {
debug!("Failed to handle HTTP request: {self}");
}

HttpStatusCode::INTERNAL_SERVER_ERROR
}
Expand Down
8 changes: 6 additions & 2 deletions src/servers/src/http.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ use common_error::status_code::StatusCode;
use common_query::Output;
use common_recordbatch::{util, RecordBatch};
use common_telemetry::logging::{debug, error, info};
use common_telemetry::tracing::Level;
use common_time::timestamp::TimeUnit;
use common_time::Timestamp;
use datatypes::data_type::DataType;
Expand All @@ -61,7 +62,7 @@ use tokio::sync::oneshot::{self, Sender};
use tokio::sync::Mutex;
use tower::timeout::TimeoutLayer;
use tower::ServiceBuilder;
use tower_http::trace::TraceLayer;
use tower_http::trace::{DefaultOnFailure, TraceLayer};

use self::authorize::AuthState;
use crate::configurator::ConfiguratorRef;
Expand Down Expand Up @@ -710,7 +711,10 @@ impl HttpServer {
.layer(
ServiceBuilder::new()
.layer(HandleErrorLayer::new(handle_error))
.layer(TraceLayer::new_for_http())
.layer(
TraceLayer::new_for_http()
.on_failure(DefaultOnFailure::new().level(Level::DEBUG)),
)
.layer(TimeoutLayer::new(self.options.timeout))
.layer(DefaultBodyLimit::max(
self.options
Expand Down
5 changes: 4 additions & 1 deletion tests-integration/tests/http.rs
Original file line number Diff line number Diff line change
Expand Up @@ -747,6 +747,7 @@ enable = true
[frontend.logging]
enable_otlp_tracing = false
append_stdout = true
[frontend.datanode.client]
timeout = "10s"
Expand Down Expand Up @@ -815,6 +816,7 @@ parallel_scan_channel_size = 32
[datanode.logging]
enable_otlp_tracing = false
append_stdout = true
[datanode.export_metrics]
enable = false
Expand All @@ -824,7 +826,8 @@ write_interval = "30s"
[datanode.export_metrics.headers]
[logging]
enable_otlp_tracing = false"#,
enable_otlp_tracing = false
append_stdout = true"#,
store_type,
);
let body_text = drop_lines_with_inconsistent_results(res_get.text().await);
Expand Down

0 comments on commit 9c5ef80

Please sign in to comment.