Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add blanket implementations of Recorder for some pointer-like types #507

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,22 @@ jobs:
name: Test ${{ matrix.rust_version }}
runs-on: ubuntu-latest
strategy:
# 1.70 is the MSRV for the project, which currently does not match the version specified in
# the rust-toolchain.toml file as metrics-observer requires 1.74 to build. See
# https://github.com/metrics-rs/metrics/pull/505#discussion_r1724092556 for more information.
matrix:
rust_version: ['stable', 'nightly']
rust_version: ['stable', 'nightly', '1.70']
include:
- rust_version: '1.70'
exclude-packages: '--exclude metrics-observer'
steps:
- uses: actions/checkout@v3
- name: Install Protobuf Compiler
run: sudo apt-get install protobuf-compiler
- name: Install Rust ${{ matrix.rust_version }}
run: rustup install ${{ matrix.rust_version }}
- name: Run Tests
run: cargo +${{ matrix.rust_version }} test --all-features --workspace
run: cargo +${{ matrix.rust_version }} test --all-features --workspace ${{ matrix.exclude-packages }}
docs:
runs-on: ubuntu-latest
env:
Expand Down
1 change: 1 addition & 0 deletions clippy.toml
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
too-many-lines-threshold = 150
ignore-interior-mutability = ["metrics::key::Key"]
1 change: 1 addition & 0 deletions metrics-exporter-prometheus/src/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ pub enum BuildError {
ZeroBucketDuration,
}

#[derive(Debug)]
pub struct Snapshot {
pub counters: HashMap<String, HashMap<Vec<String>, u64>>,
pub gauges: HashMap<String, HashMap<Vec<String>, f64>>,
Expand Down
25 changes: 17 additions & 8 deletions metrics-exporter-prometheus/src/distribution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ const DEFAULT_SUMMARY_BUCKET_COUNT: NonZeroU32 = match NonZeroU32::new(3) {
const DEFAULT_SUMMARY_BUCKET_DURATION: Duration = Duration::from_secs(20);

/// Distribution type.
#[derive(Clone)]
#[derive(Clone, Debug)]
pub enum Distribution {
/// A Prometheus histogram.
///
Expand All @@ -33,7 +33,10 @@ pub enum Distribution {

impl Distribution {
/// Creates a histogram distribution.
#[warn(clippy::missing_panics_doc)]
///
/// # Panics
///
/// Panics if `buckets` is empty.
pub fn new_histogram(buckets: &[f64]) -> Distribution {
let hist = Histogram::new(buckets).expect("buckets should never be empty");
Distribution::Histogram(hist)
Expand Down Expand Up @@ -134,14 +137,14 @@ impl DistributionBuilder {
}
}

#[derive(Clone)]
#[derive(Clone, Debug)]
struct Bucket {
begin: Instant,
summary: Summary,
}

/// A `RollingSummary` manages a list of [Summary] so that old results can be expired.
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct RollingSummary {
// Buckets are ordered with the latest buckets first. The buckets are kept in alignment based
// on the instant of the first added bucket and the bucket_duration. There may be gaps in the
Expand Down Expand Up @@ -299,8 +302,11 @@ mod tests {
let snapshot = summary.snapshot(clock.now());

assert_eq!(0, snapshot.count());
assert_eq!(f64::INFINITY, snapshot.min());
assert_eq!(f64::NEG_INFINITY, snapshot.max());
#[allow(clippy::float_cmp)]
{
assert_eq!(f64::INFINITY, snapshot.min());
assert_eq!(f64::NEG_INFINITY, snapshot.max());
}
assert_eq!(None, snapshot.quantile(0.5));
}

Expand All @@ -318,8 +324,11 @@ mod tests {

let snapshot = summary.snapshot(clock.now());

assert_eq!(42.0, snapshot.min());
assert_eq!(42.0, snapshot.max());
#[allow(clippy::float_cmp)]
{
assert_eq!(42.0, snapshot.min());
assert_eq!(42.0, snapshot.max());
}
// 42 +/- (42 * 0.0001)
assert!(Some(41.9958) < snapshot.quantile(0.5));
assert!(Some(42.0042) > snapshot.quantile(0.5));
Expand Down
6 changes: 3 additions & 3 deletions metrics-exporter-prometheus/src/exporter/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ use super::ExporterConfig;
use super::ExporterFuture;

/// Builder for creating and installing a Prometheus recorder/exporter.
#[derive(Debug)]
pub struct PrometheusBuilder {
#[cfg_attr(not(any(feature = "http-listener", feature = "push-gateway")), allow(dead_code))]
exporter_config: ExporterConfig,
Expand Down Expand Up @@ -560,8 +561,7 @@ mod tests {
gauge1.set(-3.14);
let rendered = handle.render();
let expected_gauge = format!(
"{}# TYPE basic_gauge gauge\nbasic_gauge{{wutang=\"forever\"}} -3.14\n\n",
expected_counter
"{expected_counter}# TYPE basic_gauge gauge\nbasic_gauge{{wutang=\"forever\"}} -3.14\n\n",
);

assert_eq!(rendered, expected_gauge);
Expand All @@ -579,7 +579,7 @@ mod tests {
"basic_histogram_count 1\n",
"\n"
);
let expected_histogram = format!("{}{}", expected_gauge, histogram_data);
let expected_histogram = format!("{expected_gauge}{histogram_data}");

assert_eq!(rendered, expected_histogram);
}
Expand Down
9 changes: 5 additions & 4 deletions metrics-exporter-prometheus/src/exporter/http_listener.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ enum ListenerType {
}

/// Error type for HTTP listening.
#[derive(Debug)]
pub enum HttpListeningError {
Hyper(hyper::Error),
Io(std::io::Error),
Expand Down Expand Up @@ -59,11 +60,11 @@ impl HttpListeningExporter {
continue;
}
};
self.process_tcp_stream(stream).await;
self.process_tcp_stream(stream);
}
}

async fn process_tcp_stream(&self, stream: TcpStream) {
fn process_tcp_stream(&self, stream: TcpStream) {
let is_allowed = self.check_tcp_allowed(&stream);
let handle = self.handle.clone();
let service = service_fn(move |req: Request<body::Incoming>| {
Expand Down Expand Up @@ -107,12 +108,12 @@ impl HttpListeningExporter {
continue;
}
};
self.process_uds_stream(stream).await;
self.process_uds_stream(stream);
}
}

#[cfg(feature = "uds-listener")]
async fn process_uds_stream(&self, stream: UnixStream) {
fn process_uds_stream(&self, stream: UnixStream) {
let handle = self.handle.clone();
let service = service_fn(move |req: Request<body::Incoming>| {
let handle = handle.clone();
Expand Down
5 changes: 3 additions & 2 deletions metrics-exporter-prometheus/src/exporter/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ use hyper::Uri;

/// Error types possible from an exporter
#[cfg(any(feature = "http-listener", feature = "push-gateway"))]
#[derive(Debug)]
pub enum ExporterError {
#[cfg(feature = "http-listener")]
HttpListener(HttpListeningError),
Expand All @@ -24,14 +25,14 @@ pub enum ExporterError {
pub type ExporterFuture = Pin<Box<dyn Future<Output = Result<(), ExporterError>> + Send + 'static>>;

#[cfg(feature = "http-listener")]
#[derive(Clone)]
#[derive(Clone, Debug)]
enum ListenDestination {
Tcp(SocketAddr),
#[cfg(feature = "uds-listener")]
Uds(std::path::PathBuf),
}

#[derive(Clone)]
#[derive(Clone, Debug)]
enum ExporterConfig {
// Run an HTTP listener on the given `listen_address`.
#[cfg(feature = "http-listener")]
Expand Down
2 changes: 1 addition & 1 deletion metrics-exporter-prometheus/src/exporter/push_gateway.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ fn basic_auth(username: &str, password: Option<&str>) -> HeaderValue {
header
}

#[cfg(all(test))]
#[cfg(test)]
mod tests {
use super::basic_auth;

Expand Down
78 changes: 40 additions & 38 deletions metrics-exporter-prometheus/src/formatting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,35 +110,37 @@ pub fn write_metric_line<T, T2>(
/// [data model]: https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels
pub fn sanitize_metric_name(name: &str) -> String {
// The first character must be [a-zA-Z_:], and all subsequent characters must be [a-zA-Z0-9_:].
let mut out = String::with_capacity(name.len());
let mut is_invalid: fn(char) -> bool = invalid_metric_name_start_character;
for c in name.chars() {
if is_invalid(c) {
out.push('_');
} else {
out.push(c);
}
is_invalid = invalid_metric_name_character;
}
out
name.chars()
.enumerate()
.map(|(i, c)| {
if i == 0 && valid_metric_name_start_character(c)
|| i != 0 && valid_metric_name_character(c)
{
c
} else {
'_'
}
})
.collect()
}

/// Sanitizes a label key to be valid under the Prometheus [data model].
///
/// [data model]: https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels
pub fn sanitize_label_key(key: &str) -> String {
// The first character must be [a-zA-Z_], and all subsequent characters must be [a-zA-Z0-9_].
let mut out = String::with_capacity(key.len());
let mut is_invalid: fn(char) -> bool = invalid_label_key_start_character;
for c in key.chars() {
if is_invalid(c) {
out.push('_');
} else {
out.push(c);
}
is_invalid = invalid_label_key_character;
}
out
key.chars()
.enumerate()
.map(|(i, c)| {
if i == 0 && valid_label_key_start_character(c)
|| i != 0 && valid_label_key_character(c)
{
c
} else {
'_'
}
})
.collect()
}

/// Sanitizes a label value to be valid under the Prometheus [data model].
Expand Down Expand Up @@ -209,35 +211,35 @@ fn sanitize_label_value_or_description(value: &str, is_desc: bool) -> String {
}

#[inline]
fn invalid_metric_name_start_character(c: char) -> bool {
fn valid_metric_name_start_character(c: char) -> bool {
// Essentially, needs to match the regex pattern of [a-zA-Z_:].
!(c.is_ascii_alphabetic() || c == '_' || c == ':')
c.is_ascii_alphabetic() || c == '_' || c == ':'
}

#[inline]
fn invalid_metric_name_character(c: char) -> bool {
fn valid_metric_name_character(c: char) -> bool {
// Essentially, needs to match the regex pattern of [a-zA-Z0-9_:].
!(c.is_ascii_alphanumeric() || c == '_' || c == ':')
c.is_ascii_alphanumeric() || c == '_' || c == ':'
}

#[inline]
fn invalid_label_key_start_character(c: char) -> bool {
fn valid_label_key_start_character(c: char) -> bool {
// Essentially, needs to match the regex pattern of [a-zA-Z_].
!(c.is_ascii_alphabetic() || c == '_')
c.is_ascii_alphabetic() || c == '_'
}

#[inline]
fn invalid_label_key_character(c: char) -> bool {
fn valid_label_key_character(c: char) -> bool {
// Essentially, needs to match the regex pattern of [a-zA-Z0-9_].
!(c.is_ascii_alphanumeric() || c == '_')
c.is_ascii_alphanumeric() || c == '_'
}

#[cfg(test)]
mod tests {
use crate::formatting::{
invalid_label_key_character, invalid_label_key_start_character,
invalid_metric_name_character, invalid_metric_name_start_character, sanitize_description,
sanitize_label_key, sanitize_label_value, sanitize_metric_name,
sanitize_description, sanitize_label_key, sanitize_label_value, sanitize_metric_name,
valid_label_key_character, valid_label_key_start_character, valid_metric_name_character,
valid_metric_name_start_character,
};
use proptest::prelude::*;

Expand Down Expand Up @@ -321,11 +323,11 @@ mod tests {
let as_chars = result.chars().collect::<Vec<_>>();

if let Some(c) = as_chars.first() {
assert_eq!(false, invalid_metric_name_start_character(*c),
assert!(valid_metric_name_start_character(*c),
"first character of metric name was not valid");
}

assert!(!as_chars.iter().any(|c| invalid_metric_name_character(*c)),
assert!(as_chars.iter().all(|c| valid_metric_name_character(*c)),
"invalid character in metric name");
}

Expand All @@ -335,7 +337,7 @@ mod tests {
let as_chars = result.chars().collect::<Vec<_>>();

if let Some(c) = as_chars.first() {
assert_eq!(false, invalid_label_key_start_character(*c),
assert!(valid_label_key_start_character(*c),
"first character of label key was not valid");
}

Expand All @@ -353,7 +355,7 @@ mod tests {
}
}*/

assert!(!as_chars.iter().any(|c| invalid_label_key_character(*c)),
assert!(as_chars.iter().all(|c| valid_label_key_character(*c)),
"invalid character in label key");
}

Expand All @@ -369,7 +371,7 @@ mod tests {
let as_chars = delayered_backslashes.chars().collect::<Vec<_>>();

// If the first character is a double quote, then we messed up.
assert!(as_chars.first().map(|c| *c != '"').unwrap_or(true),
assert!(as_chars.first().map_or(true, |c| *c != '"'),
"first character cannot be a double quote: {}", result);

// Now look for unescaped characters in the rest of the string, in a windowed fashion.
Expand Down
4 changes: 3 additions & 1 deletion metrics-exporter-prometheus/src/recorder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ use crate::formatting::{
};
use crate::registry::GenerationalAtomicStorage;

#[derive(Debug)]
pub(crate) struct Inner {
pub registry: Registry<Key, GenerationalAtomicStorage>,
pub recency: Recency<Key>,
Expand Down Expand Up @@ -214,6 +215,7 @@ impl Inner {
/// Most users will not need to interact directly with the recorder, and can simply deal with the
/// builder methods on [`PrometheusBuilder`](crate::PrometheusBuilder) for building and installing
/// the recorder/exporter.
#[derive(Debug)]
pub struct PrometheusRecorder {
inner: Arc<Inner>,
}
Expand Down Expand Up @@ -275,7 +277,7 @@ impl Recorder for PrometheusRecorder {
/// handled directly by the HTTP listener, or push gateway background task. [`PrometheusHandle`]
/// allows rendering a snapshot of the current metrics stored by an installed [`PrometheusRecorder`]
/// as a payload conforming to the Prometheus exposition format.
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct PrometheusHandle {
inner: Arc<Inner>,
}
Expand Down
2 changes: 2 additions & 0 deletions metrics-exporter-prometheus/src/registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use quanta::Instant;
pub type GenerationalAtomicStorage = GenerationalStorage<AtomicStorage>;

/// Atomic metric storage for the prometheus exporter.
#[derive(Debug)]
pub struct AtomicStorage;

impl<K> metrics_util::registry::Storage<K> for AtomicStorage {
Expand All @@ -28,6 +29,7 @@ impl<K> metrics_util::registry::Storage<K> for AtomicStorage {
}

/// An `AtomicBucket` newtype wrapper that tracks the time of value insertion.
#[derive(Debug)]
pub struct AtomicBucketInstant<T> {
inner: AtomicBucket<(T, Instant)>,
}
Expand Down
Loading
Loading