From e96c51a72eeac3c43d0fa0f278e109820ed1213c Mon Sep 17 00:00:00 2001 From: Ruihang Xia Date: Sun, 17 Sep 2023 02:56:41 -0500 Subject: [PATCH] refactor: rename common-function-macro subcrate (#2418) * rename common-function-macro to common-macro Signed-off-by: Ruihang Xia * put impl into their own file Signed-off-by: Ruihang Xia --------- Signed-off-by: Ruihang Xia --- Cargo.lock | 38 +-- Cargo.toml | 4 +- src/common/function-macro/src/lib.rs | 226 ------------------ src/common/function/Cargo.toml | 2 +- .../function/src/scalars/aggregate/argmax.rs | 2 +- .../function/src/scalars/aggregate/argmin.rs | 2 +- .../function/src/scalars/aggregate/diff.rs | 2 +- .../function/src/scalars/aggregate/mean.rs | 2 +- .../src/scalars/aggregate/percentile.rs | 2 +- .../function/src/scalars/aggregate/polyval.rs | 2 +- .../scalars/aggregate/scipy_stats_norm_cdf.rs | 2 +- .../scalars/aggregate/scipy_stats_norm_pdf.rs | 2 +- .../{function-macro => macro}/Cargo.toml | 2 +- src/common/macro/src/aggr_func.rs | 72 ++++++ src/common/macro/src/lib.rs | 89 +++++++ src/common/macro/src/print_caller.rs | 108 +++++++++ .../{function-macro => macro}/src/range_fn.rs | 0 .../tests/test_derive.rs | 2 +- src/promql/Cargo.toml | 2 +- src/promql/src/functions/aggr_over_time.rs | 2 +- src/promql/src/functions/changes.rs | 2 +- src/promql/src/functions/deriv.rs | 2 +- src/promql/src/functions/resets.rs | 2 +- src/query/Cargo.toml | 2 +- src/query/src/tests/my_sum_udaf_example.rs | 2 +- 25 files changed, 308 insertions(+), 265 deletions(-) delete mode 100644 src/common/function-macro/src/lib.rs rename src/common/{function-macro => macro}/Cargo.toml (92%) create mode 100644 src/common/macro/src/aggr_func.rs create mode 100644 src/common/macro/src/lib.rs create mode 100644 src/common/macro/src/print_caller.rs rename src/common/{function-macro => macro}/src/range_fn.rs (100%) rename src/common/{function-macro => macro}/tests/test_derive.rs (93%) diff --git a/Cargo.lock b/Cargo.lock index c069c7e3c891..bd62da4e7ac6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1717,7 +1717,7 @@ dependencies = [ "arc-swap", "chrono-tz 0.6.3", "common-error", - "common-function-macro", + "common-macro", "common-query", "common-time", "datafusion", @@ -1733,22 +1733,6 @@ dependencies = [ "statrs", ] -[[package]] -name = "common-function-macro" -version = "0.4.0-nightly" -dependencies = [ - "arc-swap", - "backtrace", - "common-query", - "common-telemetry", - "datatypes", - "proc-macro2", - "quote", - "snafu", - "static_assertions", - "syn 1.0.109", -] - [[package]] name = "common-greptimedb-telemetry" version = "0.4.0-nightly" @@ -1815,6 +1799,22 @@ dependencies = [ "table", ] +[[package]] +name = "common-macro" +version = "0.4.0-nightly" +dependencies = [ + "arc-swap", + "backtrace", + "common-query", + "common-telemetry", + "datatypes", + "proc-macro2", + "quote", + "snafu", + "static_assertions", + "syn 1.0.109", +] + [[package]] name = "common-mem-prof" version = "0.4.0-nightly" @@ -6926,7 +6926,7 @@ dependencies = [ "catalog", "common-catalog", "common-error", - "common-function-macro", + "common-macro", "common-telemetry", "datafusion", "datatypes", @@ -7197,7 +7197,7 @@ dependencies = [ "common-datasource", "common-error", "common-function", - "common-function-macro", + "common-macro", "common-meta", "common-query", "common-recordbatch", diff --git a/Cargo.toml b/Cargo.toml index 2d6832e1e036..73375ac4f37d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,7 @@ members = [ "src/common/datasource", "src/common/error", "src/common/function", - "src/common/function-macro", + "src/common/macro", "src/common/greptimedb-telemetry", "src/common/grpc", "src/common/grpc-expr", @@ -123,7 +123,7 @@ common-config = { path = "src/common/config" } common-datasource = { path = "src/common/datasource" } common-error = { path = "src/common/error" } common-function = { path = "src/common/function" } -common-function-macro = { path = "src/common/function-macro" } +common-macro = { path = "src/common/macro" } common-greptimedb-telemetry = { path = "src/common/greptimedb-telemetry" } common-grpc = { path = "src/common/grpc" } common-grpc-expr = { path = "src/common/grpc-expr" } diff --git a/src/common/function-macro/src/lib.rs b/src/common/function-macro/src/lib.rs deleted file mode 100644 index c0cc67045472..000000000000 --- a/src/common/function-macro/src/lib.rs +++ /dev/null @@ -1,226 +0,0 @@ -// Copyright 2023 Greptime Team -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -mod range_fn; - -use proc_macro::TokenStream; -use quote::{quote, quote_spanned, ToTokens}; -use range_fn::process_range_fn; -use syn::parse::Parser; -use syn::spanned::Spanned; -use syn::{ - parse_macro_input, AttributeArgs, DeriveInput, ItemFn, ItemStruct, Lit, Meta, NestedMeta, -}; - -/// Make struct implemented trait [AggrFuncTypeStore], which is necessary when writing UDAF. -/// This derive macro is expect to be used along with attribute macro [as_aggr_func_creator]. -#[proc_macro_derive(AggrFuncTypeStore)] -pub fn aggr_func_type_store_derive(input: TokenStream) -> TokenStream { - let ast = parse_macro_input!(input as DeriveInput); - impl_aggr_func_type_store(&ast) -} - -fn impl_aggr_func_type_store(ast: &DeriveInput) -> TokenStream { - let name = &ast.ident; - let gen = quote! { - use common_query::logical_plan::accumulator::AggrFuncTypeStore; - use common_query::error::{InvalidInputStateSnafu, Error as QueryError}; - use datatypes::prelude::ConcreteDataType; - - impl AggrFuncTypeStore for #name { - fn input_types(&self) -> std::result::Result, QueryError> { - let input_types = self.input_types.load(); - snafu::ensure!(input_types.is_some(), InvalidInputStateSnafu); - Ok(input_types.as_ref().unwrap().as_ref().clone()) - } - - fn set_input_types(&self, input_types: Vec) -> std::result::Result<(), QueryError> { - let old = self.input_types.swap(Some(std::sync::Arc::new(input_types.clone()))); - if let Some(old) = old { - snafu::ensure!(old.len() == input_types.len(), InvalidInputStateSnafu); - for (x, y) in old.iter().zip(input_types.iter()) { - snafu::ensure!(x == y, InvalidInputStateSnafu); - } - } - Ok(()) - } - } - }; - gen.into() -} - -/// A struct can be used as a creator for aggregate function if it has been annotated with this -/// attribute first. This attribute add a necessary field which is intended to store the input -/// data's types to the struct. -/// This attribute is expected to be used along with derive macro [AggrFuncTypeStore]. -#[proc_macro_attribute] -pub fn as_aggr_func_creator(_args: TokenStream, input: TokenStream) -> TokenStream { - let mut item_struct = parse_macro_input!(input as ItemStruct); - if let syn::Fields::Named(ref mut fields) = item_struct.fields { - let result = syn::Field::parse_named.parse2(quote! { - input_types: arc_swap::ArcSwapOption> - }); - match result { - Ok(field) => fields.named.push(field), - Err(e) => return e.into_compile_error().into(), - } - } else { - return quote_spanned!( - item_struct.fields.span() => compile_error!( - "This attribute macro needs to add fields to the its annotated struct, \ - so the struct must have \"{}\".") - ) - .into(); - } - quote! { - #item_struct - } - .into() -} - -/// Attribute macro to convert an arithimetic function to a range function. The annotated function -/// should accept servaral arrays as input and return a single value as output. This procedure -/// macro can works on any number of input parameters. Return type can be either primitive type -/// or wrapped in `Option`. -/// -/// # Example -/// Take `count_over_time()` in PromQL as an example: -/// ```rust, ignore -/// /// The count of all values in the specified interval. -/// #[range_fn( -/// name = "CountOverTime", -/// ret = "Float64Array", -/// display_name = "prom_count_over_time" -/// )] -/// pub fn count_over_time(_: &TimestampMillisecondArray, values: &Float64Array) -> f64 { -/// values.len() as f64 -/// } -/// ``` -/// -/// # Arguments -/// - `name`: The name of the generated [ScalarUDF] struct. -/// - `ret`: The return type of the generated UDF function. -/// - `display_name`: The display name of the generated UDF function. -#[proc_macro_attribute] -pub fn range_fn(args: TokenStream, input: TokenStream) -> TokenStream { - process_range_fn(args, input) -} - -/// Attribute macro to print the caller to the annotated function. -/// The caller is printed as its filename and the call site line number. -/// -/// This macro works like this: inject the tracking codes as the first statement to the annotated -/// function body. The tracking codes use [backtrace-rs](https://crates.io/crates/backtrace) to get -/// the callers. So you must dependent on the `backtrace-rs` crate. -/// -/// # Arguments -/// - `depth`: The max depth of call stack to print. Optional, defaults to 1. -/// -/// # Example -/// ```rust, ignore -/// -/// #[print_caller(depth = 3)] -/// fn foo() {} -/// ``` -#[proc_macro_attribute] -pub fn print_caller(args: TokenStream, input: TokenStream) -> TokenStream { - let mut depth = 1; - - let args = parse_macro_input!(args as AttributeArgs); - for meta in args.iter() { - if let NestedMeta::Meta(Meta::NameValue(name_value)) = meta { - let ident = name_value - .path - .get_ident() - .expect("Expected an ident!") - .to_string(); - if ident == "depth" { - let Lit::Int(i) = &name_value.lit else { - panic!("Expected 'depth' to be a valid int!") - }; - depth = i.base10_parse::().expect("Invalid 'depth' value"); - break; - } - } - } - - let tokens: TokenStream = quote! { - { - let curr_file = file!(); - - let bt = backtrace::Backtrace::new(); - let call_stack = bt - .frames() - .iter() - .skip_while(|f| { - !f.symbols().iter().any(|s| { - s.filename() - .map(|p| p.ends_with(curr_file)) - .unwrap_or(false) - }) - }) - .skip(1) - .take(#depth); - - let call_stack = call_stack - .map(|f| { - f.symbols() - .iter() - .map(|s| { - let filename = s - .filename() - .map(|p| format!("{:?}", p)) - .unwrap_or_else(|| "unknown".to_string()); - - let lineno = s - .lineno() - .map(|l| format!("{}", l)) - .unwrap_or_else(|| "unknown".to_string()); - - format!("filename: {}, lineno: {}", filename, lineno) - }) - .collect::>() - .join(", ") - }) - .collect::>(); - - match call_stack.len() { - 0 => common_telemetry::info!("unable to find call stack"), - 1 => common_telemetry::info!("caller: {}", call_stack[0]), - _ => { - let mut s = String::new(); - s.push_str("[\n"); - for e in call_stack { - s.push_str("\t"); - s.push_str(&e); - s.push_str("\n"); - } - s.push_str("]"); - common_telemetry::info!("call stack: {}", s) - } - } - } - } - .into(); - - let stmt = match syn::parse(tokens) { - Ok(stmt) => stmt, - Err(e) => return e.into_compile_error().into(), - }; - - let mut item = parse_macro_input!(input as ItemFn); - item.block.stmts.insert(0, stmt); - - item.into_token_stream().into() -} diff --git a/src/common/function/Cargo.toml b/src/common/function/Cargo.toml index 02adc46d13ab..e5a6433b7c93 100644 --- a/src/common/function/Cargo.toml +++ b/src/common/function/Cargo.toml @@ -8,7 +8,7 @@ license.workspace = true arc-swap = "1.0" chrono-tz = "0.6" common-error = { workspace = true } -common-function-macro = { workspace = true } +common-macro = { workspace = true } common-query = { workspace = true } common-time = { workspace = true } datafusion.workspace = true diff --git a/src/common/function/src/scalars/aggregate/argmax.rs b/src/common/function/src/scalars/aggregate/argmax.rs index de02c02760eb..c5c5264f1994 100644 --- a/src/common/function/src/scalars/aggregate/argmax.rs +++ b/src/common/function/src/scalars/aggregate/argmax.rs @@ -15,7 +15,7 @@ use std::cmp::Ordering; use std::sync::Arc; -use common_function_macro::{as_aggr_func_creator, AggrFuncTypeStore}; +use common_macro::{as_aggr_func_creator, AggrFuncTypeStore}; use common_query::error::{BadAccumulatorImplSnafu, CreateAccumulatorSnafu, Result}; use common_query::logical_plan::{Accumulator, AggregateFunctionCreator}; use common_query::prelude::*; diff --git a/src/common/function/src/scalars/aggregate/argmin.rs b/src/common/function/src/scalars/aggregate/argmin.rs index 30cd51305b2f..7233f43b7708 100644 --- a/src/common/function/src/scalars/aggregate/argmin.rs +++ b/src/common/function/src/scalars/aggregate/argmin.rs @@ -15,7 +15,7 @@ use std::cmp::Ordering; use std::sync::Arc; -use common_function_macro::{as_aggr_func_creator, AggrFuncTypeStore}; +use common_macro::{as_aggr_func_creator, AggrFuncTypeStore}; use common_query::error::{BadAccumulatorImplSnafu, CreateAccumulatorSnafu, Result}; use common_query::logical_plan::{Accumulator, AggregateFunctionCreator}; use common_query::prelude::*; diff --git a/src/common/function/src/scalars/aggregate/diff.rs b/src/common/function/src/scalars/aggregate/diff.rs index 747ff8af2e34..9893d6199b71 100644 --- a/src/common/function/src/scalars/aggregate/diff.rs +++ b/src/common/function/src/scalars/aggregate/diff.rs @@ -15,7 +15,7 @@ use std::marker::PhantomData; use std::sync::Arc; -use common_function_macro::{as_aggr_func_creator, AggrFuncTypeStore}; +use common_macro::{as_aggr_func_creator, AggrFuncTypeStore}; use common_query::error::{ CreateAccumulatorSnafu, DowncastVectorSnafu, FromScalarValueSnafu, Result, }; diff --git a/src/common/function/src/scalars/aggregate/mean.rs b/src/common/function/src/scalars/aggregate/mean.rs index 6f42e11b5e98..3dc3e185351b 100644 --- a/src/common/function/src/scalars/aggregate/mean.rs +++ b/src/common/function/src/scalars/aggregate/mean.rs @@ -15,7 +15,7 @@ use std::marker::PhantomData; use std::sync::Arc; -use common_function_macro::{as_aggr_func_creator, AggrFuncTypeStore}; +use common_macro::{as_aggr_func_creator, AggrFuncTypeStore}; use common_query::error::{ BadAccumulatorImplSnafu, CreateAccumulatorSnafu, DowncastVectorSnafu, Result, }; diff --git a/src/common/function/src/scalars/aggregate/percentile.rs b/src/common/function/src/scalars/aggregate/percentile.rs index 3bdd24b0e8c6..49b981a7ee0e 100644 --- a/src/common/function/src/scalars/aggregate/percentile.rs +++ b/src/common/function/src/scalars/aggregate/percentile.rs @@ -16,7 +16,7 @@ use std::cmp::Reverse; use std::collections::BinaryHeap; use std::sync::Arc; -use common_function_macro::{as_aggr_func_creator, AggrFuncTypeStore}; +use common_macro::{as_aggr_func_creator, AggrFuncTypeStore}; use common_query::error::{ self, BadAccumulatorImplSnafu, CreateAccumulatorSnafu, DowncastVectorSnafu, FromScalarValueSnafu, InvalidInputColSnafu, Result, diff --git a/src/common/function/src/scalars/aggregate/polyval.rs b/src/common/function/src/scalars/aggregate/polyval.rs index cd37f3a118d0..b56a692c8df7 100644 --- a/src/common/function/src/scalars/aggregate/polyval.rs +++ b/src/common/function/src/scalars/aggregate/polyval.rs @@ -15,7 +15,7 @@ use std::marker::PhantomData; use std::sync::Arc; -use common_function_macro::{as_aggr_func_creator, AggrFuncTypeStore}; +use common_macro::{as_aggr_func_creator, AggrFuncTypeStore}; use common_query::error::{ self, BadAccumulatorImplSnafu, CreateAccumulatorSnafu, DowncastVectorSnafu, FromScalarValueSnafu, InvalidInputColSnafu, Result, diff --git a/src/common/function/src/scalars/aggregate/scipy_stats_norm_cdf.rs b/src/common/function/src/scalars/aggregate/scipy_stats_norm_cdf.rs index fb0b19e07f1e..2ec954051341 100644 --- a/src/common/function/src/scalars/aggregate/scipy_stats_norm_cdf.rs +++ b/src/common/function/src/scalars/aggregate/scipy_stats_norm_cdf.rs @@ -14,7 +14,7 @@ use std::sync::Arc; -use common_function_macro::{as_aggr_func_creator, AggrFuncTypeStore}; +use common_macro::{as_aggr_func_creator, AggrFuncTypeStore}; use common_query::error::{ self, BadAccumulatorImplSnafu, CreateAccumulatorSnafu, DowncastVectorSnafu, FromScalarValueSnafu, GenerateFunctionSnafu, InvalidInputColSnafu, Result, diff --git a/src/common/function/src/scalars/aggregate/scipy_stats_norm_pdf.rs b/src/common/function/src/scalars/aggregate/scipy_stats_norm_pdf.rs index 154465cb5b1b..d1bf432c993a 100644 --- a/src/common/function/src/scalars/aggregate/scipy_stats_norm_pdf.rs +++ b/src/common/function/src/scalars/aggregate/scipy_stats_norm_pdf.rs @@ -14,7 +14,7 @@ use std::sync::Arc; -use common_function_macro::{as_aggr_func_creator, AggrFuncTypeStore}; +use common_macro::{as_aggr_func_creator, AggrFuncTypeStore}; use common_query::error::{ self, BadAccumulatorImplSnafu, CreateAccumulatorSnafu, DowncastVectorSnafu, FromScalarValueSnafu, GenerateFunctionSnafu, InvalidInputColSnafu, Result, diff --git a/src/common/function-macro/Cargo.toml b/src/common/macro/Cargo.toml similarity index 92% rename from src/common/function-macro/Cargo.toml rename to src/common/macro/Cargo.toml index 5601549fd7c0..c0ab6b0a5be3 100644 --- a/src/common/function-macro/Cargo.toml +++ b/src/common/macro/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "common-function-macro" +name = "common-macro" version.workspace = true edition.workspace = true license.workspace = true diff --git a/src/common/macro/src/aggr_func.rs b/src/common/macro/src/aggr_func.rs new file mode 100644 index 000000000000..4c3ccccdeeb5 --- /dev/null +++ b/src/common/macro/src/aggr_func.rs @@ -0,0 +1,72 @@ +// Copyright 2023 Greptime Team +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use proc_macro::TokenStream; +use quote::{quote, quote_spanned}; +use syn::parse::Parser; +use syn::spanned::Spanned; +use syn::{parse_macro_input, DeriveInput, ItemStruct}; + +pub(crate) fn impl_aggr_func_type_store(ast: &DeriveInput) -> TokenStream { + let name = &ast.ident; + let gen = quote! { + use common_query::logical_plan::accumulator::AggrFuncTypeStore; + use common_query::error::{InvalidInputStateSnafu, Error as QueryError}; + use datatypes::prelude::ConcreteDataType; + + impl AggrFuncTypeStore for #name { + fn input_types(&self) -> std::result::Result, QueryError> { + let input_types = self.input_types.load(); + snafu::ensure!(input_types.is_some(), InvalidInputStateSnafu); + Ok(input_types.as_ref().unwrap().as_ref().clone()) + } + + fn set_input_types(&self, input_types: Vec) -> std::result::Result<(), QueryError> { + let old = self.input_types.swap(Some(std::sync::Arc::new(input_types.clone()))); + if let Some(old) = old { + snafu::ensure!(old.len() == input_types.len(), InvalidInputStateSnafu); + for (x, y) in old.iter().zip(input_types.iter()) { + snafu::ensure!(x == y, InvalidInputStateSnafu); + } + } + Ok(()) + } + } + }; + gen.into() +} + +pub(crate) fn impl_as_aggr_func_creator(_args: TokenStream, input: TokenStream) -> TokenStream { + let mut item_struct = parse_macro_input!(input as ItemStruct); + if let syn::Fields::Named(ref mut fields) = item_struct.fields { + let result = syn::Field::parse_named.parse2(quote! { + input_types: arc_swap::ArcSwapOption> + }); + match result { + Ok(field) => fields.named.push(field), + Err(e) => return e.into_compile_error().into(), + } + } else { + return quote_spanned!( + item_struct.fields.span() => compile_error!( + "This attribute macro needs to add fields to the its annotated struct, \ + so the struct must have \"{}\".") + ) + .into(); + } + quote! { + #item_struct + } + .into() +} diff --git a/src/common/macro/src/lib.rs b/src/common/macro/src/lib.rs new file mode 100644 index 000000000000..61c3bc0edc19 --- /dev/null +++ b/src/common/macro/src/lib.rs @@ -0,0 +1,89 @@ +// Copyright 2023 Greptime Team +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +mod aggr_func; +mod print_caller; +mod range_fn; + +use aggr_func::{impl_aggr_func_type_store, impl_as_aggr_func_creator}; +use print_caller::process_print_caller; +use proc_macro::TokenStream; +use range_fn::process_range_fn; +use syn::{parse_macro_input, DeriveInput}; + +/// Make struct implemented trait [AggrFuncTypeStore], which is necessary when writing UDAF. +/// This derive macro is expect to be used along with attribute macro [as_aggr_func_creator]. +#[proc_macro_derive(AggrFuncTypeStore)] +pub fn aggr_func_type_store_derive(input: TokenStream) -> TokenStream { + let ast = parse_macro_input!(input as DeriveInput); + impl_aggr_func_type_store(&ast) +} + +/// A struct can be used as a creator for aggregate function if it has been annotated with this +/// attribute first. This attribute add a necessary field which is intended to store the input +/// data's types to the struct. +/// This attribute is expected to be used along with derive macro [AggrFuncTypeStore]. +#[proc_macro_attribute] +pub fn as_aggr_func_creator(args: TokenStream, input: TokenStream) -> TokenStream { + impl_as_aggr_func_creator(args, input) +} + +/// Attribute macro to convert an arithimetic function to a range function. The annotated function +/// should accept servaral arrays as input and return a single value as output. This procedure +/// macro can works on any number of input parameters. Return type can be either primitive type +/// or wrapped in `Option`. +/// +/// # Example +/// Take `count_over_time()` in PromQL as an example: +/// ```rust, ignore +/// /// The count of all values in the specified interval. +/// #[range_fn( +/// name = "CountOverTime", +/// ret = "Float64Array", +/// display_name = "prom_count_over_time" +/// )] +/// pub fn count_over_time(_: &TimestampMillisecondArray, values: &Float64Array) -> f64 { +/// values.len() as f64 +/// } +/// ``` +/// +/// # Arguments +/// - `name`: The name of the generated [ScalarUDF] struct. +/// - `ret`: The return type of the generated UDF function. +/// - `display_name`: The display name of the generated UDF function. +#[proc_macro_attribute] +pub fn range_fn(args: TokenStream, input: TokenStream) -> TokenStream { + process_range_fn(args, input) +} + +/// Attribute macro to print the caller to the annotated function. +/// The caller is printed as its filename and the call site line number. +/// +/// This macro works like this: inject the tracking codes as the first statement to the annotated +/// function body. The tracking codes use [backtrace-rs](https://crates.io/crates/backtrace) to get +/// the callers. So you must dependent on the `backtrace-rs` crate. +/// +/// # Arguments +/// - `depth`: The max depth of call stack to print. Optional, defaults to 1. +/// +/// # Example +/// ```rust, ignore +/// +/// #[print_caller(depth = 3)] +/// fn foo() {} +/// ``` +#[proc_macro_attribute] +pub fn print_caller(args: TokenStream, input: TokenStream) -> TokenStream { + process_print_caller(args, input) +} diff --git a/src/common/macro/src/print_caller.rs b/src/common/macro/src/print_caller.rs new file mode 100644 index 000000000000..c4510ddfa94e --- /dev/null +++ b/src/common/macro/src/print_caller.rs @@ -0,0 +1,108 @@ +// Copyright 2023 Greptime Team +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use proc_macro::TokenStream; +use quote::{quote, ToTokens}; +use syn::{parse_macro_input, AttributeArgs, ItemFn, Lit, Meta, NestedMeta}; + +pub(crate) fn process_print_caller(args: TokenStream, input: TokenStream) -> TokenStream { + let mut depth = 1; + + let args = parse_macro_input!(args as AttributeArgs); + for meta in args.iter() { + if let NestedMeta::Meta(Meta::NameValue(name_value)) = meta { + let ident = name_value + .path + .get_ident() + .expect("Expected an ident!") + .to_string(); + if ident == "depth" { + let Lit::Int(i) = &name_value.lit else { + panic!("Expected 'depth' to be a valid int!") + }; + depth = i.base10_parse::().expect("Invalid 'depth' value"); + break; + } + } + } + + let tokens: TokenStream = quote! { + { + let curr_file = file!(); + + let bt = backtrace::Backtrace::new(); + let call_stack = bt + .frames() + .iter() + .skip_while(|f| { + !f.symbols().iter().any(|s| { + s.filename() + .map(|p| p.ends_with(curr_file)) + .unwrap_or(false) + }) + }) + .skip(1) + .take(#depth); + + let call_stack = call_stack + .map(|f| { + f.symbols() + .iter() + .map(|s| { + let filename = s + .filename() + .map(|p| format!("{:?}", p)) + .unwrap_or_else(|| "unknown".to_string()); + + let lineno = s + .lineno() + .map(|l| format!("{}", l)) + .unwrap_or_else(|| "unknown".to_string()); + + format!("filename: {}, lineno: {}", filename, lineno) + }) + .collect::>() + .join(", ") + }) + .collect::>(); + + match call_stack.len() { + 0 => common_telemetry::info!("unable to find call stack"), + 1 => common_telemetry::info!("caller: {}", call_stack[0]), + _ => { + let mut s = String::new(); + s.push_str("[\n"); + for e in call_stack { + s.push_str("\t"); + s.push_str(&e); + s.push_str("\n"); + } + s.push_str("]"); + common_telemetry::info!("call stack: {}", s) + } + } + } + } + .into(); + + let stmt = match syn::parse(tokens) { + Ok(stmt) => stmt, + Err(e) => return e.into_compile_error().into(), + }; + + let mut item = parse_macro_input!(input as ItemFn); + item.block.stmts.insert(0, stmt); + + item.into_token_stream().into() +} diff --git a/src/common/function-macro/src/range_fn.rs b/src/common/macro/src/range_fn.rs similarity index 100% rename from src/common/function-macro/src/range_fn.rs rename to src/common/macro/src/range_fn.rs diff --git a/src/common/function-macro/tests/test_derive.rs b/src/common/macro/tests/test_derive.rs similarity index 93% rename from src/common/function-macro/tests/test_derive.rs rename to src/common/macro/tests/test_derive.rs index db2b469e9b36..9c648c788dce 100644 --- a/src/common/function-macro/tests/test_derive.rs +++ b/src/common/macro/tests/test_derive.rs @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -use common_function_macro::{as_aggr_func_creator, AggrFuncTypeStore}; +use common_macro::{as_aggr_func_creator, AggrFuncTypeStore}; use static_assertions::{assert_fields, assert_impl_all}; #[as_aggr_func_creator] diff --git a/src/promql/Cargo.toml b/src/promql/Cargo.toml index dab4edd506d6..00f55ce296c5 100644 --- a/src/promql/Cargo.toml +++ b/src/promql/Cargo.toml @@ -11,7 +11,7 @@ bytemuck = "1.12" catalog = { workspace = true } common-catalog = { workspace = true } common-error = { workspace = true } -common-function-macro = { workspace = true } +common-macro = { workspace = true } common-telemetry = { workspace = true } datafusion.workspace = true datatypes = { workspace = true } diff --git a/src/promql/src/functions/aggr_over_time.rs b/src/promql/src/functions/aggr_over_time.rs index 5c9d0578d248..05428db47c2b 100644 --- a/src/promql/src/functions/aggr_over_time.rs +++ b/src/promql/src/functions/aggr_over_time.rs @@ -14,7 +14,7 @@ use std::sync::Arc; -use common_function_macro::range_fn; +use common_macro::range_fn; use datafusion::arrow::array::{Float64Array, TimestampMillisecondArray}; use datafusion::arrow::datatypes::TimeUnit; use datafusion::common::DataFusionError; diff --git a/src/promql/src/functions/changes.rs b/src/promql/src/functions/changes.rs index 4039a95f9a04..a8b29c9cbdac 100644 --- a/src/promql/src/functions/changes.rs +++ b/src/promql/src/functions/changes.rs @@ -17,7 +17,7 @@ use std::sync::Arc; -use common_function_macro::range_fn; +use common_macro::range_fn; use datafusion::arrow::array::{Float64Array, TimestampMillisecondArray}; use datafusion::arrow::datatypes::TimeUnit; use datafusion::common::DataFusionError; diff --git a/src/promql/src/functions/deriv.rs b/src/promql/src/functions/deriv.rs index 4ba0a30438ae..84e5c2e212de 100644 --- a/src/promql/src/functions/deriv.rs +++ b/src/promql/src/functions/deriv.rs @@ -17,7 +17,7 @@ use std::sync::Arc; -use common_function_macro::range_fn; +use common_macro::range_fn; use datafusion::arrow::array::{Float64Array, TimestampMillisecondArray}; use datafusion::arrow::datatypes::TimeUnit; use datafusion::common::DataFusionError; diff --git a/src/promql/src/functions/resets.rs b/src/promql/src/functions/resets.rs index a76ee65510f5..218e1908738a 100644 --- a/src/promql/src/functions/resets.rs +++ b/src/promql/src/functions/resets.rs @@ -17,7 +17,7 @@ use std::sync::Arc; -use common_function_macro::range_fn; +use common_macro::range_fn; use datafusion::arrow::array::{Float64Array, TimestampMillisecondArray}; use datafusion::arrow::datatypes::TimeUnit; use datafusion::common::DataFusionError; diff --git a/src/query/Cargo.toml b/src/query/Cargo.toml index 33e4564167cc..19d642e077e4 100644 --- a/src/query/Cargo.toml +++ b/src/query/Cargo.toml @@ -57,7 +57,7 @@ tokio.workspace = true approx_eq = "0.1" arrow.workspace = true catalog = { workspace = true, features = ["testing"] } -common-function-macro.workspace = true +common-macro.workspace = true format_num = "0.1" num = "0.4" num-traits = "0.2" diff --git a/src/query/src/tests/my_sum_udaf_example.rs b/src/query/src/tests/my_sum_udaf_example.rs index 363816b4d638..8220dcf72dfa 100644 --- a/src/query/src/tests/my_sum_udaf_example.rs +++ b/src/query/src/tests/my_sum_udaf_example.rs @@ -17,7 +17,7 @@ use std::marker::PhantomData; use std::sync::Arc; use common_function::scalars::aggregate::AggregateFunctionMeta; -use common_function_macro::{as_aggr_func_creator, AggrFuncTypeStore}; +use common_macro::{as_aggr_func_creator, AggrFuncTypeStore}; use common_query::error::{CreateAccumulatorSnafu, Result as QueryResult}; use common_query::logical_plan::{Accumulator, AggregateFunctionCreator}; use common_query::prelude::*;