diff --git a/processing/src/main/java/org/apache/druid/math/expr/BuiltInExprMacros.java b/processing/src/main/java/org/apache/druid/math/expr/BuiltInExprMacros.java index c1298cd79297c..1a61408aab571 100644 --- a/processing/src/main/java/org/apache/druid/math/expr/BuiltInExprMacros.java +++ b/processing/src/main/java/org/apache/druid/math/expr/BuiltInExprMacros.java @@ -25,6 +25,7 @@ import javax.annotation.Nullable; import java.util.Collections; import java.util.List; +import java.util.Optional; import java.util.stream.Collectors; public class BuiltInExprMacros @@ -32,6 +33,7 @@ public class BuiltInExprMacros public static class ComplexDecodeBase64ExprMacro implements ExprMacroTable.ExprMacro { public static final String NAME = "complex_decode_base64"; + public static final String ALIAS_NAME = "decode_base64_complex"; @Override public String name() @@ -39,6 +41,12 @@ public String name() return NAME; } + @Override + public Optional alias() + { + return Optional.of(ALIAS_NAME); + } + @Override public Expr apply(List args) { diff --git a/processing/src/main/java/org/apache/druid/math/expr/ExprMacroTable.java b/processing/src/main/java/org/apache/druid/math/expr/ExprMacroTable.java index d3cc6461c51b2..e75213045789e 100644 --- a/processing/src/main/java/org/apache/druid/math/expr/ExprMacroTable.java +++ b/processing/src/main/java/org/apache/druid/math/expr/ExprMacroTable.java @@ -33,7 +33,6 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.stream.Collectors; /** * Mechanism by which Druid expressions can define new functions for the Druid expression language. When @@ -53,9 +52,19 @@ public class ExprMacroTable public ExprMacroTable(final List macros) { - this.macroMap = Maps.newHashMapWithExpectedSize(BUILT_IN.size() + macros.size()); - macroMap.putAll(BUILT_IN.stream().collect(Collectors.toMap(m -> StringUtils.toLowerCase(m.name()), m -> m))); - macroMap.putAll(macros.stream().collect(Collectors.toMap(m -> StringUtils.toLowerCase(m.name()), m -> m))); + this.macroMap = Maps.newHashMapWithExpectedSize(BUILT_IN.size() + 1 + macros.size()); + BUILT_IN.forEach(m -> { + macroMap.put(StringUtils.toLowerCase(m.name()), m); + if (m.alias().isPresent()) { + macroMap.put(StringUtils.toLowerCase(m.alias().get()), m); + } + }); + for (ExprMacro macro : macros) { + macroMap.put(StringUtils.toLowerCase(macro.name()), macro); + if (macro.alias().isPresent()) { + macroMap.put(StringUtils.toLowerCase(macro.alias().get()), macro); + } + } } public static ExprMacroTable nil() diff --git a/processing/src/main/java/org/apache/druid/math/expr/NamedFunction.java b/processing/src/main/java/org/apache/druid/math/expr/NamedFunction.java index 574535ac68480..839a166db9fbf 100644 --- a/processing/src/main/java/org/apache/druid/math/expr/NamedFunction.java +++ b/processing/src/main/java/org/apache/druid/math/expr/NamedFunction.java @@ -23,6 +23,7 @@ import java.util.Arrays; import java.util.List; +import java.util.Optional; /** * Common stuff for "named" functions of "functional" expressions, such as {@link FunctionExpr}, @@ -38,6 +39,14 @@ public interface NamedFunction */ String name(); + /** + * Alias of the function + */ + default Optional alias() + { + return Optional.empty(); + } + /** * Helper method for creating a {@link ExpressionValidationException} with the specified reason */ diff --git a/processing/src/test/java/org/apache/druid/math/expr/FunctionTest.java b/processing/src/test/java/org/apache/druid/math/expr/FunctionTest.java index c16b12372b3b0..959694b406a74 100644 --- a/processing/src/test/java/org/apache/druid/math/expr/FunctionTest.java +++ b/processing/src/test/java/org/apache/druid/math/expr/FunctionTest.java @@ -952,6 +952,15 @@ public void testComplexDecode() ), expected ); + // test with alias + assertExpr( + StringUtils.format( + "decode_base64_complex('%s', '%s')", + TypeStrategiesTest.NULLABLE_TEST_PAIR_TYPE.getComplexTypeName(), + StringUtils.encodeBase64String(bytes) + ), + expected + ); } @Test @@ -964,6 +973,13 @@ public void testComplexDecodeNull() ), null ); + assertExpr( + StringUtils.format( + "decode_base64_complex('%s', null)", + TypeStrategiesTest.NULLABLE_TEST_PAIR_TYPE.getComplexTypeName() + ), + null + ); } @Test diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java b/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java index e8a2b796a26c9..f183d50aae425 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/expression/OperatorConversions.java @@ -54,6 +54,7 @@ import org.apache.calcite.util.Static; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; +import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.aggregation.post.ExpressionPostAggregator; @@ -565,6 +566,38 @@ public T build() ); } + /** + * Creates a {@link SqlFunction} from this builder with alias. + */ + @SuppressWarnings("unchecked") + public Pair buildWithAlias(String alias) + { + Preconditions.checkNotNull(alias, "Function Alias"); + Preconditions.checkArgument(!alias.equals(name), "Function alias can not equal to name"); + final IntSet nullableOperands = buildNullableOperands(); + final SqlOperandTypeInference operandTypeInference = buildOperandTypeInference(nullableOperands); + final SqlOperandTypeChecker sqlOperandTypeChecker = buildOperandTypeChecker(nullableOperands); + Preconditions.checkNotNull(returnTypeInference, "returnTypeInference"); + return Pair.of( + (T) new SqlFunction( + name, + kind, + returnTypeInference, + operandTypeInference, + sqlOperandTypeChecker, + functionCategory + ), + (T) new SqlFunction( + alias, + kind, + returnTypeInference, + operandTypeInference, + sqlOperandTypeChecker, + functionCategory + ) + ); + } + protected IntSet buildNullableOperands() { // Create "nullableOperands" set including all optional arguments. diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/expression/SqlOperatorConversion.java b/sql/src/main/java/org/apache/druid/sql/calcite/expression/SqlOperatorConversion.java index ca269ee336fa2..4ae62b75ccf50 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/expression/SqlOperatorConversion.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/expression/SqlOperatorConversion.java @@ -28,6 +28,7 @@ import org.apache.druid.sql.calcite.rel.VirtualColumnRegistry; import javax.annotation.Nullable; +import java.util.Optional; public interface SqlOperatorConversion { @@ -38,6 +39,16 @@ public interface SqlOperatorConversion */ SqlOperator calciteOperator(); + /** + * Returns the Alias SQL operator corresponding to this function. Should be a singleton. + * + * @return operator + */ + default Optional aliasCalciteOperator() + { + return Optional.empty(); + } + /** * Translate a Calcite {@code RexNode} to a Druid expression. * diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ComplexDecodeBase64OperatorConversion.java b/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ComplexDecodeBase64OperatorConversion.java index 94b90ed9af8d8..c794184951db1 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ComplexDecodeBase64OperatorConversion.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ComplexDecodeBase64OperatorConversion.java @@ -26,6 +26,7 @@ import org.apache.calcite.sql.type.OperandTypes; import org.apache.calcite.sql.type.SqlReturnTypeInference; import org.apache.calcite.sql.type.SqlTypeFamily; +import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.math.expr.BuiltInExprMacros; import org.apache.druid.segment.column.ColumnType; @@ -37,6 +38,7 @@ import org.apache.druid.sql.calcite.table.RowSignatures; import javax.annotation.Nullable; +import java.util.Optional; public class ComplexDecodeBase64OperatorConversion implements SqlOperatorConversion { @@ -50,7 +52,7 @@ public class ComplexDecodeBase64OperatorConversion implements SqlOperatorConvers ); }; - private static final SqlFunction SQL_FUNCTION = OperatorConversions + private static final Pair SQL_FUNCTION_PAIR = OperatorConversions .operatorBuilder(StringUtils.toUpperCase(BuiltInExprMacros.ComplexDecodeBase64ExprMacro.NAME)) .operandTypeChecker( OperandTypes.sequence( @@ -61,13 +63,19 @@ public class ComplexDecodeBase64OperatorConversion implements SqlOperatorConvers ) .returnTypeInference(ARBITRARY_COMPLEX_RETURN_TYPE_INFERENCE) .functionCategory(SqlFunctionCategory.USER_DEFINED_FUNCTION) - .build(); + .buildWithAlias(StringUtils.toUpperCase(BuiltInExprMacros.ComplexDecodeBase64ExprMacro.ALIAS_NAME)); @Override public SqlOperator calciteOperator() { - return SQL_FUNCTION; + return SQL_FUNCTION_PAIR.lhs; + } + + @Override + public Optional aliasCalciteOperator() + { + return Optional.of(SQL_FUNCTION_PAIR.rhs); } @Nullable diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java index af6c2fc30d72b..f52369eb65805 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java @@ -445,6 +445,13 @@ public DruidOperatorTable( || this.operatorConversions.put(operatorKey, operatorConversion) != null) { throw new ISE("Cannot have two operators with key [%s]", operatorKey); } + if (operatorConversion.aliasCalciteOperator().isPresent()) { + final OperatorKey aliasOperatorKey = OperatorKey.of(operatorConversion.aliasCalciteOperator().get()); + if (this.aggregators.containsKey(aliasOperatorKey) + || this.operatorConversions.put(aliasOperatorKey, operatorConversion) != null) { + throw new ISE("Cannot have two operators with alias key [%s]", aliasOperatorKey); + } + } } for (SqlOperatorConversion operatorConversion : STANDARD_OPERATOR_CONVERSIONS) { @@ -456,6 +463,15 @@ public DruidOperatorTable( } this.operatorConversions.putIfAbsent(operatorKey, operatorConversion); + + if (operatorConversion.aliasCalciteOperator().isPresent()) { + final OperatorKey aliasOperatorKey = OperatorKey.of(operatorConversion.aliasCalciteOperator().get()); + // Don't complain if the alias already exists; we allow standard operators alias to be overridden as well. + if (this.aggregators.containsKey(aliasOperatorKey)) { + continue; + } + this.operatorConversions.put(aliasOperatorKey, operatorConversion); + } } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index 2d6dc078b9a60..8226bf99c945c 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -30,6 +30,7 @@ import org.apache.druid.java.util.common.HumanReadableBytes; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.JodaUtils; +import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.UOE; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.granularity.PeriodGranularity; @@ -14368,36 +14369,40 @@ public void testTimeseriesQueryWithEmptyInlineDatasourceAndGranularity() public void testComplexDecode() { cannotVectorize(); - testQuery( - "SELECT COMPLEX_DECODE_BASE64('hyperUnique',PARSE_JSON(TO_JSON_STRING(unique_dim1))) from druid.foo LIMIT 10", - ImmutableList.of( - Druids.newScanQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(querySegmentSpec(Filtration.eternity())) - .columns("v0") - .virtualColumns( - expressionVirtualColumn( - "v0", - "complex_decode_base64('hyperUnique',parse_json(to_json_string(\"unique_dim1\")))", - ColumnType.ofComplex("hyperUnique") - ) - ) - .resultFormat(ResultFormat.RESULT_FORMAT_COMPACTED_LIST) - .legacy(false) - .limit(10) - .build() - ), - ImmutableList.of( - new Object[]{"\"AQAAAEAAAA==\""}, - new Object[]{"\"AQAAAQAAAAHNBA==\""}, - new Object[]{"\"AQAAAQAAAAOzAg==\""}, - new Object[]{"\"AQAAAQAAAAFREA==\""}, - new Object[]{"\"AQAAAQAAAACyEA==\""}, - new Object[]{"\"AQAAAQAAAAEkAQ==\""} - ) - ); + for (String complexDecode : Arrays.asList("COMPLEX_DECODE_BASE64", "DECODE_BASE64_COMPLEX")) { + testQuery( + StringUtils.format( + "SELECT %s('hyperUnique',PARSE_JSON(TO_JSON_STRING(unique_dim1))) from druid.foo LIMIT 10", + complexDecode + ), + ImmutableList.of( + Druids.newScanQueryBuilder() + .dataSource(CalciteTests.DATASOURCE1) + .intervals(querySegmentSpec(Filtration.eternity())) + .columns("v0") + .virtualColumns( + expressionVirtualColumn( + "v0", + "complex_decode_base64('hyperUnique',parse_json(to_json_string(\"unique_dim1\")))", + ColumnType.ofComplex("hyperUnique") + ) + ) + .resultFormat(ResultFormat.RESULT_FORMAT_COMPACTED_LIST) + .legacy(false) + .limit(10) + .build() + ), + ImmutableList.of( + new Object[]{"\"AQAAAEAAAA==\""}, + new Object[]{"\"AQAAAQAAAAHNBA==\""}, + new Object[]{"\"AQAAAQAAAAOzAg==\""}, + new Object[]{"\"AQAAAQAAAAFREA==\""}, + new Object[]{"\"AQAAAQAAAACyEA==\""}, + new Object[]{"\"AQAAAQAAAAEkAQ==\""} + ) + ); + } } - @Test public void testComplexDecodeAgg() {