Skip to content

Commit

Permalink
Window Functions : Context Parameter to Enable Transfer of RACs over …
Browse files Browse the repository at this point in the history
…wire (#17150)
  • Loading branch information
sreemanamala authored Sep 28, 2024
1 parent f8a72b9 commit 6616141
Show file tree
Hide file tree
Showing 6 changed files with 33 additions and 8 deletions.
4 changes: 0 additions & 4 deletions docs/querying/sql-window-functions.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,6 @@ description: Reference for window functions
Apache Druid supports two query languages: [Druid SQL](sql.md) and [native queries](querying.md).
This document describes the SQL language.

Window functions are an [experimental](../development/experimental.md) feature.
Development and testing are still at early stage. Feel free to try window functions and provide your feedback.
Windows functions are not currently supported by multi-stage-query engine so you cannot use them in SQL-based ingestion.

:::

Window functions in Apache Druid produce values based upon the relationship of one row within a window of rows to the other rows within the same window. A window is a group of related rows within a result set. For example, rows with the same value for a specific dimension.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,11 @@ public class PlannerContext
*/
public static final String CTX_SQL_OUTER_LIMIT = "sqlOuterLimit";

/**
* Key to enable transfer of RACs over wire.
*/
public static final String CTX_ENABLE_RAC_TRANSFER_OVER_WIRE = "enableRACOverWire";

/**
* Context key for {@link PlannerContext#isUseBoundsAndSelectors()}.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1497,13 +1497,16 @@ private WindowOperatorQuery toWindowQuery()
// This would cause MSQ queries to plan as
// Window over an inner scan and avoid
// leaf operators
boolean pushLeafOperator = plannerContext.queryContext()
.getBoolean(PlannerContext.CTX_ENABLE_RAC_TRANSFER_OVER_WIRE, false)
&& !plannerContext.featureAvailable(EngineFeature.WINDOW_LEAF_OPERATOR);
return new WindowOperatorQuery(
dataSource,
new LegacySegmentSpec(Intervals.ETERNITY),
plannerContext.queryContextMap(),
windowing.getSignature(),
operators,
plannerContext.featureAvailable(EngineFeature.WINDOW_LEAF_OPERATOR) ? ImmutableList.of() : null
pushLeafOperator ? null : ImmutableList.of()
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16133,7 +16133,6 @@ public void testScanAndSortOnJoin()
.run();
}

@NotYetSupported(Modes.UNSUPPORTED_DATASOURCE)
@Test
public void testWindowingOverJoin()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.druid.sql.calcite.CalciteWindowQueryTest.WindowQueryTestInputClass.TestType;
import org.apache.druid.sql.calcite.QueryTestRunner.QueryResults;
import org.apache.druid.sql.calcite.QueryVerification.QueryResultsVerifier;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.junit.Assert;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
Expand Down Expand Up @@ -296,10 +297,32 @@ public void testFailure_partitionByMVD()
);

assertEquals(
"Encountered a multi value column. Window processing does not support MVDs. "
"Encountered a multi value column [v0]. Window processing does not support MVDs. "
+ "Consider using UNNEST or MV_TO_ARRAY.",
e.getMessage()
);

final DruidException e1 = Assert.assertThrows(
DruidException.class,
() -> testBuilder()
.sql("select cityName, countryName, array_to_mv(array[1,length(cityName)]),\n"
+ "row_number() over (partition by array_to_mv(array[1,length(cityName)]) order by countryName, cityName)\n"
+ "from wikipedia\n"
+ "where countryName in ('Austria', 'Republic of Korea') and cityName is not null\n"
+ "order by 1, 2, 3")
.queryContext(ImmutableMap.of(
QueryContexts.ENABLE_DEBUG, true,
QueryContexts.CTX_SQL_STRINGIFY_ARRAYS, false,
PlannerContext.CTX_ENABLE_RAC_TRANSFER_OVER_WIRE, true
))
.run()
);

assertEquals(
"Encountered a multi value column. Window processing does not support MVDs. "
+ "Consider using UNNEST or MV_TO_ARRAY.",
e1.getMessage()
);
}

private WindowOperatorQuery getWindowOperatorQuery(List<Query<?>> queries)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,6 @@ enum Modes
RESULT_MISMATCH(AssertionError.class, "(assertResulEquals|AssertionError: column content mismatch)"),
LONG_CASTING(AssertionError.class, "expected: java.lang.Long"),
UNSUPPORTED_NULL_ORDERING(DruidException.class, "(A|DE)SCENDING ordering with NULLS (LAST|FIRST)"),
UNSUPPORTED_DATASOURCE(DruidException.class, "WindowOperatorQuery must run on top of a query or inline data source"),
UNION_WITH_COMPLEX_OPERAND(DruidException.class, "Only Table and Values are supported as inputs for Union"),
UNION_MORE_STRICT_ROWTYPE_CHECK(DruidException.class, "Row signature mismatch in Union inputs"),
UNNEST_NOT_SUPPORTED_CORRELATE_CONVERSION(DruidException.class, "Missing conversion( is|s are) LogicalCorrelate"),
Expand Down

0 comments on commit 6616141

Please sign in to comment.