Skip to content

Commit

Permalink
remove druid.sql.planner.serializeComplexValues config in favor of al…
Browse files Browse the repository at this point in the history
…ways serializing complex values (#17549)
  • Loading branch information
clintropolis authored Dec 11, 2024
1 parent f3d7f1a commit 3c1b488
Show file tree
Hide file tree
Showing 12 changed files with 60 additions and 101 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,6 @@
},
"sqlResultsContext": {
"timeZone": "UTC",
"serializeComplexValues": true,
"stringifyArrays": false
},
"sqlTypeNames": [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,6 @@ public class PlannerConfig
@JsonProperty
private String nativeQuerySqlPlanningMode = QueryContexts.NATIVE_QUERY_SQL_PLANNING_MODE_COUPLED; // can be COUPLED or DECOUPLED

private boolean serializeComplexValues = true;

public int getMaxNumericInFilters()
{
return maxNumericInFilters;
Expand Down Expand Up @@ -112,11 +110,6 @@ public DateTimeZone getSqlTimeZone()
return sqlTimeZone;
}

public boolean shouldSerializeComplexValues()
{
return serializeComplexValues;
}

public boolean isComputeInnerJoinCostAsFilter()
{
return computeInnerJoinCostAsFilter;
Expand Down Expand Up @@ -170,7 +163,6 @@ public boolean equals(final Object o)
useApproximateCountDistinct == that.useApproximateCountDistinct &&
useApproximateTopN == that.useApproximateTopN &&
requireTimeCondition == that.requireTimeCondition &&
serializeComplexValues == that.serializeComplexValues &&
Objects.equals(sqlTimeZone, that.sqlTimeZone) &&
useNativeQueryExplain == that.useNativeQueryExplain &&
forceExpressionVirtualColumns == that.forceExpressionVirtualColumns &&
Expand All @@ -191,7 +183,6 @@ public int hashCode()
useApproximateTopN,
requireTimeCondition,
sqlTimeZone,
serializeComplexValues,
useNativeQueryExplain,
forceExpressionVirtualColumns,
nativeQuerySqlPlanningMode
Expand All @@ -207,7 +198,6 @@ public String toString()
", useApproximateTopN=" + useApproximateTopN +
", requireTimeCondition=" + requireTimeCondition +
", sqlTimeZone=" + sqlTimeZone +
", serializeComplexValues=" + serializeComplexValues +
", useNativeQueryExplain=" + useNativeQueryExplain +
", nativeQuerySqlPlanningMode=" + nativeQuerySqlPlanningMode +
'}';
Expand Down Expand Up @@ -242,7 +232,6 @@ public static class Builder
private boolean useNativeQueryExplain;
private boolean forceExpressionVirtualColumns;
private int maxNumericInFilters;
private boolean serializeComplexValues;
private String nativeQuerySqlPlanningMode;

public Builder(PlannerConfig base)
Expand All @@ -261,7 +250,6 @@ public Builder(PlannerConfig base)
useNativeQueryExplain = base.isUseNativeQueryExplain();
forceExpressionVirtualColumns = base.isForceExpressionVirtualColumns();
maxNumericInFilters = base.getMaxNumericInFilters();
serializeComplexValues = base.shouldSerializeComplexValues();
nativeQuerySqlPlanningMode = base.getNativeQuerySqlPlanningMode();
}

Expand Down Expand Up @@ -319,12 +307,6 @@ public Builder authorizeSystemTablesDirectly(boolean option)
return this;
}

public Builder serializeComplexValues(boolean option)
{
this.serializeComplexValues = option;
return this;
}

public Builder useNativeQueryExplain(boolean option)
{
this.useNativeQueryExplain = option;
Expand Down Expand Up @@ -421,7 +403,6 @@ public PlannerConfig build()
config.useNativeQueryExplain = useNativeQueryExplain;
config.maxNumericInFilters = maxNumericInFilters;
config.forceExpressionVirtualColumns = forceExpressionVirtualColumns;
config.serializeComplexValues = serializeComplexValues;
config.nativeQuerySqlPlanningMode = nativeQuerySqlPlanningMode;
return config;
}
Expand Down
23 changes: 3 additions & 20 deletions sql/src/main/java/org/apache/druid/sql/calcite/run/SqlResults.java
Original file line number Diff line number Diff line change
Expand Up @@ -134,12 +134,7 @@ public static Object coerce(
throw cannotCoerce(value, sqlTypeName, fieldName);
}
} else if (sqlTypeName == SqlTypeName.OTHER) {
// Complex type, try to serialize if we should, else print class name
if (context.isSerializeComplexValues()) {
coercedValue = coerceUsingObjectMapper(jsonMapper, value, sqlTypeName, fieldName);
} else {
coercedValue = value.getClass().getName();
}
coercedValue = coerceUsingObjectMapper(jsonMapper, value, sqlTypeName, fieldName);
} else if (sqlTypeName == SqlTypeName.ARRAY) {
if (context.isStringifyArrays()) {
if (value instanceof String) {
Expand Down Expand Up @@ -271,26 +266,22 @@ private static DruidException cannotCoerce(final Object value, final SqlTypeName
public static class Context
{
private final DateTimeZone timeZone;
private final boolean serializeComplexValues;
private final boolean stringifyArrays;

@JsonCreator
public Context(
@JsonProperty("timeZone") final DateTimeZone timeZone,
@JsonProperty("serializeComplexValues") final boolean serializeComplexValues,
@JsonProperty("stringifyArrays") final boolean stringifyArrays
)
{
this.timeZone = timeZone;
this.serializeComplexValues = serializeComplexValues;
this.stringifyArrays = stringifyArrays;
}

public static Context fromPlannerContext(final PlannerContext plannerContext)
{
return new Context(
plannerContext.getTimeZone(),
plannerContext.getPlannerConfig().shouldSerializeComplexValues(),
plannerContext.isStringifyArrays()
);
}
Expand All @@ -301,12 +292,6 @@ public DateTimeZone getTimeZone()
return timeZone;
}

@JsonProperty
public boolean isSerializeComplexValues()
{
return serializeComplexValues;
}

@JsonProperty
public boolean isStringifyArrays()
{
Expand All @@ -323,23 +308,21 @@ public boolean equals(Object o)
return false;
}
Context context = (Context) o;
return serializeComplexValues == context.serializeComplexValues
&& stringifyArrays == context.stringifyArrays
return stringifyArrays == context.stringifyArrays
&& Objects.equals(timeZone, context.timeZone);
}

@Override
public int hashCode()
{
return Objects.hash(timeZone, serializeComplexValues, stringifyArrays);
return Objects.hash(timeZone, stringifyArrays);
}

@Override
public String toString()
{
return "Context{" +
"timeZone=" + timeZone +
", serializeComplexValues=" + serializeComplexValues +
", stringifyArrays=" + stringifyArrays +
'}';
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ public void setUp()
{
executorService = MoreExecutors.listeningDecorator(Execs.multiThreaded(8, "test_sql_resource_%s"));

final PlannerConfig plannerConfig = PlannerConfig.builder().serializeComplexValues(false).build();
final PlannerConfig plannerConfig = PlannerConfig.builder().build();
final DruidSchemaCatalog rootSchema = CalciteTests.createMockRootSchema(
conglomerate,
walker,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import org.apache.druid.error.DruidException.Category;
import org.apache.druid.error.DruidException.Persona;
import org.apache.druid.error.DruidExceptionMatcher;
import org.apache.druid.hll.VersionOneHyperLogLogCollector;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Intervals;
Expand Down Expand Up @@ -145,7 +144,6 @@ public class BaseCalciteQueryTest extends CalciteTestBase
public static String NULL_STRING;
public static Float NULL_FLOAT;
public static Long NULL_LONG;
public static final String HLLC_STRING = VersionOneHyperLogLogCollector.class.getName();

@BeforeAll
public static void setupNullValues()
Expand All @@ -158,8 +156,6 @@ public static void setupNullValues()
public static final Logger log = new Logger(BaseCalciteQueryTest.class);

public static final PlannerConfig PLANNER_CONFIG_DEFAULT = new PlannerConfig();
public static final PlannerConfig PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE =
PlannerConfig.builder().serializeComplexValues(false).build();

public static final PlannerConfig PLANNER_CONFIG_REQUIRE_TIME_CONDITION =
PlannerConfig.builder().requireTimeCondition(true).build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -631,7 +631,7 @@ public void testExplainSelectConstantExpression()
public void testSelectStarWithDimFilter()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
PLANNER_CONFIG_DEFAULT,
QUERY_CONTEXT_DEFAULT,
"SELECT * FROM druid.foo WHERE dim1 > 'd' OR dim2 = 'a'",
CalciteTests.REGULAR_USER_AUTH_RESULT,
Expand All @@ -652,9 +652,9 @@ public void testSelectStarWithDimFilter()
.build()
),
ImmutableList.of(
new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1.0f, 1.0d, HLLC_STRING},
new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4.0f, 4.0d, HLLC_STRING},
new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5.0f, 5.0d, HLLC_STRING}
new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1.0f, 1.0d, "\"AQAAAEAAAA==\""},
new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4.0f, 4.0d, "\"AQAAAQAAAAFREA==\""},
new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5.0f, 5.0d, "\"AQAAAQAAAACyEA==\""}
)
);
}
Expand Down Expand Up @@ -1140,7 +1140,7 @@ public void testSelectStarFromLookup()
public void testSelectStar()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
PLANNER_CONFIG_DEFAULT,
QUERY_CONTEXT_DEFAULT,
"SELECT * FROM druid.foo",
CalciteTests.REGULAR_USER_AUTH_RESULT,
Expand All @@ -1155,12 +1155,12 @@ public void testSelectStar()
.build()
),
ImmutableList.of(
new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1f, 1.0, HLLC_STRING},
new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2f, 2.0, HLLC_STRING},
new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, HLLC_STRING},
new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4f, 4.0, HLLC_STRING},
new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5.0, HLLC_STRING},
new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6.0, HLLC_STRING}
new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1f, 1.0, "\"AQAAAEAAAA==\""},
new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2f, 2.0, "\"AQAAAQAAAAHNBA==\""},
new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, "\"AQAAAQAAAAOzAg==\""},
new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4f, 4.0, "\"AQAAAQAAAAFREA==\""},
new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5.0, "\"AQAAAQAAAACyEA==\""},
new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6.0, "\"AQAAAQAAAAEkAQ==\""}
)
);
}
Expand Down Expand Up @@ -1356,7 +1356,7 @@ public void testExplainSelectStar()
public void testSelectStarWithLimit()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
PLANNER_CONFIG_DEFAULT,
QUERY_CONTEXT_DEFAULT,
"SELECT * FROM druid.foo LIMIT 2",
CalciteTests.REGULAR_USER_AUTH_RESULT,
Expand All @@ -1372,8 +1372,8 @@ public void testSelectStarWithLimit()
.build()
),
ImmutableList.of(
new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1.0f, 1.0, HLLC_STRING},
new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2.0f, 2.0, HLLC_STRING}
new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1.0f, 1.0, "\"AQAAAEAAAA==\""},
new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2.0f, 2.0, "\"AQAAAQAAAAHNBA==\""}
)
);
}
Expand All @@ -1382,7 +1382,7 @@ public void testSelectStarWithLimit()
public void testSelectStarWithLimitAndOffset()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
PLANNER_CONFIG_DEFAULT,
QUERY_CONTEXT_DEFAULT,
"SELECT * FROM druid.foo LIMIT 2 OFFSET 1",
CalciteTests.REGULAR_USER_AUTH_RESULT,
Expand All @@ -1399,8 +1399,8 @@ public void testSelectStarWithLimitAndOffset()
.build()
),
ImmutableList.of(
new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2.0f, 2.0, HLLC_STRING},
new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, HLLC_STRING}
new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2.0f, 2.0, "\"AQAAAQAAAAHNBA==\""},
new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, "\"AQAAAQAAAAOzAg==\""}
)
);
}
Expand Down Expand Up @@ -1464,7 +1464,7 @@ public void testSelectWithExpressionFilter()
public void testSelectStarWithLimitTimeDescending()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
PLANNER_CONFIG_DEFAULT,
QUERY_CONTEXT_DEFAULT,
"SELECT * FROM druid.foo ORDER BY __time DESC LIMIT 2",
CalciteTests.REGULAR_USER_AUTH_RESULT,
Expand All @@ -1481,8 +1481,8 @@ public void testSelectStarWithLimitTimeDescending()
.build()
),
ImmutableList.of(
new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6d, HLLC_STRING},
new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5d, HLLC_STRING}
new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6d, "\"AQAAAQAAAAEkAQ==\""},
new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5d, "\"AQAAAQAAAACyEA==\""}
)
);
}
Expand All @@ -1491,7 +1491,7 @@ public void testSelectStarWithLimitTimeDescending()
public void testSelectStarWithoutLimitTimeAscending()
{
testQuery(
PLANNER_CONFIG_DEFAULT_NO_COMPLEX_SERDE,
PLANNER_CONFIG_DEFAULT,
QUERY_CONTEXT_DEFAULT,
"SELECT * FROM druid.foo ORDER BY __time",
CalciteTests.REGULAR_USER_AUTH_RESULT,
Expand All @@ -1508,12 +1508,12 @@ public void testSelectStarWithoutLimitTimeAscending()
.build()
),
ImmutableList.of(
new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1f, 1.0, HLLC_STRING},
new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2f, 2.0, HLLC_STRING},
new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, HLLC_STRING},
new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4f, 4.0, HLLC_STRING},
new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5.0, HLLC_STRING},
new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6.0, HLLC_STRING}
new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1f, 1.0, "\"AQAAAEAAAA==\""},
new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2f, 2.0, "\"AQAAAQAAAAHNBA==\""},
new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, "\"AQAAAQAAAAOzAg==\""},
new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4f, 4.0, "\"AQAAAQAAAAFREA==\""},
new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5.0, "\"AQAAAQAAAACyEA==\""},
new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6.0, "\"AQAAAQAAAAEkAQ==\""}
)
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedSet;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.error.DruidException;
Expand All @@ -42,7 +43,7 @@

public class SqlResultsTest extends InitializedNullHandlingTest
{
private static final SqlResults.Context DEFAULT_CONTEXT = new SqlResults.Context(DateTimeZone.UTC, true, false);
private static final SqlResults.Context DEFAULT_CONTEXT = new SqlResults.Context(DateTimeZone.UTC, false);

private ObjectMapper jsonMapper;

Expand Down Expand Up @@ -246,6 +247,12 @@ public void testMayNotCoerceList()
Assert.assertEquals("hello", SqlResults.coerceArrayToList("hello", false));
}

@Test
public void testContextEqualsAndHashcode()
{
EqualsVerifier.forClass(SqlResults.Context.class).usingGetClass().verify();
}

private void assertCoerce(Object expected, Object toCoerce, SqlTypeName typeName)
{
Assert.assertEquals(
Expand Down
Loading

0 comments on commit 3c1b488

Please sign in to comment.