Skip to content

Commit

Permalink
add JSON_QUERY_ARRAY function to pluck ARRAY<COMPLEX<json>> out of CO…
Browse files Browse the repository at this point in the history
…MPLEX<json>
  • Loading branch information
clintropolis committed Dec 8, 2023
1 parent db3a633 commit 46469e0
Show file tree
Hide file tree
Showing 10 changed files with 439 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ public class ExpressionModule implements Module
.add(NestedDataExpressions.JsonPathsExprMacro.class)
.add(NestedDataExpressions.JsonValueExprMacro.class)
.add(NestedDataExpressions.JsonQueryExprMacro.class)
.add(NestedDataExpressions.JsonQueryArrayExprMacro.class)
.add(NestedDataExpressions.ToJsonStringExprMacro.class)
.add(NestedDataExpressions.ParseJsonExprMacro.class)
.add(NestedDataExpressions.TryParseJsonExprMacro.class)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.math.expr.ExprType;
import org.apache.druid.math.expr.ExpressionType;
import org.apache.druid.math.expr.ExpressionTypeFactory;
import org.apache.druid.math.expr.NamedFunction;
import org.apache.druid.segment.nested.NestedPathFinder;
import org.apache.druid.segment.nested.NestedPathPart;
Expand All @@ -44,6 +45,8 @@

public class NestedDataExpressions
{
private static ExpressionType JSON_ARRAY = ExpressionTypeFactory.getInstance().ofArray(ExpressionType.NESTED_DATA);

public static class JsonObjectExprMacro implements ExprMacroTable.ExprMacro
{
public static final String NAME = "json_object";
Expand Down Expand Up @@ -591,6 +594,120 @@ public ExpressionType getOutputType(InputBindingInspector inspector)
}
}

public static class JsonQueryArrayExprMacro implements ExprMacroTable.ExprMacro
{
public static final String NAME = "json_query_array";

@Override
public String name()
{
return NAME;
}

@Override
public Expr apply(List<Expr> args)
{
if (args.get(1).isLiteral()) {
return new JsonQueryArrayExpr(args);
} else {
return new JsonQueryArrayDynamicExpr(args);
}
}

final class JsonQueryArrayExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
{
private final List<NestedPathPart> parts;

public JsonQueryArrayExpr(List<Expr> args)
{
super(name(), args);
this.parts = getJsonPathPartsFromLiteral(JsonQueryArrayExprMacro.this, args.get(1));
}

@Override
public ExprEval eval(ObjectBinding bindings)
{
ExprEval input = args.get(0).eval(bindings);
final Object value = NestedPathFinder.find(unwrap(input), parts);
if (value instanceof List) {
return ExprEval.ofArray(
JSON_ARRAY,
ExprEval.bestEffortArray((List) value).asArray()
);
}
return ExprEval.ofArray(
JSON_ARRAY,
ExprEval.bestEffortOf(value).asArray()
);
}

@Override
public Expr visit(Shuttle shuttle)
{
List<Expr> newArgs = args.stream().map(x -> x.visit(shuttle)).collect(Collectors.toList());
if (newArgs.get(1).isLiteral()) {
return shuttle.visit(new JsonQueryArrayExpr(newArgs));
} else {
return shuttle.visit(new JsonQueryArrayDynamicExpr(newArgs));
}
}

@Nullable
@Override
public ExpressionType getOutputType(InputBindingInspector inspector)
{
// call all the output JSON typed
return ExpressionType.NESTED_DATA;
}
}

final class JsonQueryArrayDynamicExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
{
public JsonQueryArrayDynamicExpr(List<Expr> args)
{
super(name(), args);
}

@Override
public ExprEval eval(ObjectBinding bindings)
{
ExprEval input = args.get(0).eval(bindings);
ExprEval path = args.get(1).eval(bindings);
final List<NestedPathPart> parts = NestedPathFinder.parseJsonPath(path.asString());
final Object value = NestedPathFinder.find(unwrap(input), parts);
if (value instanceof List) {
return ExprEval.ofArray(
JSON_ARRAY,
ExprEval.bestEffortArray((List) value).asArray()
);
}
return ExprEval.ofArray(
JSON_ARRAY,
ExprEval.bestEffortOf(value).asArray()
);
}

@Override
public Expr visit(Shuttle shuttle)
{
List<Expr> newArgs = args.stream().map(x -> x.visit(shuttle)).collect(Collectors.toList());
if (newArgs.get(1).isLiteral()) {
return shuttle.visit(new JsonQueryArrayExpr(newArgs));
} else {
return shuttle.visit(new JsonQueryArrayDynamicExpr(newArgs));
}
}

@Nullable
@Override
public ExpressionType getOutputType(InputBindingInspector inspector)
{
// call all the output ARRAY<COMPLEX<json>> typed
return JSON_ARRAY;
}
}
}

public static class JsonPathsExprMacro implements ExprMacroTable.ExprMacro
{
public static final String NAME = "json_paths";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public static ColumnType ofType(TypeSignature<ValueType> type)
case STRING:
return ColumnType.STRING_ARRAY;
default:
throw new ISE("Unsupported expression type[%s]", type.asTypeString());
return ColumnType.ofArray(ofType(type.getElementType()));
}
case COMPLEX:
return INTERNER.intern(new ColumnType(ValueType.COMPLEX, type.getComplexTypeName(), null));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1228,6 +1228,13 @@ public ColumnCapabilities capabilities(String columnName)
public ColumnCapabilities capabilities(ColumnInspector inspector, String columnName)
{
if (processFromRaw) {
if (expectedType != null && expectedType.isArray() && ColumnType.NESTED_DATA.equals(expectedType.getElementType())) {
// arrays of objects!
return ColumnCapabilitiesImpl.createDefault()
.setType(ColumnType.ofArray(ColumnType.NESTED_DATA))
.setHasMultipleValues(false)
.setHasNulls(true);
}
// JSON_QUERY always returns a StructuredData
return ColumnCapabilitiesImpl.createDefault()
.setType(ColumnType.NESTED_DATA)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.math.expr.ExpressionProcessingException;
import org.apache.druid.math.expr.ExpressionType;
import org.apache.druid.math.expr.ExpressionTypeFactory;
import org.apache.druid.math.expr.InputBindings;
import org.apache.druid.math.expr.Parser;
import org.apache.druid.segment.nested.StructuredData;
Expand All @@ -37,6 +38,7 @@
import org.junit.Test;

import java.util.Arrays;
import java.util.List;
import java.util.Map;

public class NestedDataExpressionsTest extends InitializedNullHandlingTest
Expand All @@ -49,6 +51,7 @@ public class NestedDataExpressionsTest extends InitializedNullHandlingTest
new NestedDataExpressions.JsonObjectExprMacro(),
new NestedDataExpressions.JsonValueExprMacro(),
new NestedDataExpressions.JsonQueryExprMacro(),
new NestedDataExpressions.JsonQueryArrayExprMacro(),
new NestedDataExpressions.ToJsonStringExprMacro(JSON_MAPPER),
new NestedDataExpressions.ParseJsonExprMacro(JSON_MAPPER),
new NestedDataExpressions.TryParseJsonExprMacro(JSON_MAPPER)
Expand Down Expand Up @@ -329,6 +332,37 @@ public void testJsonQueryExpression()
Assert.assertEquals(ExpressionType.NESTED_DATA, eval.type());
}

@Test
public void testJsonQueryArrayExpression()
{
final ExpressionType NESTED_ARRAY = ExpressionTypeFactory.getInstance().ofArray(ExpressionType.NESTED_DATA);

Expr expr = Parser.parse("json_query_array(nest, '$.x')", MACRO_TABLE);
ExprEval eval = expr.eval(inputBindings);
Assert.assertArrayEquals(new Object[]{100L}, (Object[]) eval.value());
Assert.assertEquals(NESTED_ARRAY, eval.type());

expr = Parser.parse("json_query_array(nester, '$.x')", MACRO_TABLE);
eval = expr.eval(inputBindings);
Assert.assertArrayEquals(((List) NESTER.get("x")).toArray(), (Object[]) eval.value());
Assert.assertEquals(NESTED_ARRAY, eval.type());

expr = Parser.parse("json_query_array(nester, array_offset(json_paths(nester), 0))", MACRO_TABLE);
eval = expr.eval(inputBindings);
Assert.assertArrayEquals(((List) NESTER.get("x")).toArray(), (Object[]) eval.value());
Assert.assertEquals(NESTED_ARRAY, eval.type());

expr = Parser.parse("json_query_array(nesterer, '$.y')", MACRO_TABLE);
eval = expr.eval(inputBindings);
Assert.assertArrayEquals(((List) NESTERER.get("y")).toArray(), (Object[]) eval.value());
Assert.assertEquals(NESTED_ARRAY, eval.type());

expr = Parser.parse("array_length(json_query_array(nesterer, '$.y'))", MACRO_TABLE);
eval = expr.eval(inputBindings);
Assert.assertEquals(3L, eval.value());
Assert.assertEquals(ExpressionType.LONG, eval.type());
}

@Test
public void testParseJsonTryParseJson() throws JsonProcessingException
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,13 @@
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.math.expr.Expr;
import org.apache.druid.math.expr.InputBindings;
import org.apache.druid.query.expression.NestedDataExpressions;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.nested.NestedPathFinder;
import org.apache.druid.segment.nested.NestedPathPart;
import org.apache.druid.segment.virtual.NestedFieldVirtualColumn;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
Expand All @@ -78,6 +80,16 @@ public class NestedDataOperatorConversions
true
);

public static final SqlReturnTypeInference NESTED_ARRAY_RETURN_TYPE_INFERENCE = opBinding ->
opBinding.getTypeFactory().createArrayType(
RowSignatures.makeComplexType(
opBinding.getTypeFactory(),
ColumnType.NESTED_DATA,
true
),
-1
);

public static class JsonPathsOperatorConversion implements SqlOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
Expand Down Expand Up @@ -231,6 +243,26 @@ public DruidExpression toDruidExpression(
}
}

public static class JsonQueryArrayOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(StringUtils.toUpperCase(NestedDataExpressions.JsonQueryArrayExprMacro.NAME))
.operandTypeChecker(
OperandTypes.family(
SqlTypeFamily.ANY,
SqlTypeFamily.CHARACTER
)
)
.returnTypeInference(NESTED_ARRAY_RETURN_TYPE_INFERENCE)
.functionCategory(SqlFunctionCategory.SYSTEM)
.build();

public JsonQueryArrayOperatorConversion()
{
super(SQL_FUNCTION, NestedDataExpressions.JsonQueryArrayExprMacro.NAME);
}
}

/**
* The {@link org.apache.calcite.sql2rel.StandardConvertletTable} converts json_value(.. RETURNING type) into
* cast(json_value_any(..), type).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,7 @@ public class DruidOperatorTable implements SqlOperatorTable
.add(new NestedDataOperatorConversions.JsonKeysOperatorConversion())
.add(new NestedDataOperatorConversions.JsonPathsOperatorConversion())
.add(new NestedDataOperatorConversions.JsonQueryOperatorConversion())
.add(new NestedDataOperatorConversions.JsonQueryArrayOperatorConversion())
.add(new NestedDataOperatorConversions.JsonValueAnyOperatorConversion())
.add(new NestedDataOperatorConversions.JsonValueBigintOperatorConversion())
.add(new NestedDataOperatorConversions.JsonValueDoubleOperatorConversion())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.apache.druid.sql.calcite.planner;

import com.google.common.collect.ImmutableSet;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.prepare.BaseDruidSqlValidator;
import org.apache.calcite.prepare.CalciteCatalogReader;
Expand All @@ -29,10 +30,14 @@
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.SqlTypeMappingRule;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.sql.calcite.run.EngineFeature;

import java.util.Map;

/**
* Druid extended SQL validator. (At present, it doesn't actually
* have any extensions yet, but it will soon.)
Expand Down Expand Up @@ -80,6 +85,26 @@ public void validateCall(SqlCall call, SqlValidatorScope scope)
super.validateCall(call, scope);
}

@Override
public SqlTypeMappingRule getTypeMappingRule()
{
SqlTypeMappingRule base = super.getTypeMappingRule();
return new SqlTypeMappingRule()
{
@Override
public Map<SqlTypeName, ImmutableSet<SqlTypeName>> getTypeMapping()
{
return base.getTypeMapping();
}

@Override
public boolean canApplyFrom(SqlTypeName to, SqlTypeName from)
{
return SqlTypeMappingRule.super.canApplyFrom(to, from);
}
};
}

private CalciteContextException buildCalciteContextException(String message, SqlCall call)
{
SqlParserPos pos = call.getParserPosition();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,10 @@
import com.google.inject.Provider;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.sql.calcite.table.RowSignatures;

import java.util.Map;
import java.util.Set;
Expand Down Expand Up @@ -66,6 +69,10 @@ public DruidSchemaCatalog get()
for (NamedSchema schema : namedSchemas) {
rootSchema.add(schema.getSchemaName(), schema.getSchema());
}
rootSchema.add(
"JSON",
relDataTypeFactory -> new RowSignatures.ComplexSqlType(SqlTypeName.OTHER, ColumnType.NESTED_DATA, true)
);
return new DruidSchemaCatalog(rootSchema, ImmutableMap.copyOf(schemasByName));
}
}
Loading

0 comments on commit 46469e0

Please sign in to comment.