Skip to content

Commit

Permalink
remove isDescending from Query interface, move to TimeseriesQuery (#1…
Browse files Browse the repository at this point in the history
…6917)

* remove isDescending from Query interface, since it is only actually settable and usable by TimeseriesQuery
  • Loading branch information
clintropolis authored Aug 20, 2024
1 parent fb7103c commit 518f642
Show file tree
Hide file tree
Showing 13 changed files with 28 additions and 49 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -146,12 +146,6 @@ public Map<String, Object> getContext()
return query.getContext();
}

@Override
public boolean isDescending()
{
return query.isDescending();
}

@Override
public Ordering<T> getResultOrdering()
{
Expand Down
21 changes: 4 additions & 17 deletions processing/src/main/java/org/apache/druid/query/BaseQuery.java
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ public static void checkInterrupted()
public static final String SUB_QUERY_ID = "subQueryId";
public static final String SQL_QUERY_ID = "sqlQueryId";
private final DataSource dataSource;
private final boolean descending;
private final QueryContext context;
private final QuerySegmentSpec querySegmentSpec;
private volatile Duration duration;
Expand All @@ -69,13 +68,12 @@ public BaseQuery(
Map<String, Object> context
)
{
this(dataSource, querySegmentSpec, false, context, Granularities.ALL);
this(dataSource, querySegmentSpec, context, Granularities.ALL);
}

public BaseQuery(
DataSource dataSource,
QuerySegmentSpec querySegmentSpec,
boolean descending,
Map<String, Object> context,
Granularity granularity
)
Expand All @@ -87,7 +85,6 @@ public BaseQuery(
this.dataSource = dataSource;
this.context = QueryContext.of(context);
this.querySegmentSpec = querySegmentSpec;
this.descending = descending;
this.granularity = granularity;
}

Expand All @@ -98,14 +95,6 @@ public DataSource getDataSource()
return dataSource;
}

@JsonProperty
@Override
@JsonInclude(Include.NON_DEFAULT)
public boolean isDescending()
{
return descending;
}

@JsonProperty("intervals")
public QuerySegmentSpec getQuerySegmentSpec()
{
Expand Down Expand Up @@ -205,8 +194,7 @@ public static Map<String, Object> computeOverriddenContext(
@SuppressWarnings("unchecked") // assumes T is Comparable; see method javadoc
public Ordering<T> getResultOrdering()
{
Ordering retVal = Ordering.natural();
return descending ? retVal.reverse() : retVal;
return (Ordering<T>) Ordering.natural();
}

@Nullable
Expand Down Expand Up @@ -253,8 +241,7 @@ public boolean equals(Object o)
BaseQuery<?> baseQuery = (BaseQuery<?>) o;

// Must use getDuration() instead of "duration" because duration is lazily computed.
return descending == baseQuery.descending &&
Objects.equals(dataSource, baseQuery.dataSource) &&
return Objects.equals(dataSource, baseQuery.dataSource) &&
Objects.equals(context, baseQuery.context) &&
Objects.equals(querySegmentSpec, baseQuery.querySegmentSpec) &&
Objects.equals(getDuration(), baseQuery.getDuration()) &&
Expand All @@ -265,6 +252,6 @@ public boolean equals(Object o)
public int hashCode()
{
// Must use getDuration() instead of "duration" because duration is lazily computed.
return Objects.hash(dataSource, descending, context, querySegmentSpec, getDuration(), granularity);
return Objects.hash(dataSource, context, querySegmentSpec, getDuration(), granularity);
}
}
2 changes: 0 additions & 2 deletions processing/src/main/java/org/apache/druid/query/Query.java
Original file line number Diff line number Diff line change
Expand Up @@ -166,8 +166,6 @@ default HumanReadableBytes getContextHumanReadableBytes(String key, HumanReadabl
return context().getHumanReadableBytes(key, defaultValue);
}

boolean isDescending();

/**
* Comparator that represents the order in which results are generated from the
* {@link QueryRunnerFactory#createRunner(Segment)} and
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ private GroupByQuery(
final Map<String, Object> context
)
{
super(dataSource, querySegmentSpec, false, context, granularity);
super(dataSource, querySegmentSpec, context, granularity);

this.virtualColumns = VirtualColumns.nullToEmpty(virtualColumns);
this.dimFilter = dimFilter;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ public SearchQuery(
@JsonProperty("context") Map<String, Object> context
)
{
super(dataSource, querySegmentSpec, false, context, Granularities.nullToAll(granularity));
super(dataSource, querySegmentSpec, context, Granularities.nullToAll(granularity));
Preconditions.checkNotNull(querySegmentSpec, "Must specify an interval");

this.dimFilter = dimFilter;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,12 +110,6 @@ public Map<String, Object> getContext()
throw new RuntimeException(REMOVED_ERROR_MESSAGE);
}

@Override
public boolean isDescending()
{
throw new RuntimeException(REMOVED_ERROR_MESSAGE);
}

@Override
public Ordering<Object> getResultOrdering()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Ordering;
import org.apache.commons.lang.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.query.BaseQuery;
Expand Down Expand Up @@ -68,6 +69,7 @@ public class TimeseriesQuery extends BaseQuery<Result<TimeseriesResultValue>>
private final DimFilter dimFilter;
private final List<AggregatorFactory> aggregatorSpecs;
private final List<PostAggregator> postAggregatorSpecs;
private final boolean descending;
private final int limit;

@JsonCreator
Expand All @@ -84,7 +86,7 @@ public TimeseriesQuery(
@JsonProperty("context") Map<String, Object> context
)
{
super(dataSource, querySegmentSpec, descending, context, granularity);
super(dataSource, querySegmentSpec, context, granularity);

// The below should be executed after context is initialized.
final String timestampField = getTimestampResultField();
Expand All @@ -97,6 +99,7 @@ public TimeseriesQuery(
this.aggregatorSpecs,
postAggregatorSpecs == null ? ImmutableList.of() : postAggregatorSpecs
);
this.descending = descending;
this.limit = (limit == 0) ? Integer.MAX_VALUE : limit;
Preconditions.checkArgument(this.limit > 0, "limit must be greater than 0");
}
Expand Down Expand Up @@ -148,13 +151,21 @@ public List<PostAggregator> getPostAggregatorSpecs()
return postAggregatorSpecs;
}

@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public boolean isDescending()
{
return descending;
}

@JsonProperty("limit")
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = LimitJsonIncludeFilter.class)
public int getLimit()
{
return limit;
}


public boolean isGrandTotal()
{
return context().getBoolean(CTX_GRAND_TOTAL, false);
Expand Down Expand Up @@ -195,6 +206,13 @@ public Set<String> getRequiredColumns()
);
}

@Override
public Ordering<Result<TimeseriesResultValue>> getResultOrdering()
{
Ordering<Result<TimeseriesResultValue>> retVal = Ordering.natural();
return descending ? retVal.reverse() : retVal;
}

@Override
public TimeseriesQuery withQuerySegmentSpec(QuerySegmentSpec querySegmentSpec)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ public BinaryOperator<Result<TimeseriesResultValue>> createMergeFn(
@Override
public Comparator<Result<TimeseriesResultValue>> createResultComparator(Query<Result<TimeseriesResultValue>> query)
{
return ResultGranularTimestampComparator.create(query.getGranularity(), query.isDescending());
return ResultGranularTimestampComparator.create(query.getGranularity(), ((TimeseriesQuery) query).isDescending());
}

private Result<TimeseriesResultValue> getNullTimeseriesResultValue(TimeseriesQuery query)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ public TopNQuery(
@JsonProperty("context") Map<String, Object> context
)
{
super(dataSource, querySegmentSpec, false, context, granularity);
super(dataSource, querySegmentSpec, context, granularity);

Preconditions.checkNotNull(dimensionSpec, "dimensionSpec can't be null");
Preconditions.checkNotNull(topNMetricSpec, "must specify a metric");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ public QueryRunner<Result<TopNResultValue>> mergeResults(QueryRunner<Result<TopN
{
final ResultMergeQueryRunner<Result<TopNResultValue>> delegateRunner = new ResultMergeQueryRunner<>(
runner,
query -> ResultGranularTimestampComparator.create(query.getGranularity(), query.isDescending()),
query -> ResultGranularTimestampComparator.create(query.getGranularity(), false),
query -> {
TopNQuery topNQuery = (TopNQuery) query;
return new TopNBinaryFn(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -464,12 +464,6 @@ public Map<String, Object> getContext()
return context;
}

@Override
public boolean isDescending()
{
return false;
}

@Override
public Ordering<Integer> getResultOrdering()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@ public void logNativeQuery(RequestLogLine requestLogLine) throws IOException
MDC.put("hasFilters", Boolean.toString(query.hasFilters()));
MDC.put("remoteAddr", requestLogLine.getRemoteAddr());
MDC.put("duration", query.getDuration().toString());
MDC.put("descending", Boolean.toString(query.isDescending()));
if (setContextMDC) {
final Iterable<Map.Entry<String, Object>> entries = query.getContext() == null
? ImmutableList.of()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,6 @@ public void testLoggingMDC() throws Exception
Assert.assertEquals("false", map.get("hasFilters"));
Assert.assertEquals("fake", map.get("queryType"));
Assert.assertEquals("some.host.tld", map.get("remoteAddr"));
Assert.assertEquals("false", map.get("descending"));
Assert.assertEquals("false", map.get("isNested"));
Assert.assertNull(map.get("foo"));
}
Expand All @@ -235,7 +234,6 @@ public void testLoggingMDCContext() throws Exception
Assert.assertEquals("false", map.get("hasFilters"));
Assert.assertEquals("fake", map.get("queryType"));
Assert.assertEquals("some.host.tld", map.get("remoteAddr"));
Assert.assertEquals("false", map.get("descending"));
Assert.assertEquals("false", map.get("isNested"));
Assert.assertEquals("bar", map.get("foo"));
}
Expand All @@ -256,7 +254,6 @@ public void testNestedQueryLoggingMDC() throws Exception
Assert.assertEquals("false", map.get("hasFilters"));
Assert.assertEquals("fake", map.get("queryType"));
Assert.assertEquals("some.host.tld", map.get("remoteAddr"));
Assert.assertEquals("false", map.get("descending"));
Assert.assertEquals("true", map.get("isNested"));
Assert.assertNull(map.get("foo"));
}
Expand All @@ -278,7 +275,6 @@ public void testNestedNestedQueryLoggingMDC() throws Exception
Assert.assertEquals("fake", map.get("queryType"));
Assert.assertEquals("some.host.tld", map.get("remoteAddr"));
Assert.assertEquals("true", map.get("isNested"));
Assert.assertEquals("false", map.get("descending"));
Assert.assertNull(map.get("foo"));
}

Expand All @@ -299,7 +295,6 @@ public void testUnionQueryLoggingMDC() throws Exception
Assert.assertEquals("false", map.get("hasFilters"));
Assert.assertEquals("fake", map.get("queryType"));
Assert.assertEquals("some.host.tld", map.get("remoteAddr"));
Assert.assertEquals("false", map.get("descending"));
Assert.assertNull(map.get("foo"));
}

Expand Down

0 comments on commit 518f642

Please sign in to comment.