Skip to content

Commit

Permalink
Vectorize earliest aggregator for both numeric and string types (#14408)
Browse files Browse the repository at this point in the history
* Vectorizing earliest for numeric

* Vectorizing earliest string aggregator

* checkstyle fix

* Removing unnecessary exceptions

* Ignoring tests in MSQ as earliest is not supported for numeric there

* Fixing benchmarks

* Updating tests as MSQ does not support earliest for some cases

* Addressing review comments by adding the following:
1. Checking capabilities first before creating selectors
2. Removing mockito in tests for numeric first aggs
3. Removing unnecessary tests

* Addressing issues for dictionary encoded single string columns where we can use the dictionary ids instead of the entire string

* Adding a flag for multi value dimension selector

* Addressing comments

* 1 more change

* Handling review comments part 1

* Handling review comments and correctness fix for latest_by when the time expression need not be in sorted order

* Updating numeric first vector agg

* Revert "Updating numeric first vector agg"

This reverts commit 4291709.

* Updating code for correctness issues

* fixing an issue with latest agg

* Adding more comments and removing an unnecessary check

* Addressing null checks for tie selector and only vectorize false for quantile sketches
  • Loading branch information
somu-imply authored Sep 5, 2023
1 parent 9d6ca61 commit 8088a76
Show file tree
Hide file tree
Showing 20 changed files with 1,813 additions and 44 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ public String getFormatString()
"SELECT TIME_SHIFT(MILLIS_TO_TIMESTAMP(long4), 'PT1H', 1), string2, SUM(long1 * double4) FROM foo GROUP BY 1,2 ORDER BY 3",
// 37: time shift + expr agg (group by), uniform distribution high cardinality
"SELECT TIME_SHIFT(MILLIS_TO_TIMESTAMP(long5), 'PT1H', 1), string2, SUM(long1 * double4) FROM foo GROUP BY 1,2 ORDER BY 3",
// 38: LATEST aggregator
// 38: LATEST aggregator long
"SELECT LATEST(long1) FROM foo",
// 39: LATEST aggregator double
"SELECT LATEST(double4) FROM foo",
Expand All @@ -207,7 +207,13 @@ public String getFormatString()
"SELECT LATEST(float3), LATEST(long1), LATEST(double4) FROM foo",
// 42,43: filter numeric nulls
"SELECT SUM(long5) FROM foo WHERE long5 IS NOT NULL",
"SELECT string2, SUM(long5) FROM foo WHERE long5 IS NOT NULL GROUP BY 1"
"SELECT string2, SUM(long5) FROM foo WHERE long5 IS NOT NULL GROUP BY 1",
// 44: EARLIEST aggregator long
"SELECT EARLIEST(long1) FROM foo",
// 45: EARLIEST aggregator double
"SELECT EARLIEST(double4) FROM foo",
// 46: EARLIEST aggregator float
"SELECT EARLIEST(float3) FROM foo"
);

@Param({"5000000"})
Expand Down Expand Up @@ -265,7 +271,11 @@ public String getFormatString()
"40",
"41",
"42",
"43"
"43",
"44",
"45",
"46",
"47"
})
private String query;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,8 @@
}
],
"context": {
"useCache": "true",
"useCache": "true",
"vectorize": "false",
"populateCache": "true",
"timeout": 360000
}
Expand Down Expand Up @@ -270,7 +271,8 @@
}
],
"context": {
"useCache": "true",
"useCache": "true",
"vectorize": "false",
"populateCache": "true",
"timeout": 360000
}
Expand Down Expand Up @@ -514,7 +516,8 @@
"metric": "unique_users",
"threshold": 3,
"context": {
"useCache": "true",
"useCache": "true",
"vectorize": "false",
"populateCache": "true",
"timeout": 360000
}
Expand Down Expand Up @@ -693,7 +696,8 @@
"metric": "count",
"threshold": 3,
"context": {
"useCache": "true",
"useCache": "true",
"vectorize": "false",
"populateCache": "true",
"timeout": 360000
}
Expand Down Expand Up @@ -878,7 +882,8 @@
"metric": "count",
"threshold": 3,
"context": {
"useCache": "true",
"useCache": "true",
"vectorize": "false",
"populateCache": "true",
"timeout": 360000
}
Expand Down Expand Up @@ -1243,7 +1248,8 @@
"orderBy": ["robot", "namespace"]
},
"context": {
"useCache": "true",
"useCache": "true",
"vectorize": "false",
"populateCache": "true",
"timeout": 360000
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,21 @@
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.AggregatorUtil;
import org.apache.druid.query.aggregation.BufferAggregator;
import org.apache.druid.query.aggregation.VectorAggregator;
import org.apache.druid.query.aggregation.any.NumericNilVectorAggregator;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector;
import org.apache.druid.segment.BaseDoubleColumnValueSelector;
import org.apache.druid.segment.ColumnInspector;
import org.apache.druid.segment.ColumnSelectorFactory;
import org.apache.druid.segment.ColumnValueSelector;
import org.apache.druid.segment.NilColumnValueSelector;
import org.apache.druid.segment.column.ColumnCapabilities;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.Types;
import org.apache.druid.segment.vector.VectorColumnSelectorFactory;
import org.apache.druid.segment.vector.VectorValueSelector;

import javax.annotation.Nullable;
import java.nio.ByteBuffer;
Expand Down Expand Up @@ -96,6 +103,12 @@ public DoubleFirstAggregatorFactory(
this.storeDoubleAsFloat = ColumnHolder.storeDoubleAsFloat();
}

@Override
public boolean canVectorize(ColumnInspector columnInspector)
{
return true;
}

@Override
public Aggregator factorize(ColumnSelectorFactory metricFactory)
{
Expand Down Expand Up @@ -124,6 +137,21 @@ public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
}
}

@Override
public VectorAggregator factorizeVector(
VectorColumnSelectorFactory columnSelectorFactory
)
{
ColumnCapabilities capabilities = columnSelectorFactory.getColumnCapabilities(fieldName);
if (Types.isNumeric(capabilities)) {
VectorValueSelector valueSelector = columnSelectorFactory.makeValueSelector(fieldName);
VectorValueSelector timeSelector = columnSelectorFactory.makeValueSelector(
timeColumn);
return new DoubleFirstVectorAggregator(timeSelector, valueSelector);
}
return NumericNilVectorAggregator.doubleNilVectorAggregator();
}

@Override
public Comparator getComparator()
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.query.aggregation.first;

import org.apache.druid.collections.SerializablePair;
import org.apache.druid.segment.vector.VectorValueSelector;

import javax.annotation.Nullable;
import java.nio.ByteBuffer;

public class DoubleFirstVectorAggregator extends NumericFirstVectorAggregator
{

public DoubleFirstVectorAggregator(VectorValueSelector timeSelector, VectorValueSelector valueSelector)
{
super(timeSelector, valueSelector);
}

@Override
public void initValue(ByteBuffer buf, int position)
{
buf.putDouble(position, 0);
}


@Override
void putValue(ByteBuffer buf, int position, int index)
{
double firstValue = valueSelector.getDoubleVector()[index];
buf.putDouble(position, firstValue);
}


/**
* @return The object as a pair with the position and the value stored at the position in the buffer.
*/
@Nullable
@Override
public Object get(ByteBuffer buf, int position)
{
final boolean rhsNull = isValueNull(buf, position);
return new SerializablePair<>(buf.getLong(position), rhsNull ? null : buf.getDouble(position + VALUE_OFFSET));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,21 @@
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.AggregatorUtil;
import org.apache.druid.query.aggregation.BufferAggregator;
import org.apache.druid.query.aggregation.VectorAggregator;
import org.apache.druid.query.aggregation.any.NumericNilVectorAggregator;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector;
import org.apache.druid.segment.BaseFloatColumnValueSelector;
import org.apache.druid.segment.ColumnInspector;
import org.apache.druid.segment.ColumnSelectorFactory;
import org.apache.druid.segment.ColumnValueSelector;
import org.apache.druid.segment.NilColumnValueSelector;
import org.apache.druid.segment.column.ColumnCapabilities;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.Types;
import org.apache.druid.segment.vector.VectorColumnSelectorFactory;
import org.apache.druid.segment.vector.VectorValueSelector;

import javax.annotation.Nullable;
import java.nio.ByteBuffer;
Expand Down Expand Up @@ -122,6 +129,24 @@ public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
}
}

@Override
public VectorAggregator factorizeVector(VectorColumnSelectorFactory columnSelectorFactory)
{
ColumnCapabilities capabilities = columnSelectorFactory.getColumnCapabilities(fieldName);
if (Types.isNumeric(capabilities)) {
VectorValueSelector valueSelector = columnSelectorFactory.makeValueSelector(fieldName);
VectorValueSelector timeSelector = columnSelectorFactory.makeValueSelector(timeColumn);
return new FloatFirstVectorAggregator(timeSelector, valueSelector);
}
return NumericNilVectorAggregator.floatNilVectorAggregator();
}

@Override
public boolean canVectorize(ColumnInspector columnInspector)
{
return true;
}

@Override
public Comparator getComparator()
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.query.aggregation.first;

import org.apache.druid.collections.SerializablePair;
import org.apache.druid.segment.vector.VectorValueSelector;

import javax.annotation.Nullable;
import java.nio.ByteBuffer;

public class FloatFirstVectorAggregator extends NumericFirstVectorAggregator
{

public FloatFirstVectorAggregator(VectorValueSelector timeSelector, VectorValueSelector valueSelector)
{
super(timeSelector, valueSelector);
}

@Override
public void initValue(ByteBuffer buf, int position)
{
buf.putFloat(position, 0);
}


@Override
void putValue(ByteBuffer buf, int position, int index)
{
float firstValue = valueSelector.getFloatVector()[index];
buf.putFloat(position, firstValue);
}


/**
* @return The object as a pair with the position and the value stored at the position in the buffer.
*/
@Nullable
@Override
public Object get(ByteBuffer buf, int position)
{
final boolean rhsNull = isValueNull(buf, position);
return new SerializablePair<>(buf.getLong(position), rhsNull ? null : buf.getFloat(position + VALUE_OFFSET));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,21 @@
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.AggregatorUtil;
import org.apache.druid.query.aggregation.BufferAggregator;
import org.apache.druid.query.aggregation.VectorAggregator;
import org.apache.druid.query.aggregation.any.NumericNilVectorAggregator;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector;
import org.apache.druid.segment.BaseLongColumnValueSelector;
import org.apache.druid.segment.ColumnInspector;
import org.apache.druid.segment.ColumnSelectorFactory;
import org.apache.druid.segment.ColumnValueSelector;
import org.apache.druid.segment.NilColumnValueSelector;
import org.apache.druid.segment.column.ColumnCapabilities;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.Types;
import org.apache.druid.segment.vector.VectorColumnSelectorFactory;
import org.apache.druid.segment.vector.VectorValueSelector;

import javax.annotation.Nullable;
import java.nio.ByteBuffer;
Expand Down Expand Up @@ -121,6 +128,25 @@ public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
}
}

@Override
public VectorAggregator factorizeVector(VectorColumnSelectorFactory columnSelectorFactory)
{
ColumnCapabilities capabilities = columnSelectorFactory.getColumnCapabilities(fieldName);
if (Types.isNumeric(capabilities)) {
VectorValueSelector valueSelector = columnSelectorFactory.makeValueSelector(fieldName);
VectorValueSelector timeSelector = columnSelectorFactory.makeValueSelector(
timeColumn);
return new LongFirstVectorAggregator(timeSelector, valueSelector);
}
return NumericNilVectorAggregator.longNilVectorAggregator();
}

@Override
public boolean canVectorize(ColumnInspector columnInspector)
{
return true;
}

@Override
public Comparator getComparator()
{
Expand Down
Loading

0 comments on commit 8088a76

Please sign in to comment.