Skip to content

Commit

Permalink
Merge branch 'apache:master' into ashibhardwaj/CVE-2024-47535
Browse files Browse the repository at this point in the history
  • Loading branch information
ashibhardwaj authored Jan 6, 2025
2 parents 15787b6 + 09840ad commit f6a3897
Show file tree
Hide file tree
Showing 1,171 changed files with 43,149 additions and 8,783 deletions.
4 changes: 2 additions & 2 deletions .github/scripts/unit_tests_script.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ unset _JAVA_OPTIONS

# Set MAVEN_OPTS for Surefire launcher.
MAVEN_OPTS='-Xmx2500m' ${MVN} test -pl ${MAVEN_PROJECTS} \
${MAVEN_SKIP} -Ddruid.generic.useDefaultValueForNull=${DRUID_USE_DEFAULT_VALUE_FOR_NULL} \
-DjfrProfilerArgLine="${JFR_PROFILER_ARG_LINE}"
${MAVEN_SKIP} \
-DjfrProfilerArgLine="${JFR_PROFILER_ARG_LINE}" -Pci
sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
free -m
${MVN} -pl ${MAVEN_PROJECTS} jacoco:report || { echo "coverage_failure=false" >> "$GITHUB_ENV" && false; }
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/cron-job-its.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ jobs:
needs: build
with:
build_jdk: 17
runtime_jdk: 21.0.4
runtime_jdk: 17
testing_groups: -Dgroups=${{ matrix.testing_group }}
use_indexer: middleManager
group: ${{ matrix.testing_group }}
Expand All @@ -75,7 +75,7 @@ jobs:
needs: build
with:
build_jdk: 17
runtime_jdk: 21.0.4
runtime_jdk: 17
testing_groups: -Dgroups=${{ matrix.testing_group }}
use_indexer: indexer
group: ${{ matrix.testing_group }}
Expand All @@ -89,7 +89,7 @@ jobs:
needs: build
with:
build_jdk: 17
runtime_jdk: 21.0.4
runtime_jdk: 17
testing_groups: -Dgroups=${{ matrix.testing_group }}
use_indexer: middleManager
override_config_path: ./environment-configs/test-groups/prepopulated-data
Expand All @@ -104,7 +104,7 @@ jobs:
needs: build
with:
build_jdk: 17
runtime_jdk: 21.0.4
runtime_jdk: 17
testing_groups: -DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties
use_indexer: ${{ matrix.indexer }}
group: other
Expand Down
12 changes: 0 additions & 12 deletions .github/workflows/reusable-unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,6 @@ on:
required: true
type: string
description: 'JDK version used to test Druid'
sql_compatibility:
required: false
type: boolean
default: true
description: 'For SQL compatibility'
module:
required: true
type: string
Expand Down Expand Up @@ -84,13 +79,6 @@ jobs:
run: |
export base_ref=${{ github.base_ref }}
echo "GITHUB_BASE_REF=${base_ref}" >> $GITHUB_ENV
# If sql_compatibilty is true, we want to set default_value_for_null
# which enables compatibility mode
if (${{ inputs.sql_compatibility }} == true); then
echo "DRUID_USE_DEFAULT_VALUE_FOR_NULL=false" >> $GITHUB_ENV
else
echo "DRUID_USE_DEFAULT_VALUE_FOR_NULL=true" >> $GITHUB_ENV
fi
- name: test profiling
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/revised-its.yml
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ jobs:
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
with:
build_jdk: 17
runtime_jdk: 21.0.4
runtime_jdk: 17
use_indexer: middleManager
script: ./it.sh github S3DeepStorage
it: S3DeepStorage
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/static-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ jobs:
if: ${{ matrix.java == '17' }}
# errorprone requires JDK 11+
# Strict compilation requires more than 2 GB
run: ${MVN} clean -DstrictCompile compile test-compile --fail-at-end ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
run: ${MVN} clean -DstrictCompile compile test-compile --fail-at-end ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C

- name: maven install
if: ${{ matrix.java == '17' }}
Expand Down
10 changes: 2 additions & 8 deletions .github/workflows/unit-and-integration-tests-unified.yml
Original file line number Diff line number Diff line change
Expand Up @@ -163,25 +163,19 @@ jobs:
matrix:
# Use JDK 21.0.4 to work around https://github.com/apache/druid/issues/17429
jdk: [ '11', '21.0.4' ]
name: "unit tests (jdk${{ matrix.jdk }}, sql-compat=true)"
name: "unit tests (jdk${{ matrix.jdk }})"
uses: ./.github/workflows/unit-tests.yml
needs: unit-tests
if: ${{ always() && (needs.unit-tests.result == 'success' || needs.unit-tests.outputs.continue_tests) }}
with:
jdk: ${{ matrix.jdk }}
sql_compatibility: true

unit-tests:
strategy:
fail-fast: false
matrix:
sql_compatibility: [ false, true ]
name: "unit tests (jdk17, sql-compat=${{ matrix.sql_compatibility }})"
name: "unit tests (jdk17)"
uses: ./.github/workflows/unit-tests.yml
needs: build
with:
jdk: 17
sql_compatibility: ${{ matrix.sql_compatibility }}

standard-its:
needs: unit-tests
Expand Down
9 changes: 0 additions & 9 deletions .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,6 @@ on:
required: true
type: string
description: 'JDK version used to test Druid'
sql_compatibility:
required: false
type: boolean
default: true
description: 'For SQL compatibility'
outputs:
continue_tests:
description: 'Flag to decide if next tests need to run incase coverage issue failures'
Expand Down Expand Up @@ -72,7 +67,6 @@ jobs:
uses: ./.github/workflows/reusable-unit-tests.yml
with:
jdk: ${{ inputs.jdk }}
sql_compatibility: ${{ inputs.sql_compatibility }}
module: indexing
maven_projects: 'indexing-hadoop,indexing-service,extensions-core/kafka-indexing-service,extensions-core/kinesis-indexing-service'

Expand All @@ -82,7 +76,6 @@ jobs:
uses: ./.github/workflows/reusable-unit-tests.yml
with:
jdk: ${{ inputs.jdk }}
sql_compatibility: ${{ inputs.sql_compatibility }}
module: processing
maven_projects: 'processing'

Expand All @@ -92,14 +85,12 @@ jobs:
uses: ./.github/workflows/reusable-unit-tests.yml
with:
jdk: ${{ inputs.jdk }}
sql_compatibility: ${{ inputs.sql_compatibility }}
module: server
maven_projects: 'server'

other_modules_test:
uses: ./.github/workflows/reusable-unit-tests.yml
with:
jdk: ${{ inputs.jdk }}
sql_compatibility: ${{ inputs.sql_compatibility }}
module: other
maven_projects: '!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests,!:druid-it-tools,!:druid-it-image,!:druid-it-cases'
5 changes: 0 additions & 5 deletions benchmarks/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -61,11 +61,6 @@
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-processing</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,15 +68,15 @@ public class FlattenJSONBenchmark
public void prepare() throws Exception
{
FlattenJSONBenchmarkUtil gen = new FlattenJSONBenchmarkUtil();
flatInputs = new ArrayList<String>();
flatInputs = new ArrayList<>();
for (int i = 0; i < NUM_EVENTS; i++) {
flatInputs.add(gen.generateFlatEvent());
}
nestedInputs = new ArrayList<String>();
nestedInputs = new ArrayList<>();
for (int i = 0; i < NUM_EVENTS; i++) {
nestedInputs.add(gen.generateNestedEvent());
}
jqInputs = new ArrayList<String>();
jqInputs = new ArrayList<>();
for (int i = 0; i < NUM_EVENTS; i++) {
jqInputs.add(gen.generateNestedEvent()); // reuse the same event as "nested"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ public class GenericIndexedBenchmark

public static final int ITERATIONS = 10000;

static final ObjectStrategy<byte[]> BYTE_ARRAY_STRATEGY = new ObjectStrategy<byte[]>()
static final ObjectStrategy<byte[]> BYTE_ARRAY_STRATEGY = new ObjectStrategy<>()
{
@Override
public Class<byte[]> getClazz()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,9 @@ public class IncrementalIndexRowTypeBenchmark
private static AggregatorFactory[] aggs;
static final int DIMENSION_COUNT = 8;

private ArrayList<InputRow> longRows = new ArrayList<InputRow>();
private ArrayList<InputRow> floatRows = new ArrayList<InputRow>();
private ArrayList<InputRow> stringRows = new ArrayList<InputRow>();
private ArrayList<InputRow> longRows = new ArrayList<>();
private ArrayList<InputRow> floatRows = new ArrayList<>();
private ArrayList<InputRow> stringRows = new ArrayList<>();


static {
Expand All @@ -95,7 +95,7 @@ public class IncrementalIndexRowTypeBenchmark
private MapBasedInputRow getLongRow(long timestamp, int dimensionCount)
{
Random rng = ThreadLocalRandom.current();
List<String> dimensionList = new ArrayList<String>(dimensionCount);
List<String> dimensionList = new ArrayList<>(dimensionCount);
ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder();
for (int i = 0; i < dimensionCount; i++) {
String dimName = StringUtils.format("Dim_%d", i);
Expand All @@ -108,7 +108,7 @@ private MapBasedInputRow getLongRow(long timestamp, int dimensionCount)
private MapBasedInputRow getFloatRow(long timestamp, int dimensionCount)
{
Random rng = ThreadLocalRandom.current();
List<String> dimensionList = new ArrayList<String>(dimensionCount);
List<String> dimensionList = new ArrayList<>(dimensionCount);
ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder();
for (int i = 0; i < dimensionCount; i++) {
String dimName = StringUtils.format("Dim_%d", i);
Expand All @@ -121,7 +121,7 @@ private MapBasedInputRow getFloatRow(long timestamp, int dimensionCount)
private MapBasedInputRow getStringRow(long timestamp, int dimensionCount)
{
Random rng = ThreadLocalRandom.current();
List<String> dimensionList = new ArrayList<String>(dimensionCount);
List<String> dimensionList = new ArrayList<>(dimensionCount);
ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder();
for (int i = 0; i < dimensionCount; i++) {
String dimName = StringUtils.format("Dim_%d", i);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.benchmark;

import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.MapBasedInputRow;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.FileUtils;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.emitter.core.LoggingEmitter;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.query.Druids;
import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.Result;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.context.ResponseContext;
import org.apache.druid.query.timeseries.TimeseriesQuery;
import org.apache.druid.query.timeseries.TimeseriesResultValue;
import org.apache.druid.segment.realtime.appenderator.Appenderator;
import org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec;
import org.apache.druid.segment.realtime.appenderator.StreamAppenderatorTester;
import org.apache.druid.segment.realtime.sink.Committers;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;

import java.io.File;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;

@State(Scope.Benchmark)
@Fork(value = 1)
@Warmup(iterations = 3)
@Measurement(iterations = 5)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public class SinkQuerySegmentWalkerBenchmark
{
static {
NullHandling.initializeForTests();
}

@Param({"10", "50", "100", "200"})
private int numFireHydrants;

private final LoggingEmitter loggingEmitter = new LoggingEmitter(new Logger(LoggingEmitter.class), LoggingEmitter.Level.INFO, new DefaultObjectMapper());
private final ServiceEmitter serviceEmitter = new ServiceEmitter("test", "test", loggingEmitter);
private File cacheDir;

private Appenderator appenderator;

@Setup(Level.Trial)
public void setup() throws Exception
{
final String userConfiguredCacheDir = System.getProperty("druid.benchmark.cacheDir", System.getenv("DRUID_BENCHMARK_CACHE_DIR"));
cacheDir = new File(userConfiguredCacheDir);
final StreamAppenderatorTester tester =
new StreamAppenderatorTester.Builder().maxRowsInMemory(1)
.basePersistDirectory(cacheDir)
.withServiceEmitter(serviceEmitter)
.build();

appenderator = tester.getAppenderator();
appenderator.startJob();

final SegmentIdWithShardSpec segmentIdWithShardSpec = new SegmentIdWithShardSpec(
StreamAppenderatorTester.DATASOURCE,
Intervals.of("2000/2001"),
"A",
new LinearShardSpec(0)
);

for (int i = 0; i < numFireHydrants; i++) {
final MapBasedInputRow inputRow = new MapBasedInputRow(
DateTimes.of("2000").getMillis(),
ImmutableList.of("dim"),
ImmutableMap.of(
"dim",
"bar_" + i,
"met",
1
)
);
appenderator.add(segmentIdWithShardSpec, inputRow, Suppliers.ofInstance(Committers.nil()));
}
}

@TearDown(Level.Trial)
public void tearDown() throws Exception
{
appenderator.close();
FileUtils.deleteDirectory(cacheDir);
}

@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public void emitSinkMetrics(Blackhole blackhole) throws Exception
{
{
final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder()
.dataSource(StreamAppenderatorTester.DATASOURCE)
.intervals(ImmutableList.of(Intervals.of("2000/2001")))
.aggregators(
Arrays.asList(
new LongSumAggregatorFactory("count", "count"),
new LongSumAggregatorFactory("met", "met")
)
)
.granularity(Granularities.DAY)
.build();

final List<Result<TimeseriesResultValue>> results =
QueryPlus.wrap(query1).run(appenderator, ResponseContext.createEmpty()).toList();
blackhole.consume(results);

serviceEmitter.flush();
}
}
}
Loading

0 comments on commit f6a3897

Please sign in to comment.