From c5397d024ad124d90437470f1303f8a47c07b138 Mon Sep 17 00:00:00 2001 From: Parth Agrawal <98726675+pagrawal10@users.noreply.github.com> Date: Mon, 18 Dec 2023 11:36:35 +0530 Subject: [PATCH] [OBSDATA-3156] Apply Confluent Patches on top of Druid 28.0.1 (#171) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bring dockerfile up to date * add opencensus extension * make checkstyle happy * bump pom version for opencensus extension * fix issues related to shading opencensus extension The extension packaging included both shaded and unshaded dependencies in the classpath. Shading should not be necessary in this case. Also excludes guava dependencies, which are already provided by Druid and don't need to be added to the extensions jars. * METRICS-516: Adding Resource labels in OpenCensus Extension * bump extension version to match release * confluent-extensions with custom transform specs (#9) * fix extraction transform serde (#10) * fix check-style build errors * setup semaphore build * add checkstyle * fix edge cases for internal topics * METRICS-1302: Added prefix support for resource labels. (#14) * METRICS-1302: Added prefix support for resource labels. * Addressed review comments. * Added and moved configs to ingestion spec, optimized code. * Addressed review comments * Updated metric dimesnion and other review comments * Flipped ternary operator * Moved from NullHandling to StringUtils. * Removed unnecessary HashMap. * Removed verbosity for instance variables. * Added getters for configs, labels for distribution metric. (#15) * Added getters for configs, labels for distribution metric. * Addressed review comments * Removed extra brackets in JsonProperty. * Default resource label prefix to blank - Backward Compatibility (#16) * update opencensus parent pom version * update opencensus extensions for 0.19.x * update parent pom version for confluent-extensions * Add the capability to speed up S3 uploads using AWS transfer manager * fix conflicting protobuf dependencies Align protobuf dependencies to use the main pom one * fix timestamp milliseconds in OpenCensusProtobufInputRowParser - fix millisecond resolution being dropped when converting timestamps - remove unnecessary conversion of ByteBuffer to ByteString - make test code a little more concise * improve OpenCensusProtobufInputRowParser performance (#25) - remove the need to parse timestamps into their own column - reduce the number of times we copy maps of labels - pre-size hashmaps and arrays when possible - use loops instead of streams in critical sections Combined these changes improve parsing performance by about 15% - added benchmark for reference * deprecate OpenCensusInputRowParser in favor of OpenCensusProtobufInputFormat (#26) InputRowParsers have been deprecated in favor or InputFormat. This implements the InputFormat version of the OpenCensus Protobuf parser, and deprecates the existing InputRowParser implementation. - the existing InputRowParser behavior is unchanged. - the InputFormat behaves like the InputRowParser, except for the default resource prefix which now defaults to "resource." instead of empty. - both implementations internally delegate to OpenCensusProtobufReader, which is covered by the existing InputRowParser tests. * add default query context and update timeout to 30 sec * Setting default query lane from druid console. * Giving more heap space for test jvm in semaphore config. * update parent pom version for Confluent extensions * Add Java 11 image build and remove unused MySQL images * fix docker image build failure caused by #10506 * switch build to use Java 11 by default * Fixed forbiddenapi error * Added phases before checks * Fixed * OpenTelemetry Emitter Extension (#47) Add OpenTelemetry Emitter Extension * Add dependency check (#59) * Add dependency check * Fix maven-dependency-plugin errors * Add --fail-at-end flag * Fix comment * METRICS-3663 OpenTelemetry Metrics InputFormat (#63) * An OpenTelemetry metrics extension * An InputFormat that is able to ingest metrics that are in the OpenTelemetry format * Unit tests for the InputFormat * Benchmarking Tests for the new OpenTelemetryMetricsProtobufInputFormat * update parent pom version for Confluent extensions * Adding getRequiredColumns() in our custom transforms. * Updating shade-plugin version in opentelemetry-emitter. * Removing the unwanted maven-shade-plugin change. * Adding JDK version to DockerFile and removing unwanted executions from main pom.xml file. (#75) * Passing JDK_VERSION as build args to docker build. (#76) * Make the OpenTelemetry InputFormat More Flexible to Metric, Value and Attribute Types (#67) * Hybrid OpenCensusProtobufInputFormat in opencensus-extensions (#69) * Support OpenTelemetry payloads in OpenCensusProtobufInputFormat Support reading mixed OpenTelemetry and OpenCensus topics based on Kafka version header * workaround classloader isolation Workaround classloader isolation by using method handles to get access to KafkaRecordEntity related methods and check record headers Co-authored-by: Xavier Léauté * Modify the OpenTelemetry ProtobufReader's Handling of Attribute Types (#77) * Only handle INT_VALUE, BOOL_VALUE, DOUBLE_VALUE and STRING_VALUE and return null otherwise * fix wrong class in the DruidModule service provider definition * Fixing Opencensus extension build failures. * fix dependency check (#79) * fix OpenTelemetry extension module service definition (#73) (#81) * Setting default refresh value for task view as none. (#88) As part of this we added a default parameter that can be passed for refresh widget to avoid every refresh widget getting affected. * go/codeowners: Generate CODEOWNERS [ci skip] (#87) * fixes in pom.xml files * adapt to new input argument in ParseException * adapt to the new constructor for DimensionsSpec * update obs-data team as codeownders (#98) * [OBSDATA-334] Patch opencensus/opentelemetry parse exception (#99) * [METRICS-4487] add obs-oncall as codeowners (#101) * DP-8085 - Migrate to Sempahore self-hosted agent (#100) * [OBSDATA-334] Patch opentelemetry IllegalStateException for unsupported metric types (#103) * Fixing checkstyle issues in openncensus and opentelemetry extensions. (#109) * Remove SNAPSHOT from versions in confluent pom files * Fixing CI/CD in 24.0.0 upgrade branch (#116) * OBSDATA-440 Adding SegmentMetadataEvent and publishing them via KafkaSegmentMetadataEmitter (#117) * Change unsupported type message from WARN to TRACE (#119) * Use place holder for logging invalid format (#120) Use place holder for logging invalid format for better performance * DP-9370 - use cc-service-bot to manage Semaphore project (#118) * chore: update repo semaphore project * DP-9632: remediate duplicate Semaphore workflows (#121) Only build the master branch and the `x.x.x-confluent` Druid release branches by default * chore: update repo semaphore project * Bump version to 24.0.1 in confluent extensions after rebasing on top of druid-24.0.1 * Bump version to 24.0.2 in confluent extensions after rebasing on top of druid-24.0.2 * OBSDATA-483: Adapt OpenCensus and OpenTelemetry extensions to the introduction of SettableByteEntity (#113) * OBSDATA-483: Adapt opencensus extension to the introduction of SettableByteEntity * OBSDATA-483: Adapt opentelemetry extension to the introduction of SettableByteEntity * OBSDATA-483: Decide which reader to instantiate on read between opencensus and opentelemetry * OBSDATA-483: Add logger config in opencensus tests * OBSDATA-483: Fix issue with opening the byte entity * OBSDATA-483: Instantiate the right iterator in every read request * OBSDATA-483: Add comments * OBSDATA-483: Address Xavier's comments * OBSDATA-483: Remove unused member fields * OBSDATA-483: Rename enum * OBSDATA-483: Fix trace log to actually print the argument * OBSDATA-483: Keep passing the underlying byte buffer and move its position explicitly * OBSDATA-483: Fix checkstyle issues * OBSDATA-483: Add back handling of InvalidProtocolBufferException * OBSDATA-483: Extend the semaphore workflow execution time to 2 hours * Revert "OBSDATA-483: Extend the semaphore workflow execution time to 2 hours" * OBSDATA-483: Don't close iterator in sample * chore: update repo semaphore project (#124) Co-authored-by: Confluent Jenkins Bot * [Metrics-4776] OpenTelemetry Extensions - Upgrade otel-proto version (#125) * Upgrade proto version * Fix names and tests - Upgrade version * Fix open census tests * Fix test name * Move to Java 17 (#128) * bumping version of java to 17 for semaphore test run * bumping java version to 17 as per https://github.com/confluentinc/druid/pull/127/files * After speaking with Xavier, made these changes * Trying to add required flags to run druid using java 17 (#130) * Use apache-jar-resource-bundle:1.5 instead of 1.5-SNAPSHOT (#14054) (#131) Co-authored-by: Tejaswini Bandlamudi <96047043+tejaswini-imply@users.noreply.github.com> * update parent pom version for Confluent extensions * Fix CI/CD while upgrading to Druid 25.0.0 * Fix jest and prettify checks * Adding SegmentMetadataEvent and publishing them via KafkaEmitter (#14281) (#139) (cherry picked from commit 4ff6026d30e4da53dc0e37bc2279d9e030773787) * Downgrade busybox version to fix k8s IT (#14518) (#143) Co-authored-by: Rishabh Singh <6513075+findingrish@users.noreply.github.com> * Passing TARGETARCH in build_args to Docker build (#144) * Downgrade busybox version to fix k8s IT (#14518) * Add TargetArch needed in distribution/Dockerfile * Fix linting --------- Co-authored-by: Rishabh Singh <6513075+findingrish@users.noreply.github.com> * remove docker-maven-plugin and Dockerfile customizations - remove our custom profile to build using dockerfile-maven-plugin, since that plugin is no longer maintained. - remove our custom Dockerfile patches since we can now use the BUILD_FROM_SOURCE argument to decide if we want to build the tarball outside of docker. * Revert "Trying to add required flags to run druid using java 17 (#130)" (#147) This reverts our custom patch from commit 7cf2de4081bc9196471436654bf5ebe268611e80. The necessary Java 17 exports are now included as part of 25.0.0 in https://github.com/confluentinc/druid/blob/25.0.0-confluent/examples/bin/run-java#L27-L56 which is now called by the druid.sh docker startup script as well. The exports for java.base/jdk.internal.perf=ALL-UNNAMED are no longer needed since https://github.com/apache/druid/pull/12481#discussion_r859396192 * removing use of semaphore cache as the public semaphore will not have cache (#145) (#148) * utilize workflow level caching to publish the built artifacts to the tests. otherwise turn off all caching of .m2 etc * remove .m2/settings.xml to ensure build passes without internal artifact store --------- Co-authored-by: Jeremy Kuhnash <111304461+jkuhnashconfluent@users.noreply.github.com> * OBSDATA-1365: add support for debian based base images (#149) * Debeian based base image upgrade * updated suggestions * Update Dockerfile * minor correction --------- * Revert "fix KafkaInputFormat with nested columns by delegating to underlying inputRow map instead of eagerly copying (#13406) (#13447)" (#155) This reverts commit 23500a4c28767c0a74b2557f8966e13720b7511b. * Filter Out Metrics with NoRecordedValue Flag Set (#157) Metrics that contain the NoRecordedValue Flag are being written to Druid with a 0 value. We should properly handle them in the backend * memcached cache: switch to AWS elasticache-java-cluster-client and add TLS support (#14827) (#159) This PR updates the library used for Memcached client to AWS Elasticache Client : https://github.com/awslabs/aws-elasticache-cluster-client-memcached-for-java This enables us to use the option of encrypting data in transit: Amazon ElastiCache for Memcached now supports encryption of data in transit For clusters running the Memcached engine, ElastiCache supports Auto Discovery—the ability for client programs to automatically identify all of the nodes in a cache cluster, and to initiate and maintain connections to all of these nodes. Benefits of Auto Discovery - Amazon ElastiCache AWS has forked spymemcached 2.12.1, and has since added all the patches included in 2.12.2 and 2.12.3 as part of the 1.2.0 release. So, this can now be considered as an equivalent drop-in replacement. GitHub - awslabs/aws-elasticache-cluster-client-memcached-for-java: Amazon ElastiCache Cluster Client for Java - enhanced library to connect to ElastiCache clusters. https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/elasticache/AmazonElastiCacheClient.html#AmazonElastiCacheClient-- How to enable TLS with Elasticache On server side: https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/in-transit-encryption-mc.html#in-transit-encryption-enable-existing-mc On client side: GitHub - awslabs/aws-elasticache-cluster-client-memcached-for-java: Amazon ElastiCache Cluster Client for Java - enhanced library to connect to ElastiCache clusters. * PRSP-3603 Bump org.xerial.snappy:snappy-java to latest version to address CVEs (#164) * Bump org.xerial.snappy:snappy-java from 1.1.8.4 to 1.1.10.5 * Add licenses * [backport] Upgrade Avro to latest version (#14440) (#162) Upgraded Avro to 1.11.1 (cherry picked from commit 72cf91fbc0ed9cc2abce91df878ab431678b12f3) Co-authored-by: Tejaswini Bandlamudi <96047043+tejaswini-imply@users.noreply.github.com> * Revert "PRSP-3603 Bump org.xerial.snappy:snappy-java to latest version to address CVEs (#164)" (#166) This reverts commit 185d6559eedefee2c0a17eda7cb5750b2997710c. * Upgrade Avro to latest version to address CVEs (#167) * OBSDATA-1697: Do not build extensions not loaded by cc-druid (#152) Create new profiles to enable only the used extensions during the build. This helps address CVEs that were being flagged due to the unused extensions. --------- Co-authored-by: Keerthana Srikanth * update parent pom version for Confluent extensions * Add value to child POMs * Upgrade dependencies to match upstream v28 & checkstyle fix * KafkaEmitter changes * Modifying RowFunction interface * Fix test cases * Fix test cases * Fix test cases * Fix test cases * upgrade dependency as per druid 28 * Removing unnecessary change * Change Maven repository URL * Add Druid.xml * Update tag name to match version * Fix dist-used profile to use Hadoop compile version (#173) * Changes based on PR comments * Fix refreshButton * Use onRefresh only once * Fix snapshot so that the test passes --------- Co-authored-by: Travis Thompson Co-authored-by: Sumit Arrawatia Co-authored-by: Xavier Léauté Co-authored-by: Apoorv Mittal Co-authored-by: Xavier Léauté Co-authored-by: Huajun Qin Co-authored-by: Huajun Qin Co-authored-by: CodingParsley Co-authored-by: Harini Rajendran Co-authored-by: Ivan Vankovich Co-authored-by: Ivan Vankovich Co-authored-by: Marcus Greer Co-authored-by: Harini Rajendran Co-authored-by: Yun Fu Co-authored-by: Xavier Léauté Co-authored-by: lokesh-lingarajan Co-authored-by: Luke Young <91491244+lyoung-confluent@users.noreply.github.com> Co-authored-by: Konstantine Karantasis Co-authored-by: Naya Chen Co-authored-by: nlou9 <39046184+nlou9@users.noreply.github.com> Co-authored-by: Corey Christous Co-authored-by: Confluent Jenkins Bot Co-authored-by: ConfluentTools <96149134+ConfluentTools@users.noreply.github.com> Co-authored-by: Kamal Narayan <119908061+kamal-narayan@users.noreply.github.com> Co-authored-by: David Steere Co-authored-by: Tejaswini Bandlamudi <96047043+tejaswini-imply@users.noreply.github.com> Co-authored-by: Ghazanfar-CFLT Co-authored-by: Rishabh Singh <6513075+findingrish@users.noreply.github.com> Co-authored-by: Jeremy Kuhnash <111304461+jkuhnashconfluent@users.noreply.github.com> Co-authored-by: Hardik Bajaj <58038410+hardikbajaj@users.noreply.github.com> Co-authored-by: Michael Li Co-authored-by: Keerthana Srikanth Co-authored-by: Jan Werner <105367074+janjwerner-confluent@users.noreply.github.com> Co-authored-by: mustajibmk <120099779+mustajibmk@users.noreply.github.com> Co-authored-by: Pankaj kumar --- .github/CODEOWNERS | 1 + .semaphore/project.yml | 42 + .semaphore/semaphore.yml | 143 ++ codestyle/checkstyle-suppressions.xml | 4 + crypto-algos.txt | 1925 +++++++++++++++++ distribution/docker/Dockerfile | 66 +- distribution/pom.xml | 216 +- .../extensions-contrib/kafka-emitter.md | 8 +- .../confluent-extensions/pom.xml | 76 + .../druid/ConfluentExtensionsModule.java | 36 + .../ExtractTenantTopicTransform.java | 101 + .../transform/ExtractTenantTransform.java | 95 + .../druid/transform/TenantUtils.java | 26 + ...rg.apache.druid.initialization.DruidModule | 3 + .../druid/transform/ExtractTransformTest.java | 161 ++ extensions-contrib/kafka-emitter/pom.xml | 43 +- .../druid/emitter/kafka/KafkaEmitter.java | 83 +- .../emitter/kafka/KafkaEmitterConfig.java | 52 +- .../src/main/proto/DruidSegmentEvent.proto | 30 + .../emitter/kafka/KafkaEmitterConfigTest.java | 13 +- .../druid/emitter/kafka/KafkaEmitterTest.java | 6 +- .../opencensus-extensions/pom.xml | 149 ++ .../apache/druid/data/input/KafkaUtils.java | 107 + .../protobuf/HybridProtobufReader.java | 137 ++ .../OpenCensusProtobufExtensionsModule.java | 50 + .../OpenCensusProtobufInputFormat.java | 126 ++ .../OpenCensusProtobufInputRowParser.java | 140 ++ .../protobuf/OpenCensusProtobufReader.java | 231 ++ ...rg.apache.druid.initialization.DruidModule | 16 + .../druid/data/input/KafkaUtilsTest.java | 90 + .../protobuf/OpenCensusBenchmark.java | 118 + .../protobuf/OpenCensusInputFormatTest.java | 56 + .../OpenCensusProtobufInputRowParserTest.java | 477 ++++ .../OpenCensusProtobufReaderTest.java | 368 ++++ .../src/test/resources/log4j2.xml | 35 + .../opentelemetry-extensions/pom.xml | 103 + ...enTelemetryMetricsProtobufInputFormat.java | 132 ++ .../OpenTelemetryMetricsProtobufReader.java | 249 +++ ...OpenTelemetryProtobufExtensionsModule.java | 49 + ...rg.apache.druid.initialization.DruidModule | 17 + .../protobuf/OpenTelemetryBenchmark.java | 135 ++ .../OpenTelemetryMetricsInputFormatTest.java | 68 + ...penTelemetryMetricsProtobufReaderTest.java | 441 ++++ .../druid/storage/s3/S3StorageConfig.java | 18 +- .../druid/storage/s3/S3TransferConfig.java | 71 + .../org/apache/druid/storage/s3/S3Utils.java | 7 +- .../s3/ServerSideEncryptingAmazonS3.java | 29 +- .../data/input/s3/S3InputSourceTest.java | 4 +- .../storage/s3/ObjectSummaryIteratorTest.java | 2 +- .../storage/s3/S3DataSegmentArchiverTest.java | 3 +- .../storage/s3/S3DataSegmentMoverTest.java | 2 +- .../storage/s3/S3DataSegmentPusherTest.java | 7 +- .../s3/S3StorageConnectorProviderTest.java | 2 +- .../druid/storage/s3/S3TaskLogsTest.java | 23 +- .../s3/TestAWSCredentialsProvider.java | 4 +- .../output/RetryableS3OutputStreamTest.java | 3 +- .../s3/output/S3StorageConnectorTest.java | 4 +- licenses.yaml | 2 +- owasp-dependency-check-suppressions.xml | 7 - pom.xml | 18 +- .../emitter/service/SegmentMetadataEvent.java | 31 + .../druid/segment/transform/RowFunction.java | 7 +- service.yml | 13 + web-console/README.md | 5 +- web-console/console-config.js | 5 + web-console/src/ace-modes/hjson.js | 12 +- .../refresh-button/refresh-button.tsx | 8 +- .../__snapshots__/tasks-view.spec.tsx.snap | 1 + .../src/views/tasks-view/tasks-view.tsx | 1 + 69 files changed, 6596 insertions(+), 117 deletions(-) create mode 100644 .github/CODEOWNERS create mode 100644 .semaphore/project.yml create mode 100644 .semaphore/semaphore.yml create mode 100644 crypto-algos.txt create mode 100644 extensions-contrib/confluent-extensions/pom.xml create mode 100644 extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/ConfluentExtensionsModule.java create mode 100644 extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/ExtractTenantTopicTransform.java create mode 100644 extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/ExtractTenantTransform.java create mode 100644 extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/TenantUtils.java create mode 100644 extensions-contrib/confluent-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule create mode 100644 extensions-contrib/confluent-extensions/src/test/java/io/confluent/druid/transform/ExtractTransformTest.java create mode 100644 extensions-contrib/kafka-emitter/src/main/proto/DruidSegmentEvent.proto create mode 100644 extensions-contrib/opencensus-extensions/pom.xml create mode 100644 extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/KafkaUtils.java create mode 100644 extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/HybridProtobufReader.java create mode 100644 extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufExtensionsModule.java create mode 100644 extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputFormat.java create mode 100644 extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputRowParser.java create mode 100644 extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufReader.java create mode 100755 extensions-contrib/opencensus-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule create mode 100644 extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/KafkaUtilsTest.java create mode 100644 extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusBenchmark.java create mode 100644 extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusInputFormatTest.java create mode 100644 extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputRowParserTest.java create mode 100644 extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufReaderTest.java create mode 100644 extensions-contrib/opencensus-extensions/src/test/resources/log4j2.xml create mode 100644 extensions-contrib/opentelemetry-extensions/pom.xml create mode 100644 extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufInputFormat.java create mode 100644 extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufReader.java create mode 100644 extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryProtobufExtensionsModule.java create mode 100755 extensions-contrib/opentelemetry-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule create mode 100644 extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryBenchmark.java create mode 100644 extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsInputFormatTest.java create mode 100644 extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufReaderTest.java create mode 100644 extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3TransferConfig.java create mode 100644 service.yml diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000000..858ee8331184 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @confluentinc/obs-data @confluentinc/obs-oncall diff --git a/.semaphore/project.yml b/.semaphore/project.yml new file mode 100644 index 000000000000..2f7c905344f9 --- /dev/null +++ b/.semaphore/project.yml @@ -0,0 +1,42 @@ +# This file is managed by ServiceBot plugin - Semaphore. The content in this file is created using a common +# template and configurations in service.yml. +# Modifications in this file will be overwritten by generated content in the nightly run. +# For more information, please refer to the page: +# https://confluentinc.atlassian.net/wiki/spaces/Foundations/pages/2871296194/Add+SemaphoreCI +apiVersion: v1alpha +kind: Project +metadata: + name: druid + description: "" +spec: + visibility: private + repository: + url: git@github.com:confluentinc/druid.git + run_on: + - branches + - tags + - pull_requests + pipeline_file: .semaphore/semaphore.yml + integration_type: github_app + status: + pipeline_files: + - path: .semaphore/semaphore.yml + level: pipeline + whitelist: + branches: + - master + - /^.*-confluent$/ + custom_permissions: true + debug_permissions: + - empty + - default_branch + - non_default_branch + - pull_request + - forked_pull_request + - tag + attach_permissions: + - default_branch + - non_default_branch + - pull_request + - forked_pull_request + - tag diff --git a/.semaphore/semaphore.yml b/.semaphore/semaphore.yml new file mode 100644 index 000000000000..9888b2175bdd --- /dev/null +++ b/.semaphore/semaphore.yml @@ -0,0 +1,143 @@ +version: v1.0 +name: Apache Druid +agent: + machine: + type: s1-prod-ubuntu20-04-amd64-1 +execution_time_limit: + hours: 3 +blocks: + - name: "Install" + task: + env_vars: &env_vars + - name: MVN + value: "mvn -B" + - name: MAVEN_OPTS + value: "-Dmaven.repo.local=.m2" + - name: MAVEN_SKIP + value: > + -Danimal.sniffer.skip=true + -Dcheckstyle.skip=true + -Ddruid.console.skip=true + -Denforcer.skip=true + -Dforbiddenapis.skip=true + -Dmaven.javadoc.skip=true + -Dpmd.skip=true + -Dspotbugs.skip=true + + - name: MAVEN_SKIP_TESTS + value: "-DskipTests -Djacoco.skip=true" + prologue: + commands: + - echo $SEMAPHORE_WORKFLOW_ID + - sem-version java 17 + - checkout + jobs: + - name: "Install" + commands: + # This is a change meant to validate semaphore public builds + # so thus removing configurations for Confluent's internal CodeArtifact + - rm ~/.m2/settings.xml + - > + MAVEN_OPTS="${MAVEN_OPTS} -Xmx3000m" ${MVN} clean install + -q -ff ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C + # downstream tests depend on artifacts installed by mvn install into .m2 + # also cache target to avoid the cost of recompiling tests + - tar zcf cache-post-install.tgz .m2 target + - artifact push workflow cache-post-install.tgz + + - name: "Tests" + task: + env_vars: *env_vars + prologue: + commands: + - echo $SEMAPHORE_WORKFLOW_ID + - sem-version java 17 + - checkout + - artifact pull workflow cache-post-install.tgz + - tar zxf cache-post-install.tgz + # This is a change meant to validate semaphore public builds + # so thus removing configurations for Confluent's internal CodeArtifact + - rm ~/.m2/settings.xml + jobs: + - name: "animal sniffer checks" + commands: + - ${MVN} test-compile ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} + - ${MVN} animal-sniffer:check --fail-at-end + + - name: "checkstyle" + commands: + - ${MVN} checkstyle:checkstyle --fail-at-end + + - name: "enforcer checks" + commands: + - ${MVN} enforcer:enforce --fail-at-end + + - name: "forbidden api checks" + commands: + - ${MVN} test-compile ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} + - ${MVN} forbiddenapis:check forbiddenapis:testCheck --fail-at-end + + - name: "pmd checks" + commands: + - ${MVN} pmd:check --fail-at-end # TODO: consider adding pmd:cpd-check + + - name: "spotbugs checks" + commands: + - ${MVN} spotbugs:check --fail-at-end -pl '!benchmarks' + + - name: "analyze dependencies" + commands: + - > + ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true --fail-at-end || { echo " + + The dependency analysis has found a dependency that is either: + 1) Used and undeclared: These are available as a transitive dependency but should be explicitly + added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is + shown above. + 2) Unused and declared: These are not needed and removing them from the POM will speed up the build + and reduce the artifact size. The dependencies to remove are shown above. + If there are false positive dependency analysis warnings, they can be suppressed: + https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies + https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html + For more information, refer to: + https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html + " && false; } + + - name: "Confluent Extensions" + env_vars: + - name: MAVEN_PROJECTS + value: extensions-contrib/confluent-extensions + commands: &run_tests + - > + MAVEN_OPTS="${MAVEN_OPTS} -Xmx1g" ${MVN} test -pl ${MAVEN_PROJECTS} + ${MAVEN_SKIP} -Dremoteresources.skip=true + + - name: "Server" + env_vars: + - name: MAVEN_PROJECTS + value: server + commands: *run_tests + + - name: "Processing" + env_vars: + - name: MAVEN_PROJECTS + value: processing + commands: *run_tests + + - name: "Indexing Service" + env_vars: + - name: MAVEN_PROJECTS + value: indexing-service + commands: *run_tests + + - name: "Kafka Indexing Service" + env_vars: + - name: MAVEN_PROJECTS + value: extensions-core/kafka-indexing-service + commands: *run_tests + + - name: "Other Tests" + env_vars: + - name: MAVEN_PROJECTS + value: '!server,!processing,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-contrib/confluent-extensions,!integration-tests-ex/cases' + commands: *run_tests diff --git a/codestyle/checkstyle-suppressions.xml b/codestyle/checkstyle-suppressions.xml index 01868d73f34f..6d42d891382f 100644 --- a/codestyle/checkstyle-suppressions.xml +++ b/codestyle/checkstyle-suppressions.xml @@ -70,4 +70,8 @@ + + + + diff --git a/crypto-algos.txt b/crypto-algos.txt new file mode 100644 index 000000000000..071300bb46a3 --- /dev/null +++ b/crypto-algos.txt @@ -0,0 +1,1925 @@ + + +┌───────────────────┐ +│ 251 Code Findings │ +└───────────────────┘ + +  core/src/main/java/org/apache/druid/common/utils/SocketUtil.java  + infosec-fips-rules.java.unencrypted-socket + Detected use of network without encryption. + +  ▶▶┆ Autofix ▶ s/(.*)/// Appears that this is using the network without encryption, please + verify and fix. https://go/fips-compliance + \1 + /1 + 44┆ try (ServerSocket socket = new ServerSocket(currPort)) { + +  core/src/main/java/org/apache/druid/crypto/CryptoService.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 120┆ Cipher ecipher = Cipher.getInstance(transformation); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 144┆ Cipher dcipher = Cipher.getInstance(transformation); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 156┆ SecretKeyFactory factory = SecretKeyFactory.getInstance(secretKeyFactoryAlg); + +  core/src/main/java/org/apache/druid/guice/Binders.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 55┆ return PolyBind.optionBinder(binder, Key.get(DataSegmentPusher.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 60┆ return PolyBind.optionBinder(binder, Key.get(TaskLogs.class)); + +  core/src/main/java/org/apache/druid/guice/DruidSecondaryModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 72┆ binder.bind(ObjectMapper.class).to(Key.get(ObjectMapper.class, Json.class)); + +  core/src/main/java/org/apache/druid/guice/GuiceAnnotationIntrospector.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 61┆ return Key.get(m.getGenericType()); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 63┆ return Key.get(m.getGenericType(), guiceAnnotation); + +  core/src/main/java/org/apache/druid/guice/JacksonConfigProvider.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 38┆ binder.bind(Key.get(Types.newParameterizedType(Supplier.class, clazz))) + +  core/src/main/java/org/apache/druid/guice/JsonConfigProvider.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 91┆ Key.get(classToProvide), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 92┆ (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 109┆ Key.get(classToProvide), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 110┆ (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 121┆ Key.get(classToProvide, annotation), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 122┆ (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide), annotation) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 138┆ Key.get(classToProvide, annotation), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 139┆ (Key) Key.get(Types.newParameterizedType(Supplier.class, classToProvide), annotation) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 181┆ supplierKey = Key.get(supType, bindKey.getAnnotationType()); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 183┆ supplierKey = Key.get(supType, bindKey.getAnnotation()); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 185┆ supplierKey = Key.get(supType); + +  core/src/main/java/org/apache/druid/guice/LifecycleModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 71┆ registerKey(binder, Key.get(clazz)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 96┆ registerKey(binder, Key.get(clazz, annotation)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 141┆ final Key> keyHolderKey = Key.get(new TypeLiteral>(){}, + Names.named("lifecycle")); + +  core/src/main/java/org/apache/druid/guice/ListProvider.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 40┆ return add(Key.get(clazz)); + +  core/src/main/java/org/apache/druid/guice/PolyBind.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 178┆ implsMap = (Map>) injector.getInstance(Key.get(mapType, + key.getAnnotation())); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 180┆ implsMap = (Map>) injector.getInstance(Key.get(mapType, + key.getAnnotationType())); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 182┆ implsMap = (Map>) injector.getInstance(Key.get(mapType)); + +  core/src/main/java/org/apache/druid/java/util/http/client/HttpClientInit.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 111┆ final KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType()); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 111┆ final KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType()); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 117┆ final SSLContext sslContext = SSLContext.getInstance("TLS"); + ⋮┆---------------------------------------- + infosec-fips-rules.java.java.lang.security.audit.weak-ssl-context.weak-ssl-context + An insecure SSL context was detected. TLS versions 1.0, 1.1, and all SSL versions are + considered weak encryption and are deprecated. Use SSLContext.getInstance("TLSv1.2") for the + best security. + +  ▶▶┆ Autofix ▶ s/(.*)/// A minimum version of TLS1.2 is required for FIPS compliance. Please + review. https://go/fips-compliance + \1 + /1 + 117┆ final SSLContext sslContext = SSLContext.getInstance("TLS"); + +  + extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterMo + dule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 62┆ .map((String name) -> injector.getInstance(Key.get(Emitter.class, Names.named(name)))) + +  + extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDruidModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 44┆ PolyBind.optionBinder(binder, Key.get(DataSegmentPusher.class)) + +  + extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterModule.java +  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 65┆ Key.get( + 66┆  Emitter.class, + 67┆  Names.named(s) + 68┆ ))) + +  + extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitterModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 65┆ return injector.getInstance(Key.get(Emitter.class, Names.named(alertEmitterName))); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 74┆ return injector.getInstance(Key.get(Emitter.class, Names.named(requestLogEmitterName))); + +  extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitter.java +  + infosec-fips-rules.java.insecure-hostname-verifier + Insecure HostnameVerifier implementation detected. This will accept any SSL certificate with + any hostname, which creates the possibility for man-in-the-middle attacks. + +  ▶▶┆ Autofix ▶ s/(.*)/// This Insecure HostnameVerifier will accept any SSL certificate without + a hostname, which is insecure. Please review. https://go/fips-compliance + \1 + /1 + 31┆ import org.apache.http.conn.ssl.NoopHostnameVerifier; + ⋮┆---------------------------------------- + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 233┆ KeyStore store = KeyStore.getInstance(influxdbEmitterConfig.getTrustStoreType()); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 237┆ sslContext = SSLContext.getInstance("TLS"); + ⋮┆---------------------------------------- + infosec-fips-rules.java.java.lang.security.audit.weak-ssl-context.weak-ssl-context + An insecure SSL context was detected. TLS versions 1.0, 1.1, and all SSL versions are + considered weak encryption and are deprecated. Use SSLContext.getInstance("TLSv1.2") for the + best security. + +  ▶▶┆ Autofix ▶ s/(.*)/// A minimum version of TLS1.2 is required for FIPS compliance. Please + review. https://go/fips-compliance + \1 + /1 + 237┆ sslContext = SSLContext.getInstance("TLS"); + +  + extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterConfig.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 91┆ this.trustStoreType = trustStoreType == null ? KeyStore.getDefaultType() : trustStoreType; + +  + extensions-contrib/kubernetes-overlord-extensions/src/main/java/org/apache/druid/k8s/overlord/K8sOverlordModule.java +  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 52┆ Key.get(TaskRunnerFactory.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 53┆ Key.get(KubernetesTaskRunnerFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 57┆ Key.get(TaskRunnerFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 69┆ PolyBind.createChoice(binder, "druid.indexer.logs.type", Key.get(TaskLogs.class), + Key.get(FileTaskLogs.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 69┆ PolyBind.createChoice(binder, "druid.indexer.logs.type", Key.get(TaskLogs.class), + Key.get(FileTaskLogs.class)); + +  + extensions-contrib/sqlserver-metadata-storage/src/main/java/org/apache/druid/metadata/storage/sqlserver/SQLServerMetad + ataStorageModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 62┆ .optionBinder(binder, Key.get(MetadataStorageProvider.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 68┆ .optionBinder(binder, Key.get(MetadataStorageConnector.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 74┆ .optionBinder(binder, Key.get(SQLMetadataConnector.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 79┆ PolyBind.optionBinder(binder, Key.get(MetadataStorageActionHandlerFactory.class)) + +  extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/BasicAuthUtils.java +  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 108┆ SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(ALGORITHM); + +  + extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/BasicSecurityDruidModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 251┆ serviceName = injector.getInstance(Key.get(String.class, Names.named("serviceName"))); + +  + extensions-core/kubernetes-extensions/src/main/java/org/apache/druid/k8s/discovery/K8sDiscoveryModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 78┆ PolyBind.optionBinder(binder, Key.get(DruidNodeDiscoveryProvider.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 83┆ PolyBind.optionBinder(binder, Key.get(DruidNodeAnnouncer.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 88┆ PolyBind.optionBinder(binder, Key.get(DruidLeaderSelector.class, Coordinator.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 95┆ PolyBind.optionBinder(binder, Key.get(DruidLeaderSelector.class, IndexingService.class)) + +  + extensions-core/lookups-cached-global/src/main/java/org/apache/druid/server/lookup/namespace/NamespaceExtractionModule + .java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 84┆ .createChoiceWithDefault(binder, TYPE_PREFIX, Key.get(NamespaceExtractionCacheManager.class), + "onHeap") + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 88┆ .optionBinder(binder, Key.get(NamespaceExtractionCacheManager.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 94┆ .optionBinder(binder, Key.get(NamespaceExtractionCacheManager.class)) + +  extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/exec/MSQTasks.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 124┆ return injector.getInstance(Key.get(StorageConnector.class, MultiStageQuery.class)); + +  extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/guice/MSQDurableStorageModule.java +  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 86┆ binder.bind(Key.get(StorageConnector.class, MultiStageQuery.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 87┆ .toProvider(Key.get(StorageConnectorProvider.class, MultiStageQuery.class)) + +  + extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/IndexerControllerContext.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 82┆ return injector.getInstance(Key.get(DruidNode.class, Self.class)); + +  extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/IndexerWorkerContext.java +  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 101┆ injector.getInstance(Key.get(ServiceClientFactory.class, EscalatedGlobal.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 243┆ return injector.getInstance(Key.get(DruidNode.class, Self.class)); + +  extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/MSQControllerTask.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 182┆ injector.getInstance(Key.get(ServiceClientFactory.class, EscalatedGlobal.class)); + +  + extensions-core/mysql-metadata-storage/src/main/java/org/apache/druid/metadata/storage/mysql/MySQLMetadataStorageModul + e.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 72┆ .optionBinder(binder, Key.get(MetadataStorageProvider.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 78┆ .optionBinder(binder, Key.get(MetadataStorageConnector.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 84┆ .optionBinder(binder, Key.get(SQLMetadataConnector.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 89┆ PolyBind.optionBinder(binder, Key.get(MetadataStorageActionHandlerFactory.class)) + +  + extensions-core/postgresql-metadata-storage/src/main/java/org/apache/druid/metadata/storage/postgresql/PostgreSQLMetad + ataStorageModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 73┆ .optionBinder(binder, Key.get(MetadataStorageProvider.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 79┆ .optionBinder(binder, Key.get(MetadataStorageConnector.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 85┆ .optionBinder(binder, Key.get(SQLMetadataConnector.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 90┆ PolyBind.optionBinder(binder, Key.get(MetadataStorageActionHandlerFactory.class)) + +  indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 115┆ Key.get(DruidNode.class, Self.class), + +  indexing-service/src/main/java/org/apache/druid/guice/IndexingServiceTaskLogsModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 40┆ PolyBind.createChoice(binder, "druid.indexer.logs.type", Key.get(TaskLogs.class), + Key.get(FileTaskLogs.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 40┆ PolyBind.createChoice(binder, "druid.indexer.logs.type", Key.get(TaskLogs.class), + Key.get(FileTaskLogs.class)); + +  indexing-service/src/main/java/org/apache/druid/indexing/common/IndexTaskClient.java  + infosec-fips-rules.java.unencrypted-socket + Detected use of network without encryption. + +  ▶▶┆ Autofix ▶ s/(.*)/// Appears that this is using the network without encryption, please + verify and fix. https://go/fips-compliance + \1 + /1 + 208┆ new Socket(host, port).close(); + +  indexing-service/src/main/java/org/apache/druid/indexing/overlord/PortFinder.java  + infosec-fips-rules.java.unencrypted-socket + Detected use of network without encryption. + +  ▶▶┆ Autofix ▶ s/(.*)/// Appears that this is using the network without encryption, please + verify and fix. https://go/fips-compliance + \1 + /1 + 50┆ new ServerSocket(portNum).close(); + +  integration-tests/src/main/java/org/apache/druid/cli/CliCustomNodeRole.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 102┆ LifecycleModule.registerKey(binder, Key.get(SelfDiscoveryResource.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 129┆ final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + +  integration-tests-ex/tools/src/main/java/org/apache/druid/testing/tools/CliCustomNodeRole.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 103┆ LifecycleModule.registerKey(binder, Key.get(SelfDiscoveryResource.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 129┆ final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + +  processing/src/main/java/org/apache/druid/jackson/JacksonModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 46┆ binder.bind(ObjectMapper.class).to(Key.get(ObjectMapper.class, Json.class)); + +  processing/src/main/java/org/apache/druid/query/aggregation/JavaScriptAggregatorFactory.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 255┆ MessageDigest md = MessageDigest.getInstance("SHA-1"); + +  server/src/main/java/org/apache/druid/client/cache/MemcachedCache.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 416┆ SSLContext sslContext = SSLContext.getInstance("TLS"); + ⋮┆---------------------------------------- + infosec-fips-rules.java.java.lang.security.audit.weak-ssl-context.weak-ssl-context + An insecure SSL context was detected. TLS versions 1.0, 1.1, and all SSL versions are + considered weak encryption and are deprecated. Use SSLContext.getInstance("TLSv1.2") for the + best security. + +  ▶▶┆ Autofix ▶ s/(.*)/// A minimum version of TLS1.2 is required for FIPS compliance. Please + review. https://go/fips-compliance + \1 + /1 + 416┆ SSLContext sslContext = SSLContext.getInstance("TLS"); + +  server/src/main/java/org/apache/druid/curator/discovery/DiscoveryModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 110┆ registerKey(binder, Key.get(new TypeLiteral(){})); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 124┆ registerKey(binder, Key.get(new TypeLiteral(){}, annotation)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 139┆ registerKey(binder, Key.get(new TypeLiteral(){}, annotation)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 170┆ .to(Key.get(CuratorServiceAnnouncer.class, Names.named(NAME))) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 173┆ binder.bind(Key.get(ServiceAnnouncer.Noop.class, Names.named(NAME))).toInstance(new + ServiceAnnouncer.Noop()); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 175┆ .to(Key.get(ServiceAnnouncer.Noop.class, Names.named(NAME))) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 180┆ PolyBind.createChoiceWithDefault(binder, INTERNAL_DISCOVERY_PROP, + Key.get(DruidNodeAnnouncer.class), CURATOR_KEY); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 185┆ Key.get(DruidNodeDiscoveryProvider.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 192┆ Key.get(DruidLeaderSelector.class, Coordinator.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 199┆ Key.get(DruidLeaderSelector.class, IndexingService.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 203┆ PolyBind.optionBinder(binder, Key.get(DruidNodeDiscoveryProvider.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 208┆ PolyBind.optionBinder(binder, Key.get(DruidNodeAnnouncer.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 213┆ PolyBind.optionBinder(binder, Key.get(DruidLeaderSelector.class, Coordinator.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 222┆ PolyBind.optionBinder(binder, Key.get(DruidLeaderSelector.class, IndexingService.class)) + +  server/src/main/java/org/apache/druid/guice/CacheModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 51┆ binder.bind(Cache.class).toProvider(Key.get(CacheProvider.class, + Global.class)).in(ManageLifecycle.class); + +  server/src/main/java/org/apache/druid/guice/DruidInjectorBuilder.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 75┆ this.jsonMapper = baseInjector.getInstance(Key.get(ObjectMapper.class, Json.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 76┆ this.smileMapper = baseInjector.getInstance(Key.get(ObjectMapper.class, Smile.class)); + +  server/src/main/java/org/apache/druid/guice/LocalDataStorageDruidModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 61┆ Key.get(DataSegmentPusher.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 62┆ Key.get(LocalDataSegmentPusher.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 68┆ Key.get(DataSegmentKiller.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 69┆ Key.get(LocalDataSegmentKiller.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 85┆ PolyBind.optionBinder(binder, Key.get(DataSegmentPusher.class)) + +  server/src/main/java/org/apache/druid/guice/QueryRunnerFactoryModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 75┆ .toProvider(Key.get(QuerySchedulerProvider.class, Global.class)) + +  server/src/main/java/org/apache/druid/guice/QueryToolChestModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 110┆ Key.get(GenericQueryMetricsFactory.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 111┆ Key.get(DefaultGenericQueryMetricsFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 114┆ .optionBinder(binder, Key.get(GenericQueryMetricsFactory.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 121┆ Key.get(GroupByQueryMetricsFactory.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 122┆ Key.get(DefaultGroupByQueryMetricsFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 125┆ .optionBinder(binder, Key.get(GroupByQueryMetricsFactory.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 132┆ Key.get(TimeseriesQueryMetricsFactory.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 133┆ Key.get(DefaultTimeseriesQueryMetricsFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 136┆ .optionBinder(binder, Key.get(TimeseriesQueryMetricsFactory.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 143┆ Key.get(TopNQueryMetricsFactory.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 144┆ Key.get(DefaultTopNQueryMetricsFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 147┆ .optionBinder(binder, Key.get(TopNQueryMetricsFactory.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 154┆ Key.get(SearchQueryMetricsFactory.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 155┆ Key.get(DefaultSearchQueryMetricsFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 158┆ .optionBinder(binder, Key.get(SearchQueryMetricsFactory.class)) + +  server/src/main/java/org/apache/druid/guice/SQLMetadataStorageDruidModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 72┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(MetadataStorageConnector.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 73┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(MetadataStorageProvider.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 74┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(SQLMetadataConnector.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 76┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(SegmentsMetadataManager.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 77┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(SegmentsMetadataManagerProvider.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 78┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(MetadataRuleManager.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 79┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(MetadataRuleManagerProvider.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 80┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(MetadataSegmentPublisher.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 81┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(MetadataSegmentPublisherProvider.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 82┆ PolyBind.createChoiceWithDefault(binder, prop, + Key.get(IndexerMetadataStorageCoordinator.class), defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 83┆ PolyBind.createChoiceWithDefault(binder, prop, + Key.get(MetadataStorageActionHandlerFactory.class), defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 84┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(MetadataStorageUpdaterJobHandler.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 85┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(AuditManager.class), defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 86┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(AuditManagerProvider.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 87┆ PolyBind.createChoiceWithDefault(binder, prop, Key.get(MetadataSupervisorManager.class), + defaultValue); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 93┆ PolyBind.optionBinder(binder, Key.get(SegmentsMetadataManager.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 98┆ PolyBind.optionBinder(binder, Key.get(SegmentsMetadataManagerProvider.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 103┆ PolyBind.optionBinder(binder, Key.get(MetadataRuleManager.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 108┆ PolyBind.optionBinder(binder, Key.get(MetadataRuleManagerProvider.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 113┆ PolyBind.optionBinder(binder, Key.get(MetadataSegmentPublisher.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 118┆ PolyBind.optionBinder(binder, Key.get(MetadataSegmentPublisherProvider.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 123┆ PolyBind.optionBinder(binder, Key.get(IndexerMetadataStorageCoordinator.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 128┆ PolyBind.optionBinder(binder, Key.get(MetadataStorageUpdaterJobHandler.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 135┆ PolyBind.optionBinder(binder, Key.get(AuditManager.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 140┆ PolyBind.optionBinder(binder, Key.get(AuditManagerProvider.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 145┆ PolyBind.optionBinder(binder, Key.get(MetadataSupervisorManager.class)) + +  server/src/main/java/org/apache/druid/guice/http/AbstractHttpClientProvider.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 49┆ configKey = Key.get( + 50┆  new TypeLiteral>() + 51┆  { + 52┆  }, annotation + 53┆ ); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 54┆ sslContextKey = Key.get(SSLContext.class, annotation); + +  server/src/main/java/org/apache/druid/guice/security/AuthenticatorModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 41┆ Key.get(Authenticator.class) + +  server/src/main/java/org/apache/druid/guice/security/AuthorizerModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 41┆ Key.get(Authorizer.class) + +  server/src/main/java/org/apache/druid/initialization/Log4jShutterDownerModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 77┆ binder.bind(Key.get(Log4jShutterDowner.class, Names.named("ForTheEagerness"))) + +  server/src/main/java/org/apache/druid/metadata/storage/derby/DerbyMetadataStorageDruidModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 52┆ PolyBind.optionBinder(binder, Key.get(MetadataStorageProvider.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 57┆ PolyBind.optionBinder(binder, Key.get(MetadataStorageConnector.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 62┆ PolyBind.optionBinder(binder, Key.get(SQLMetadataConnector.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 67┆ PolyBind.optionBinder(binder, Key.get(MetadataStorageActionHandlerFactory.class)) + +  server/src/main/java/org/apache/druid/server/emitter/ComposingEmitterModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 65┆ return injector.getInstance(Key.get(Emitter.class, Names.named(s))); + +  server/src/main/java/org/apache/druid/server/emitter/HttpEmitterModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 69┆ context = SSLContext.getDefault(); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 122┆ effectiveSSLContext = SSLContext.getDefault(); + +  server/src/main/java/org/apache/druid/server/initialization/jetty/ChatHandlerServerModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 78┆ binder.bind(DruidNode.class).annotatedWith(RemoteChatHandler.class).to(Key.get(DruidNode.class, + Self.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 79┆ + binder.bind(ServerConfig.class).annotatedWith(RemoteChatHandler.class).to(Key.get(ServerConfig.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 80┆ + binder.bind(TLSServerConfig.class).annotatedWith(RemoteChatHandler.class).to(Key.get(TLSServerConfig.class)) + ; + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 109┆ injector.getExistingBinding(Key.get(SslContextFactory.Server.class)), + +  server/src/main/java/org/apache/druid/server/initialization/jetty/CliIndexerServerModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 104┆ binder.bind(DruidNode.class).annotatedWith(RemoteChatHandler.class).to(Key.get(DruidNode.class, + Self.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 105┆ + binder.bind(ServerConfig.class).annotatedWith(RemoteChatHandler.class).to(Key.get(ServerConfig.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 106┆ + binder.bind(TLSServerConfig.class).annotatedWith(RemoteChatHandler.class).to(Key.get(TLSServerConfig.class)) + ; + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 135┆ injector.getExistingBinding(Key.get(SslContextFactory.Server.class)), + +  server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 65┆ injector.getInstance(Key.get(new TypeLiteral>() {})); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 72┆ injector.getInstance(Key.get(new TypeLiteral>() {})); + +  server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 184┆ injector.getExistingBinding(Key.get(SslContextFactory.Server.class)), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 313┆ ? KeyStore.getDefaultType() + +  server/src/main/java/org/apache/druid/server/metrics/MetricsModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 95┆ binder.bind(Key.get(MonitorScheduler.class, Names.named("ForTheEagerness"))) + +  server/src/main/java/org/apache/druid/server/security/TLSUtils.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 184┆ sslContext = SSLContext.getInstance(protocol == null ? "TLSv1.2" : protocol); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 185┆ KeyStore trustStore = KeyStore.getInstance(trustStoreType == null + 186┆  ? KeyStore.getDefaultType() + 187┆  : trustStoreType); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 186┆ ? KeyStore.getDefaultType() + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 201┆ KeyStore keyStore = KeyStore.getInstance(keyStoreType == null + 202┆  ? KeyStore.getDefaultType() + 203┆  : keyStoreType); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 202┆ ? KeyStore.getDefaultType() + +  services/src/main/java/org/apache/druid/cli/CliBroker.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 171┆ LifecycleModule.registerKey(binder, Key.get(SelfDiscoveryResource.class)); + +  services/src/main/java/org/apache/druid/cli/CliCoordinator.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 322┆ LifecycleModule.registerKey(binder, Key.get(SelfDiscoveryResource.class)); + +  services/src/main/java/org/apache/druid/cli/CliHistorical.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 138┆ LifecycleModule.registerKey(binder, Key.get(SelfDiscoveryResource.class)); + +  services/src/main/java/org/apache/druid/cli/CliIndexer.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 186┆ LifecycleModule.registerKey(binder, Key.get(SelfDiscoveryResource.class)); + +  services/src/main/java/org/apache/druid/cli/CliMiddleManager.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 145┆ Key.get(RowIngestionMetersFactory.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 146┆ Key.get(DropwizardRowIngestionMetersFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 149┆ PolyBind.optionBinder(binder, Key.get(RowIngestionMetersFactory.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 174┆ LifecycleModule.registerKey(binder, Key.get(SelfDiscoveryResource.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 184┆ Key.get(IntermediaryDataManager.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 185┆ Key.get(LocalIntermediaryDataManager.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 189┆ Key.get(IntermediaryDataManager.class) + +  services/src/main/java/org/apache/druid/cli/CliOverlord.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 227┆ Key.get(RowIngestionMetersFactory.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 228┆ Key.get(DropwizardRowIngestionMetersFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 231┆ PolyBind.optionBinder(binder, Key.get(RowIngestionMetersFactory.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 276┆ LifecycleModule.registerKey(binder, Key.get(SelfDiscoveryResource.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 286┆ Key.get(TaskStorage.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 287┆ Key.get(HeapMemoryTaskStorage.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 290┆ PolyBind.optionBinder(binder, Key.get(TaskStorage.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 303┆ Key.get(IntermediaryDataManager.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 304┆ Key.get(LocalIntermediaryDataManager.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 308┆ Key.get(IntermediaryDataManager.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 321┆ Key.get(TaskRunnerFactory.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 322┆ Key.get(HttpRemoteTaskRunnerFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 326┆ Key.get(TaskRunnerFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 359┆ Key.get(ProvisioningStrategy.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 360┆ Key.get(SimpleWorkerProvisioningStrategy.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 364┆ Key.get(ProvisioningStrategy.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 413┆ final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + +  services/src/main/java/org/apache/druid/cli/CliPeon.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 354┆ Key.get(RowIngestionMetersFactory.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 355┆ Key.get(DropwizardRowIngestionMetersFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 358┆ PolyBind.optionBinder(binder, Key.get(RowIngestionMetersFactory.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 371┆ Key.get(ChatHandlerProvider.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 372┆ Key.get(ServiceAnnouncingChatHandlerProvider.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 375┆ PolyBind.optionBinder(binder, Key.get(ChatHandlerProvider.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 404┆ Key.get(TaskActionClientFactory.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 405┆ Key.get(RemoteTaskActionClientFactory.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 408┆ PolyBind.optionBinder(binder, Key.get(TaskActionClientFactory.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 470┆ Key.get(IntermediaryDataManager.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 471┆ Key.get(LocalIntermediaryDataManager.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 475┆ Key.get(IntermediaryDataManager.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 483┆ Key.get(ShuffleClient.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 484┆ Key.get(HttpShuffleClient.class) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 488┆ Key.get(ShuffleClient.class) + +  services/src/main/java/org/apache/druid/cli/CliRouter.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 128┆ LifecycleModule.registerKey(binder, Key.get(SelfDiscoveryResource.class)); + +  services/src/main/java/org/apache/druid/cli/CoordinatorJettyServerInitializer.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 81┆ final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + +  services/src/main/java/org/apache/druid/cli/CreateTables.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 83┆ Key.get(MetadataStorageConnectorConfig.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 107┆ Key.get(MetadataStorageTablesConfig.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 112┆ Key.get(DruidNode.class, Self.class), + +  services/src/main/java/org/apache/druid/cli/DumpSegment.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 233┆ final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 280┆ final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, + Json.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 360┆ final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, + Json.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 444┆ final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, + Json.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 560┆ final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, + Json.class)); + +  services/src/main/java/org/apache/druid/cli/ExportMetadata.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 148┆ Key.get(MetadataStorageConnectorConfig.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 172┆ Key.get(MetadataStorageTablesConfig.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 177┆ Key.get(DruidNode.class, Self.class), + +  services/src/main/java/org/apache/druid/cli/MiddleManagerJettyServerInitializer.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 72┆ final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + +  services/src/main/java/org/apache/druid/cli/QueryJettyServerInitializer.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 98┆ final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + +  services/src/main/java/org/apache/druid/cli/ResetCluster.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 90┆ Key.get(DruidNode.class, Self.class), + +  services/src/main/java/org/apache/druid/cli/RouterJettyServerInitializer.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 118┆ final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + +  services/src/main/java/org/apache/druid/cli/ServerRunnable.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 84┆ LifecycleModule.registerKey(binder, Key.get(DiscoverySideEffectsProvider.Child.class)); + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 98┆ LifecycleModule.registerKey(binder, Key.get(DiscoverySideEffectsProvider.Child.class, + annotation)); + +  services/src/main/java/org/apache/druid/guice/AbstractDruidServiceModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 79┆ .to(Key.get(new TypeLiteral>>(){}, + role.getDruidServiceInjectName())); + +  sql/src/main/java/org/apache/druid/sql/calcite/aggregation/SqlAggregationModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 52┆ Key.get(SqlAggregator.class, ApproxCountDistinct.class), + +  sql/src/main/java/org/apache/druid/sql/guice/SqlBindings.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 57┆ PolyBind.optionBinder(binder, Key.get(SqlAggregator.class, ApproxCountDistinct.class)) + +  sql/src/main/java/org/apache/druid/sql/guice/SqlModule.java  + infosec-fips-rules.java.detect-crypto-usage + This catchall rule detects the use of any cryptographic function for review + +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 80┆ PolyBind.optionBinder(binder, Key.get(ViewManager.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 88┆ Key.get(ViewManager.class), + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 92┆ PolyBind.optionBinder(binder, Key.get(DruidSchemaManager.class)) + ⋮┆---------------------------------------- +  ▶▶┆ Autofix ▶ s/(.*)/// Detected the use of a crypographic function. Please review this for + compliance. https://go/fips-compliance + \1 + /1 + 100┆ Key.get(DruidSchemaManager.class), + diff --git a/distribution/docker/Dockerfile b/distribution/docker/Dockerfile index 2bcd28f873a6..146f4dd9574e 100644 --- a/distribution/docker/Dockerfile +++ b/distribution/docker/Dockerfile @@ -18,6 +18,7 @@ # ARG JDK_VERSION=17 +ARG BASE_IMAGE=gcr.io/distroless/java$JDK_VERSION-debian12 # The platform is explicitly specified as x64 to build the Druid distribution. # This is because it's not able to build the distribution on arm64 due to dependency problem of web-console. See: https://github.com/apache/druid/issues/13012 @@ -49,38 +50,46 @@ RUN --mount=type=cache,target=/root/.m2 VERSION=$(mvn -B -q org.apache.maven.plu && tar -zxf ./distribution/target/apache-druid-${VERSION}-bin.tar.gz -C /opt \ && mv /opt/apache-druid-${VERSION} /opt/druid -FROM alpine:3 as bash-static +FROM busybox:1.34.1-glibc as busybox + +FROM $BASE_IMAGE + +LABEL maintainer="Apache Druid Developers " + +COPY --from=busybox /bin/busybox /busybox/busybox +RUN ["/busybox/busybox", "sh", "-c", "if [ ! -x \"$(command -v bash)\" ]; then \ + /busybox/busybox --install /bin; \ + else \ + rm /busybox/busybox; \ + fi;"] +# Predefined builtin arg, see: https://docs.docker.com/engine/reference/builder/#automatic-platform-args-in-the-global-scope ARG TARGETARCH + # # Download bash-static binary to execute scripts that require bash. # Although bash-static supports multiple platforms, but there's no need for us to support all those platform, amd64 and arm64 are enough. # ARG BASH_URL_BASE="https://github.com/robxu9/bash-static/releases/download/5.1.016-1.2.3" -RUN if [ "$TARGETARCH" = "arm64" ]; then \ - BASH_URL="${BASH_URL_BASE}/bash-linux-aarch64" ; \ - elif [ "$TARGETARCH" = "amd64" ]; then \ - BASH_URL="${BASH_URL_BASE}/bash-linux-x86_64" ; \ +RUN if [ ! -x "$(command -v bash)" ]; then \ + if [ "$TARGETARCH" = "arm64" ]; then \ + BASH_URL="${BASH_URL_BASE}/bash-linux-aarch64" ; \ + elif [ "$TARGETARCH" = "amd64" ]; then \ + BASH_URL="${BASH_URL_BASE}/bash-linux-x86_64" ; \ + else \ + echo "Unsupported architecture ($TARGETARCH)" && exit 1; \ + fi; \ + echo "Downloading bash-static from ${BASH_URL}" \ + && wget ${BASH_URL} -O /bin/bash \ + && chmod 755 /bin/bash; \ + fi; + +RUN if [ ! -x "$(command -v useradd)" ]; then \ + addgroup -S -g 1000 druid \ + && adduser -S -u 1000 -D -H -h /opt/druid -s /bin/sh -g '' -G druid druid; \ else \ - echo "Unsupported architecture ($TARGETARCH)" && exit 1; \ - fi; \ - echo "Downloading bash-static from ${BASH_URL}" \ - && wget ${BASH_URL} -O /bin/bash - -FROM busybox:1.35.0-glibc as busybox - -FROM gcr.io/distroless/java$JDK_VERSION-debian12 -LABEL maintainer="Apache Druid Developers " - -COPY --from=busybox /bin/busybox /busybox/busybox -RUN ["/busybox/busybox", "--install", "/bin"] - - -RUN addgroup -S -g 1000 druid \ - && adduser -S -u 1000 -D -H -h /opt/druid -s /bin/sh -g '' -G druid druid - - -COPY --from=bash-static /bin/bash /bin/bash -RUN chmod 755 /bin/bash + groupadd --system --gid 1000 druid \ + && useradd --system --uid 1000 -M --home /opt/druid --shell /bin/sh -c '' --gid 1000 druid; \ + fi; COPY --chown=druid:druid --from=builder /opt /opt COPY distribution/docker/druid.sh /druid.sh @@ -93,6 +102,13 @@ RUN mkdir /opt/druid/var /opt/shared \ && chown druid:druid /opt/druid/var /opt/shared \ && chmod 775 /opt/druid/var /opt/shared +# Install iproute2 to get the ip command needed to set config of druid.host IP address +# Command needed in druid.sh Line 140; +RUN if [ ! -x "$(command -v ip)" ]; then \ + apt update \ + && apt install -y iproute2; \ + fi; + USER druid VOLUME /opt/druid/var WORKDIR /opt/druid diff --git a/distribution/pom.xml b/distribution/pom.xml index ecc00a9155d2..cc035e21a3e2 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -303,8 +303,8 @@ - org.codehaus.mojo - exec-maven-plugin + org.codehaus.mojo + exec-maven-plugin generate-licenses-report @@ -450,6 +450,11 @@ org.apache.druid.extensions.contrib:opentelemetry-emitter -c org.apache.druid.extensions:druid-iceberg-extensions + org.apache.druid.extensions.contrib:druid-opencensus-extensions + -c + io.confluent.druid.extensions:confluent-extensions + -c + org.apache.druid.extensions.contrib:opentelemetry-emitter @@ -567,5 +572,212 @@ + + + dist-used + + false + + tar + + + + + + org.codehaus.mojo + exec-maven-plugin + + + generate-readme + initialize + + exec + + + ${project.basedir}/bin/build-textfile-readme.sh + + ${project.basedir}/../ + ${project.parent.version} + + + + + generate-binary-license + initialize + + exec + + + ${project.basedir}/bin/generate-binary-license.py + + ${project.parent.basedir}/licenses/APACHE2 + ${project.parent.basedir}/licenses.yaml + ${project.parent.basedir}/LICENSE.BINARY + + + + + generate-binary-notice + initialize + + exec + + + ${project.basedir}/bin/generate-binary-notice.py + + ${project.parent.basedir}/NOTICE + ${project.parent.basedir}/licenses.yaml + ${project.parent.basedir}/NOTICE.BINARY + + + + + pull-deps + package + + exec + + + ${project.parent.basedir}/examples/bin/run-java + + -classpath + + -Ddruid.extensions.loadList=[] + -Ddruid.extensions.directory=${project.build.directory}/extensions + + + -Ddruid.extensions.hadoopDependenciesDir=${project.build.directory}/hadoop-dependencies + + org.apache.druid.cli.Main + tools + pull-deps + --clean + --defaultVersion + ${project.parent.version} + -l + ${settings.localRepository} + -h + org.apache.hadoop:hadoop-client:${hadoop.compile.version} + -c + org.apache.druid.extensions:druid-datasketches + -c + org.apache.druid.extensions:druid-kafka-indexing-service + -c + org.apache.druid.extensions:druid-multi-stage-query + -c + org.apache.druid.extensions:druid-catalog + -c + org.apache.druid.extensions:druid-protobuf-extensions + -c + org.apache.druid.extensions:postgresql-metadata-storage + -c + org.apache.druid.extensions:druid-s3-extensions + -c + org.apache.druid.extensions:druid-aws-rds-extensions + -c + org.apache.druid.extensions:simple-client-sslcontext + -c + org.apache.druid.extensions:druid-basic-security + -c + org.apache.druid.extensions:druid-pac4j + -c + org.apache.druid.extensions:druid-kubernetes-extensions + --clean + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + distro-assembly + package + + single + + + apache-druid-${project.parent.version} + posix + + src/assembly/assembly.xml + + + + + + + org.codehaus.mojo + license-maven-plugin + + + download-licenses + + download-licenses + + + + + + + + + + bundle-contrib-exts-used + + + + org.codehaus.mojo + exec-maven-plugin + + + pull-deps-contrib-exts + package + + exec + + + ${project.parent.basedir}/examples/bin/run-java + + -classpath + + -Ddruid.extensions.loadList=[] + -Ddruid.extensions.directory=${project.build.directory}/extensions + + + -Ddruid.extensions.hadoopDependenciesDir=${project.build.directory}/hadoop-dependencies + + org.apache.druid.cli.Main + tools + pull-deps + --defaultVersion + ${project.parent.version} + -l + ${settings.localRepository} + --no-default-hadoop + -c + org.apache.druid.extensions.contrib:kafka-emitter + -c + org.apache.druid.extensions.contrib:statsd-emitter + -c + org.apache.druid.extensions.contrib:prometheus-emitter + -c + org.apache.druid.extensions.contrib:opentelemetry-emitter + -c + org.apache.druid.extensions.contrib:druid-opencensus-extensions + -c + io.confluent.druid.extensions:confluent-extensions + -c + org.apache.druid.extensions.contrib:opentelemetry-emitter + + + + + + + + diff --git a/docs/development/extensions-contrib/kafka-emitter.md b/docs/development/extensions-contrib/kafka-emitter.md index 40b63ca73afd..eba138e004cd 100644 --- a/docs/development/extensions-contrib/kafka-emitter.md +++ b/docs/development/extensions-contrib/kafka-emitter.md @@ -23,7 +23,7 @@ title: "Kafka Emitter" --> -To use this Apache Druid extension, [include](../../configuration/extensions.md#loading-extensions) `kafka-emitter` in the extensions load list. +To use this Apache Druid extension, [include](../../development/extensions.md#loading-extensions) `kafka-emitter` in the extensions load list. ## Introduction @@ -44,6 +44,7 @@ All the configuration parameters for the Kafka emitter are under `druid.emitter. | `druid.emitter.kafka.alert.topic` | Kafka topic name for emitter's target to emit alerts. If `event.types` contains `alerts`, this field cannot empty. | no | none | | `druid.emitter.kafka.request.topic` | Kafka topic name for emitter's target to emit request logs. If `event.types` contains `requests`, this field cannot be empty. | no | none | | `druid.emitter.kafka.segmentMetadata.topic` | Kafka topic name for emitter's target to emit segment metadata. If `event.types` contains `segment_metadata`, this field cannot be empty. | no | none | +| `druid.emitter.kafka.segmentMetadata.topic.format` | Format in which segment related metadata will be emitted.
Choices: json, protobuf.
If set to `protobuf`, then segment metadata is emitted in `DruidSegmentEvent.proto` format | no | json | | `druid.emitter.kafka.producer.config` | JSON configuration to set additional properties to Kafka producer. | no | none | | `druid.emitter.kafka.clusterName` | Optional value to specify the name of your Druid cluster. It can help make groups in your monitoring environment. | no | none | @@ -55,7 +56,8 @@ druid.emitter.kafka.event.types=["metrics", alerts", "requests", "segment_metada druid.emitter.kafka.metric.topic=druid-metric druid.emitter.kafka.alert.topic=druid-alert druid.emitter.kafka.request.topic=druid-request-logs -druid.emitter.kafka.segmentMetadata.topic=druid-segment-metadata +druid.emitter.kafka.segmentMetadata.topic=druid-segment-metadata +druid.emitter.kafka.segmentMetadata.topic.format=protobuf druid.emitter.kafka.producer.config={"max.block.ms":10000} ``` - +Whenever `druid.emitter.kafka.segmentMetadata.topic.format` field is updated, it is recommended to also update `druid.emitter.kafka.segmentMetadata.topic` to avoid the same topic from getting polluted with different formats of segment metadata. diff --git a/extensions-contrib/confluent-extensions/pom.xml b/extensions-contrib/confluent-extensions/pom.xml new file mode 100644 index 000000000000..ed012b79e629 --- /dev/null +++ b/extensions-contrib/confluent-extensions/pom.xml @@ -0,0 +1,76 @@ + + + + + + 4.0.0 + + io.confluent.druid.extensions + confluent-extensions + confluent-extensions + confluent-extensions + + + druid + org.apache.druid + 28.0.0-SNAPSHOT + ../../pom.xml + + + + + org.apache.druid + druid-processing + ${project.parent.version} + provided + + + com.google.code.findbugs + jsr305 + provided + + + com.fasterxml.jackson.core + jackson-databind + provided + + + com.google.guava + guava + provided + + + com.google.inject + guice + provided + + + com.fasterxml.jackson.core + jackson-annotations + provided + + + + junit + junit + test + + + org.apache.druid + druid-processing + ${project.parent.version} + test + test-jar + + + org.apache.druid + druid-processing + 28.0.0-SNAPSHOT + compile + + + diff --git a/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/ConfluentExtensionsModule.java b/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/ConfluentExtensionsModule.java new file mode 100644 index 000000000000..a2a835a10cde --- /dev/null +++ b/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/ConfluentExtensionsModule.java @@ -0,0 +1,36 @@ +/* + * Copyright 2020 Confluent Inc. + */ + +package io.confluent.druid; + +import com.fasterxml.jackson.databind.Module; +import com.fasterxml.jackson.databind.jsontype.NamedType; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.google.inject.Binder; +import io.confluent.druid.transform.ExtractTenantTopicTransform; +import io.confluent.druid.transform.ExtractTenantTransform; +import org.apache.druid.initialization.DruidModule; + +import java.util.Collections; +import java.util.List; + +public class ConfluentExtensionsModule implements DruidModule +{ + @Override + public List getJacksonModules() + { + return Collections.singletonList( + new SimpleModule("ConfluentTransformsModule") + .registerSubtypes( + new NamedType(ExtractTenantTransform.class, "extractTenant"), + new NamedType(ExtractTenantTopicTransform.class, "extractTenantTopic") + ) + ); + } + + @Override + public void configure(Binder binder) + { + } +} diff --git a/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/ExtractTenantTopicTransform.java b/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/ExtractTenantTopicTransform.java new file mode 100644 index 000000000000..ef4b78b0f753 --- /dev/null +++ b/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/ExtractTenantTopicTransform.java @@ -0,0 +1,101 @@ +/* + * Copyright 2020 Confluent Inc. + */ + +package io.confluent.druid.transform; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import org.apache.druid.data.input.Row; +import org.apache.druid.segment.transform.RowFunction; +import org.apache.druid.segment.transform.Transform; + +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; + +public class ExtractTenantTopicTransform implements Transform +{ + private final String fieldName; + private final String name; + + public ExtractTenantTopicTransform( + @JsonProperty("name") final String name, + @JsonProperty("fieldName") final String fieldName + ) + { + this.name = Preconditions.checkNotNull(name, "name"); + this.fieldName = Preconditions.checkNotNull(fieldName, "fieldName"); + } + + @JsonProperty + @Override + public String getName() + { + return name; + } + + @JsonProperty + public String getFieldName() + { + return fieldName; + } + + @Override + public RowFunction getRowFunction() + { + return new RowFunction() + { + @Override + public Object eval(Row row) + { + Object existing = row.getRaw(name); + // do not overwrite existing values if present + if (existing != null) { + return existing; + } + + Object value = row.getRaw(fieldName); + return value == null ? null : TenantUtils.extractTenantTopic(value.toString()); + } + }; + } + + @Override + public Set getRequiredColumns() + { + Set columns = new HashSet(); + columns.add(this.name); + columns.add(this.fieldName); + return columns; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (!(o instanceof ExtractTenantTopicTransform)) { + return false; + } + ExtractTenantTopicTransform that = (ExtractTenantTopicTransform) o; + return fieldName.equals(that.fieldName) && + name.equals(that.name); + } + + @Override + public int hashCode() + { + return Objects.hash(fieldName, name); + } + + @Override + public String toString() + { + return "ExtractTenantTopicTransform{" + + "fieldName='" + fieldName + '\'' + + ", name='" + name + '\'' + + '}'; + } +} diff --git a/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/ExtractTenantTransform.java b/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/ExtractTenantTransform.java new file mode 100644 index 000000000000..4b6ad09d6400 --- /dev/null +++ b/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/ExtractTenantTransform.java @@ -0,0 +1,95 @@ +/* + * Copyright 2020 Confluent Inc. + */ + +package io.confluent.druid.transform; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import org.apache.druid.segment.transform.RowFunction; +import org.apache.druid.segment.transform.Transform; + +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; + +public class ExtractTenantTransform implements Transform +{ + private final String fieldName; + private final String name; + + public ExtractTenantTransform( + @JsonProperty("name") final String name, + @JsonProperty("fieldName") final String fieldName + ) + { + this.name = Preconditions.checkNotNull(name, "name"); + this.fieldName = Preconditions.checkNotNull(fieldName, "fieldName"); + } + + @JsonProperty + @Override + public String getName() + { + return name; + } + + @JsonProperty + public String getFieldName() + { + return fieldName; + } + + @Override + public RowFunction getRowFunction() + { + return row -> { + Object existing = row.getRaw(name); + // do not overwrite existing values if present + if (existing != null) { + return existing; + } + + Object value = row.getRaw(fieldName); + return value == null ? null : TenantUtils.extractTenant(value.toString()); + }; + } + + @Override + public Set getRequiredColumns() + { + Set columns = new HashSet(); + columns.add(this.name); + columns.add(this.fieldName); + return columns; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (!(o instanceof ExtractTenantTransform)) { + return false; + } + ExtractTenantTransform that = (ExtractTenantTransform) o; + return fieldName.equals(that.fieldName) && + name.equals(that.name); + } + + @Override + public int hashCode() + { + return Objects.hash(fieldName, name); + } + + @Override + public String toString() + { + return "ExtractTenantTransform{" + + "fieldName='" + fieldName + '\'' + + ", name='" + name + '\'' + + '}'; + } +} diff --git a/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/TenantUtils.java b/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/TenantUtils.java new file mode 100644 index 000000000000..1a4e8c66df24 --- /dev/null +++ b/extensions-contrib/confluent-extensions/src/main/java/io/confluent/druid/transform/TenantUtils.java @@ -0,0 +1,26 @@ +/* + * Copyright 2020 Confluent Inc. + */ + +package io.confluent.druid.transform; + +import javax.annotation.Nullable; + +public class TenantUtils +{ + private static final char DELIMITER = '_'; + + @Nullable + public static String extractTenant(String prefixedTopic) + { + int i = prefixedTopic.indexOf(DELIMITER); + return i > 0 ? prefixedTopic.substring(0, i) : null; + } + + @Nullable + public static String extractTenantTopic(String prefixedTopic) + { + int i = prefixedTopic.indexOf(DELIMITER); + return i > 0 ? prefixedTopic.substring(i + 1) : null; + } +} diff --git a/extensions-contrib/confluent-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/confluent-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule new file mode 100644 index 000000000000..f14e0fe0915b --- /dev/null +++ b/extensions-contrib/confluent-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule @@ -0,0 +1,3 @@ +# Copyright 2020 Confluent Inc. + +io.confluent.druid.ConfluentExtensionsModule diff --git a/extensions-contrib/confluent-extensions/src/test/java/io/confluent/druid/transform/ExtractTransformTest.java b/extensions-contrib/confluent-extensions/src/test/java/io/confluent/druid/transform/ExtractTransformTest.java new file mode 100644 index 000000000000..2ca5390e76b9 --- /dev/null +++ b/extensions-contrib/confluent-extensions/src/test/java/io/confluent/druid/transform/ExtractTransformTest.java @@ -0,0 +1,161 @@ +/* + * Copyright 2020 Confluent Inc. + */ + +package io.confluent.druid.transform; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.confluent.druid.ConfluentExtensionsModule; +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.impl.DimensionsSpec; +import org.apache.druid.data.input.impl.InputRowParser; +import org.apache.druid.data.input.impl.MapInputRowParser; +import org.apache.druid.data.input.impl.TimeAndDimsParseSpec; +import org.apache.druid.data.input.impl.TimestampSpec; +import org.apache.druid.java.util.common.DateTimes; +import org.apache.druid.segment.TestHelper; +import org.apache.druid.segment.transform.TransformSpec; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Map; + +public class ExtractTransformTest +{ + + private static final MapInputRowParser PARSER = new MapInputRowParser( + new TimeAndDimsParseSpec( + new TimestampSpec("t", "auto", DateTimes.of("2020-01-01")), + new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("topic", "tenant"))) + ) + ); + + private static final Map ROW1 = ImmutableMap.builder() + .put("topic", "lkc-abc123_mytopic") + .build(); + + private static final Map ROW2 = ImmutableMap.builder() + .put("tenant", "lkc-xyz789") + .put("tenant_topic", "topic0") + .put("topic", "lkc-abc123_mytopic") + .build(); + + private static final Map ROW3 = ImmutableMap.builder() + .put("topic", "invalid-topic") + .build(); + + private static final Map ROW4 = ImmutableMap.builder() + .build(); + + + @Test + public void testExtraction() + { + final TransformSpec transformSpec = new TransformSpec( + null, + ImmutableList.of( + new ExtractTenantTransform("tenant", "topic"), + new ExtractTenantTopicTransform("tenant_topic", "topic") + ) + ); + + final InputRowParser> parser = transformSpec.decorate(PARSER); + final InputRow row = parser.parseBatch(ROW1).get(0); + + Assert.assertNotNull(row); + Assert.assertEquals(ImmutableList.of("topic", "tenant"), row.getDimensions()); + Assert.assertEquals(ImmutableList.of("lkc-abc123"), row.getDimension("tenant")); + Assert.assertEquals(ImmutableList.of("mytopic"), row.getDimension("tenant_topic")); + } + + @Test + public void testInternal() + { + Assert.assertEquals(null, TenantUtils.extractTenantTopic("__consumer_offsets")); + Assert.assertEquals(null, TenantUtils.extractTenant("__consumer_offsets")); + Assert.assertEquals(null, TenantUtils.extractTenantTopic("other.topic")); + Assert.assertEquals(null, TenantUtils.extractTenant("other.topic")); + } + + @Test + public void testPreserveExistingFields() + { + final TransformSpec transformSpec = new TransformSpec( + null, + ImmutableList.of( + new ExtractTenantTransform("tenant", "topic"), + new ExtractTenantTopicTransform("tenant_topic", "topic") + ) + ); + + final InputRowParser> parser = transformSpec.decorate(PARSER); + final InputRow row = parser.parseBatch(ROW2).get(0); + + Assert.assertNotNull(row); + Assert.assertEquals(ImmutableList.of("topic", "tenant"), row.getDimensions()); + Assert.assertEquals(ImmutableList.of("lkc-xyz789"), row.getDimension("tenant")); + Assert.assertEquals(ImmutableList.of("topic0"), row.getDimension("tenant_topic")); + } + + @Test + public void testInvalidTopics() + { + final TransformSpec transformSpec = new TransformSpec( + null, + ImmutableList.of( + new ExtractTenantTransform("tenant", "topic"), + new ExtractTenantTopicTransform("tenant_topic", "topic") + ) + ); + + final InputRowParser> parser = transformSpec.decorate(PARSER); + final InputRow row = parser.parseBatch(ROW3).get(0); + + Assert.assertNotNull(row); + Assert.assertEquals(ImmutableList.of("topic", "tenant"), row.getDimensions()); + Assert.assertNull(row.getRaw("tenant")); + Assert.assertNull(row.getRaw("tenant_topic")); + } + + @Test + public void testNullTopic() + { + final TransformSpec transformSpec = new TransformSpec( + null, + ImmutableList.of( + new ExtractTenantTransform("tenant", "topic"), + new ExtractTenantTopicTransform("tenant_topic", "topic") + ) + ); + + final InputRowParser> parser = transformSpec.decorate(PARSER); + final InputRow row = parser.parseBatch(ROW4).get(0); + + Assert.assertNotNull(row); + Assert.assertEquals(ImmutableList.of("topic", "tenant"), row.getDimensions()); + Assert.assertNull(row.getRaw("tenant")); + Assert.assertNull(row.getRaw("tenant_topic")); + } + + @Test + public void testSerde() throws Exception + { + final TransformSpec transformSpec = new TransformSpec( + null, + ImmutableList.of( + new ExtractTenantTopicTransform("tenant_topic", "topic"), + new ExtractTenantTransform("tenant", "topic") + ) + ); + + final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); + jsonMapper.registerModules(new ConfluentExtensionsModule().getJacksonModules()); + + Assert.assertEquals( + transformSpec, + jsonMapper.readValue(jsonMapper.writeValueAsString(transformSpec), TransformSpec.class) + ); + } +} diff --git a/extensions-contrib/kafka-emitter/pom.xml b/extensions-contrib/kafka-emitter/pom.xml index 46ca7e6c49e1..6ee40af38f95 100644 --- a/extensions-contrib/kafka-emitter/pom.xml +++ b/extensions-contrib/kafka-emitter/pom.xml @@ -91,7 +91,11 @@ slf4j-api provided - + + joda-time + joda-time + provided + junit junit @@ -116,10 +120,30 @@ test-jar test + + com.google.protobuf + protobuf-java + + + com.google.protobuf + protobuf-java-util + - + + kr.motd.maven + os-maven-plugin + 1.7.0 + + + initialize + + detect + + + + org.owasp dependency-check-maven @@ -127,6 +151,21 @@ true + + org.xolstice.maven.plugins + protobuf-maven-plugin + 0.6.1 + + + + compile + + + + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + diff --git a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitter.java b/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitter.java index dd8f3665f537..775cfa54a13a 100644 --- a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitter.java +++ b/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitter.java @@ -19,12 +19,13 @@ package org.apache.druid.emitter.kafka; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; +import com.google.protobuf.Timestamp; +import com.google.protobuf.util.Timestamps; import org.apache.druid.emitter.kafka.KafkaEmitterConfig.EventType; import org.apache.druid.emitter.kafka.MemoryBoundLinkedBlockingQueue.ObjectContainer; -import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.emitter.proto.DruidSegmentEvent; import org.apache.druid.java.util.common.lifecycle.LifecycleStop; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.emitter.core.Emitter; @@ -39,6 +40,7 @@ import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.ByteArraySerializer; import org.apache.kafka.common.serialization.StringSerializer; import java.util.Properties; @@ -62,12 +64,12 @@ public class KafkaEmitter implements Emitter private final AtomicLong invalidLost; private final KafkaEmitterConfig config; - private final Producer producer; + private final Producer producer; private final ObjectMapper jsonMapper; - private final MemoryBoundLinkedBlockingQueue metricQueue; - private final MemoryBoundLinkedBlockingQueue alertQueue; - private final MemoryBoundLinkedBlockingQueue requestQueue; - private final MemoryBoundLinkedBlockingQueue segmentMetadataQueue; + private final MemoryBoundLinkedBlockingQueue metricQueue; + private final MemoryBoundLinkedBlockingQueue alertQueue; + private final MemoryBoundLinkedBlockingQueue requestQueue; + private final MemoryBoundLinkedBlockingQueue segmentMetadataQueue; private final ScheduledExecutorService scheduler; protected int sendInterval = DEFAULT_SEND_INTERVAL_SECONDS; @@ -103,7 +105,7 @@ private Callback setProducerCallback(AtomicLong lostCouter) } @VisibleForTesting - protected Producer setKafkaProducer() + protected Producer setKafkaProducer() { ClassLoader currCtxCl = Thread.currentThread().getContextClassLoader(); try { @@ -112,7 +114,7 @@ protected Producer setKafkaProducer() Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getBootstrapServers()); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); props.put(ProducerConfig.RETRIES_CONFIG, DEFAULT_RETRIES); props.putAll(config.getKafkaProducerConfig()); @@ -171,9 +173,9 @@ private void sendSegmentMetadataToKafka() sendToKafka(config.getSegmentMetadataTopic(), segmentMetadataQueue, setProducerCallback(segmentMetadataLost)); } - private void sendToKafka(final String topic, MemoryBoundLinkedBlockingQueue recordQueue, Callback callback) + private void sendToKafka(final String topic, MemoryBoundLinkedBlockingQueue recordQueue, Callback callback) { - ObjectContainer objectToSend; + ObjectContainer objectToSend; try { while (true) { objectToSend = recordQueue.take(); @@ -193,15 +195,14 @@ public void emit(final Event event) EventMap map = event.toMap(); if (config.getClusterName() != null) { map = map.asBuilder() - .put("clusterName", config.getClusterName()) - .build(); + .put("clusterName", config.getClusterName()) + .build(); } - String resultJson = jsonMapper.writeValueAsString(map); - - ObjectContainer objectContainer = new ObjectContainer<>( - resultJson, - StringUtils.toUtf8(resultJson).length + byte[] resultBytes = jsonMapper.writeValueAsBytes(map); + ObjectContainer objectContainer = new ObjectContainer<>( + resultBytes, + resultBytes.length ); Set eventTypes = config.getEventTypes(); @@ -220,18 +221,62 @@ public void emit(final Event event) } else if (event instanceof SegmentMetadataEvent) { if (!eventTypes.contains(EventType.SEGMENT_METADATA) || !segmentMetadataQueue.offer(objectContainer)) { segmentMetadataLost.incrementAndGet(); + } else { + switch (config.getSegmentMetadataTopicFormat()) { + case PROTOBUF: + resultBytes = convertMetadataEventToProto((SegmentMetadataEvent) event, segmentMetadataLost); + objectContainer = new ObjectContainer<>( + resultBytes, + resultBytes.length + ); + break; + case JSON: + // Do Nothing. We already have the JSON object stored in objectContainer + break; + default: + throw new UnsupportedOperationException("segmentMetadata.topic.format has an invalid value " + config.getSegmentMetadataTopicFormat().toString()); + } + if (!segmentMetadataQueue.offer(objectContainer)) { + segmentMetadataLost.incrementAndGet(); + } } } else { invalidLost.incrementAndGet(); } } - catch (JsonProcessingException e) { + catch (Exception e) { invalidLost.incrementAndGet(); log.warn(e, "Exception while serializing event"); } } } + private byte[] convertMetadataEventToProto(SegmentMetadataEvent event, AtomicLong segmentMetadataLost) + { + try { + Timestamp createdTimeTs = Timestamps.fromMillis(event.getCreatedTime().getMillis()); + Timestamp startTimeTs = Timestamps.fromMillis(event.getStartTime().getMillis()); + Timestamp endTimeTs = Timestamps.fromMillis(event.getEndTime().getMillis()); + + DruidSegmentEvent.Builder druidSegmentEventBuilder = DruidSegmentEvent.newBuilder() + .setDataSource(event.getDataSource()) + .setCreatedTime(createdTimeTs) + .setStartTime(startTimeTs) + .setEndTime(endTimeTs) + .setVersion(event.getVersion()) + .setIsCompacted(event.isCompacted()); + if (config.getClusterName() != null) { + druidSegmentEventBuilder.setClusterName(config.getClusterName()); + } + DruidSegmentEvent druidSegmentEvent = druidSegmentEventBuilder.build(); + return druidSegmentEvent.toByteArray(); + } + catch (Exception e) { + log.warn(e, "Exception while serializing SegmentMetadataEvent"); + throw e; + } + } + @Override public void flush() { diff --git a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterConfig.java b/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterConfig.java index 019edd095ea4..5c6c9b75aa65 100644 --- a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterConfig.java +++ b/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterConfig.java @@ -40,7 +40,13 @@ public enum EventType METRICS, ALERTS, REQUESTS, - SEGMENT_METADATA; + SEGMENT_METADATA { + @Override + public String toString() + { + return "segmentMetadata"; + } + }; @JsonValue @Override @@ -51,12 +57,37 @@ public String toString() @JsonCreator public static EventType fromString(String name) + { + for (EventType eventType : EventType.values()) { + if (eventType.toString().equalsIgnoreCase(name)) { + return eventType; + } + } + throw new IllegalArgumentException("Invalid EventType value: " + name); + } + } + + public enum SegmentMetadataTopicFormat + { + JSON, + PROTOBUF; + + @JsonValue + @Override + public String toString() + { + return StringUtils.toLowerCase(this.name()); + } + + @JsonCreator + public static SegmentMetadataTopicFormat fromString(String name) { return valueOf(StringUtils.toUpperCase(name)); } } public static final Set DEFAULT_EVENT_TYPES = ImmutableSet.of(EventType.ALERTS, EventType.METRICS); + @JsonProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG) private final String bootstrapServers; @Nullable @JsonProperty("event.types") @@ -69,6 +100,8 @@ public static EventType fromString(String name) private final String requestTopic; @Nullable @JsonProperty("segmentMetadata.topic") private final String segmentMetadataTopic; + @Nullable @JsonProperty("segmentMetadata.topic.format") + private final SegmentMetadataTopicFormat segmentMetadataTopicFormat; @JsonProperty private final String clusterName; @JsonProperty("producer.config") @@ -82,6 +115,7 @@ public KafkaEmitterConfig( @Nullable @JsonProperty("alert.topic") String alertTopic, @Nullable @JsonProperty("request.topic") String requestTopic, @Nullable @JsonProperty("segmentMetadata.topic") String segmentMetadataTopic, + @Nullable @JsonProperty("segmentMetadata.topic.format") SegmentMetadataTopicFormat segmentMetadataTopicFormat, @JsonProperty("clusterName") String clusterName, @JsonProperty("producer.config") @Nullable Map kafkaProducerConfig ) @@ -92,6 +126,7 @@ public KafkaEmitterConfig( this.alertTopic = this.eventTypes.contains(EventType.ALERTS) ? Preconditions.checkNotNull(alertTopic, "druid.emitter.kafka.alert.topic can not be null") : null; this.requestTopic = this.eventTypes.contains(EventType.REQUESTS) ? Preconditions.checkNotNull(requestTopic, "druid.emitter.kafka.request.topic can not be null") : null; this.segmentMetadataTopic = this.eventTypes.contains(EventType.SEGMENT_METADATA) ? Preconditions.checkNotNull(segmentMetadataTopic, "druid.emitter.kafka.segmentMetadata.topic can not be null") : null; + this.segmentMetadataTopicFormat = segmentMetadataTopicFormat == null ? SegmentMetadataTopicFormat.JSON : segmentMetadataTopicFormat; this.clusterName = clusterName; this.kafkaProducerConfig = kafkaProducerConfig == null ? ImmutableMap.of() : kafkaProducerConfig; } @@ -153,6 +188,12 @@ public String getSegmentMetadataTopic() return segmentMetadataTopic; } + @JsonProperty + public SegmentMetadataTopicFormat getSegmentMetadataTopicFormat() + { + return segmentMetadataTopicFormat; + } + @JsonProperty public Map getKafkaProducerConfig() { @@ -183,6 +224,7 @@ public boolean equals(Object o) return false; } + if (getAlertTopic() != null ? !getAlertTopic().equals(that.getAlertTopic()) : that.getAlertTopic() != null) { return false; } @@ -195,6 +237,10 @@ public boolean equals(Object o) return false; } + if (getSegmentMetadataTopicFormat() != null ? !getSegmentMetadataTopicFormat().equals(that.getSegmentMetadataTopicFormat()) : that.getSegmentMetadataTopicFormat() != null) { + return false; + } + if (getClusterName() != null ? !getClusterName().equals(that.getClusterName()) : that.getClusterName() != null) { return false; } @@ -210,6 +256,7 @@ public int hashCode() result = 31 * result + (getAlertTopic() != null ? getAlertTopic().hashCode() : 0); result = 31 * result + (getRequestTopic() != null ? getRequestTopic().hashCode() : 0); result = 31 * result + (getSegmentMetadataTopic() != null ? getSegmentMetadataTopic().hashCode() : 0); + result = 31 * result + (getSegmentMetadataTopicFormat() != null ? getSegmentMetadataTopicFormat().hashCode() : 0); result = 31 * result + (getClusterName() != null ? getClusterName().hashCode() : 0); result = 31 * result + getKafkaProducerConfig().hashCode(); return result; @@ -220,11 +267,12 @@ public String toString() { return "KafkaEmitterConfig{" + "bootstrap.servers='" + bootstrapServers + '\'' + - ", event.types='" + eventTypes + '\'' + + ", event.types='" + eventTypes.toString() + '\'' + ", metric.topic='" + metricTopic + '\'' + ", alert.topic='" + alertTopic + '\'' + ", request.topic='" + requestTopic + '\'' + ", segmentMetadata.topic='" + segmentMetadataTopic + '\'' + + ", segmentMetadata.topic.format='" + segmentMetadataTopicFormat + '\'' + ", clusterName='" + clusterName + '\'' + ", Producer.config=" + kafkaProducerConfig + '}'; diff --git a/extensions-contrib/kafka-emitter/src/main/proto/DruidSegmentEvent.proto b/extensions-contrib/kafka-emitter/src/main/proto/DruidSegmentEvent.proto new file mode 100644 index 000000000000..810ab64f92dd --- /dev/null +++ b/extensions-contrib/kafka-emitter/src/main/proto/DruidSegmentEvent.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; +import "google/protobuf/timestamp.proto"; + +option java_multiple_files = true; +option java_package = "org.apache.druid.emitter.proto"; +option java_outer_classname = "DruidSegmentEventMessage"; + +/* Druid segment Event used by Druid to publish first level segment information. + * The message will be consumed by segment processing app. */ +message DruidSegmentEvent { + string dataSource = 1; + + // When this event was created + google.protobuf.Timestamp createdTime = 2; + + // Start time of the segment + google.protobuf.Timestamp startTime = 3; + + // End time of the segment + google.protobuf.Timestamp endTime = 4; + + // Segment version + string version = 5; + + // Cluster name + string clusterName = 6; + + // Is the segment compacted or not + bool isCompacted = 7; +} diff --git a/extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterConfigTest.java b/extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterConfigTest.java index c4d5811bcb53..3a39d461f19d 100644 --- a/extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterConfigTest.java +++ b/extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterConfigTest.java @@ -46,7 +46,7 @@ public void setUp() public void testSerDeserKafkaEmitterConfig() throws IOException { KafkaEmitterConfig kafkaEmitterConfig = new KafkaEmitterConfig("hostname", null, "metricTest", - "alertTest", "requestTest", "metadataTest", + "alertTest", "requestTest", "metadataTest", null, "clusterNameTest", ImmutableMap.builder() .put("testKey", "testValue").build() ); @@ -60,7 +60,7 @@ public void testSerDeserKafkaEmitterConfig() throws IOException public void testSerDeserKafkaEmitterConfigNullRequestTopic() throws IOException { KafkaEmitterConfig kafkaEmitterConfig = new KafkaEmitterConfig("hostname", null, "metricTest", - "alertTest", null, "metadataTest", + "alertTest", null, "metadataTest", null, "clusterNameTest", ImmutableMap.builder() .put("testKey", "testValue").build() ); @@ -76,7 +76,7 @@ public void testSerDeserKafkaEmitterConfigNullMetricsTopic() throws IOException Set eventTypeSet = new HashSet(); eventTypeSet.add(KafkaEmitterConfig.EventType.SEGMENT_METADATA); KafkaEmitterConfig kafkaEmitterConfig = new KafkaEmitterConfig("hostname", eventTypeSet, null, - null, null, "metadataTest", + null, null, "metadataTest", null, "clusterNameTest", ImmutableMap.builder() .put("testKey", "testValue").build() ); @@ -90,7 +90,8 @@ public void testSerDeserKafkaEmitterConfigNullMetricsTopic() throws IOException public void testSerDeNotRequiredKafkaProducerConfig() { KafkaEmitterConfig kafkaEmitterConfig = new KafkaEmitterConfig("localhost:9092", null, "metricTest", - "alertTest", null, "metadataTest", + + "alertTest", null, "metadataTest", null, "clusterNameTest", null ); try { @@ -105,9 +106,9 @@ public void testSerDeNotRequiredKafkaProducerConfig() @Test public void testDeserializeEventTypesWithDifferentCase() throws JsonProcessingException { - Assert.assertEquals(KafkaEmitterConfig.EventType.SEGMENT_METADATA, mapper.readValue("\"segment_metadata\"", KafkaEmitterConfig.EventType.class)); + Assert.assertEquals(KafkaEmitterConfig.EventType.SEGMENT_METADATA, mapper.readValue("\"segmentMetadata\"", KafkaEmitterConfig.EventType.class)); Assert.assertEquals(KafkaEmitterConfig.EventType.ALERTS, mapper.readValue("\"alerts\"", KafkaEmitterConfig.EventType.class)); - Assert.assertThrows(ValueInstantiationException.class, () -> mapper.readValue("\"segmentMetadata\"", KafkaEmitterConfig.EventType.class)); + Assert.assertThrows(ValueInstantiationException.class, () -> mapper.readValue("\"segment_metadata\"", KafkaEmitterConfig.EventType.class)); } @Test diff --git a/extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterTest.java b/extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterTest.java index 9e6846a5d8b8..dad669b2ed35 100644 --- a/extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterTest.java +++ b/extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterTest.java @@ -102,16 +102,16 @@ public void testKafkaEmitter() throws InterruptedException final CountDownLatch countDownSentEvents = new CountDownLatch( requestTopic == null ? totalEventsExcludingRequestLogEvents : totalEvents); - final KafkaProducer producer = mock(KafkaProducer.class); + final KafkaProducer producer = mock(KafkaProducer.class); ObjectMapper mapper = new ObjectMapper(); mapper.registerModule(new JodaModule()); final KafkaEmitter kafkaEmitter = new KafkaEmitter( - new KafkaEmitterConfig("", eventsType, "metrics", "alerts", requestTopic, "metadata", "test-cluster", null), + new KafkaEmitterConfig("", eventsType, "metrics", "alerts", requestTopic, "metadata", null, "test-cluster", null), mapper ) { @Override - protected Producer setKafkaProducer() + protected Producer setKafkaProducer() { // override send interval to 1 second sendInterval = 1; diff --git a/extensions-contrib/opencensus-extensions/pom.xml b/extensions-contrib/opencensus-extensions/pom.xml new file mode 100644 index 000000000000..6e23a49a5786 --- /dev/null +++ b/extensions-contrib/opencensus-extensions/pom.xml @@ -0,0 +1,149 @@ + + + + + 4.0.0 + + org.apache.druid.extensions.contrib + druid-opencensus-extensions + druid-opencensus-extensions + druid-opencensus-extensions + + + druid + org.apache.druid + 28.0.0-SNAPSHOT + ../../pom.xml + + + + + io.opencensus + opencensus-proto + 0.2.0 + + + + com.google.guava + guava + + + + + org.apache.druid + druid-processing + ${project.parent.version} + provided + + + org.apache.druid + druid-indexing-service + ${project.parent.version} + provided + + + io.opentelemetry.proto + opentelemetry-proto + 0.19.0-alpha + + + org.apache.druid.extensions.contrib + druid-opentelemetry-extensions + ${project.parent.version} + + + com.fasterxml.jackson.core + jackson-databind + provided + + + com.google.protobuf + protobuf-java + + + com.google.guava + guava + provided + + + com.google.inject + guice + provided + + + com.google.code.findbugs + jsr305 + provided + + + com.fasterxml.jackson.core + jackson-annotations + provided + + + + junit + junit + test + + + org.apache.druid.extensions + druid-kafka-indexing-service + ${project.parent.version} + test + + + org.apache.kafka + kafka-clients + ${apache.kafka.version} + test + + + + org.openjdk.jmh + jmh-core + 1.27 + test + + + org.openjdk.jmh + jmh-generator-annprocess + 1.27 + test + + + + + + org.apache.maven.plugins + maven-resources-plugin + 3.0.2 + + + desc + + + + + + diff --git a/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/KafkaUtils.java b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/KafkaUtils.java new file mode 100644 index 000000000000..f12589e5adbe --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/KafkaUtils.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input; + +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.MethodType; +import java.util.Objects; + +public class KafkaUtils +{ + /** + * Creates a MethodHandle that – when invoked on a KafkaRecordEntity - returns the given header value + * for the underlying KafkaRecordEntity + * + * The method handle is roughly equivalent to the following function + * + * (KafkaRecordEntity input) -> { + * Header h = input.getRecord().headers().lastHeader(header) + * if (h != null) { + * return h.value(); + * } else { + * return null; + * } + * } + * + * Since KafkaRecordEntity only exists in the kafka-indexing-service plugin classloader, + * we need to look up the relevant classes in the classloader where the InputEntity was instantiated. + * + * The handle returned by this method should be cached for the classloader it was invoked with. + * + * If the lookup fails for whatever reason, the method handle will always return null; + * + * @param classLoader the kafka-indexing-service classloader + * @param header the header value to look up + * @return a MethodHandle + */ + public static MethodHandle lookupGetHeaderMethod(ClassLoader classLoader, String header) + { + try { + Class entityType = Class.forName("org.apache.druid.data.input.kafka.KafkaRecordEntity", true, classLoader); + Class recordType = Class.forName("org.apache.kafka.clients.consumer.ConsumerRecord", true, classLoader); + Class headersType = Class.forName("org.apache.kafka.common.header.Headers", true, classLoader); + Class headerType = Class.forName("org.apache.kafka.common.header.Header", true, classLoader); + + final MethodHandles.Lookup lookup = MethodHandles.lookup(); + MethodHandle nonNullTest = lookup.findStatic(Objects.class, "nonNull", + MethodType.methodType(boolean.class, Object.class) + ).asType(MethodType.methodType(boolean.class, headerType)); + + final MethodHandle getRecordMethod = lookup.findVirtual( + entityType, + "getRecord", + MethodType.methodType(recordType) + ); + final MethodHandle headersMethod = lookup.findVirtual(recordType, "headers", MethodType.methodType(headersType)); + final MethodHandle lastHeaderMethod = lookup.findVirtual( + headersType, + "lastHeader", + MethodType.methodType(headerType, String.class) + ); + final MethodHandle valueMethod = lookup.findVirtual(headerType, "value", MethodType.methodType(byte[].class)); + + return MethodHandles.filterReturnValue( + MethodHandles.filterReturnValue( + MethodHandles.filterReturnValue(getRecordMethod, headersMethod), + MethodHandles.insertArguments(lastHeaderMethod, 1, header) + ), + // return null byte array if header is not present + MethodHandles.guardWithTest( + nonNullTest, + valueMethod, + // match valueMethod signature by dropping the header instance argument + MethodHandles.dropArguments(MethodHandles.constant(byte[].class, null), 0, headerType) + ) + ); + } + catch (ReflectiveOperationException e) { + // if lookup fails in the classloader where the InputEntity is defined, then the source may not be + // the kafka-indexing-service classloader, or method signatures did not match. + // In that case we return a method handle always returning null + return noopMethodHandle(); + } + } + + static MethodHandle noopMethodHandle() + { + return MethodHandles.dropArguments(MethodHandles.constant(byte[].class, null), 0, InputEntity.class); + } +} diff --git a/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/HybridProtobufReader.java b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/HybridProtobufReader.java new file mode 100644 index 000000000000..83c5c20299aa --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/HybridProtobufReader.java @@ -0,0 +1,137 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opencensus.protobuf; + +import org.apache.druid.data.input.InputEntityReader; +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.InputRowListPlusRawValues; +import org.apache.druid.data.input.KafkaUtils; +import org.apache.druid.data.input.MapBasedInputRow; +import org.apache.druid.data.input.impl.ByteEntity; +import org.apache.druid.data.input.impl.DimensionsSpec; +import org.apache.druid.data.input.opentelemetry.protobuf.OpenTelemetryMetricsProtobufReader; +import org.apache.druid.indexing.seekablestream.SettableByteEntity; +import org.apache.druid.java.util.common.parsers.CloseableIterator; + +import java.io.IOException; +import java.lang.invoke.MethodHandle; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +public class HybridProtobufReader implements InputEntityReader +{ + private static final String VERSION_HEADER_KEY = "v"; + private static final int OPENTELEMETRY_FORMAT_VERSION = 1; + + private final DimensionsSpec dimensionsSpec; + private final SettableByteEntity source; + private final String metricDimension; + private final String valueDimension; + private final String metricLabelPrefix; + private final String resourceLabelPrefix; + + private volatile MethodHandle getHeaderMethod = null; + + enum ProtobufReader + { + OPENCENSUS, + OPENTELEMETRY + } + + public HybridProtobufReader( + DimensionsSpec dimensionsSpec, + SettableByteEntity source, + String metricDimension, + String valueDimension, + String metricLabelPrefix, + String resourceLabelPrefix + ) + { + this.dimensionsSpec = dimensionsSpec; + this.source = source; + this.metricDimension = metricDimension; + this.valueDimension = valueDimension; + this.metricLabelPrefix = metricLabelPrefix; + this.resourceLabelPrefix = resourceLabelPrefix; + } + + @Override + public CloseableIterator read() throws IOException + { + return newReader(whichReader()).read(); + } + + public InputEntityReader newReader(ProtobufReader which) + { + switch (which) { + case OPENTELEMETRY: + return new OpenTelemetryMetricsProtobufReader( + dimensionsSpec, + source, + metricDimension, + valueDimension, + metricLabelPrefix, + resourceLabelPrefix + ); + case OPENCENSUS: + default: + return new OpenCensusProtobufReader( + dimensionsSpec, + source, + metricDimension, + metricLabelPrefix, + resourceLabelPrefix + ); + } + } + + public ProtobufReader whichReader() + { + // assume InputEntity is always defined in a single classloader (the kafka-indexing-service classloader) + // so we only have to look it up once. To be completely correct we should cache the method based on classloader + if (getHeaderMethod == null) { + getHeaderMethod = KafkaUtils.lookupGetHeaderMethod( + source.getEntity().getClass().getClassLoader(), + VERSION_HEADER_KEY + ); + } + + try { + byte[] versionHeader = (byte[]) getHeaderMethod.invoke(source.getEntity()); + if (versionHeader != null) { + int version = + ByteBuffer.wrap(versionHeader).order(ByteOrder.LITTLE_ENDIAN).getInt(); + if (version == OPENTELEMETRY_FORMAT_VERSION) { + return ProtobufReader.OPENTELEMETRY; + } + } + } + catch (Throwable t) { + // assume input is opencensus if something went wrong + } + return ProtobufReader.OPENCENSUS; + } + + @Override + public CloseableIterator sample() throws IOException + { + return read().map(row -> InputRowListPlusRawValues.of(row, ((MapBasedInputRow) row).getEvent())); + } +} diff --git a/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufExtensionsModule.java b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufExtensionsModule.java new file mode 100644 index 000000000000..66a58c0eb28e --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufExtensionsModule.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opencensus.protobuf; + +import com.fasterxml.jackson.databind.Module; +import com.fasterxml.jackson.databind.jsontype.NamedType; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.google.inject.Binder; +import org.apache.druid.initialization.DruidModule; + +import java.util.Collections; +import java.util.List; + +public class OpenCensusProtobufExtensionsModule implements DruidModule +{ + + @Override + public List getJacksonModules() + { + return Collections.singletonList( + new SimpleModule("OpenCensusProtobufInputRowParserModule") + .registerSubtypes( + new NamedType(OpenCensusProtobufInputRowParser.class, "opencensus-protobuf"), + new NamedType(OpenCensusProtobufInputFormat.class, "opencensus-protobuf") + ) + ); + } + + @Override + public void configure(Binder binder) + { + } +} diff --git a/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputFormat.java b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputFormat.java new file mode 100644 index 000000000000..f06d6bb9deb5 --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputFormat.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opencensus.protobuf; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.druid.data.input.InputEntity; +import org.apache.druid.data.input.InputEntityReader; +import org.apache.druid.data.input.InputFormat; +import org.apache.druid.data.input.InputRowSchema; +import org.apache.druid.data.input.impl.ByteEntity; +import org.apache.druid.indexing.seekablestream.SettableByteEntity; +import org.apache.druid.java.util.common.StringUtils; + +import javax.annotation.Nullable; +import java.io.File; +import java.util.Objects; + +public class OpenCensusProtobufInputFormat implements InputFormat +{ + private static final String DEFAULT_METRIC_DIMENSION = "name"; + private static final String DEFAULT_RESOURCE_PREFIX = "resource."; + private static final String DEFAULT_VALUE_DIMENSION = "value"; + + private final String metricDimension; + private final String valueDimension; + private final String metricLabelPrefix; + private final String resourceLabelPrefix; + + public OpenCensusProtobufInputFormat( + @JsonProperty("metricDimension") String metricDimension, + @JsonProperty("valueDimension") @Nullable String valueDimension, + @JsonProperty("metricLabelPrefix") String metricLabelPrefix, + @JsonProperty("resourceLabelPrefix") String resourceLabelPrefix + ) + { + this.metricDimension = metricDimension != null ? metricDimension : DEFAULT_METRIC_DIMENSION; + this.valueDimension = valueDimension != null ? valueDimension : DEFAULT_VALUE_DIMENSION; + this.metricLabelPrefix = StringUtils.nullToEmptyNonDruidDataString(metricLabelPrefix); + this.resourceLabelPrefix = resourceLabelPrefix != null ? resourceLabelPrefix : DEFAULT_RESOURCE_PREFIX; + } + + @Override + public boolean isSplittable() + { + return false; + } + + @Override + public InputEntityReader createReader(InputRowSchema inputRowSchema, InputEntity source, File temporaryDirectory) + { + // Sampler passes a KafkaRecordEntity directly, while the normal code path wraps the same entity in a + // SettableByteEntity + SettableByteEntity settableEntity; + if (source instanceof SettableByteEntity) { + settableEntity = (SettableByteEntity) source; + } else { + SettableByteEntity wrapper = new SettableByteEntity<>(); + wrapper.setEntity((ByteEntity) source); + settableEntity = wrapper; + } + return new HybridProtobufReader( + inputRowSchema.getDimensionsSpec(), + settableEntity, + metricDimension, + valueDimension, + metricLabelPrefix, + resourceLabelPrefix + ); + } + + @JsonProperty + public String getMetricDimension() + { + return metricDimension; + } + + @JsonProperty + public String getMetricLabelPrefix() + { + return metricLabelPrefix; + } + + @JsonProperty + public String getResourceLabelPrefix() + { + return resourceLabelPrefix; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (!(o instanceof OpenCensusProtobufInputFormat)) { + return false; + } + OpenCensusProtobufInputFormat that = (OpenCensusProtobufInputFormat) o; + return Objects.equals(metricDimension, that.metricDimension) + && Objects.equals(metricLabelPrefix, that.metricLabelPrefix) + && Objects.equals(resourceLabelPrefix, that.resourceLabelPrefix); + } + + @Override + public int hashCode() + { + return Objects.hash(metricDimension, metricLabelPrefix, resourceLabelPrefix); + } +} diff --git a/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputRowParser.java b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputRowParser.java new file mode 100644 index 000000000000..e39ca60764b6 --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputRowParser.java @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opencensus.protobuf; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Strings; +import org.apache.druid.data.input.ByteBufferInputRowParser; +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.impl.ByteEntity; +import org.apache.druid.data.input.impl.ParseSpec; +import org.apache.druid.indexing.seekablestream.SettableByteEntity; +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.java.util.common.logger.Logger; + +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Objects; + +/** + * use {@link OpenCensusProtobufInputFormat} instead + */ +@Deprecated +public class OpenCensusProtobufInputRowParser implements ByteBufferInputRowParser +{ + private static final Logger LOG = new Logger(OpenCensusProtobufInputRowParser.class); + + private static final String DEFAULT_METRIC_DIMENSION = "name"; + private static final String DEFAULT_RESOURCE_PREFIX = ""; + + private final ParseSpec parseSpec; + + private final String metricDimension; + private final String metricLabelPrefix; + private final String resourceLabelPrefix; + + @JsonCreator + public OpenCensusProtobufInputRowParser( + @JsonProperty("parseSpec") ParseSpec parseSpec, + @JsonProperty("metricDimension") String metricDimension, + @JsonProperty("metricLabelPrefix") String metricPrefix, + @JsonProperty("resourceLabelPrefix") String resourcePrefix + ) + { + this.parseSpec = parseSpec; + this.metricDimension = Strings.isNullOrEmpty(metricDimension) ? DEFAULT_METRIC_DIMENSION : metricDimension; + this.metricLabelPrefix = StringUtils.nullToEmptyNonDruidDataString(metricPrefix); + this.resourceLabelPrefix = resourcePrefix != null ? resourcePrefix : DEFAULT_RESOURCE_PREFIX; + + LOG.info("Creating OpenCensus Protobuf parser with spec:" + parseSpec); + } + + @Override + public ParseSpec getParseSpec() + { + return parseSpec; + } + + @JsonProperty + public String getMetricDimension() + { + return metricDimension; + } + + @JsonProperty + public String getMetricLabelPrefix() + { + return metricLabelPrefix; + } + + @JsonProperty + public String getResourceLabelPrefix() + { + return resourceLabelPrefix; + } + + @Override + public OpenCensusProtobufInputRowParser withParseSpec(ParseSpec parseSpec) + { + return new OpenCensusProtobufInputRowParser( + parseSpec, + metricDimension, + metricLabelPrefix, + resourceLabelPrefix); + } + + @Override + public List parseBatch(ByteBuffer input) + { + SettableByteEntity settableByteEntity = new SettableByteEntity<>(); + settableByteEntity.setEntity(new ByteEntity(input)); + return new OpenCensusProtobufReader( + parseSpec.getDimensionsSpec(), + settableByteEntity, + metricDimension, + metricLabelPrefix, + resourceLabelPrefix + ).readAsList(); + } + + @Override + public boolean equals(final Object o) + { + if (this == o) { + return true; + } + if (!(o instanceof OpenCensusProtobufInputRowParser)) { + return false; + } + final OpenCensusProtobufInputRowParser that = (OpenCensusProtobufInputRowParser) o; + return Objects.equals(parseSpec, that.parseSpec) && + Objects.equals(metricDimension, that.metricDimension) && + Objects.equals(metricLabelPrefix, that.metricLabelPrefix) && + Objects.equals(resourceLabelPrefix, that.resourceLabelPrefix); + } + + @Override + public int hashCode() + { + return Objects.hash(parseSpec, metricDimension, metricLabelPrefix, resourceLabelPrefix); + } + +} diff --git a/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufReader.java b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufReader.java new file mode 100644 index 000000000000..c19005e6cb8f --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/main/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufReader.java @@ -0,0 +1,231 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opencensus.protobuf; + +import com.google.common.base.Supplier; +import com.google.common.base.Suppliers; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.Timestamp; +import io.opencensus.proto.metrics.v1.LabelKey; +import io.opencensus.proto.metrics.v1.Metric; +import io.opencensus.proto.metrics.v1.Point; +import io.opencensus.proto.metrics.v1.TimeSeries; +import org.apache.druid.data.input.InputEntityReader; +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.InputRowListPlusRawValues; +import org.apache.druid.data.input.MapBasedInputRow; +import org.apache.druid.data.input.impl.ByteEntity; +import org.apache.druid.data.input.impl.DimensionsSpec; +import org.apache.druid.indexing.seekablestream.SettableByteEntity; +import org.apache.druid.java.util.common.CloseableIterators; +import org.apache.druid.java.util.common.parsers.CloseableIterator; +import org.apache.druid.java.util.common.parsers.ParseException; +import org.apache.druid.utils.CollectionUtils; + +import java.nio.ByteBuffer; +import java.time.Instant; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class OpenCensusProtobufReader implements InputEntityReader +{ + private static final String SEPARATOR = "-"; + private static final String VALUE_COLUMN = "value"; + + private final DimensionsSpec dimensionsSpec; + private final SettableByteEntity source; + private final String metricDimension; + private final String metricLabelPrefix; + private final String resourceLabelPrefix; + + public OpenCensusProtobufReader( + DimensionsSpec dimensionsSpec, + SettableByteEntity source, + String metricDimension, + String metricLabelPrefix, + String resourceLabelPrefix + ) + { + this.dimensionsSpec = dimensionsSpec; + this.source = source; + this.metricDimension = metricDimension; + this.metricLabelPrefix = metricLabelPrefix; + this.resourceLabelPrefix = resourceLabelPrefix; + } + + private interface LabelContext + { + void addRow(long millis, String metricName, Object value); + } + + @Override + public CloseableIterator read() + { + Supplier> supplier = Suppliers.memoize(() -> readAsList().iterator()); + return CloseableIterators.withEmptyBaggage(new Iterator() { + @Override + public boolean hasNext() + { + return supplier.get().hasNext(); + } + @Override + public InputRow next() + { + return supplier.get().next(); + } + }); + } + + List readAsList() + { + try { + ByteBuffer buffer = source.getEntity().getBuffer(); + List rows = parseMetric(Metric.parseFrom(buffer)); + // Explicitly move the position assuming that all the remaining bytes have been consumed because the protobuf + // parser does not update the position itself + buffer.position(buffer.limit()); + return rows; + } + catch (InvalidProtocolBufferException e) { + throw new ParseException(null, e, "Protobuf message could not be parsed"); + } + } + + private List parseMetric(final Metric metric) + { + // Process metric descriptor labels map keys. + List descriptorLabels = new ArrayList<>(metric.getMetricDescriptor().getLabelKeysCount()); + for (LabelKey s : metric.getMetricDescriptor().getLabelKeysList()) { + descriptorLabels.add(this.metricLabelPrefix + s.getKey()); + } + + // Process resource labels map. + Map resourceLabelsMap = CollectionUtils.mapKeys( + metric.getResource().getLabelsMap(), + key -> this.resourceLabelPrefix + key + ); + + final List schemaDimensions = dimensionsSpec.getDimensionNames(); + + final List dimensions; + if (!schemaDimensions.isEmpty()) { + dimensions = schemaDimensions; + } else { + Set recordDimensions = new HashSet<>(descriptorLabels); + + // Add resource map key set to record dimensions. + recordDimensions.addAll(resourceLabelsMap.keySet()); + + // MetricDimension, VALUE dimensions will not be present in labelKeysList or Metric.Resource + // map as they are derived dimensions, which get populated while parsing data for timeSeries + // hence add them to recordDimensions. + recordDimensions.add(metricDimension); + recordDimensions.add(VALUE_COLUMN); + + dimensions = Lists.newArrayList( + Sets.difference(recordDimensions, dimensionsSpec.getDimensionExclusions()) + ); + } + + final int capacity = resourceLabelsMap.size() + + descriptorLabels.size() + + 2; // metric name + value columns + + List rows = new ArrayList<>(); + for (TimeSeries ts : metric.getTimeseriesList()) { + final LabelContext labelContext = (millis, metricName, value) -> { + // Add common resourceLabels. + Map event = Maps.newHashMapWithExpectedSize(capacity); + event.putAll(resourceLabelsMap); + // Add metric labels + for (int i = 0; i < metric.getMetricDescriptor().getLabelKeysCount(); i++) { + event.put(descriptorLabels.get(i), ts.getLabelValues(i).getValue()); + } + // add metric name and value + event.put(metricDimension, metricName); + event.put(VALUE_COLUMN, value); + rows.add(new MapBasedInputRow(millis, dimensions, event)); + }; + + for (Point point : ts.getPointsList()) { + addPointRows(point, metric, labelContext); + } + } + return rows; + } + + private void addPointRows(Point point, Metric metric, LabelContext labelContext) + { + Timestamp timestamp = point.getTimestamp(); + long millis = Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()).toEpochMilli(); + String metricName = metric.getMetricDescriptor().getName(); + + switch (point.getValueCase()) { + case DOUBLE_VALUE: + labelContext.addRow(millis, metricName, point.getDoubleValue()); + break; + + case INT64_VALUE: + labelContext.addRow(millis, metricName, point.getInt64Value()); + break; + + case SUMMARY_VALUE: + // count + labelContext.addRow( + millis, + metricName + SEPARATOR + "count", + point.getSummaryValue().getCount().getValue() + ); + // sum + labelContext.addRow( + millis, + metricName + SEPARATOR + "sum", + point.getSummaryValue().getSnapshot().getSum().getValue() + ); + break; + + // TODO : How to handle buckets and percentiles + case DISTRIBUTION_VALUE: + // count + labelContext.addRow(millis, metricName + SEPARATOR + "count", point.getDistributionValue().getCount()); + // sum + labelContext.addRow( + millis, + metricName + SEPARATOR + "sum", + point.getDistributionValue().getSum() + ); + break; + default: + } + } + + @Override + public CloseableIterator sample() + { + return read().map(row -> InputRowListPlusRawValues.of(row, ((MapBasedInputRow) row).getEvent())); + } +} diff --git a/extensions-contrib/opencensus-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/opencensus-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule new file mode 100755 index 000000000000..54b4400fd2cf --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +org.apache.druid.data.input.opencensus.protobuf.OpenCensusProtobufExtensionsModule \ No newline at end of file diff --git a/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/KafkaUtilsTest.java b/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/KafkaUtilsTest.java new file mode 100644 index 000000000000..88d918ce09e9 --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/KafkaUtilsTest.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input; + + +import com.google.common.collect.ImmutableList; +import org.apache.druid.data.input.impl.ByteEntity; +import org.apache.druid.data.input.kafka.KafkaRecordEntity; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.common.header.Header; +import org.apache.kafka.common.header.internals.RecordHeaders; +import org.apache.kafka.common.record.TimestampType; +import org.junit.Assert; +import org.junit.Test; + +import java.lang.invoke.MethodHandle; +import java.nio.ByteBuffer; + +public class KafkaUtilsTest +{ + + private static final byte[] BYTES = ByteBuffer.allocate(Integer.BYTES).putInt(42).array(); + + @Test + public void testNoopMethodHandle() throws Throwable + { + Assert.assertNull( + KafkaUtils.noopMethodHandle().invoke(new ByteEntity(new byte[]{})) + ); + } + + @Test + public void testKafkaRecordEntity() throws Throwable + { + final MethodHandle handle = KafkaUtils.lookupGetHeaderMethod(KafkaUtilsTest.class.getClassLoader(), "version"); + KafkaRecordEntity input = new KafkaRecordEntity( + new ConsumerRecord<>( + "test", + 0, + 0, + 0, + TimestampType.CREATE_TIME, + -1L, + -1, + -1, + null, + new byte[]{}, + new RecordHeaders(ImmutableList.of(new Header() + { + @Override + public String key() + { + return "version"; + } + + @Override + public byte[] value() + { + return BYTES; + } + })) + ) + ); + Assert.assertArrayEquals(BYTES, (byte[]) handle.invoke(input)); + } + + @Test(expected = ClassCastException.class) + public void testNonKafkaEntity() throws Throwable + { + final MethodHandle handle = KafkaUtils.lookupGetHeaderMethod(KafkaUtilsTest.class.getClassLoader(), "version"); + handle.invoke(new ByteEntity(new byte[]{})); + } +} diff --git a/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusBenchmark.java b/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusBenchmark.java new file mode 100644 index 000000000000..871ce0321b8f --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusBenchmark.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opencensus.protobuf; + +import com.google.common.collect.Lists; +import com.google.protobuf.Timestamp; +import io.opencensus.proto.metrics.v1.LabelKey; +import io.opencensus.proto.metrics.v1.LabelValue; +import io.opencensus.proto.metrics.v1.Metric; +import io.opencensus.proto.metrics.v1.MetricDescriptor; +import io.opencensus.proto.metrics.v1.Point; +import io.opencensus.proto.metrics.v1.TimeSeries; +import io.opencensus.proto.resource.v1.Resource; +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.impl.DimensionsSpec; +import org.apache.druid.data.input.impl.JSONParseSpec; +import org.apache.druid.data.input.impl.TimestampSpec; +import org.apache.druid.java.util.common.parsers.JSONPathFieldSpec; +import org.apache.druid.java.util.common.parsers.JSONPathFieldType; +import org.apache.druid.java.util.common.parsers.JSONPathSpec; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.infra.Blackhole; + +import java.nio.ByteBuffer; +import java.time.Instant; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +@Fork(1) +public class OpenCensusBenchmark +{ + private static final Instant INSTANT = Instant.parse("2019-07-12T09:30:01.123Z"); + private static final Timestamp TIMESTAMP = Timestamp.newBuilder() + .setSeconds(INSTANT.getEpochSecond()) + .setNanos(INSTANT.getNano()).build(); + + private static final JSONParseSpec PARSE_SPEC = new JSONParseSpec( + new TimestampSpec("timestamp", "millis", null), + new DimensionsSpec(Collections.emptyList()), + new JSONPathSpec( + true, + Lists.newArrayList( + new JSONPathFieldSpec(JSONPathFieldType.ROOT, "name", ""), + new JSONPathFieldSpec(JSONPathFieldType.ROOT, "value", ""), + new JSONPathFieldSpec(JSONPathFieldType.ROOT, "foo_key", "") + ) + ), null, null + ); + + private static final OpenCensusProtobufInputRowParser PARSER = new OpenCensusProtobufInputRowParser(PARSE_SPEC, null, null, ""); + + private static final ByteBuffer BUFFER = ByteBuffer.wrap(createMetric().toByteArray()); + + static Metric createMetric() + { + final MetricDescriptor.Builder descriptorBuilder = MetricDescriptor.newBuilder() + .setName("io.confluent.domain/such/good/metric/wow") + .setUnit("ms") + .setType(MetricDescriptor.Type.CUMULATIVE_DOUBLE); + + + final TimeSeries.Builder tsBuilder = TimeSeries.newBuilder() + .setStartTimestamp(TIMESTAMP) + .addPoints(Point.newBuilder().setDoubleValue(42.0).build()); + for (int i = 0; i < 10; i++) { + descriptorBuilder.addLabelKeys(LabelKey.newBuilder() + .setKey("foo_key_" + i) + .build()); + tsBuilder.addLabelValues(LabelValue.newBuilder() + .setHasValue(true) + .setValue("foo_value") + .build()); + } + + final Map resourceLabels = new HashMap<>(); + for (int i = 0; i < 5; i++) { + resourceLabels.put("resoure.label_" + i, "val_" + i); + } + + return Metric.newBuilder() + .setMetricDescriptor(descriptorBuilder.build()) + .setResource( + Resource.newBuilder() + .setType("env") + .putAllLabels(resourceLabels) + .build()) + .addTimeseries(tsBuilder.build()) + .build(); + } + + @Benchmark() + public void measureSerde(Blackhole blackhole) + { + // buffer must be reset / duplicated each time to ensure each iteration reads the entire buffer from the beginning + for (InputRow row : PARSER.parseBatch(BUFFER.duplicate())) { + blackhole.consume(row); + } + } +} diff --git a/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusInputFormatTest.java b/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusInputFormatTest.java new file mode 100644 index 000000000000..7aeba5462612 --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusInputFormatTest.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opencensus.protobuf; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.druid.data.input.InputFormat; +import org.junit.Assert; +import org.junit.Test; + +public class OpenCensusInputFormatTest +{ + @Test + public void testSerde() throws Exception + { + OpenCensusProtobufInputFormat inputFormat = new OpenCensusProtobufInputFormat("metric.name", null, "descriptor.", "custom."); + + final ObjectMapper jsonMapper = new ObjectMapper(); + jsonMapper.registerModules(new OpenCensusProtobufExtensionsModule().getJacksonModules()); + + final OpenCensusProtobufInputFormat actual = (OpenCensusProtobufInputFormat) jsonMapper.readValue( + jsonMapper.writeValueAsString(inputFormat), + InputFormat.class + ); + Assert.assertEquals(inputFormat, actual); + Assert.assertEquals("metric.name", actual.getMetricDimension()); + Assert.assertEquals("descriptor.", actual.getMetricLabelPrefix()); + Assert.assertEquals("custom.", actual.getResourceLabelPrefix()); + } + + @Test + public void testDefaults() + { + OpenCensusProtobufInputFormat inputFormat = new OpenCensusProtobufInputFormat(null, null, null, null); + + Assert.assertEquals("name", inputFormat.getMetricDimension()); + Assert.assertEquals("", inputFormat.getMetricLabelPrefix()); + Assert.assertEquals("resource.", inputFormat.getResourceLabelPrefix()); + } +} diff --git a/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputRowParserTest.java b/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputRowParserTest.java new file mode 100644 index 000000000000..a9c696cd27cd --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufInputRowParserTest.java @@ -0,0 +1,477 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opencensus.protobuf; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import com.google.protobuf.DoubleValue; +import com.google.protobuf.Int64Value; +import com.google.protobuf.Timestamp; +import io.opencensus.proto.metrics.v1.DistributionValue; +import io.opencensus.proto.metrics.v1.LabelKey; +import io.opencensus.proto.metrics.v1.LabelValue; +import io.opencensus.proto.metrics.v1.Metric; +import io.opencensus.proto.metrics.v1.MetricDescriptor; +import io.opencensus.proto.metrics.v1.MetricDescriptor.Type; +import io.opencensus.proto.metrics.v1.Point; +import io.opencensus.proto.metrics.v1.SummaryValue; +import io.opencensus.proto.metrics.v1.TimeSeries; +import io.opencensus.proto.resource.v1.Resource; +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.impl.DimensionsSpec; +import org.apache.druid.data.input.impl.InputRowParser; +import org.apache.druid.data.input.impl.JSONParseSpec; +import org.apache.druid.data.input.impl.StringDimensionSchema; +import org.apache.druid.data.input.impl.TimestampSpec; +import org.apache.druid.java.util.common.parsers.JSONPathFieldSpec; +import org.apache.druid.java.util.common.parsers.JSONPathFieldType; +import org.apache.druid.java.util.common.parsers.JSONPathSpec; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.nio.ByteBuffer; +import java.time.Instant; +import java.util.Collections; +import java.util.List; + +public class OpenCensusProtobufInputRowParserTest +{ + private static final Instant INSTANT = Instant.parse("2019-07-12T09:30:01.123Z"); + private static final Timestamp TIMESTAMP = Timestamp.newBuilder() + .setSeconds(INSTANT.getEpochSecond()) + .setNanos(INSTANT.getNano()).build(); + + static final JSONParseSpec PARSE_SPEC = new JSONParseSpec( + new TimestampSpec("timestamp", "millis", null), + new DimensionsSpec(Collections.emptyList()), + new JSONPathSpec( + true, + Lists.newArrayList( + new JSONPathFieldSpec(JSONPathFieldType.ROOT, "name", ""), + new JSONPathFieldSpec(JSONPathFieldType.ROOT, "value", ""), + new JSONPathFieldSpec(JSONPathFieldType.ROOT, "foo_key", "") + ) + ), null, null + ); + + static final JSONParseSpec PARSE_SPEC_WITH_DIMENSIONS = new JSONParseSpec( + new TimestampSpec("timestamp", "millis", null), + new DimensionsSpec(ImmutableList.of( + new StringDimensionSchema("foo_key"), + new StringDimensionSchema("env_key") + )), + new JSONPathSpec( + true, + Lists.newArrayList( + new JSONPathFieldSpec(JSONPathFieldType.ROOT, "name", ""), + new JSONPathFieldSpec(JSONPathFieldType.ROOT, "value", ""), + new JSONPathFieldSpec(JSONPathFieldType.ROOT, "foo_key", "") + ) + ), null, null + ); + + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + @Test + public void testSerde() throws Exception + { + OpenCensusProtobufInputRowParser parser = new OpenCensusProtobufInputRowParser( + OpenCensusProtobufInputRowParserTest.PARSE_SPEC, + "metric.name", + "descriptor.", + "custom." + ); + + final ObjectMapper jsonMapper = new ObjectMapper(); + jsonMapper.registerModules(new OpenCensusProtobufExtensionsModule().getJacksonModules()); + + final OpenCensusProtobufInputRowParser actual = (OpenCensusProtobufInputRowParser) jsonMapper.readValue( + jsonMapper.writeValueAsString(parser), + InputRowParser.class + ); + Assert.assertEquals(parser, actual); + Assert.assertEquals("metric.name", actual.getMetricDimension()); + Assert.assertEquals("descriptor.", actual.getMetricLabelPrefix()); + Assert.assertEquals("custom.", actual.getResourceLabelPrefix()); + } + + + @Test + public void testDefaults() + { + OpenCensusProtobufInputRowParser parser = new OpenCensusProtobufInputRowParser( + OpenCensusProtobufInputRowParserTest.PARSE_SPEC, + null, null, null + ); + + Assert.assertEquals("name", parser.getMetricDimension()); + Assert.assertEquals("", parser.getMetricLabelPrefix()); + Assert.assertEquals("", parser.getResourceLabelPrefix()); + } + + @Test + public void testDoubleGaugeParse() + { + //configure parser with desc file + OpenCensusProtobufInputRowParser parser = new OpenCensusProtobufInputRowParser(PARSE_SPEC, null, null, ""); + + Metric metric = doubleGaugeMetric(TIMESTAMP); + + InputRow row = parser.parseBatch(ByteBuffer.wrap(metric.toByteArray())).get(0); + Assert.assertEquals(INSTANT.toEpochMilli(), row.getTimestampFromEpoch()); + + assertDimensionEquals(row, "name", "metric_gauge_double"); + assertDimensionEquals(row, "foo_key", "foo_value"); + + + Assert.assertEquals(2000, row.getMetric("value").doubleValue(), 0.0); + } + + @Test + public void testIntGaugeParse() + { + //configure parser with desc file + OpenCensusProtobufInputRowParser parser = new OpenCensusProtobufInputRowParser(PARSE_SPEC, null, null, ""); + + Metric metric = intGaugeMetric(TIMESTAMP); + + InputRow row = parser.parseBatch(ByteBuffer.wrap(metric.toByteArray())).get(0); + Assert.assertEquals(INSTANT.toEpochMilli(), row.getTimestampFromEpoch()); + + assertDimensionEquals(row, "name", "metric_gauge_int64"); + assertDimensionEquals(row, "foo_key", "foo_value"); + + Assert.assertEquals(1000, row.getMetric("value").intValue()); + } + + @Test + public void testSummaryParse() + { + //configure parser with desc file + OpenCensusProtobufInputRowParser parser = new OpenCensusProtobufInputRowParser(PARSE_SPEC, null, null, ""); + + Metric metric = summaryMetric(TIMESTAMP); + + List rows = parser.parseBatch(ByteBuffer.wrap(metric.toByteArray())); + + Assert.assertEquals(2, rows.size()); + + InputRow row = rows.get(0); + Assert.assertEquals(INSTANT.toEpochMilli(), row.getTimestampFromEpoch()); + assertDimensionEquals(row, "name", "metric_summary-count"); + assertDimensionEquals(row, "foo_key", "foo_value"); + Assert.assertEquals(40, row.getMetric("value").doubleValue(), 0.0); + + row = rows.get(1); + Assert.assertEquals(INSTANT.toEpochMilli(), row.getTimestampFromEpoch()); + assertDimensionEquals(row, "name", "metric_summary-sum"); + assertDimensionEquals(row, "foo_key", "foo_value"); + Assert.assertEquals(10, row.getMetric("value").doubleValue(), 0.0); + } + + @Test + public void testDistributionParse() + { + //configure parser with desc file + OpenCensusProtobufInputRowParser parser = new OpenCensusProtobufInputRowParser(PARSE_SPEC, null, null, ""); + + Metric metric = distributionMetric(TIMESTAMP); + + List rows = parser.parseBatch(ByteBuffer.wrap(metric.toByteArray())); + + Assert.assertEquals(2, rows.size()); + + InputRow row = rows.get(0); + Assert.assertEquals(INSTANT.toEpochMilli(), row.getTimestampFromEpoch()); + assertDimensionEquals(row, "name", "metric_distribution-count"); + assertDimensionEquals(row, "foo_key", "foo_value"); + Assert.assertEquals(100, row.getMetric("value").intValue()); + + row = rows.get(1); + Assert.assertEquals(INSTANT.toEpochMilli(), row.getTimestampFromEpoch()); + assertDimensionEquals(row, "name", "metric_distribution-sum"); + assertDimensionEquals(row, "foo_key", "foo_value"); + Assert.assertEquals(500, row.getMetric("value").doubleValue(), 0.0); + } + + @Test + public void testDimensionsParseWithParseSpecDimensions() + { + //configure parser with desc file + OpenCensusProtobufInputRowParser parser = new OpenCensusProtobufInputRowParser(PARSE_SPEC_WITH_DIMENSIONS, null, null, ""); + + Metric metric = summaryMetric(TIMESTAMP); + + List rows = parser.parseBatch(ByteBuffer.wrap(metric.toByteArray())); + + Assert.assertEquals(2, rows.size()); + + InputRow row = rows.get(0); + Assert.assertEquals(2, row.getDimensions().size()); + assertDimensionEquals(row, "env_key", "env_val"); + assertDimensionEquals(row, "foo_key", "foo_value"); + + row = rows.get(1); + Assert.assertEquals(2, row.getDimensions().size()); + assertDimensionEquals(row, "env_key", "env_val"); + assertDimensionEquals(row, "foo_key", "foo_value"); + + } + + @Test + public void testDimensionsParseWithoutPARSE_SPECDimensions() + { + //configure parser with desc file + OpenCensusProtobufInputRowParser parser = new OpenCensusProtobufInputRowParser(PARSE_SPEC, null, null, ""); + + Metric metric = summaryMetric(TIMESTAMP); + + List rows = parser.parseBatch(ByteBuffer.wrap(metric.toByteArray())); + + Assert.assertEquals(2, rows.size()); + + InputRow row = rows.get(0); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "name", "metric_summary-count"); + assertDimensionEquals(row, "env_key", "env_val"); + assertDimensionEquals(row, "foo_key", "foo_value"); + + row = rows.get(1); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "name", "metric_summary-sum"); + assertDimensionEquals(row, "env_key", "env_val"); + assertDimensionEquals(row, "foo_key", "foo_value"); + + } + + @Test + public void testMetricNameOverride() + { + //configure parser with desc file + OpenCensusProtobufInputRowParser parser = new OpenCensusProtobufInputRowParser(PARSE_SPEC, "dimension_name", null, ""); + + Metric metric = summaryMetric(Timestamp.getDefaultInstance()); + + List rows = parser.parseBatch(ByteBuffer.wrap(metric.toByteArray())); + + Assert.assertEquals(2, rows.size()); + + InputRow row = rows.get(0); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "dimension_name", "metric_summary-count"); + assertDimensionEquals(row, "foo_key", "foo_value"); + assertDimensionEquals(row, "env_key", "env_val"); + + row = rows.get(1); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "dimension_name", "metric_summary-sum"); + assertDimensionEquals(row, "foo_key", "foo_value"); + assertDimensionEquals(row, "env_key", "env_val"); + } + + @Test + public void testDefaultPrefix() + { + //configure parser with desc file + OpenCensusProtobufInputRowParser parser = new OpenCensusProtobufInputRowParser(PARSE_SPEC, null, null, null); + + Metric metric = summaryMetric(Timestamp.getDefaultInstance()); + + List rows = parser.parseBatch(ByteBuffer.wrap(metric.toByteArray())); + + Assert.assertEquals(2, rows.size()); + + InputRow row = rows.get(0); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "name", "metric_summary-count"); + assertDimensionEquals(row, "foo_key", "foo_value"); + assertDimensionEquals(row, "env_key", "env_val"); + + row = rows.get(1); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "name", "metric_summary-sum"); + assertDimensionEquals(row, "foo_key", "foo_value"); + assertDimensionEquals(row, "env_key", "env_val"); + } + + @Test + public void testCustomPrefix() + { + //configure parser with desc file + OpenCensusProtobufInputRowParser parser = new OpenCensusProtobufInputRowParser(PARSE_SPEC, null, "descriptor.", "custom."); + + Metric metric = summaryMetric(Timestamp.getDefaultInstance()); + + List rows = parser.parseBatch(ByteBuffer.wrap(metric.toByteArray())); + + Assert.assertEquals(2, rows.size()); + + InputRow row = rows.get(0); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "name", "metric_summary-count"); + assertDimensionEquals(row, "descriptor.foo_key", "foo_value"); + assertDimensionEquals(row, "custom.env_key", "env_val"); + + row = rows.get(1); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "name", "metric_summary-sum"); + assertDimensionEquals(row, "descriptor.foo_key", "foo_value"); + assertDimensionEquals(row, "custom.env_key", "env_val"); + } + + private void assertDimensionEquals(InputRow row, String dimension, Object expected) + { + List values = row.getDimension(dimension); + + Assert.assertEquals(1, values.size()); + Assert.assertEquals(expected, values.get(0)); + } + + static Metric doubleGaugeMetric(Timestamp timestamp) + { + return getMetric( + "metric_gauge_double", + "metric_gauge_double_description", + Type.GAUGE_DOUBLE, + Point.newBuilder() + .setTimestamp(timestamp) + .setDoubleValue(2000) + .build(), + timestamp); + } + + static Metric intGaugeMetric(Timestamp timestamp) + { + return getMetric( + "metric_gauge_int64", + "metric_gauge_int64_description", + MetricDescriptor.Type.GAUGE_INT64, + Point.newBuilder() + .setTimestamp(timestamp) + .setInt64Value(1000) + .build(), + timestamp); + } + + static Metric summaryMetric(Timestamp timestamp) + { + + SummaryValue.Snapshot snapshot = SummaryValue.Snapshot.newBuilder() + .setSum(DoubleValue.newBuilder().setValue(10).build()) + .addPercentileValues(SummaryValue.Snapshot.ValueAtPercentile.newBuilder() + .setPercentile(50.0) + .setValue(10) + .build()) + .addPercentileValues(SummaryValue.Snapshot.ValueAtPercentile.newBuilder() + .setPercentile(75.0) + .setValue(20) + .build()) + .addPercentileValues(SummaryValue.Snapshot.ValueAtPercentile.newBuilder() + .setPercentile(95.0) + .setValue(30) + .build()) + .addPercentileValues(SummaryValue.Snapshot.ValueAtPercentile.newBuilder() + .setPercentile(98.0) + .setValue(40) + .build()) + .addPercentileValues(SummaryValue.Snapshot.ValueAtPercentile.newBuilder() + .setPercentile(99.0) + .setValue(50) + .build()) + .addPercentileValues(SummaryValue.Snapshot.ValueAtPercentile.newBuilder() + .setPercentile(99.9) + .setValue(60) + .build()) + .build(); + + + SummaryValue summaryValue = SummaryValue.newBuilder() + .setCount(Int64Value.newBuilder().setValue(40).build()) + .setSnapshot(snapshot) + .build(); + + return getMetric( + "metric_summary", + "metric_summary_description", + MetricDescriptor.Type.SUMMARY, + Point.newBuilder() + .setTimestamp(timestamp) + .setSummaryValue(summaryValue) + .build(), + timestamp); + } + + static Metric distributionMetric(Timestamp timestamp) + { + DistributionValue distributionValue = DistributionValue.newBuilder() + .setCount(100) + .setSum(500) + .build(); + + return getMetric( + "metric_distribution", + "metric_distribution_description", + MetricDescriptor.Type.GAUGE_DISTRIBUTION, + Point.newBuilder() + .setTimestamp(timestamp) + .setDistributionValue(distributionValue) + .build(), + timestamp); + } + + static Metric getMetric(String name, String description, MetricDescriptor.Type type, Point point, Timestamp timestamp) + { + Metric dist = Metric.newBuilder() + .setMetricDescriptor( + MetricDescriptor.newBuilder() + .setName(name) + .setDescription(description) + .setUnit("ms") + .setType(type) + .addLabelKeys( + LabelKey.newBuilder() + .setKey("foo_key") + .build()) + .build()) + .setResource( + Resource.newBuilder() + .setType("env") + .putAllLabels(Collections.singletonMap("env_key", "env_val")) + .build()) + .addTimeseries( + TimeSeries.newBuilder() + .setStartTimestamp(timestamp) + .addLabelValues( + LabelValue.newBuilder() + .setHasValue(true) + .setValue("foo_value") + .build()) + .addPoints(point) + .build()) + .build(); + + return dist; + } + +} diff --git a/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufReaderTest.java b/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufReaderTest.java new file mode 100644 index 000000000000..b089e36e2357 --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/test/java/org/apache/druid/data/input/opencensus/protobuf/OpenCensusProtobufReaderTest.java @@ -0,0 +1,368 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opencensus.protobuf; + +import com.google.common.collect.ImmutableList; +import io.opentelemetry.proto.common.v1.AnyValue; +import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.proto.metrics.v1.Metric; +import io.opentelemetry.proto.metrics.v1.MetricsData; +import org.apache.druid.data.input.ColumnsFilter; +import org.apache.druid.data.input.InputEntityReader; +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.InputRowSchema; +import org.apache.druid.data.input.impl.DimensionsSpec; +import org.apache.druid.data.input.impl.StringDimensionSchema; +import org.apache.druid.data.input.impl.TimestampSpec; +import org.apache.druid.data.input.kafka.KafkaRecordEntity; +import org.apache.druid.indexing.seekablestream.SettableByteEntity; +import org.apache.druid.java.util.common.parsers.CloseableIterator; +import org.apache.druid.java.util.common.parsers.ParseException; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.common.header.Header; +import org.apache.kafka.common.header.Headers; +import org.apache.kafka.common.header.internals.RecordHeader; +import org.apache.kafka.common.header.internals.RecordHeaders; +import org.apache.kafka.common.record.TimestampType; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +public class OpenCensusProtobufReaderTest +{ + private static final long TIMESTAMP = TimeUnit.MILLISECONDS.toNanos(Instant.parse("2019-07-12T09:30:01.123Z").toEpochMilli()); + public static final String RESOURCE_ATTRIBUTE_COUNTRY = "country"; + public static final String RESOURCE_ATTRIBUTE_VALUE_USA = "usa"; + + public static final String RESOURCE_ATTRIBUTE_ENV = "env"; + public static final String RESOURCE_ATTRIBUTE_VALUE_DEVEL = "devel"; + + public static final String INSTRUMENTATION_SCOPE_NAME = "mock-instr-lib"; + public static final String INSTRUMENTATION_SCOPE_VERSION = "1.0"; + + public static final String METRIC_ATTRIBUTE_COLOR = "color"; + public static final String METRIC_ATTRIBUTE_VALUE_RED = "red"; + + public static final String METRIC_ATTRIBUTE_FOO_KEY = "foo_key"; + public static final String METRIC_ATTRIBUTE_FOO_VAL = "foo_value"; + + private final MetricsData.Builder metricsDataBuilder = MetricsData.newBuilder(); + + private final Metric.Builder metricBuilder = metricsDataBuilder.addResourceMetricsBuilder() + .addScopeMetricsBuilder() + .addMetricsBuilder(); + + private final DimensionsSpec dimensionsSpec = new DimensionsSpec(ImmutableList.of( + new StringDimensionSchema("descriptor." + METRIC_ATTRIBUTE_COLOR), + new StringDimensionSchema("descriptor." + METRIC_ATTRIBUTE_FOO_KEY), + new StringDimensionSchema("custom." + RESOURCE_ATTRIBUTE_ENV), + new StringDimensionSchema("custom." + RESOURCE_ATTRIBUTE_COUNTRY) + )); + + public static final String TOPIC = "telemetry.metrics.otel"; + public static final int PARTITION = 2; + public static final long OFFSET = 13095752723L; + public static final long TS = 1643974867555L; + public static final TimestampType TSTYPE = TimestampType.CREATE_TIME; + public static final byte[] V0_HEADER_BYTES = ByteBuffer.allocate(Integer.BYTES) + .order(ByteOrder.LITTLE_ENDIAN) + .putInt(1) + .array(); + private static final Header HEADERV1 = new RecordHeader("v", V0_HEADER_BYTES); + private static final Headers HEADERS = new RecordHeaders(new Header[]{HEADERV1}); + + @Before + public void setUp() + { + metricsDataBuilder + .getResourceMetricsBuilder(0) + .getResourceBuilder() + .addAttributes(KeyValue.newBuilder() + .setKey(RESOURCE_ATTRIBUTE_COUNTRY) + .setValue(AnyValue.newBuilder().setStringValue(RESOURCE_ATTRIBUTE_VALUE_USA))); + + metricsDataBuilder + .getResourceMetricsBuilder(0) + .getScopeMetricsBuilder(0) + .getScopeBuilder() + .setName(INSTRUMENTATION_SCOPE_NAME) + .setVersion(INSTRUMENTATION_SCOPE_VERSION); + + } + + @Test + public void testSumWithAttributes() throws IOException + { + metricBuilder + .setName("example_sum") + .getSumBuilder() + .addDataPointsBuilder() + .setAsInt(6) + .setTimeUnixNano(TIMESTAMP) + .addAttributesBuilder() // test sum with attributes + .setKey(METRIC_ATTRIBUTE_COLOR) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_VALUE_RED).build()); + + + MetricsData metricsData = metricsDataBuilder.build(); + ConsumerRecord consumerRecord = new ConsumerRecord<>(TOPIC, PARTITION, OFFSET, TS, TSTYPE, -1, -1, + null, metricsData.toByteArray(), HEADERS, Optional.empty()); + OpenCensusProtobufInputFormat inputFormat = new OpenCensusProtobufInputFormat( + "metric.name", + null, + "descriptor.", + "custom." + ); + + SettableByteEntity entity = new SettableByteEntity<>(); + InputEntityReader reader = inputFormat.createReader(new InputRowSchema( + new TimestampSpec("timestamp", "iso", null), + dimensionsSpec, + ColumnsFilter.all() + ), entity, null); + + entity.setEntity(new KafkaRecordEntity(consumerRecord)); + try (CloseableIterator rows = reader.read()) { + List rowList = new ArrayList<>(); + rows.forEachRemaining(rowList::add); + Assert.assertEquals(1, rowList.size()); + + InputRow row = rowList.get(0); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "metric.name", "example_sum"); + assertDimensionEquals(row, "custom.country", "usa"); + assertDimensionEquals(row, "descriptor.color", "red"); + assertDimensionEquals(row, "value", "6"); + } + } + + @Test + public void testGaugeWithAttributes() throws IOException + { + metricBuilder.setName("example_gauge") + .getGaugeBuilder() + .addDataPointsBuilder() + .setAsInt(6) + .setTimeUnixNano(TIMESTAMP) + .addAttributesBuilder() // test sum with attributes + .setKey(METRIC_ATTRIBUTE_COLOR) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_VALUE_RED).build()); + + MetricsData metricsData = metricsDataBuilder.build(); + ConsumerRecord consumerRecord = new ConsumerRecord<>(TOPIC, PARTITION, OFFSET, TS, TSTYPE, -1, -1, + null, metricsData.toByteArray(), HEADERS, Optional.empty()); + OpenCensusProtobufInputFormat inputFormat = new OpenCensusProtobufInputFormat("metric.name", + null, + "descriptor.", + "custom."); + SettableByteEntity entity = new SettableByteEntity<>(); + InputEntityReader reader = inputFormat.createReader(new InputRowSchema( + new TimestampSpec("timestamp", "iso", null), + dimensionsSpec, + ColumnsFilter.all() + ), entity, null); + + entity.setEntity(new KafkaRecordEntity(consumerRecord)); + try (CloseableIterator rows = reader.read()) { + Assert.assertTrue(rows.hasNext()); + InputRow row = rows.next(); + + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "metric.name", "example_gauge"); + assertDimensionEquals(row, "custom.country", "usa"); + assertDimensionEquals(row, "descriptor.color", "red"); + assertDimensionEquals(row, "value", "6"); + } + } + + @Test + public void testBatchedMetricParse() throws IOException + { + metricBuilder.setName("example_sum") + .getSumBuilder() + .addDataPointsBuilder() + .setAsInt(6) + .setTimeUnixNano(TIMESTAMP) + .addAttributesBuilder() // test sum with attributes + .setKey(METRIC_ATTRIBUTE_COLOR) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_VALUE_RED).build()); + + // Create Second Metric + Metric.Builder gaugeMetricBuilder = metricsDataBuilder.addResourceMetricsBuilder() + .addScopeMetricsBuilder() + .addMetricsBuilder(); + + metricsDataBuilder.getResourceMetricsBuilder(1) + .getResourceBuilder() + .addAttributes(KeyValue.newBuilder() + .setKey(RESOURCE_ATTRIBUTE_ENV) + .setValue(AnyValue.newBuilder().setStringValue(RESOURCE_ATTRIBUTE_VALUE_DEVEL)) + .build()); + + metricsDataBuilder.getResourceMetricsBuilder(1) + .getScopeMetricsBuilder(0) + .getScopeBuilder() + .setName(INSTRUMENTATION_SCOPE_NAME) + .setVersion(INSTRUMENTATION_SCOPE_VERSION); + + gaugeMetricBuilder.setName("example_gauge") + .getGaugeBuilder() + .addDataPointsBuilder() + .setAsInt(8) + .setTimeUnixNano(TIMESTAMP) + .addAttributesBuilder() // test sum with attributes + .setKey(METRIC_ATTRIBUTE_FOO_KEY) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_FOO_VAL).build()); + + MetricsData metricsData = metricsDataBuilder.build(); + ConsumerRecord consumerRecord = new ConsumerRecord<>(TOPIC, PARTITION, OFFSET, TS, TSTYPE, -1, -1, + null, metricsData.toByteArray(), HEADERS, Optional.empty()); + OpenCensusProtobufInputFormat inputFormat = new OpenCensusProtobufInputFormat("metric.name", + null, + "descriptor.", + "custom."); + SettableByteEntity entity = new SettableByteEntity<>(); + InputEntityReader reader = inputFormat.createReader(new InputRowSchema( + new TimestampSpec("timestamp", "iso", null), + dimensionsSpec, + ColumnsFilter.all() + ), entity, null); + + entity.setEntity(new KafkaRecordEntity(consumerRecord)); + try (CloseableIterator rows = reader.read()) { + Assert.assertTrue(rows.hasNext()); + InputRow row = rows.next(); + + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "metric.name", "example_sum"); + assertDimensionEquals(row, "custom.country", "usa"); + assertDimensionEquals(row, "descriptor.color", "red"); + assertDimensionEquals(row, "value", "6"); + + Assert.assertTrue(rows.hasNext()); + row = rows.next(); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "metric.name", "example_gauge"); + assertDimensionEquals(row, "custom.env", "devel"); + assertDimensionEquals(row, "descriptor.foo_key", "foo_value"); + assertDimensionEquals(row, "value", "8"); + } + } + + @Test + public void testDimensionSpecExclusions() throws IOException + { + metricsDataBuilder.getResourceMetricsBuilder(0) + .getResourceBuilder() + .addAttributesBuilder() + .setKey(RESOURCE_ATTRIBUTE_ENV) + .setValue(AnyValue.newBuilder().setStringValue(RESOURCE_ATTRIBUTE_VALUE_DEVEL).build()); + + metricBuilder.setName("example_gauge") + .getGaugeBuilder() + .addDataPointsBuilder() + .setAsInt(6) + .setTimeUnixNano(TIMESTAMP) + .addAllAttributes(ImmutableList.of( + KeyValue.newBuilder() + .setKey(METRIC_ATTRIBUTE_COLOR) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_VALUE_RED).build()).build(), + KeyValue.newBuilder() + .setKey(METRIC_ATTRIBUTE_FOO_KEY) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_FOO_VAL).build()).build())); + + DimensionsSpec dimensionsSpecWithExclusions = DimensionsSpec.builder().setDimensionExclusions( + ImmutableList.of( + "descriptor." + METRIC_ATTRIBUTE_COLOR, + "custom." + RESOURCE_ATTRIBUTE_COUNTRY + )).build(); + + MetricsData metricsData = metricsDataBuilder.build(); + ConsumerRecord consumerRecord = new ConsumerRecord<>(TOPIC, PARTITION, OFFSET, TS, TSTYPE, -1, -1, + null, metricsData.toByteArray(), HEADERS, Optional.empty()); + OpenCensusProtobufInputFormat inputFormat = new OpenCensusProtobufInputFormat("metric.name", + null, + "descriptor.", + "custom."); + + SettableByteEntity entity = new SettableByteEntity<>(); + InputEntityReader reader = inputFormat.createReader(new InputRowSchema( + new TimestampSpec("timestamp", "iso", null), + dimensionsSpecWithExclusions, + ColumnsFilter.all() + ), entity, null); + + entity.setEntity(new KafkaRecordEntity(consumerRecord)); + try (CloseableIterator rows = reader.read()) { + Assert.assertTrue(rows.hasNext()); + InputRow row = rows.next(); + + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "metric.name", "example_gauge"); + assertDimensionEquals(row, "value", "6"); + assertDimensionEquals(row, "custom.env", "devel"); + assertDimensionEquals(row, "descriptor.foo_key", "foo_value"); + Assert.assertFalse(row.getDimensions().contains("custom.country")); + Assert.assertFalse(row.getDimensions().contains("descriptor.color")); + } + } + + @Test + public void testInvalidProtobuf() throws IOException + { + byte[] invalidProtobuf = new byte[] {0x00, 0x01}; + ConsumerRecord consumerRecord = new ConsumerRecord<>(TOPIC, PARTITION, OFFSET, TS, TSTYPE, -1, -1, + null, invalidProtobuf, HEADERS, Optional.empty()); + OpenCensusProtobufInputFormat inputFormat = new OpenCensusProtobufInputFormat("metric.name", + null, + "descriptor.", + "custom."); + + SettableByteEntity entity = new SettableByteEntity<>(); + InputEntityReader reader = inputFormat.createReader(new InputRowSchema( + new TimestampSpec("timestamp", "iso", null), + dimensionsSpec, + ColumnsFilter.all() + ), entity, null); + + entity.setEntity(new KafkaRecordEntity(consumerRecord)); + try (CloseableIterator rows = reader.read()) { + Assert.assertThrows(ParseException.class, () -> rows.hasNext()); + Assert.assertThrows(ParseException.class, () -> rows.next()); + } + } + + private void assertDimensionEquals(InputRow row, String dimension, Object expected) + { + List values = row.getDimension(dimension); + Assert.assertEquals(1, values.size()); + Assert.assertEquals(expected, values.get(0)); + } + +} diff --git a/extensions-contrib/opencensus-extensions/src/test/resources/log4j2.xml b/extensions-contrib/opencensus-extensions/src/test/resources/log4j2.xml new file mode 100644 index 000000000000..05a8e1d69cbe --- /dev/null +++ b/extensions-contrib/opencensus-extensions/src/test/resources/log4j2.xml @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + diff --git a/extensions-contrib/opentelemetry-extensions/pom.xml b/extensions-contrib/opentelemetry-extensions/pom.xml new file mode 100644 index 000000000000..1a457cbc538a --- /dev/null +++ b/extensions-contrib/opentelemetry-extensions/pom.xml @@ -0,0 +1,103 @@ + + + + 4.0.0 + + org.apache.druid.extensions.contrib + druid-opentelemetry-extensions + druid-opentelemetry-extensions + druid-opentelemetry-extensions + + + druid + org.apache.druid + 28.0.0-SNAPSHOT + ../../pom.xml + + + + com.google.protobuf + protobuf-java + + + io.opentelemetry.proto + opentelemetry-proto + 0.19.0-alpha + + + com.google.guava + guava + provided + + + com.google.inject + guice + provided + + + com.google.code.findbugs + jsr305 + provided + + + com.fasterxml.jackson.core + jackson-annotations + provided + + + com.fasterxml.jackson.core + jackson-databind + provided + + + org.apache.druid + druid-processing + ${project.parent.version} + provided + + + org.apache.druid + druid-indexing-service + ${project.parent.version} + provided + + + + junit + junit + test + + + + org.openjdk.jmh + jmh-core + 1.27 + test + + + org.openjdk.jmh + jmh-generator-annprocess + 1.27 + test + + + diff --git a/extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufInputFormat.java b/extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufInputFormat.java new file mode 100644 index 000000000000..50029e8dfbd9 --- /dev/null +++ b/extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufInputFormat.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opentelemetry.protobuf; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.druid.data.input.InputEntity; +import org.apache.druid.data.input.InputEntityReader; +import org.apache.druid.data.input.InputFormat; +import org.apache.druid.data.input.InputRowSchema; +import org.apache.druid.data.input.impl.ByteEntity; +import org.apache.druid.indexing.seekablestream.SettableByteEntity; +import org.apache.druid.java.util.common.StringUtils; + +import java.io.File; +import java.util.Objects; + +public class OpenTelemetryMetricsProtobufInputFormat implements InputFormat +{ + private static final String DEFAULT_METRIC_DIMENSION = "metric"; + private static final String DEFAULT_VALUE_DIMENSION = "value"; + private static final String DEFAULT_RESOURCE_PREFIX = "resource."; + + private final String metricDimension; + private final String valueDimension; + private final String metricAttributePrefix; + private final String resourceAttributePrefix; + + public OpenTelemetryMetricsProtobufInputFormat( + @JsonProperty("metricDimension") String metricDimension, + @JsonProperty("valueDimension") String valueDimension, + @JsonProperty("metricAttributePrefix") String metricAttributePrefix, + @JsonProperty("resourceAttributePrefix") String resourceAttributePrefix + ) + { + this.metricDimension = metricDimension != null ? metricDimension : DEFAULT_METRIC_DIMENSION; + this.valueDimension = valueDimension != null ? valueDimension : DEFAULT_VALUE_DIMENSION; + this.metricAttributePrefix = StringUtils.nullToEmptyNonDruidDataString(metricAttributePrefix); + this.resourceAttributePrefix = resourceAttributePrefix != null ? resourceAttributePrefix : DEFAULT_RESOURCE_PREFIX; + } + + @Override + public boolean isSplittable() + { + return false; + } + + @Override + public InputEntityReader createReader(InputRowSchema inputRowSchema, InputEntity source, File temporaryDirectory) + { + // Sampler passes a KafkaRecordEntity directly, while the normal code path wraps the same entity in a + // SettableByteEntity + SettableByteEntity settableEntity; + if (source instanceof SettableByteEntity) { + settableEntity = (SettableByteEntity) source; + } else { + SettableByteEntity wrapper = new SettableByteEntity<>(); + wrapper.setEntity((ByteEntity) source); + settableEntity = wrapper; + } + return new OpenTelemetryMetricsProtobufReader( + inputRowSchema.getDimensionsSpec(), + settableEntity, + metricDimension, + valueDimension, + metricAttributePrefix, + resourceAttributePrefix + ); + } + + @JsonProperty + public String getMetricDimension() + { + return metricDimension; + } + + @JsonProperty + public String getValueDimension() + { + return valueDimension; + } + + @JsonProperty + public String getMetricAttributePrefix() + { + return metricAttributePrefix; + } + + @JsonProperty + public String getResourceAttributePrefix() + { + return resourceAttributePrefix; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (!(o instanceof OpenTelemetryMetricsProtobufInputFormat)) { + return false; + } + OpenTelemetryMetricsProtobufInputFormat that = (OpenTelemetryMetricsProtobufInputFormat) o; + return Objects.equals(metricDimension, that.metricDimension) + && Objects.equals(valueDimension, that.valueDimension) + && Objects.equals(metricAttributePrefix, that.metricAttributePrefix) + && Objects.equals(resourceAttributePrefix, that.resourceAttributePrefix); + } + + @Override + public int hashCode() + { + return Objects.hash(metricDimension, valueDimension, metricAttributePrefix, resourceAttributePrefix); + } +} diff --git a/extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufReader.java b/extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufReader.java new file mode 100644 index 000000000000..2e0a62532b5d --- /dev/null +++ b/extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufReader.java @@ -0,0 +1,249 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opentelemetry.protobuf; + +import com.google.common.base.Supplier; +import com.google.common.base.Suppliers; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import com.google.protobuf.InvalidProtocolBufferException; +import io.opentelemetry.proto.common.v1.AnyValue; +import io.opentelemetry.proto.metrics.v1.DataPointFlags; +import io.opentelemetry.proto.metrics.v1.Metric; +import io.opentelemetry.proto.metrics.v1.MetricsData; +import io.opentelemetry.proto.metrics.v1.NumberDataPoint; +import org.apache.druid.data.input.InputEntityReader; +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.InputRowListPlusRawValues; +import org.apache.druid.data.input.MapBasedInputRow; +import org.apache.druid.data.input.impl.ByteEntity; +import org.apache.druid.data.input.impl.DimensionsSpec; +import org.apache.druid.indexing.seekablestream.SettableByteEntity; +import org.apache.druid.java.util.common.CloseableIterators; +import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.java.util.common.parsers.CloseableIterator; +import org.apache.druid.java.util.common.parsers.ParseException; + +import javax.annotation.Nullable; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +public class OpenTelemetryMetricsProtobufReader implements InputEntityReader +{ + private static final Logger log = new Logger(OpenTelemetryMetricsProtobufReader.class); + + private final SettableByteEntity source; + private final String metricDimension; + private final String valueDimension; + private final String metricAttributePrefix; + private final String resourceAttributePrefix; + private final DimensionsSpec dimensionsSpec; + + public OpenTelemetryMetricsProtobufReader( + DimensionsSpec dimensionsSpec, + SettableByteEntity source, + String metricDimension, + String valueDimension, + String metricAttributePrefix, + String resourceAttributePrefix + ) + { + this.dimensionsSpec = dimensionsSpec; + this.source = source; + this.metricDimension = metricDimension; + this.valueDimension = valueDimension; + this.metricAttributePrefix = metricAttributePrefix; + this.resourceAttributePrefix = resourceAttributePrefix; + } + + @Override + public CloseableIterator read() + { + Supplier> supplier = Suppliers.memoize(() -> readAsList().iterator()); + return CloseableIterators.withEmptyBaggage(new Iterator() { + @Override + public boolean hasNext() + { + return supplier.get().hasNext(); + } + @Override + public InputRow next() + { + return supplier.get().next(); + } + }); + } + + List readAsList() + { + try { + ByteBuffer buffer = source.getEntity().getBuffer(); + List rows = parseMetricsData(MetricsData.parseFrom(buffer)); + // Explicitly move the position assuming that all the remaining bytes have been consumed because the protobuf + // parser does not update the position itself + buffer.position(buffer.limit()); + return rows; + } + catch (InvalidProtocolBufferException e) { + throw new ParseException(null, e, "Protobuf message could not be parsed"); + } + } + + private List parseMetricsData(final MetricsData metricsData) + { + return metricsData.getResourceMetricsList() + .stream() + .flatMap(resourceMetrics -> { + Map resourceAttributes = resourceMetrics.getResource() + .getAttributesList() + .stream() + .collect(HashMap::new, + (m, kv) -> { + Object value = parseAnyValue(kv.getValue()); + if (value != null) { + m.put(resourceAttributePrefix + kv.getKey(), value); + } + }, + HashMap::putAll); + return resourceMetrics.getScopeMetricsList() + .stream() + .flatMap(scopeMetrics -> scopeMetrics.getMetricsList() + .stream() + .flatMap(metric -> parseMetric(metric, resourceAttributes).stream())); + }) + .collect(Collectors.toList()); + } + + private List parseMetric(Metric metric, Map resourceAttributes) + { + final List inputRows; + String metricName = metric.getName(); + switch (metric.getDataCase()) { + case SUM: { + inputRows = new ArrayList<>(metric.getSum().getDataPointsCount()); + metric.getSum() + .getDataPointsList() + .forEach(dataPoint -> { + if (hasRecordedValue(dataPoint)) { + inputRows.add(parseNumberDataPoint(dataPoint, resourceAttributes, metricName)); + } + }); + break; + } + case GAUGE: { + inputRows = new ArrayList<>(metric.getGauge().getDataPointsCount()); + metric.getGauge() + .getDataPointsList() + .forEach(dataPoint -> { + if (hasRecordedValue(dataPoint)) { + inputRows.add(parseNumberDataPoint(dataPoint, resourceAttributes, metricName)); + } + }); + break; + } + // TODO Support HISTOGRAM and SUMMARY metrics + case HISTOGRAM: + case SUMMARY: + default: + log.trace("Metric type %s is not supported", metric.getDataCase()); + inputRows = Collections.emptyList(); + + } + return inputRows; + } + + private static boolean hasRecordedValue(NumberDataPoint d) + { + return (d.getFlags() & DataPointFlags.FLAG_NO_RECORDED_VALUE_VALUE) == 0; + } + + private InputRow parseNumberDataPoint(NumberDataPoint dataPoint, + Map resourceAttributes, + String metricName) + { + + int capacity = resourceAttributes.size() + + dataPoint.getAttributesCount() + + 2; // metric name + value columns + Map event = Maps.newHashMapWithExpectedSize(capacity); + event.put(metricDimension, metricName); + + if (dataPoint.hasAsInt()) { + event.put(valueDimension, dataPoint.getAsInt()); + } else { + event.put(valueDimension, dataPoint.getAsDouble()); + } + + event.putAll(resourceAttributes); + dataPoint.getAttributesList().forEach(att -> { + Object value = parseAnyValue(att.getValue()); + if (value != null) { + event.put(metricAttributePrefix + att.getKey(), value); + } + }); + + return createRow(TimeUnit.NANOSECONDS.toMillis(dataPoint.getTimeUnixNano()), event); + } + + @Nullable + private static Object parseAnyValue(AnyValue value) + { + switch (value.getValueCase()) { + case INT_VALUE: + return value.getIntValue(); + case BOOL_VALUE: + return value.getBoolValue(); + case DOUBLE_VALUE: + return value.getDoubleValue(); + case STRING_VALUE: + return value.getStringValue(); + + // TODO: Support KVLIST_VALUE, ARRAY_VALUE and BYTES_VALUE + + default: + // VALUE_NOT_SET + return null; + } + } + + InputRow createRow(long timeUnixMilli, Map event) + { + final List dimensions; + if (!dimensionsSpec.getDimensionNames().isEmpty()) { + dimensions = dimensionsSpec.getDimensionNames(); + } else { + dimensions = new ArrayList<>(Sets.difference(event.keySet(), dimensionsSpec.getDimensionExclusions())); + } + return new MapBasedInputRow(timeUnixMilli, dimensions, event); + } + + @Override + public CloseableIterator sample() + { + return read().map(row -> InputRowListPlusRawValues.of(row, ((MapBasedInputRow) row).getEvent())); + } +} diff --git a/extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryProtobufExtensionsModule.java b/extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryProtobufExtensionsModule.java new file mode 100644 index 000000000000..4c027c31248c --- /dev/null +++ b/extensions-contrib/opentelemetry-extensions/src/main/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryProtobufExtensionsModule.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opentelemetry.protobuf; + +import com.fasterxml.jackson.databind.Module; +import com.fasterxml.jackson.databind.jsontype.NamedType; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.google.inject.Binder; +import org.apache.druid.initialization.DruidModule; + +import java.util.Collections; +import java.util.List; + +public class OpenTelemetryProtobufExtensionsModule implements DruidModule +{ + + @Override + public List getJacksonModules() + { + return Collections.singletonList( + new SimpleModule("OpenTelemetryProtobufInputFormat") + .registerSubtypes( + new NamedType(OpenTelemetryMetricsProtobufInputFormat.class, "opentelemetry-metrics-protobuf") + ) + ); + } + + @Override + public void configure(Binder binder) + { + } +} diff --git a/extensions-contrib/opentelemetry-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/opentelemetry-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule new file mode 100755 index 000000000000..b2a7d04bb635 --- /dev/null +++ b/extensions-contrib/opentelemetry-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +org.apache.druid.data.input.opentelemetry.protobuf.OpenTelemetryProtobufExtensionsModule + diff --git a/extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryBenchmark.java b/extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryBenchmark.java new file mode 100644 index 000000000000..0238aeccafa5 --- /dev/null +++ b/extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryBenchmark.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opentelemetry.protobuf; + +import com.google.common.collect.ImmutableList; +import io.opentelemetry.proto.common.v1.AnyValue; +import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.proto.metrics.v1.Metric; +import io.opentelemetry.proto.metrics.v1.MetricsData; +import io.opentelemetry.proto.metrics.v1.NumberDataPoint; +import io.opentelemetry.proto.metrics.v1.ResourceMetrics; +import io.opentelemetry.proto.metrics.v1.ScopeMetrics; +import io.opentelemetry.proto.resource.v1.Resource; +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.InputRowSchema; +import org.apache.druid.data.input.impl.ByteEntity; +import org.apache.druid.data.input.impl.DimensionsSpec; +import org.apache.druid.data.input.impl.StringDimensionSchema; +import org.apache.druid.java.util.common.parsers.CloseableIterator; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.infra.Blackhole; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.time.Instant; +import java.util.concurrent.TimeUnit; + +@Fork(1) +@State(Scope.Benchmark) +public class OpenTelemetryBenchmark +{ + + private static ByteBuffer BUFFER; + + @Param(value = {"1", "2", "4", "8" }) + private int resourceMetricCount = 1; + + @Param(value = {"1"}) + private int instrumentationScopeCount = 1; + + @Param(value = {"1", "2", "4", "8" }) + private int metricsCount = 1; + + @Param(value = {"1", "2", "4", "8" }) + private int dataPointCount; + + private static final long TIMESTAMP = TimeUnit.MILLISECONDS.toNanos(Instant.parse("2019-07-12T09:30:01.123Z").toEpochMilli()); + + private static final InputRowSchema ROW_SCHEMA = new InputRowSchema(null, + new DimensionsSpec(ImmutableList.of( + new StringDimensionSchema("name"), + new StringDimensionSchema("value"), + new StringDimensionSchema("foo_key"))), + null); + + private static final OpenTelemetryMetricsProtobufInputFormat INPUT_FORMAT = + new OpenTelemetryMetricsProtobufInputFormat("name", + "value", + "", + "resource."); + + private ByteBuffer createMetricBuffer() + { + MetricsData.Builder metricsData = MetricsData.newBuilder(); + for (int i = 0; i < resourceMetricCount; i++) { + ResourceMetrics.Builder resourceMetricsBuilder = metricsData.addResourceMetricsBuilder(); + Resource.Builder resourceBuilder = resourceMetricsBuilder.getResourceBuilder(); + + for (int resourceAttributeI = 0; resourceAttributeI < 5; resourceAttributeI++) { + KeyValue.Builder resourceAttributeBuilder = resourceBuilder.addAttributesBuilder(); + resourceAttributeBuilder.setKey("resource.label_key_" + resourceAttributeI); + resourceAttributeBuilder.setValue(AnyValue.newBuilder().setStringValue("resource.label_value")); + } + + for (int j = 0; j < instrumentationScopeCount; j++) { + ScopeMetrics.Builder scopeMetricsBuilder = + resourceMetricsBuilder.addScopeMetricsBuilder(); + + for (int k = 0; k < metricsCount; k++) { + Metric.Builder metricBuilder = scopeMetricsBuilder.addMetricsBuilder(); + metricBuilder.setName("io.confluent.domain/such/good/metric/wow"); + + for (int l = 0; l < dataPointCount; l++) { + NumberDataPoint.Builder dataPointBuilder = metricBuilder.getSumBuilder().addDataPointsBuilder(); + dataPointBuilder.setAsDouble(42.0).setTimeUnixNano(TIMESTAMP); + + for (int metricAttributeI = 0; metricAttributeI < 10; metricAttributeI++) { + KeyValue.Builder attributeBuilder = dataPointBuilder.addAttributesBuilder(); + attributeBuilder.setKey("foo_key_" + metricAttributeI); + attributeBuilder.setValue(AnyValue.newBuilder().setStringValue("foo-value")); + } + } + } + } + } + return ByteBuffer.wrap(metricsData.build().toByteArray()); + } + + @Setup + public void init() + { + BUFFER = createMetricBuffer(); + } + + @Benchmark() + public void measureSerde(Blackhole blackhole) throws IOException + { + for (CloseableIterator it = INPUT_FORMAT.createReader(ROW_SCHEMA, new ByteEntity(BUFFER), null).read(); it.hasNext(); ) { + InputRow row = it.next(); + blackhole.consume(row); + } + } +} diff --git a/extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsInputFormatTest.java b/extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsInputFormatTest.java new file mode 100644 index 000000000000..536247ab5716 --- /dev/null +++ b/extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsInputFormatTest.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opentelemetry.protobuf; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.druid.data.input.InputFormat; +import org.junit.Assert; +import org.junit.Test; + +public class OpenTelemetryMetricsInputFormatTest +{ + @Test + public void testSerde() throws Exception + { + OpenTelemetryMetricsProtobufInputFormat inputFormat = new OpenTelemetryMetricsProtobufInputFormat( + "metric.name", + "raw.value", + "descriptor.", + "custom." + ); + + final ObjectMapper jsonMapper = new ObjectMapper(); + jsonMapper.registerModules(new OpenTelemetryProtobufExtensionsModule().getJacksonModules()); + + final OpenTelemetryMetricsProtobufInputFormat actual = (OpenTelemetryMetricsProtobufInputFormat) jsonMapper.readValue( + jsonMapper.writeValueAsString(inputFormat), + InputFormat.class + ); + Assert.assertEquals(inputFormat, actual); + Assert.assertEquals("metric.name", actual.getMetricDimension()); + Assert.assertEquals("raw.value", actual.getValueDimension()); + Assert.assertEquals("descriptor.", actual.getMetricAttributePrefix()); + Assert.assertEquals("custom.", actual.getResourceAttributePrefix()); + } + + @Test + public void testDefaults() + { + OpenTelemetryMetricsProtobufInputFormat inputFormat = new OpenTelemetryMetricsProtobufInputFormat( + null, + null, + null, + null + ); + + Assert.assertEquals("metric", inputFormat.getMetricDimension()); + Assert.assertEquals("value", inputFormat.getValueDimension()); + Assert.assertEquals("", inputFormat.getMetricAttributePrefix()); + Assert.assertEquals("resource.", inputFormat.getResourceAttributePrefix()); + } +} diff --git a/extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufReaderTest.java b/extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufReaderTest.java new file mode 100644 index 000000000000..8044baf7dbe0 --- /dev/null +++ b/extensions-contrib/opentelemetry-extensions/src/test/java/org/apache/druid/data/input/opentelemetry/protobuf/OpenTelemetryMetricsProtobufReaderTest.java @@ -0,0 +1,441 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.data.input.opentelemetry.protobuf; + +import com.google.common.collect.ImmutableList; +import io.opentelemetry.proto.common.v1.AnyValue; +import io.opentelemetry.proto.common.v1.KeyValue; +import io.opentelemetry.proto.common.v1.KeyValueList; +import io.opentelemetry.proto.metrics.v1.DataPointFlags; +import io.opentelemetry.proto.metrics.v1.Metric; +import io.opentelemetry.proto.metrics.v1.MetricsData; +import org.apache.druid.data.input.InputRow; +import org.apache.druid.data.input.impl.ByteEntity; +import org.apache.druid.data.input.impl.DimensionsSpec; +import org.apache.druid.data.input.impl.StringDimensionSchema; +import org.apache.druid.indexing.seekablestream.SettableByteEntity; +import org.apache.druid.java.util.common.parsers.CloseableIterator; +import org.apache.druid.java.util.common.parsers.ParseException; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.io.IOException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +public class OpenTelemetryMetricsProtobufReaderTest +{ + private static final long TIMESTAMP = TimeUnit.MILLISECONDS.toNanos(Instant.parse("2019-07-12T09:30:01.123Z").toEpochMilli()); + public static final String RESOURCE_ATTRIBUTE_COUNTRY = "country"; + public static final String RESOURCE_ATTRIBUTE_VALUE_USA = "usa"; + + public static final String RESOURCE_ATTRIBUTE_ENV = "env"; + public static final String RESOURCE_ATTRIBUTE_VALUE_DEVEL = "devel"; + + public static final String INSTRUMENTATION_SCOPE_NAME = "mock-instr-lib"; + public static final String INSTRUMENTATION_SCOPE_VERSION = "1.0"; + + public static final String METRIC_ATTRIBUTE_COLOR = "color"; + public static final String METRIC_ATTRIBUTE_VALUE_RED = "red"; + + public static final String METRIC_ATTRIBUTE_FOO_KEY = "foo_key"; + public static final String METRIC_ATTRIBUTE_FOO_VAL = "foo_value"; + + private final MetricsData.Builder metricsDataBuilder = MetricsData.newBuilder(); + + private final Metric.Builder metricBuilder = metricsDataBuilder.addResourceMetricsBuilder() + .addScopeMetricsBuilder() + .addMetricsBuilder(); + + private final DimensionsSpec dimensionsSpec = new DimensionsSpec(ImmutableList.of( + new StringDimensionSchema("descriptor." + METRIC_ATTRIBUTE_COLOR), + new StringDimensionSchema("descriptor." + METRIC_ATTRIBUTE_FOO_KEY), + new StringDimensionSchema("custom." + RESOURCE_ATTRIBUTE_ENV), + new StringDimensionSchema("custom." + RESOURCE_ATTRIBUTE_COUNTRY) + )); + + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + @Before + public void setUp() + { + metricsDataBuilder + .getResourceMetricsBuilder(0) + .getResourceBuilder() + .addAttributes(KeyValue.newBuilder() + .setKey(RESOURCE_ATTRIBUTE_COUNTRY) + .setValue(AnyValue.newBuilder().setStringValue(RESOURCE_ATTRIBUTE_VALUE_USA))); + + metricsDataBuilder + .getResourceMetricsBuilder(0) + .getScopeMetricsBuilder(0) + .getScopeBuilder() + .setName(INSTRUMENTATION_SCOPE_NAME) + .setVersion(INSTRUMENTATION_SCOPE_VERSION); + + } + + @Test + public void testSumWithAttributes() + { + metricBuilder + .setName("example_sum") + .getSumBuilder() + .addDataPointsBuilder() + .setAsInt(6) + .setTimeUnixNano(TIMESTAMP) + .addAttributesBuilder() // test sum with attributes + .setKey(METRIC_ATTRIBUTE_COLOR) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_VALUE_RED).build()); + + MetricsData metricsData = metricsDataBuilder.build(); + + SettableByteEntity settableByteEntity = new SettableByteEntity<>(); + settableByteEntity.setEntity(new ByteEntity(metricsData.toByteArray())); + CloseableIterator rows = new OpenTelemetryMetricsProtobufReader( + dimensionsSpec, + settableByteEntity, + "metric.name", + "raw.value", + "descriptor.", + "custom." + ).read(); + + List rowList = new ArrayList<>(); + rows.forEachRemaining(rowList::add); + Assert.assertEquals(1, rowList.size()); + + InputRow row = rowList.get(0); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "metric.name", "example_sum"); + assertDimensionEquals(row, "custom.country", "usa"); + assertDimensionEquals(row, "descriptor.color", "red"); + assertDimensionEquals(row, "raw.value", "6"); + } + + @Test + public void testGaugeWithAttributes() + { + metricBuilder.setName("example_gauge") + .getGaugeBuilder() + .addDataPointsBuilder() + .setAsInt(6) + .setTimeUnixNano(TIMESTAMP) + .addAttributesBuilder() // test sum with attributes + .setKey(METRIC_ATTRIBUTE_COLOR) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_VALUE_RED).build()); + + MetricsData metricsData = metricsDataBuilder.build(); + + SettableByteEntity settableByteEntity = new SettableByteEntity<>(); + settableByteEntity.setEntity(new ByteEntity(metricsData.toByteArray())); + CloseableIterator rows = new OpenTelemetryMetricsProtobufReader( + dimensionsSpec, + settableByteEntity, + "metric.name", + "raw.value", + "descriptor.", + "custom." + ).read(); + + Assert.assertTrue(rows.hasNext()); + InputRow row = rows.next(); + + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "metric.name", "example_gauge"); + assertDimensionEquals(row, "custom.country", "usa"); + assertDimensionEquals(row, "descriptor.color", "red"); + assertDimensionEquals(row, "raw.value", "6"); + } + + @Test + public void testBatchedMetricParse() + { + metricBuilder.setName("example_sum") + .getSumBuilder() + .addDataPointsBuilder() + .setAsInt(6) + .setTimeUnixNano(TIMESTAMP) + .addAttributesBuilder() // test sum with attributes + .setKey(METRIC_ATTRIBUTE_COLOR) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_VALUE_RED).build()); + + // Create Second Metric + Metric.Builder gaugeMetricBuilder = metricsDataBuilder.addResourceMetricsBuilder() + .addScopeMetricsBuilder() + .addMetricsBuilder(); + + metricsDataBuilder.getResourceMetricsBuilder(1) + .getResourceBuilder() + .addAttributes(KeyValue.newBuilder() + .setKey(RESOURCE_ATTRIBUTE_ENV) + .setValue(AnyValue.newBuilder().setStringValue(RESOURCE_ATTRIBUTE_VALUE_DEVEL)) + .build()); + + metricsDataBuilder.getResourceMetricsBuilder(1) + .getScopeMetricsBuilder(0) + .getScopeBuilder() + .setName(INSTRUMENTATION_SCOPE_NAME) + .setVersion(INSTRUMENTATION_SCOPE_VERSION); + + gaugeMetricBuilder.setName("example_gauge") + .getGaugeBuilder() + .addDataPointsBuilder() + .setAsInt(8) + .setTimeUnixNano(TIMESTAMP) + .addAttributesBuilder() // test sum with attributes + .setKey(METRIC_ATTRIBUTE_FOO_KEY) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_FOO_VAL).build()); + + MetricsData metricsData = metricsDataBuilder.build(); + + SettableByteEntity settableByteEntity = new SettableByteEntity<>(); + settableByteEntity.setEntity(new ByteEntity(metricsData.toByteArray())); + CloseableIterator rows = new OpenTelemetryMetricsProtobufReader( + dimensionsSpec, + settableByteEntity, + "metric.name", + "raw.value", + "descriptor.", + "custom." + ).read(); + + Assert.assertTrue(rows.hasNext()); + InputRow row = rows.next(); + + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "metric.name", "example_sum"); + assertDimensionEquals(row, "custom.country", "usa"); + assertDimensionEquals(row, "descriptor.color", "red"); + assertDimensionEquals(row, "raw.value", "6"); + + Assert.assertTrue(rows.hasNext()); + row = rows.next(); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "metric.name", "example_gauge"); + assertDimensionEquals(row, "custom.env", "devel"); + assertDimensionEquals(row, "descriptor.foo_key", "foo_value"); + assertDimensionEquals(row, "raw.value", "8"); + + } + + @Test + public void testDimensionSpecExclusions() + { + metricsDataBuilder.getResourceMetricsBuilder(0) + .getResourceBuilder() + .addAttributesBuilder() + .setKey(RESOURCE_ATTRIBUTE_ENV) + .setValue(AnyValue.newBuilder().setStringValue(RESOURCE_ATTRIBUTE_VALUE_DEVEL).build()); + + metricBuilder.setName("example_gauge") + .getGaugeBuilder() + .addDataPointsBuilder() + .setAsInt(6) + .setTimeUnixNano(TIMESTAMP) + .addAllAttributes(ImmutableList.of( + KeyValue.newBuilder() + .setKey(METRIC_ATTRIBUTE_COLOR) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_VALUE_RED).build()).build(), + KeyValue.newBuilder() + .setKey(METRIC_ATTRIBUTE_FOO_KEY) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_FOO_VAL).build()).build())); + + MetricsData metricsData = metricsDataBuilder.build(); + + DimensionsSpec dimensionsSpecWithExclusions = DimensionsSpec.builder().setDimensionExclusions(ImmutableList.of( + "descriptor." + METRIC_ATTRIBUTE_COLOR, + "custom." + RESOURCE_ATTRIBUTE_COUNTRY + )).build(); + + SettableByteEntity settableByteEntity = new SettableByteEntity<>(); + settableByteEntity.setEntity(new ByteEntity(metricsData.toByteArray())); + CloseableIterator rows = new OpenTelemetryMetricsProtobufReader( + dimensionsSpecWithExclusions, + settableByteEntity, + "metric.name", + "raw.value", + "descriptor.", + "custom." + ).read(); + + Assert.assertTrue(rows.hasNext()); + InputRow row = rows.next(); + + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "metric.name", "example_gauge"); + assertDimensionEquals(row, "raw.value", "6"); + assertDimensionEquals(row, "custom.env", "devel"); + assertDimensionEquals(row, "descriptor.foo_key", "foo_value"); + Assert.assertFalse(row.getDimensions().contains("custom.country")); + Assert.assertFalse(row.getDimensions().contains("descriptor.color")); + } + + @Test + public void testUnsupportedValueTypes() + { + KeyValueList kvList = KeyValueList.newBuilder() + .addValues( + KeyValue.newBuilder() + .setKey("foo") + .setValue(AnyValue.newBuilder().setStringValue("bar").build())) + .build(); + + metricsDataBuilder.getResourceMetricsBuilder(0) + .getResourceBuilder() + .addAttributesBuilder() + .setKey(RESOURCE_ATTRIBUTE_ENV) + .setValue(AnyValue.newBuilder().setKvlistValue(kvList).build()); + + metricBuilder + .setName("example_sum") + .getSumBuilder() + .addDataPointsBuilder() + .setAsInt(6) + .setTimeUnixNano(TIMESTAMP) + .addAllAttributes(ImmutableList.of( + KeyValue.newBuilder() + .setKey(METRIC_ATTRIBUTE_COLOR) + .setValue(AnyValue.newBuilder().setStringValue(METRIC_ATTRIBUTE_VALUE_RED).build()).build(), + KeyValue.newBuilder() + .setKey(METRIC_ATTRIBUTE_FOO_KEY) + .setValue(AnyValue.newBuilder().setKvlistValue(kvList).build()).build())); + + MetricsData metricsData = metricsDataBuilder.build(); + + SettableByteEntity settableByteEntity = new SettableByteEntity<>(); + settableByteEntity.setEntity(new ByteEntity(metricsData.toByteArray())); + CloseableIterator rows = new OpenTelemetryMetricsProtobufReader( + dimensionsSpec, + settableByteEntity, + "metric.name", + "raw.value", + "descriptor.", + "custom." + ).read(); + + List rowList = new ArrayList<>(); + rows.forEachRemaining(rowList::add); + Assert.assertEquals(1, rowList.size()); + + InputRow row = rowList.get(0); + Assert.assertEquals(4, row.getDimensions().size()); + assertDimensionEquals(row, "metric.name", "example_sum"); + assertDimensionEquals(row, "custom.country", "usa"); + assertDimensionEquals(row, "descriptor.color", "red"); + + // Unsupported resource attribute type is omitted + Assert.assertEquals(0, row.getDimension("custom.env").size()); + + // Unsupported metric attribute type is omitted + Assert.assertEquals(0, row.getDimension("descriptor.foo_key").size()); + + assertDimensionEquals(row, "raw.value", "6"); + } + + @Test + public void testInvalidProtobuf() + { + byte[] invalidProtobuf = new byte[] {0x00, 0x01}; + SettableByteEntity settableByteEntity = new SettableByteEntity<>(); + settableByteEntity.setEntity(new ByteEntity(invalidProtobuf)); + try (CloseableIterator rows = new OpenTelemetryMetricsProtobufReader( + dimensionsSpec, + settableByteEntity, + "metric.name", + "raw.value", + "descriptor.", + "custom." + ).read()) { + Assert.assertThrows(ParseException.class, () -> rows.hasNext()); + Assert.assertThrows(ParseException.class, () -> rows.next()); + } + catch (IOException e) { + // Comes from the implicit call to close. Ignore + } + } + + @Test + public void testInvalidMetricType() + { + metricBuilder + .setName("unsupported_histogram_metric") + .getExponentialHistogramBuilder() + .addDataPointsBuilder() + .setTimeUnixNano(TIMESTAMP); + + MetricsData metricsData = metricsDataBuilder.build(); + + SettableByteEntity settableByteEntity = new SettableByteEntity<>(); + settableByteEntity.setEntity(new ByteEntity(metricsData.toByteArray())); + CloseableIterator rows = new OpenTelemetryMetricsProtobufReader( + dimensionsSpec, + settableByteEntity, + "metric.name", + "raw.value", + "descriptor.", + "custom." + ).read(); + + List rowList = new ArrayList<>(); + rows.forEachRemaining(rowList::add); + Assert.assertEquals(0, rowList.size()); + } + + @Test + public void testNoRecordedValueMetric() + { + metricBuilder.setName("example_gauge") + .getGaugeBuilder() + .addDataPointsBuilder() + .setAsInt(6) + .setFlags(DataPointFlags.FLAG_NO_RECORDED_VALUE_VALUE) + .setTimeUnixNano(TIMESTAMP); + + MetricsData metricsData = metricsDataBuilder.build(); + + SettableByteEntity settableByteEntity = new SettableByteEntity<>(); + settableByteEntity.setEntity(new ByteEntity(metricsData.toByteArray())); + CloseableIterator rows = new OpenTelemetryMetricsProtobufReader( + dimensionsSpec, + settableByteEntity, + "metric.name", + "raw.value", + "descriptor.", + "custom." + ).read(); + + Assert.assertFalse(rows.hasNext()); + } + + private void assertDimensionEquals(InputRow row, String dimension, Object expected) + { + List values = row.getDimension(dimension); + Assert.assertEquals(1, values.size()); + Assert.assertEquals(expected, values.get(0)); + } + +} diff --git a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3StorageConfig.java b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3StorageConfig.java index cfae0eb084b7..b52d13cd518e 100644 --- a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3StorageConfig.java +++ b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3StorageConfig.java @@ -36,12 +36,22 @@ public class S3StorageConfig @JsonProperty("sse") private final ServerSideEncryption serverSideEncryption; + /** + * S3 transfer config. + * + * @see S3StorageDruidModule#configure + */ + @JsonProperty("transfer") + private final S3TransferConfig s3TransferConfig; + @JsonCreator public S3StorageConfig( - @JsonProperty("sse") ServerSideEncryption serverSideEncryption + @JsonProperty("sse") ServerSideEncryption serverSideEncryption, + @JsonProperty("transfer") S3TransferConfig s3TransferConfig ) { this.serverSideEncryption = serverSideEncryption == null ? new NoopServerSideEncryption() : serverSideEncryption; + this.s3TransferConfig = s3TransferConfig == null ? new S3TransferConfig() : s3TransferConfig; } @JsonProperty("sse") @@ -49,4 +59,10 @@ public ServerSideEncryption getServerSideEncryption() { return serverSideEncryption; } + + @JsonProperty("transfer") + public S3TransferConfig getS3TransferConfig() + { + return s3TransferConfig; + } } diff --git a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3TransferConfig.java b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3TransferConfig.java new file mode 100644 index 000000000000..fc8bd8903fad --- /dev/null +++ b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3TransferConfig.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.storage.s3; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import javax.validation.constraints.Min; + +/** + */ +public class S3TransferConfig +{ + @JsonProperty + private boolean useTransferManager = false; + + @JsonProperty + @Min(1) + private long minimumUploadPartSize = 5 * 1024 * 1024L; + + @JsonProperty + @Min(1) + private long multipartUploadThreshold = 5 * 1024 * 1024L; + + public void setUseTransferManager(boolean useTransferManager) + { + this.useTransferManager = useTransferManager; + } + + public void setMinimumUploadPartSize(long minimumUploadPartSize) + { + this.minimumUploadPartSize = minimumUploadPartSize; + } + + public void setMultipartUploadThreshold(long multipartUploadThreshold) + { + this.multipartUploadThreshold = multipartUploadThreshold; + } + + public boolean isUseTransferManager() + { + return useTransferManager; + } + + public long getMinimumUploadPartSize() + { + return minimumUploadPartSize; + } + + public long getMultipartUploadThreshold() + { + return multipartUploadThreshold; + } + +} diff --git a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java index 087d6684c7f3..a256b7001506 100644 --- a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java +++ b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3Utils.java @@ -83,6 +83,9 @@ public boolean apply(Throwable e) } else if (e instanceof SdkClientException && e.getMessage().contains("Unable to execute HTTP request")) { // This is likely due to a temporary DNS issue and can be retried. return true; + } else if (e instanceof InterruptedException) { + Thread.interrupted(); // Clear interrupted state and not retry + return false; } else if (e instanceof AmazonClientException) { return AWSClientUtil.isClientExceptionRecoverable((AmazonClientException) e); } else { @@ -320,7 +323,7 @@ static void uploadFileIfPossible( String bucket, String key, File file - ) + ) throws InterruptedException { final PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, file); @@ -328,7 +331,7 @@ static void uploadFileIfPossible( putObjectRequest.setAccessControlList(S3Utils.grantFullControlToBucketOwner(service, bucket)); } log.info("Pushing [%s] to bucket[%s] and key[%s].", file, bucket, key); - service.putObject(putObjectRequest); + service.upload(putObjectRequest); } @Nullable diff --git a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/ServerSideEncryptingAmazonS3.java b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/ServerSideEncryptingAmazonS3.java index 320a0b9a6f99..d97d8df6c8a8 100644 --- a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/ServerSideEncryptingAmazonS3.java +++ b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/ServerSideEncryptingAmazonS3.java @@ -43,6 +43,9 @@ import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.UploadPartRequest; import com.amazonaws.services.s3.model.UploadPartResult; +import com.amazonaws.services.s3.transfer.TransferManager; +import com.amazonaws.services.s3.transfer.TransferManagerBuilder; +import com.amazonaws.services.s3.transfer.Upload; import org.apache.druid.java.util.common.ISE; import java.io.File; @@ -65,11 +68,21 @@ public static Builder builder() private final AmazonS3 amazonS3; private final ServerSideEncryption serverSideEncryption; + private final TransferManager transferManager; - public ServerSideEncryptingAmazonS3(AmazonS3 amazonS3, ServerSideEncryption serverSideEncryption) + public ServerSideEncryptingAmazonS3(AmazonS3 amazonS3, ServerSideEncryption serverSideEncryption, S3TransferConfig transferConfig) { this.amazonS3 = amazonS3; this.serverSideEncryption = serverSideEncryption; + if (transferConfig.isUseTransferManager()) { + this.transferManager = TransferManagerBuilder.standard() + .withS3Client(amazonS3) + .withMinimumUploadPartSize(transferConfig.getMinimumUploadPartSize()) + .withMultipartUploadThreshold(transferConfig.getMultipartUploadThreshold()) + .build(); + } else { + this.transferManager = null; + } } public boolean doesObjectExist(String bucket, String objectName) @@ -168,10 +181,20 @@ public CompleteMultipartUploadResult completeMultipartUpload(CompleteMultipartUp return amazonS3.completeMultipartUpload(request); } + public void upload(PutObjectRequest request) throws InterruptedException + { + if (transferManager == null) { + putObject(request); + } else { + Upload transfer = transferManager.upload(serverSideEncryption.decorate(request)); + transfer.waitForCompletion(); + } + } + public static class Builder { private AmazonS3ClientBuilder amazonS3ClientBuilder = AmazonS3Client.builder(); - private S3StorageConfig s3StorageConfig = new S3StorageConfig(new NoopServerSideEncryption()); + private S3StorageConfig s3StorageConfig = new S3StorageConfig(new NoopServerSideEncryption(), null); public Builder setAmazonS3ClientBuilder(AmazonS3ClientBuilder amazonS3ClientBuilder) { @@ -204,7 +227,7 @@ public ServerSideEncryptingAmazonS3 build() throw new ISE("S3StorageConfig cannot be null!"); } - return new ServerSideEncryptingAmazonS3(amazonS3ClientBuilder.build(), s3StorageConfig.getServerSideEncryption()); + return new ServerSideEncryptingAmazonS3(amazonS3ClientBuilder.build(), s3StorageConfig.getServerSideEncryption(), s3StorageConfig.getS3TransferConfig()); } } } diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/data/input/s3/S3InputSourceTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/data/input/s3/S3InputSourceTest.java index 6b0bb537c7e1..9daf4cb42860 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/data/input/s3/S3InputSourceTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/data/input/s3/S3InputSourceTest.java @@ -70,6 +70,7 @@ import org.apache.druid.metadata.DefaultPasswordProvider; import org.apache.druid.storage.s3.NoopServerSideEncryption; import org.apache.druid.storage.s3.S3InputDataConfig; +import org.apache.druid.storage.s3.S3TransferConfig; import org.apache.druid.storage.s3.S3Utils; import org.apache.druid.storage.s3.ServerSideEncryptingAmazonS3; import org.apache.druid.testing.InitializedNullHandlingTest; @@ -108,7 +109,8 @@ public class S3InputSourceTest extends InitializedNullHandlingTest public static final AmazonS3ClientBuilder AMAZON_S3_CLIENT_BUILDER = AmazonS3Client.builder(); public static final ServerSideEncryptingAmazonS3 SERVICE = new ServerSideEncryptingAmazonS3( S3_CLIENT, - new NoopServerSideEncryption() + new NoopServerSideEncryption(), + new S3TransferConfig() ); public static final S3InputDataConfig INPUT_DATA_CONFIG; private static final int MAX_LISTING_LENGTH = 10; diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/ObjectSummaryIteratorTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/ObjectSummaryIteratorTest.java index ea2ca4af26c1..8ee6c826718d 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/ObjectSummaryIteratorTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/ObjectSummaryIteratorTest.java @@ -195,7 +195,7 @@ private static ServerSideEncryptingAmazonS3 makeMockClient( final List objects ) { - return new ServerSideEncryptingAmazonS3(null, null) + return new ServerSideEncryptingAmazonS3(null, null, new S3TransferConfig()) { @Override public ListObjectsV2Result listObjectsV2(final ListObjectsV2Request request) diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentArchiverTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentArchiverTest.java index f5005c706e01..4acf553fae50 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentArchiverTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentArchiverTest.java @@ -76,7 +76,8 @@ public String getArchiveBaseKey() private static final Supplier S3_SERVICE = Suppliers.ofInstance( new ServerSideEncryptingAmazonS3( EasyMock.createStrictMock(AmazonS3Client.class), - new NoopServerSideEncryption() + new NoopServerSideEncryption(), + new S3TransferConfig() ) ); private static final S3DataSegmentPuller PULLER = new S3DataSegmentPuller(S3_SERVICE.get()); diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java index 550a72cef43c..8f653e956a83 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java @@ -201,7 +201,7 @@ private static class MockAmazonS3Client extends ServerSideEncryptingAmazonS3 private MockAmazonS3Client() { - super(new AmazonS3Client(), new NoopServerSideEncryption()); + super(new AmazonS3Client(), new NoopServerSideEncryption(), new S3TransferConfig()); } public boolean didMove() diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java index 23ab725aa33d..449fad012ccc 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java @@ -24,7 +24,7 @@ import com.amazonaws.services.s3.model.Grant; import com.amazonaws.services.s3.model.Owner; import com.amazonaws.services.s3.model.Permission; -import com.amazonaws.services.s3.model.PutObjectResult; +import com.amazonaws.services.s3.model.PutObjectRequest; import com.google.common.io.Files; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.timeline.DataSegment; @@ -83,9 +83,8 @@ private void testPushInternal(boolean useUniquePath, String matcher) throws Exce acl.grantAllPermissions(new Grant(new CanonicalGrantee(acl.getOwner().getId()), Permission.FullControl)); EasyMock.expect(s3Client.getBucketAcl(EasyMock.eq("bucket"))).andReturn(acl).once(); - EasyMock.expect(s3Client.putObject(EasyMock.anyObject())) - .andReturn(new PutObjectResult()) - .once(); + s3Client.upload(EasyMock.anyObject(PutObjectRequest.class)); + EasyMock.expectLastCall().once(); EasyMock.replay(s3Client); diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3StorageConnectorProviderTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3StorageConnectorProviderTest.java index 9f9d632f6181..790a4f1a2643 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3StorageConnectorProviderTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3StorageConnectorProviderTest.java @@ -145,7 +145,7 @@ public void configure(Binder binder) new InjectableValues.Std() .addValue( ServerSideEncryptingAmazonS3.class, - new ServerSideEncryptingAmazonS3(null, new NoopServerSideEncryption()) + new ServerSideEncryptingAmazonS3(null, new NoopServerSideEncryption(), new S3TransferConfig()) )); diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3TaskLogsTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3TaskLogsTest.java index 011dc4888456..a434739ed21b 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3TaskLogsTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3TaskLogsTest.java @@ -29,7 +29,6 @@ import com.amazonaws.services.s3.model.Owner; import com.amazonaws.services.s3.model.Permission; import com.amazonaws.services.s3.model.PutObjectRequest; -import com.amazonaws.services.s3.model.PutObjectResult; import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectSummary; import com.google.common.base.Optional; @@ -123,11 +122,9 @@ public void testTaskLogsPushWithAclEnabled() throws Exception } @Test - public void test_pushTaskStatus() throws IOException + public void test_pushTaskStatus() throws IOException, InterruptedException { - EasyMock.expect(s3Client.putObject(EasyMock.anyObject(PutObjectRequest.class))) - .andReturn(new PutObjectResult()) - .once(); + s3Client.upload(EasyMock.anyObject(PutObjectRequest.class)); EasyMock.replay(s3Client); @@ -148,12 +145,10 @@ public void test_pushTaskStatus() throws IOException } @Test - public void test_pushTaskPayload() throws IOException + public void test_pushTaskPayload() throws IOException, InterruptedException { Capture putObjectRequestCapture = Capture.newInstance(CaptureType.FIRST); - EasyMock.expect(s3Client.putObject(EasyMock.capture(putObjectRequestCapture))) - .andReturn(new PutObjectResult()) - .once(); + s3Client.upload(EasyMock.capture(putObjectRequestCapture)); EasyMock.replay(s3Client); @@ -617,9 +612,8 @@ private S3TaskLogs getS3TaskLogs() private List testPushInternal(boolean disableAcl, String ownerId, String ownerDisplayName) throws Exception { - EasyMock.expect(s3Client.putObject(EasyMock.anyObject())) - .andReturn(new PutObjectResult()) - .once(); + s3Client.upload(EasyMock.anyObject(PutObjectRequest.class)); + EasyMock.expectLastCall().once(); AccessControlList aclExpected = new AccessControlList(); aclExpected.setOwner(new Owner(ownerId, ownerDisplayName)); @@ -628,9 +622,8 @@ private List testPushInternal(boolean disableAcl, String ownerId, String .andReturn(aclExpected) .once(); - EasyMock.expect(s3Client.putObject(EasyMock.anyObject(PutObjectRequest.class))) - .andReturn(new PutObjectResult()) - .once(); + s3Client.upload(EasyMock.anyObject(PutObjectRequest.class)); + EasyMock.expectLastCall().once(); EasyMock.replay(s3Client); diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestAWSCredentialsProvider.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestAWSCredentialsProvider.java index 3685fc6fa19b..fefcb8c3c38b 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestAWSCredentialsProvider.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestAWSCredentialsProvider.java @@ -67,7 +67,7 @@ public void testWithFixedAWSKeys() new AWSProxyConfig(), new AWSEndpointConfig(), new AWSClientConfig(), - new S3StorageConfig(new NoopServerSideEncryption()) + new S3StorageConfig(new NoopServerSideEncryption(), null) ); s3Module.getAmazonS3Client( @@ -102,7 +102,7 @@ public void testWithFileSessionCredentials() throws IOException new AWSProxyConfig(), new AWSEndpointConfig(), new AWSClientConfig(), - new S3StorageConfig(new NoopServerSideEncryption()) + new S3StorageConfig(new NoopServerSideEncryption(), null) ); s3Module.getAmazonS3Client( diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/output/RetryableS3OutputStreamTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/output/RetryableS3OutputStreamTest.java index 1f8eac3bbae0..f407c2a41d7e 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/output/RetryableS3OutputStreamTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/output/RetryableS3OutputStreamTest.java @@ -34,6 +34,7 @@ import org.apache.druid.java.util.common.IOE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.storage.s3.NoopServerSideEncryption; +import org.apache.druid.storage.s3.S3TransferConfig; import org.apache.druid.storage.s3.ServerSideEncryptingAmazonS3; import org.easymock.EasyMock; import org.hamcrest.CoreMatchers; @@ -228,7 +229,7 @@ private static class TestAmazonS3 extends ServerSideEncryptingAmazonS3 private TestAmazonS3(int totalUploadFailures) { - super(EasyMock.createMock(AmazonS3.class), new NoopServerSideEncryption()); + super(EasyMock.createMock(AmazonS3.class), new NoopServerSideEncryption(), new S3TransferConfig()); this.uploadFailuresLeft = totalUploadFailures; } diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/output/S3StorageConnectorTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/output/S3StorageConnectorTest.java index 380c5cb1e508..a2f4d7e1a459 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/output/S3StorageConnectorTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/output/S3StorageConnectorTest.java @@ -33,6 +33,7 @@ import com.google.common.collect.Lists; import org.apache.druid.storage.StorageConnector; import org.apache.druid.storage.s3.NoopServerSideEncryption; +import org.apache.druid.storage.s3.S3TransferConfig; import org.apache.druid.storage.s3.ServerSideEncryptingAmazonS3; import org.easymock.Capture; import org.easymock.EasyMock; @@ -64,7 +65,8 @@ public class S3StorageConnectorTest private final AmazonS3Client s3Client = EasyMock.createMock(AmazonS3Client.class); private final ServerSideEncryptingAmazonS3 service = new ServerSideEncryptingAmazonS3( s3Client, - new NoopServerSideEncryption() + new NoopServerSideEncryption(), + new S3TransferConfig() ); private final ListObjectsV2Result testResult = EasyMock.createMock(ListObjectsV2Result.class); diff --git a/licenses.yaml b/licenses.yaml index 0632e36f7fda..2eb0e489d69a 100644 --- a/licenses.yaml +++ b/licenses.yaml @@ -3345,7 +3345,7 @@ name: Apache Avro license_category: binary module: extensions/druid-avro-extensions license_name: Apache License version 2.0 -version: 1.11.1 +version: 1.11.3 libraries: - org.apache.avro: avro - org.apache.avro: avro-mapred diff --git a/owasp-dependency-check-suppressions.xml b/owasp-dependency-check-suppressions.xml index bdfe6c527d2b..054d57f92eb7 100644 --- a/owasp-dependency-check-suppressions.xml +++ b/owasp-dependency-check-suppressions.xml @@ -548,13 +548,6 @@ CVE-2017-3162
- - - - CVE-2021-43045 - diff --git a/pom.xml b/pom.xml index 9d22ef43755c..57a49f31258c 100644 --- a/pom.xml +++ b/pom.xml @@ -66,7 +66,7 @@ scm:git:ssh://git@github.com/apache/druid.git scm:git:ssh://git@github.com/apache/druid.git https://github.com/apache/druid.git - 0.19.0-SNAPSHOT + 28.0.0-SNAPSHOT @@ -81,7 +81,7 @@ 2.2.4 2.13.11 1.23.0 - 1.11.1 + 1.11.3 @@ -133,6 +133,7 @@ maven.org Maven Central Repository https://repo1.maven.org/maven2/ + 0.19.0-alpha 3 @@ -227,6 +228,9 @@ extensions-contrib/opentelemetry-emitter extensions-contrib/kubernetes-overlord-extensions extensions-contrib/druid-iceberg-extensions + extensions-contrib/opencensus-extensions + extensions-contrib/confluent-extensions + extensions-contrib/opentelemetry-extensions distribution @@ -817,6 +821,11 @@ protobuf-java ${protobuf.version} + + com.google.protobuf + protobuf-java-util + ${protobuf.version} + io.tesla.aether tesla-aether @@ -1301,6 +1310,11 @@ + + io.opentelemetry.proto + opentelemetry-proto + ${opentelemetry.proto.version} + diff --git a/processing/src/main/java/org/apache/druid/java/util/emitter/service/SegmentMetadataEvent.java b/processing/src/main/java/org/apache/druid/java/util/emitter/service/SegmentMetadataEvent.java index 7e249f72d0a6..732db1d3cecb 100644 --- a/processing/src/main/java/org/apache/druid/java/util/emitter/service/SegmentMetadataEvent.java +++ b/processing/src/main/java/org/apache/druid/java/util/emitter/service/SegmentMetadataEvent.java @@ -97,6 +97,37 @@ public String getFeed() { return "segment_metadata"; } + + public DateTime getCreatedTime() + { + return createdTime; + } + + public DateTime getStartTime() + { + return startTime; + } + + public DateTime getEndTime() + { + return endTime; + } + + public String getDataSource() + { + return dataSource; + } + + public String getVersion() + { + return version; + } + + public boolean isCompacted() + { + return isCompacted; + } + @Override @JsonValue public EventMap toMap() diff --git a/processing/src/main/java/org/apache/druid/segment/transform/RowFunction.java b/processing/src/main/java/org/apache/druid/segment/transform/RowFunction.java index 375aa5f1b10f..4aab82781aad 100644 --- a/processing/src/main/java/org/apache/druid/segment/transform/RowFunction.java +++ b/processing/src/main/java/org/apache/druid/segment/transform/RowFunction.java @@ -20,6 +20,7 @@ package org.apache.druid.segment.transform; import org.apache.druid.data.input.Row; +import org.apache.druid.data.input.Rows; import java.util.List; @@ -29,6 +30,8 @@ public interface RowFunction { Object eval(Row row); - - List evalDimension(Row row); + default List evalDimension(Row row) + { + return Rows.objectToStrings(eval(row)); + } } diff --git a/service.yml b/service.yml new file mode 100644 index 000000000000..6ac069c67d9b --- /dev/null +++ b/service.yml @@ -0,0 +1,13 @@ +name: druid +lang: unknown +lang_version: unknown +git: + enable: true +github: + enable: true +semaphore: + enable: true + pipeline_enable: false + branches: + - master + - /^.*-confluent$/ diff --git a/web-console/README.md b/web-console/README.md index 4e16369d3436..ff5012344bd4 100644 --- a/web-console/README.md +++ b/web-console/README.md @@ -46,6 +46,7 @@ The console relies on [eslint](https://eslint.org) (and various plugins), [sass- #### Configuring WebStorm - **Preferences | Languages & Frameworks | JavaScript | Code Quality Tools | ESLint** + - Select "Automatic ESLint Configuration" - Check "Run eslint --fix on save" @@ -55,6 +56,7 @@ The console relies on [eslint](https://eslint.org) (and various plugins), [sass- - Check "On save" #### Configuring VS Code + - Install `dbaeumer.vscode-eslint` extension - Install `esbenp.prettier-vscode` extension - Open User Settings (JSON) and set the following: @@ -67,10 +69,11 @@ The console relies on [eslint](https://eslint.org) (and various plugins), [sass- ``` #### Auto-fixing manually + It is also possible to auto-fix and format code without making IDE changes by running the following script: - `npm run autofix` — run code linters and formatter - + You could also run fixers individually: - `npm run eslint-fix` — run code linter and fix issues diff --git a/web-console/console-config.js b/web-console/console-config.js index 10bdddb611af..e7e34c136c00 100644 --- a/web-console/console-config.js +++ b/web-console/console-config.js @@ -19,4 +19,9 @@ window.consoleConfig = { exampleManifestsUrl: 'https://druid.apache.org/data/example-manifests-v2.tsv', /* future configs may go here */ + defaultQueryContext: { + priority: -1, + timeout: 30000, + lane: 'console', + }, }; diff --git a/web-console/src/ace-modes/hjson.js b/web-console/src/ace-modes/hjson.js index 17142c89e597..084017217bd6 100644 --- a/web-console/src/ace-modes/hjson.js +++ b/web-console/src/ace-modes/hjson.js @@ -25,15 +25,15 @@ ace.define( 'ace/mode/hjson_highlight_rules', ['require', 'exports', 'module', 'ace/lib/oop', 'ace/mode/text_highlight_rules'], - function(acequire, exports, module) { + function (acequire, exports, module) { 'use strict'; var oop = acequire('../lib/oop'); var TextHighlightRules = acequire('./text_highlight_rules').TextHighlightRules; - var HjsonHighlightRules = function() { + var HjsonHighlightRules = function () { this.$rules = { - start: [ + 'start': [ { include: '#comments', }, @@ -277,19 +277,19 @@ ace.define( 'ace/mode/text', 'ace/mode/hjson_highlight_rules', ], - function(acequire, exports, module) { + function (acequire, exports, module) { 'use strict'; var oop = acequire('../lib/oop'); var TextMode = acequire('./text').Mode; var HjsonHighlightRules = acequire('./hjson_highlight_rules').HjsonHighlightRules; - var Mode = function() { + var Mode = function () { this.HighlightRules = HjsonHighlightRules; }; oop.inherits(Mode, TextMode); - (function() { + (function () { this.lineCommentStart = '//'; this.blockComment = { start: '/*', end: '*/' }; this.$id = 'ace/mode/hjson'; diff --git a/web-console/src/components/refresh-button/refresh-button.tsx b/web-console/src/components/refresh-button/refresh-button.tsx index bbf7ed2f0714..407151d14f35 100644 --- a/web-console/src/components/refresh-button/refresh-button.tsx +++ b/web-console/src/components/refresh-button/refresh-button.tsx @@ -35,19 +35,23 @@ const DELAYS: DelayLabel[] = [ export interface RefreshButtonProps { onRefresh(auto: boolean): void; localStorageKey?: LocalStorageKeys; + defaultDelay?: number; } export const RefreshButton = React.memo(function RefreshButton(props: RefreshButtonProps) { + const { onRefresh, localStorageKey, defaultDelay = 30000 } = props; + return ( ); }); diff --git a/web-console/src/views/tasks-view/__snapshots__/tasks-view.spec.tsx.snap b/web-console/src/views/tasks-view/__snapshots__/tasks-view.spec.tsx.snap index 865d7875400d..2cd45da49f16 100644 --- a/web-console/src/views/tasks-view/__snapshots__/tasks-view.spec.tsx.snap +++ b/web-console/src/views/tasks-view/__snapshots__/tasks-view.spec.tsx.snap @@ -43,6 +43,7 @@ exports[`TasksView matches snapshot 1`] = ` diff --git a/web-console/src/views/tasks-view/tasks-view.tsx b/web-console/src/views/tasks-view/tasks-view.tsx index 795c9908412a..be0c3b7b9ee4 100644 --- a/web-console/src/views/tasks-view/tasks-view.tsx +++ b/web-console/src/views/tasks-view/tasks-view.tsx @@ -584,6 +584,7 @@ ORDER BY { if (auto && hasPopoverOpen()) return; this.taskQueryManager.rerunLastQuery(auto);