From 11cd7e54ccb1aeafb993425feb3553e1eb8bc4d1 Mon Sep 17 00:00:00 2001 From: Suneet Saldanha Date: Thu, 30 Jul 2020 10:55:27 -0700 Subject: [PATCH] [0.19.0-iap] Remove all contrib extensions Contrib extensions are not packaged with Imply Druid. This will help speed up build time as we no longer have to compile the extensions only to throw them away later in the build process. https://github.com/implydata/distribution/blob/53efbb9231cbf25624a22d36c832fcde30a22689/src/build/druid#L125 --- distribution/pom.xml | 85 -- extensions-contrib/README.md | 25 - .../aliyun-oss-extensions/pom.xml | 180 ---- .../data/input/aliyun/OssClientConfig.java | 122 --- .../druid/data/input/aliyun/OssEntity.java | 88 -- .../data/input/aliyun/OssInputSource.java | 178 ---- .../aliyun/OssInputSourceDruidModule.java | 49 - .../aliyun/OssFirehoseDruidModule.java | 46 - .../aliyun/StaticOssFirehoseFactory.java | 243 ----- .../aliyun/OssDataSegmentArchiver.java | 101 --- .../aliyun/OssDataSegmentArchiverConfig.java | 41 - .../storage/aliyun/OssDataSegmentKiller.java | 98 -- .../storage/aliyun/OssDataSegmentMover.java | 253 ------ .../storage/aliyun/OssDataSegmentPuller.java | 308 ------- .../storage/aliyun/OssDataSegmentPusher.java | 131 --- .../storage/aliyun/OssInputDataConfig.java | 52 -- .../druid/storage/aliyun/OssLoadSpec.java | 72 -- .../aliyun/OssObjectSummaryIterator.java | 156 ---- .../storage/aliyun/OssStorageConfig.java | 50 - .../storage/aliyun/OssStorageDruidModule.java | 106 --- .../druid/storage/aliyun/OssTaskLogs.java | 201 ----- .../storage/aliyun/OssTaskLogsConfig.java | 73 -- .../OssTimestampVersionedDataFinder.java | 90 -- .../apache/druid/storage/aliyun/OssUtils.java | 271 ------ ...rg.apache.druid.initialization.DruidModule | 18 - .../data/input/aliyun/OssInputSourceTest.java | 660 -------------- .../aliyun/OssDataSegmentArchiverTest.java | 195 ---- .../aliyun/OssDataSegmentKillerTest.java | 205 ----- .../aliyun/OssDataSegmentMoverTest.java | 266 ------ .../aliyun/OssDataSegmentPullerTest.java | 205 ----- .../OssDataSegmentPusherConfigTest.java | 51 -- .../aliyun/OssDataSegmentPusherTest.java | 125 --- .../aliyun/OssObjectSummaryIteratorTest.java | 276 ------ .../druid/storage/aliyun/OssTaskLogsTest.java | 336 ------- .../druid/storage/aliyun/OssTestUtils.java | 177 ---- .../OssTimestampVersionedDataFinderTest.java | 178 ---- .../ambari-metrics-emitter/pom.xml | 169 ---- .../ambari/metrics/AmbariMetricsEmitter.java | 233 ----- .../metrics/AmbariMetricsEmitterConfig.java | 262 ------ .../metrics/AmbariMetricsEmitterModule.java | 66 -- .../DruidToTimelineMetricConverter.java | 44 - .../SendAllTimelineEventConverter.java | 129 --- ...istBasedDruidToTimelineEventConverter.java | 259 ------ ...rg.apache.druid.initialization.DruidModule | 16 - .../main/resources/defaultWhiteListMap.json | 73 -- .../AmbariMetricsEmitterConfigTest.java | 88 -- .../DruidToWhiteListBasedConverterTest.java | 34 - ...asedDruidToTimelineEventConverterTest.java | 195 ---- .../testWhiteListedStringArrayDimension.json | 3 - extensions-contrib/cassandra-storage/pom.xml | 176 ---- .../cassandra/CassandraDataSegmentConfig.java | 48 - .../cassandra/CassandraDataSegmentPuller.java | 108 --- .../cassandra/CassandraDataSegmentPusher.java | 132 --- .../cassandra/CassandraDruidModule.java | 77 -- .../storage/cassandra/CassandraLoadSpec.java | 56 -- .../storage/cassandra/CassandraStorage.java | 72 -- ...rg.apache.druid.initialization.DruidModule | 16 - .../cloudfiles-extensions/pom.xml | 187 ---- .../firehose/cloudfiles/CloudFilesBlob.java | 101 --- .../CloudFilesFirehoseDruidModule.java | 48 - .../StaticCloudFilesFirehoseFactory.java | 162 ---- .../cloudfiles/CloudFilesAccountConfig.java | 65 -- .../cloudfiles/CloudFilesByteSource.java | 70 -- .../CloudFilesDataSegmentPuller.java | 96 -- .../CloudFilesDataSegmentPusher.java | 153 ---- .../CloudFilesDataSegmentPusherConfig.java | 90 -- .../cloudfiles/CloudFilesLoadSpec.java | 67 -- .../storage/cloudfiles/CloudFilesObject.java | 83 -- .../cloudfiles/CloudFilesObjectApiProxy.java | 72 -- .../CloudFilesStorageDruidModule.java | 117 --- .../storage/cloudfiles/CloudFilesUtils.java | 70 -- ...rg.apache.druid.initialization.DruidModule | 19 - .../StaticCloudFilesFirehoseFactoryTest.java | 105 --- .../cloudfiles/CloudFilesByteSourceTest.java | 87 -- .../CloudFilesDataSegmentPusherTest.java | 92 -- .../CloudFilesObjectApiProxyTest.java | 61 -- extensions-contrib/distinctcount/pom.xml | 117 --- .../distinctcount/BitMapFactory.java | 42 - .../distinctcount/ConciseBitMapFactory.java | 57 -- .../DistinctCountAggregator.java | 81 -- .../DistinctCountAggregatorFactory.java | 246 ----- .../DistinctCountBufferAggregator.java | 108 --- .../DistinctCountDruidModule.java | 49 - .../distinctcount/JavaBitMapFactory.java | 57 -- .../NoopDistinctCountAggregator.java | 63 -- .../NoopDistinctCountBufferAggregator.java | 88 -- .../distinctcount/RoaringBitMapFactory.java | 57 -- ...rg.apache.druid.initialization.DruidModule | 16 - .../DistinctCountGroupByQueryTest.java | 159 ---- .../DistinctCountTimeseriesQueryTest.java | 114 --- .../DistinctCountTopNQueryTest.java | 155 ---- extensions-contrib/dropwizard-emitter/pom.xml | 128 --- .../dropwizard/DropwizardConverter.java | 106 --- .../emitter/dropwizard/DropwizardEmitter.java | 200 ---- .../dropwizard/DropwizardEmitterConfig.java | 139 --- .../dropwizard/DropwizardEmitterModule.java | 73 -- .../dropwizard/DropwizardMetricSpec.java | 72 -- .../dropwizard/DropwizardReporter.java | 45 - .../druid/emitter/dropwizard/GaugesCache.java | 39 - .../reporters/DropwizardConsoleReporter.java | 143 --- .../reporters/DropwizardJMXReporter.java | 86 -- ...rg.apache.druid.initialization.DruidModule | 16 - .../resources/defaultMetricDimensions.json | 538 ----------- .../DropwizardEmitterConfigTest.java | 86 -- extensions-contrib/gce-extensions/pom.xml | 130 --- .../autoscaling/gce/GceAutoScaler.java | 526 ----------- .../autoscaling/gce/GceEnvironmentConfig.java | 132 --- .../overlord/autoscaling/gce/GceModule.java | 42 - .../autoscaling/gce/GceServiceException.java | 32 - .../overlord/autoscaling/gce/GceUtils.java | 73 -- ...rg.apache.druid.initialization.DruidModule | 16 - .../autoscaling/gce/GceAutoScalerTest.java | 853 ------------------ .../autoscaling/gce/GceUtilsTest.java | 96 -- extensions-contrib/graphite-emitter/pom.xml | 124 --- .../DruidToGraphiteEventConverter.java | 44 - .../emitter/graphite/GraphiteEmitter.java | 259 ------ .../graphite/GraphiteEmitterConfig.java | 226 ----- .../graphite/GraphiteEmitterModule.java | 80 -- .../druid/emitter/graphite/GraphiteEvent.java | 60 -- .../SendAllGraphiteEventConverter.java | 157 ---- .../graphite/WhiteListBasedConverter.java | 294 ------ ...rg.apache.druid.initialization.DruidModule | 16 - .../main/resources/defaultWhiteListMap.json | 48 - .../DruidToWhiteListBasedConverterTest.java | 41 - .../graphite/GraphiteEmitterConfigTest.java | 101 --- .../graphite/WhiteListBasedConverterTest.java | 201 ----- .../testWhiteListedStringArrayDimension.json | 3 - extensions-contrib/influx-extensions/pom.xml | 127 --- .../data/input/influx/InfluxLineProtocol.g4 | 95 -- .../input/influx/InfluxExtensionsModule.java | 52 -- .../data/input/influx/InfluxParseSpec.java | 64 -- .../druid/data/input/influx/InfluxParser.java | 179 ---- ...rg.apache.druid.initialization.DruidModule | 16 - .../data/input/influx/InfluxParserTest.java | 231 ----- extensions-contrib/influxdb-emitter/pom.xml | 95 -- .../emitter/influxdb/InfluxdbEmitter.java | 214 ----- .../influxdb/InfluxdbEmitterConfig.java | 196 ---- .../influxdb/InfluxdbEmitterModule.java | 61 -- ...rg.apache.druid.initialization.DruidModule | 16 - .../influxdb/InfluxdbEmitterConfigTest.java | 211 ----- .../emitter/influxdb/InfluxdbEmitterTest.java | 214 ----- extensions-contrib/kafka-emitter/pom.xml | 115 --- .../druid/emitter/kafka/KafkaEmitter.java | 192 ---- .../emitter/kafka/KafkaEmitterConfig.java | 140 --- .../emitter/kafka/KafkaEmitterModule.java | 58 -- .../kafka/MemoryBoundLinkedBlockingQueue.java | 85 -- ...rg.apache.druid.initialization.DruidModule | 16 - .../emitter/kafka/KafkaEmitterConfigTest.java | 77 -- .../materialized-view-maintenance/pom.xml | 137 --- .../DerivativeDataSourceMetadata.java | 138 --- ...aterializedViewMaintenanceDruidModule.java | 51 -- .../MaterializedViewSupervisor.java | 515 ----------- .../MaterializedViewSupervisorReport.java | 63 -- .../MaterializedViewSupervisorSpec.java | 437 --------- .../MaterializedViewTaskConfig.java | 34 - ...rg.apache.druid.initialization.DruidModule | 16 - .../DerivativeDataSourceMetadataTest.java | 61 -- .../MaterializedViewSupervisorSpecTest.java | 302 ------- .../MaterializedViewSupervisorTest.java | 360 -------- .../materialized-view-selection/pom.xml | 155 ---- .../materializedview/DataSourceOptimizer.java | 216 ----- .../DataSourceOptimizerMonitor.java | 66 -- .../DataSourceOptimizerStats.java | 93 -- .../DerivativeDataSource.java | 94 -- .../DerivativeDataSourceManager.java | 254 ------ .../MaterializedViewConfig.java | 35 - .../MaterializedViewQuery.java | 248 ----- .../MaterializedViewQueryQueryToolChest.java | 138 --- .../MaterializedViewQueryRunner.java | 68 -- .../MaterializedViewSelectionDruidModule.java | 59 -- .../MaterializedViewUtils.java | 233 ----- ...rg.apache.druid.initialization.DruidModule | 16 - .../DatasourceOptimizerTest.java | 315 ------- ...terializedViewQueryQueryToolChestTest.java | 250 ----- .../MaterializedViewQueryTest.java | 92 -- .../MaterializedViewUtilsTest.java | 232 ----- extensions-contrib/momentsketch/pom.xml | 123 --- .../MomentSketchComplexMetricSerde.java | 92 -- .../MomentSketchJsonSerializer.java | 39 - .../momentsketch/MomentSketchModule.java | 86 -- .../MomentSketchObjectStrategy.java | 62 -- .../momentsketch/MomentSketchWrapper.java | 189 ---- .../MomentSketchAggregatorFactory.java | 290 ------ .../MomentSketchBuildAggregator.java | 85 -- .../MomentSketchBuildBufferAggregator.java | 97 -- .../MomentSketchMaxPostAggregator.java | 130 --- .../MomentSketchMergeAggregator.java | 75 -- .../MomentSketchMergeAggregatorFactory.java | 62 -- .../MomentSketchMergeBufferAggregator.java | 110 --- .../MomentSketchMinPostAggregator.java | 129 --- .../MomentSketchQuantilePostAggregator.java | 148 --- ...rg.apache.druid.initialization.DruidModule | 16 - .../momentsketch/MomentSketchWrapperTest.java | 52 -- ...omentSketchAggregatorFactorySerdeTest.java | 44 - .../MomentSketchMaxPostAggregatorTest.java | 68 -- .../MomentSketchMinPostAggregatorTest.java | 68 -- ...omentSketchQuantilePostAggregatorTest.java | 83 -- .../MomentsSketchAggregatorTest.java | 225 ----- .../src/test/resources/doubles_build_data.tsv | 400 -------- .../moving-average-query/README.md | 29 - .../moving-average-query/pom.xml | 131 --- .../movingaverage/AveragerFactoryWrapper.java | 184 ---- .../movingaverage/BucketingAccumulator.java | 68 -- .../DefaultMovingAverageQueryMetrics.java | 55 -- ...faultMovingAverageQueryMetricsFactory.java | 47 - .../movingaverage/MovingAverageHelper.java | 53 -- .../movingaverage/MovingAverageIterable.java | 312 ------- .../movingaverage/MovingAverageQuery.java | 387 -------- .../MovingAverageQueryMetrics.java | 42 - .../MovingAverageQueryMetricsFactory.java | 36 - .../MovingAverageQueryModule.java | 61 -- .../MovingAverageQueryRunner.java | 239 ----- .../MovingAverageQueryToolChest.java | 134 --- .../PostAveragerAggregatorCalculator.java | 63 -- .../druid/query/movingaverage/RowBucket.java | 62 -- .../movingaverage/RowBucketIterable.java | 155 ---- .../movingaverage/averagers/Averager.java | 57 -- .../averagers/AveragerFactory.java | 108 --- .../movingaverage/averagers/BaseAverager.java | 192 ---- .../averagers/BaseAveragerFactory.java | 103 --- .../averagers/ComparableAveragerFactory.java | 51 -- .../averagers/ConstantAverager.java | 81 -- .../averagers/ConstantAveragerFactory.java | 101 --- .../averagers/DoubleMaxAverager.java | 44 - .../averagers/DoubleMaxAveragerFactory.java | 44 - .../averagers/DoubleMeanAverager.java | 48 - .../averagers/DoubleMeanAveragerFactory.java | 44 - .../averagers/DoubleMeanNoNullAverager.java | 46 - .../DoubleMeanNoNullAveragerFactory.java | 43 - .../averagers/DoubleMinAverager.java | 44 - .../averagers/DoubleMinAveragerFactory.java | 43 - .../averagers/DoubleSumAverager.java | 45 - .../averagers/DoubleSumAveragerFactory.java | 44 - .../averagers/LongMaxAverager.java | 44 - .../averagers/LongMaxAveragerFactory.java | 43 - .../averagers/LongMeanAverager.java | 48 - .../averagers/LongMeanAveragerFactory.java | 44 - .../averagers/LongMeanNoNullAverager.java | 46 - .../LongMeanNoNullAveragerFactory.java | 44 - .../averagers/LongMinAverager.java | 45 - .../averagers/LongMinAveragerFactory.java | 44 - .../averagers/LongSumAverager.java | 45 - .../averagers/LongSumAveragerFactory.java | 44 - ...rg.apache.druid.initialization.DruidModule | 16 - .../MovingAverageIterableTest.java | 810 ----------------- .../movingaverage/MovingAverageQueryTest.java | 403 --------- .../PostAveragerAggregatorCalculatorTest.java | 106 --- .../movingaverage/RowBucketIterableTest.java | 630 ------------- .../averagers/BaseAveragerFactoryTest.java | 66 -- .../averagers/BaseAveragerTest.java | 147 --- .../DoubleMaxAveragerFactoryTest.java | 34 - .../averagers/DoubleMaxAveragerTest.java | 54 -- .../DoubleMeanAveragerFactoryTest.java | 34 - .../averagers/DoubleMeanAveragerTest.java | 54 -- .../DoubleMeanAveragerWithPeriodTest.java | 78 -- .../DoubleMeanNoNullAveragerFactoryTest.java | 34 - .../DoubleMeanNoNullAveragerTest.java | 79 -- .../DoubleMinAveragerFactoryTest.java | 34 - .../averagers/DoubleMinAveragerTest.java | 55 -- .../DoubleSumAveragerFactoryTest.java | 36 - .../averagers/DoubleSumAveragerTest.java | 57 -- .../averagers/LongMaxAveragerFactoryTest.java | 34 - .../averagers/LongMaxAveragerTest.java | 54 -- .../LongMeanAveragerFactoryTest.java | 34 - .../averagers/LongMeanAveragerTest.java | 54 -- .../LongMeanNoNullAveragerFactoryTest.java | 34 - .../averagers/LongMeanNoNullAveragerTest.java | 55 -- .../averagers/LongMinAveragerFactoryTest.java | 34 - .../averagers/LongMinAveragerTest.java | 55 -- .../averagers/LongSumAveragerFactoryTest.java | 36 - .../averagers/LongSumAveragerTest.java | 54 -- .../query/movingaverage/test/TestConfig.java | 35 - .../queryTests/basicGroupByMovingAverage.yaml | 46 - .../basicGroupByMovingAverage2.yaml | 47 - .../basicTimeseriesMovingAverage.yaml | 51 -- .../queryTests/missingGroupByValues.yaml | 60 -- .../queryTests/sortingAveragersAsc.yaml | 60 -- .../queryTests/sortingAveragersDesc.yaml | 61 -- ...ortingWithNonMovingAndMovingAvgMetric.yaml | 66 -- .../sortingWithNonMovingAvgMetric.yaml | 64 -- extensions-contrib/opentsdb-emitter/pom.xml | 102 --- .../emitter/opentsdb/EventConverter.java | 126 --- .../emitter/opentsdb/OpentsdbEmitter.java | 100 -- .../opentsdb/OpentsdbEmitterConfig.java | 189 ---- .../opentsdb/OpentsdbEmitterModule.java | 58 -- .../druid/emitter/opentsdb/OpentsdbEvent.java | 109 --- .../emitter/opentsdb/OpentsdbSender.java | 168 ---- ...rg.apache.druid.initialization.DruidModule | 16 - .../src/main/resources/defaultMetrics.json | 211 ----- .../emitter/opentsdb/EventConverterTest.java | 146 --- .../opentsdb/OpentsdbEmitterConfigTest.java | 84 -- .../emitter/opentsdb/OpentsdbEventTest.java | 54 -- .../emitter/opentsdb/OpentsdbSenderTest.java | 34 - extensions-contrib/redis-cache/pom.xml | 95 -- .../apache/druid/client/cache/RedisCache.java | 197 ---- .../druid/client/cache/RedisCacheConfig.java | 86 -- .../client/cache/RedisCacheProvider.java | 32 - .../druid/client/cache/RedisDruidModule.java | 43 - ...rg.apache.druid.initialization.DruidModule | 16 - .../druid/client/cache/RedisCacheTest.java | 212 ----- .../sqlserver-metadata-storage/pom.xml | 88 -- .../storage/sqlserver/SQLServerConnector.java | 285 ------ .../SQLServerMetadataStorageModule.java | 84 -- ...rg.apache.druid.initialization.DruidModule | 16 - .../CustomStatementRewriterTest.java | 142 --- .../sqlserver/SQLServerConnectorTest.java | 67 -- extensions-contrib/statsd-emitter/pom.xml | 111 --- .../emitter/statsd/DimensionConverter.java | 98 -- .../druid/emitter/statsd/StatsDEmitter.java | 257 ------ .../emitter/statsd/StatsDEmitterConfig.java | 200 ---- .../emitter/statsd/StatsDEmitterModule.java | 60 -- .../druid/emitter/statsd/StatsDMetric.java | 51 -- ...rg.apache.druid.initialization.DruidModule | 16 - .../resources/defaultMetricDimensions.json | 127 --- .../statsd/DimensionConverterTest.java | 62 -- .../emitter/statsd/StatsDEmitterTest.java | 245 ----- extensions-contrib/tdigestsketch/pom.xml | 184 ---- .../TDigestSketchAggregator.java | 105 --- .../TDigestSketchAggregatorFactory.java | 299 ------ .../TDigestSketchBufferAggregator.java | 131 --- .../TDigestSketchComplexMetricSerde.java | 111 --- .../TDigestSketchJsonSerializer.java | 41 - .../tdigestsketch/TDigestSketchModule.java | 79 -- .../TDigestSketchObjectStrategy.java | 63 -- ...TDigestSketchToQuantilePostAggregator.java | 146 --- ...DigestSketchToQuantilesPostAggregator.java | 159 ---- .../tdigestsketch/TDigestSketchUtils.java | 110 --- .../TDigestGenerateSketchSqlAggregator.java | 183 ---- .../TDigestSketchQuantileSqlAggregator.java | 214 ----- ...rg.apache.druid.initialization.DruidModule | 16 - .../tdigestsketch/GenerateTestData.java | 80 -- .../TDigestSketchAggregatorTest.java | 264 ------ ...estSketchToQuantilePostAggregatorTest.java | 73 -- ...stSketchToQuantilesPostAggregatorTest.java | 90 -- .../sql/TDigestSketchSqlAggregatorTest.java | 476 ---------- .../src/test/resources/doubles_build_data.tsv | 400 -------- .../test/resources/doubles_sketch_data.tsv | 20 - .../thrift-extensions/example/books.json | 65 -- extensions-contrib/thrift-extensions/pom.xml | 206 ----- .../input/thrift/ThriftDeserialization.java | 122 --- .../input/thrift/ThriftExtensionsModule.java | 49 - .../input/thrift/ThriftInputRowParser.java | 172 ---- ...rg.apache.druid.initialization.DruidModule | 16 - .../thrift/ThriftInputRowParserTest.java | 161 ---- .../src/test/thrift/book.thrift | 28 - extensions-contrib/time-min-max/pom.xml | 113 --- .../aggregation/TimestampAggregator.java | 104 --- .../TimestampAggregatorFactory.java | 267 ------ .../TimestampBufferAggregator.java | 99 -- .../TimestampMaxAggregatorFactory.java | 49 - .../TimestampMinAggregatorFactory.java | 49 - .../aggregation/TimestampMinMaxModule.java | 50 - ...rg.apache.druid.initialization.DruidModule | 16 - .../TimestampGroupByAggregationTest.java | 195 ---- .../TimestampMinMaxAggregatorTest.java | 175 ---- .../src/test/resources/druid.sample.tsv.zip | Bin 11899 -> 0 bytes extensions-contrib/virtual-columns/pom.xml | 107 --- .../segment/DruidVirtualColumnsModule.java | 50 - ...TypeMapVirtualColumnDimensionSelector.java | 143 --- .../druid/segment/MapVirtualColumn.java | 269 ------ .../MapVirtualColumnDimensionSelector.java | 76 -- .../MapVirtualColumnValueSelector.java | 69 -- ...TypeMapVirtualColumnDimensionSelector.java | 198 ---- ...rg.apache.druid.initialization.DruidModule | 16 - .../segment/MapVirtualColumnGroupByTest.java | 174 ---- .../segment/MapVirtualColumnTestBase.java | 83 -- .../segment/MapVirtualColumnTopNTest.java | 144 --- pom.xml | 24 - 368 files changed, 43975 deletions(-) delete mode 100644 extensions-contrib/README.md delete mode 100644 extensions-contrib/aliyun-oss-extensions/pom.xml delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssClientConfig.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssEntity.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssInputSource.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssInputSourceDruidModule.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/firehose/aliyun/OssFirehoseDruidModule.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/firehose/aliyun/StaticOssFirehoseFactory.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiver.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiverConfig.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentKiller.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentMover.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPuller.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPusher.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssInputDataConfig.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssLoadSpec.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssObjectSummaryIterator.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssStorageConfig.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssStorageDruidModule.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTaskLogs.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTaskLogsConfig.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTimestampVersionedDataFinder.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssUtils.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/data/input/aliyun/OssInputSourceTest.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiverTest.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentKillerTest.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentMoverTest.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPullerTest.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPusherConfigTest.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPusherTest.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssObjectSummaryIteratorTest.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTaskLogsTest.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTestUtils.java delete mode 100644 extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTimestampVersionedDataFinderTest.java delete mode 100644 extensions-contrib/ambari-metrics-emitter/pom.xml delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfig.java delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterModule.java delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/DruidToTimelineMetricConverter.java delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/SendAllTimelineEventConverter.java delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverter.java delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/main/resources/defaultWhiteListMap.json delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfigTest.java delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/DruidToWhiteListBasedConverterTest.java delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java delete mode 100644 extensions-contrib/ambari-metrics-emitter/src/test/resources/testWhiteListedStringArrayDimension.json delete mode 100644 extensions-contrib/cassandra-storage/pom.xml delete mode 100644 extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentConfig.java delete mode 100644 extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java delete mode 100644 extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPusher.java delete mode 100644 extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDruidModule.java delete mode 100644 extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraLoadSpec.java delete mode 100644 extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraStorage.java delete mode 100644 extensions-contrib/cassandra-storage/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/cloudfiles-extensions/pom.xml delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesBlob.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesFirehoseDruidModule.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactory.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesAccountConfig.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSource.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherConfig.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesLoadSpec.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesObject.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxy.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesUtils.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactoryTest.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSourceTest.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java delete mode 100644 extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxyTest.java delete mode 100644 extensions-contrib/distinctcount/pom.xml delete mode 100644 extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/BitMapFactory.java delete mode 100644 extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/ConciseBitMapFactory.java delete mode 100644 extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountAggregator.java delete mode 100644 extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java delete mode 100644 extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountBufferAggregator.java delete mode 100644 extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountDruidModule.java delete mode 100644 extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/JavaBitMapFactory.java delete mode 100644 extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/NoopDistinctCountAggregator.java delete mode 100644 extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/NoopDistinctCountBufferAggregator.java delete mode 100644 extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/RoaringBitMapFactory.java delete mode 100644 extensions-contrib/distinctcount/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java delete mode 100644 extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java delete mode 100644 extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java delete mode 100644 extensions-contrib/dropwizard-emitter/pom.xml delete mode 100644 extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardConverter.java delete mode 100644 extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitter.java delete mode 100644 extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterConfig.java delete mode 100644 extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterModule.java delete mode 100644 extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardMetricSpec.java delete mode 100644 extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardReporter.java delete mode 100644 extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/GaugesCache.java delete mode 100644 extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/reporters/DropwizardConsoleReporter.java delete mode 100644 extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/reporters/DropwizardJMXReporter.java delete mode 100644 extensions-contrib/dropwizard-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/dropwizard-emitter/src/main/resources/defaultMetricDimensions.json delete mode 100644 extensions-contrib/dropwizard-emitter/src/test/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterConfigTest.java delete mode 100644 extensions-contrib/gce-extensions/pom.xml delete mode 100644 extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceAutoScaler.java delete mode 100644 extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceEnvironmentConfig.java delete mode 100644 extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceModule.java delete mode 100644 extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceServiceException.java delete mode 100644 extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceUtils.java delete mode 100644 extensions-contrib/gce-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/gce-extensions/src/test/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceAutoScalerTest.java delete mode 100644 extensions-contrib/gce-extensions/src/test/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceUtilsTest.java delete mode 100644 extensions-contrib/graphite-emitter/pom.xml delete mode 100644 extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/DruidToGraphiteEventConverter.java delete mode 100644 extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitter.java delete mode 100644 extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitterConfig.java delete mode 100644 extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitterModule.java delete mode 100644 extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEvent.java delete mode 100644 extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/SendAllGraphiteEventConverter.java delete mode 100644 extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/WhiteListBasedConverter.java delete mode 100644 extensions-contrib/graphite-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/graphite-emitter/src/main/resources/defaultWhiteListMap.json delete mode 100644 extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/DruidToWhiteListBasedConverterTest.java delete mode 100644 extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/GraphiteEmitterConfigTest.java delete mode 100644 extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/WhiteListBasedConverterTest.java delete mode 100644 extensions-contrib/graphite-emitter/src/test/resources/testWhiteListedStringArrayDimension.json delete mode 100644 extensions-contrib/influx-extensions/pom.xml delete mode 100644 extensions-contrib/influx-extensions/src/main/antlr4/org/apache/druid/data/input/influx/InfluxLineProtocol.g4 delete mode 100644 extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxExtensionsModule.java delete mode 100644 extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParseSpec.java delete mode 100644 extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParser.java delete mode 100644 extensions-contrib/influx-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/influx-extensions/src/test/java/org/apache/druid/data/input/influx/InfluxParserTest.java delete mode 100644 extensions-contrib/influxdb-emitter/pom.xml delete mode 100644 extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitter.java delete mode 100644 extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterConfig.java delete mode 100644 extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterModule.java delete mode 100644 extensions-contrib/influxdb-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/influxdb-emitter/src/test/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterConfigTest.java delete mode 100644 extensions-contrib/influxdb-emitter/src/test/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterTest.java delete mode 100644 extensions-contrib/kafka-emitter/pom.xml delete mode 100644 extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitter.java delete mode 100644 extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterConfig.java delete mode 100644 extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterModule.java delete mode 100644 extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/MemoryBoundLinkedBlockingQueue.java delete mode 100644 extensions-contrib/kafka-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterConfigTest.java delete mode 100644 extensions-contrib/materialized-view-maintenance/pom.xml delete mode 100644 extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/DerivativeDataSourceMetadata.java delete mode 100644 extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewMaintenanceDruidModule.java delete mode 100644 extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java delete mode 100644 extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorReport.java delete mode 100644 extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpec.java delete mode 100644 extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewTaskConfig.java delete mode 100644 extensions-contrib/materialized-view-maintenance/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/DerivativeDataSourceMetadataTest.java delete mode 100644 extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpecTest.java delete mode 100644 extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java delete mode 100644 extensions-contrib/materialized-view-selection/pom.xml delete mode 100644 extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizer.java delete mode 100644 extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizerMonitor.java delete mode 100644 extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizerStats.java delete mode 100644 extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSource.java delete mode 100644 extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java delete mode 100644 extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewConfig.java delete mode 100644 extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java delete mode 100644 extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChest.java delete mode 100644 extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQueryRunner.java delete mode 100644 extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewSelectionDruidModule.java delete mode 100644 extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewUtils.java delete mode 100644 extensions-contrib/materialized-view-selection/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java delete mode 100644 extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChestTest.java delete mode 100644 extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java delete mode 100644 extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewUtilsTest.java delete mode 100644 extensions-contrib/momentsketch/pom.xml delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchComplexMetricSerde.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchJsonSerializer.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchModule.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchObjectStrategy.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapper.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchAggregatorFactory.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchBuildAggregator.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchBuildBufferAggregator.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMaxPostAggregator.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeAggregator.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeAggregatorFactory.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeBufferAggregator.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMinPostAggregator.java delete mode 100644 extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchQuantilePostAggregator.java delete mode 100644 extensions-contrib/momentsketch/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapperTest.java delete mode 100644 extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchAggregatorFactorySerdeTest.java delete mode 100644 extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMaxPostAggregatorTest.java delete mode 100644 extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMinPostAggregatorTest.java delete mode 100644 extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchQuantilePostAggregatorTest.java delete mode 100644 extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentsSketchAggregatorTest.java delete mode 100644 extensions-contrib/momentsketch/src/test/resources/doubles_build_data.tsv delete mode 100644 extensions-contrib/moving-average-query/README.md delete mode 100644 extensions-contrib/moving-average-query/pom.xml delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/AveragerFactoryWrapper.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/BucketingAccumulator.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/DefaultMovingAverageQueryMetrics.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/DefaultMovingAverageQueryMetricsFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageHelper.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageIterable.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryMetrics.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryMetricsFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryRunner.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryToolChest.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculator.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/RowBucket.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/RowBucketIterable.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/Averager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/AveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/BaseAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ComparableAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ConstantAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ConstantAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMaxAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMinAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongSumAverager.java delete mode 100644 extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerFactory.java delete mode 100644 extensions-contrib/moving-average-query/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageQueryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculatorTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/RowBucketIterableTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactoryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactoryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactoryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerWithPeriodTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactoryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactoryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerFactoryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactoryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactoryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactoryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactoryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerFactoryTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerTest.java delete mode 100644 extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/test/TestConfig.java delete mode 100644 extensions-contrib/moving-average-query/src/test/resources/queryTests/basicGroupByMovingAverage.yaml delete mode 100644 extensions-contrib/moving-average-query/src/test/resources/queryTests/basicGroupByMovingAverage2.yaml delete mode 100644 extensions-contrib/moving-average-query/src/test/resources/queryTests/basicTimeseriesMovingAverage.yaml delete mode 100644 extensions-contrib/moving-average-query/src/test/resources/queryTests/missingGroupByValues.yaml delete mode 100644 extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingAveragersAsc.yaml delete mode 100644 extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingAveragersDesc.yaml delete mode 100644 extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingWithNonMovingAndMovingAvgMetric.yaml delete mode 100644 extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingWithNonMovingAvgMetric.yaml delete mode 100644 extensions-contrib/opentsdb-emitter/pom.xml delete mode 100644 extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/EventConverter.java delete mode 100644 extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitter.java delete mode 100644 extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterConfig.java delete mode 100644 extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterModule.java delete mode 100644 extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEvent.java delete mode 100644 extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbSender.java delete mode 100644 extensions-contrib/opentsdb-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/opentsdb-emitter/src/main/resources/defaultMetrics.json delete mode 100644 extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/EventConverterTest.java delete mode 100644 extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterConfigTest.java delete mode 100644 extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbEventTest.java delete mode 100644 extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbSenderTest.java delete mode 100644 extensions-contrib/redis-cache/pom.xml delete mode 100644 extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCache.java delete mode 100644 extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCacheConfig.java delete mode 100644 extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCacheProvider.java delete mode 100644 extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisDruidModule.java delete mode 100644 extensions-contrib/redis-cache/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/redis-cache/src/test/java/org/apache/druid/client/cache/RedisCacheTest.java delete mode 100644 extensions-contrib/sqlserver-metadata-storage/pom.xml delete mode 100644 extensions-contrib/sqlserver-metadata-storage/src/main/java/org/apache/druid/metadata/storage/sqlserver/SQLServerConnector.java delete mode 100644 extensions-contrib/sqlserver-metadata-storage/src/main/java/org/apache/druid/metadata/storage/sqlserver/SQLServerMetadataStorageModule.java delete mode 100644 extensions-contrib/sqlserver-metadata-storage/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/sqlserver-metadata-storage/src/test/java/org/apache/druid/metadata/storage/sqlserver/CustomStatementRewriterTest.java delete mode 100644 extensions-contrib/sqlserver-metadata-storage/src/test/java/org/apache/druid/metadata/storage/sqlserver/SQLServerConnectorTest.java delete mode 100644 extensions-contrib/statsd-emitter/pom.xml delete mode 100644 extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/DimensionConverter.java delete mode 100644 extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitter.java delete mode 100644 extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitterConfig.java delete mode 100644 extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitterModule.java delete mode 100644 extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDMetric.java delete mode 100644 extensions-contrib/statsd-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/statsd-emitter/src/main/resources/defaultMetricDimensions.json delete mode 100644 extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/DimensionConverterTest.java delete mode 100644 extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/StatsDEmitterTest.java delete mode 100644 extensions-contrib/tdigestsketch/pom.xml delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregator.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregatorFactory.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchBufferAggregator.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchComplexMetricSerde.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchJsonSerializer.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchModule.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchObjectStrategy.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilePostAggregator.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilesPostAggregator.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchUtils.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestGenerateSketchSqlAggregator.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchQuantileSqlAggregator.java delete mode 100644 extensions-contrib/tdigestsketch/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/GenerateTestData.java delete mode 100644 extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregatorTest.java delete mode 100644 extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilePostAggregatorTest.java delete mode 100644 extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilesPostAggregatorTest.java delete mode 100644 extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java delete mode 100644 extensions-contrib/tdigestsketch/src/test/resources/doubles_build_data.tsv delete mode 100644 extensions-contrib/tdigestsketch/src/test/resources/doubles_sketch_data.tsv delete mode 100644 extensions-contrib/thrift-extensions/example/books.json delete mode 100644 extensions-contrib/thrift-extensions/pom.xml delete mode 100644 extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftDeserialization.java delete mode 100644 extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftExtensionsModule.java delete mode 100644 extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftInputRowParser.java delete mode 100755 extensions-contrib/thrift-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/thrift-extensions/src/test/java/org/apache/druid/data/input/thrift/ThriftInputRowParserTest.java delete mode 100644 extensions-contrib/thrift-extensions/src/test/thrift/book.thrift delete mode 100644 extensions-contrib/time-min-max/pom.xml delete mode 100644 extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampAggregator.java delete mode 100644 extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampAggregatorFactory.java delete mode 100644 extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampBufferAggregator.java delete mode 100644 extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMaxAggregatorFactory.java delete mode 100644 extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMinAggregatorFactory.java delete mode 100644 extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMinMaxModule.java delete mode 100644 extensions-contrib/time-min-max/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampGroupByAggregationTest.java delete mode 100644 extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampMinMaxAggregatorTest.java delete mode 100644 extensions-contrib/time-min-max/src/test/resources/druid.sample.tsv.zip delete mode 100644 extensions-contrib/virtual-columns/pom.xml delete mode 100644 extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/DruidVirtualColumnsModule.java delete mode 100644 extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapTypeMapVirtualColumnDimensionSelector.java delete mode 100644 extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumn.java delete mode 100644 extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumnDimensionSelector.java delete mode 100644 extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumnValueSelector.java delete mode 100644 extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/StringTypeMapVirtualColumnDimensionSelector.java delete mode 100644 extensions-contrib/virtual-columns/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule delete mode 100644 extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnGroupByTest.java delete mode 100644 extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTestBase.java delete mode 100644 extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTopNTest.java diff --git a/distribution/pom.xml b/distribution/pom.xml index 5a130142a71e..97644b9a6a52 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -349,90 +349,5 @@ - - bundle-contrib-exts - - - - org.codehaus.mojo - exec-maven-plugin - - - pull-deps-contrib-exts - package - - exec - - - java - - -classpath - - -Ddruid.extensions.loadList=[] - -Ddruid.extensions.directory=${project.build.directory}/extensions - - - -Ddruid.extensions.hadoopDependenciesDir=${project.build.directory}/hadoop-dependencies - - org.apache.druid.cli.Main - tools - pull-deps - --defaultVersion - ${project.parent.version} - -l - ${settings.localRepository} - --no-default-hadoop - -c - org.apache.druid.extensions.contrib:ambari-metrics-emitter - -c - org.apache.druid.extensions.contrib:dropwizard-emitter - -c - org.apache.druid.extensions.contrib:druid-cassandra-storage - -c - org.apache.druid.extensions.contrib:druid-cloudfiles-extensions - -c - org.apache.druid.extensions.contrib:druid-distinctcount - -c - org.apache.druid.extensions.contrib:graphite-emitter - -c - org.apache.druid.extensions.contrib:druid-influx-extensions - -c - org.apache.druid.extensions.contrib:druid-influxdb-emitter - -c - org.apache.druid.extensions.contrib:kafka-emitter - -c - org.apache.druid.extensions.contrib:materialized-view-maintenance - -c - org.apache.druid.extensions.contrib:materialized-view-selection - -c - org.apache.druid.extensions.contrib:druid-opentsdb-emitter - -c - org.apache.druid.extensions.contrib:druid-redis-cache - -c - org.apache.druid.extensions.contrib:sqlserver-metadata-storage - -c - org.apache.druid.extensions.contrib:statsd-emitter - -c - org.apache.druid.extensions.contrib:druid-thrift-extensions - -c - org.apache.druid.extensions.contrib:druid-time-min-max - -c - org.apache.druid.extensions.contrib:druid-virtual-columns - -c - org.apache.druid.extensions.contrib:druid-moving-average-query - -c - org.apache.druid.extensions.contrib:druid-tdigestsketch - -c - org.apache.druid.extensions.contrib:gce-extensions - -c - org.apache.druid.extensions.contrib:aliyun-oss-extensions - - - - - - - - diff --git a/extensions-contrib/README.md b/extensions-contrib/README.md deleted file mode 100644 index f08269fd01ae..000000000000 --- a/extensions-contrib/README.md +++ /dev/null @@ -1,25 +0,0 @@ - - -# Community Extensions - -Please contribute all community extensions in this directory and include a doc of how your extension can be used under [docs/development/extensions-contrib/](/docs/development/extensions-contrib). - -Please note that community extensions are maintained by their original contributors and are not packaged with the core Druid distribution. -If you'd like to take on maintenance for a community extension, please post on [dev@druid.apache.org](https://lists.apache.org/list.html?dev@druid.apache.org) to let us know! diff --git a/extensions-contrib/aliyun-oss-extensions/pom.xml b/extensions-contrib/aliyun-oss-extensions/pom.xml deleted file mode 100644 index c85a5e2f3f59..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/pom.xml +++ /dev/null @@ -1,180 +0,0 @@ - - - - - 4.0.0 - org.apache.druid.extensions.contrib - aliyun-oss-extensions - aliyun-oss-extensions - aliyun-oss-extensions - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - com.aliyun.oss - aliyun-sdk-oss - 3.3.0 - - - - com.fasterxml.jackson.module - jackson-module-guice - ${jackson.version} - provided - - - commons-io - commons-io - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - joda-time - joda-time - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - com.google.inject.extensions - guice-multibindings - provided - - - com.google.guava - guava - provided - - - javax.validation - validation-api - provided - - - com.google.code.findbugs - jsr305 - provided - - - commons-lang - commons-lang - provided - - - - - org.apache.druid - druid-core - ${project.parent.version} - test-jar - test - - - junit - junit - test - - - org.apache.druid - druid-server - ${project.parent.version} - test - - - org.apache.druid - druid-processing - ${project.parent.version} - test - - - org.easymock - easymock - test - - - nl.jqno.equalsverifier - equalsverifier - test - - - org.hamcrest - hamcrest-core - 1.3 - test - - - - - - org.apache.maven.plugins - maven-assembly-plugin - - false - - jar-with-dependencies - - - - true - - - - - - make-assembly - package - - single - - - - - - - diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssClientConfig.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssClientConfig.java deleted file mode 100644 index 9b91dbc98421..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssClientConfig.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSClientBuilder; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import org.apache.druid.metadata.PasswordProvider; - -import java.util.Objects; - -/** - * Contains properties for aliyun OSS input source. - * Properties can be specified by ingestionSpec which will override system default. - */ -public class OssClientConfig -{ - @JsonCreator - public OssClientConfig( - @JsonProperty("endpoint") String endpoint, - @JsonProperty("accessKey") PasswordProvider accessKey, - @JsonProperty("secretKey") PasswordProvider secretKey - ) - { - this.accessKey = Preconditions.checkNotNull( - accessKey, - "accessKey cannot be null" - ); - this.secretKey = Preconditions.checkNotNull( - secretKey, - "secretKey cannot be null" - ); - this.endpoint = endpoint; - } - - @JsonProperty - private String endpoint; - - @JsonProperty - private PasswordProvider accessKey; - - @JsonProperty - private PasswordProvider secretKey; - - public String getEndpoint() - { - return endpoint; - } - - public PasswordProvider getAccessKey() - { - return accessKey; - } - - public PasswordProvider getSecretKey() - { - return secretKey; - } - - @JsonIgnore - public boolean isCredentialsConfigured() - { - return accessKey != null && - secretKey != null; - } - - @Override - public String toString() - { - return "OssInputSourceConfig{" + - "endpoint=" + endpoint + - "accessKeyId=" + accessKey + - ", secretAccessKey=" + secretKey + - '}'; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - OssClientConfig that = (OssClientConfig) o; - return Objects.equals(accessKey, that.accessKey) && - Objects.equals(secretKey, that.secretKey) && - Objects.equals(endpoint, that.endpoint); - } - - @Override - public int hashCode() - { - return Objects.hash(accessKey, secretKey, endpoint); - } - - public OSS buildClient() - { - return new OSSClientBuilder().build(endpoint, accessKey.getPassword(), secretKey.getPassword()); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssEntity.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssEntity.java deleted file mode 100644 index 3f501f212f7b..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssEntity.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSException; -import com.aliyun.oss.model.GetObjectRequest; -import com.aliyun.oss.model.OSSObject; -import com.google.common.base.Predicate; -import org.apache.druid.data.input.RetryingInputEntity; -import org.apache.druid.data.input.impl.CloudObjectLocation; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.storage.aliyun.OssStorageDruidModule; -import org.apache.druid.storage.aliyun.OssUtils; - -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; - -public class OssEntity extends RetryingInputEntity -{ - private final OSS ossClient; - private final CloudObjectLocation object; - - OssEntity(OSS ossClient, CloudObjectLocation coords) - { - this.ossClient = ossClient; - this.object = coords; - } - - @Override - public URI getUri() - { - return object.toUri(OssStorageDruidModule.SCHEME); - } - - @Override - protected InputStream readFrom(long offset) throws IOException - { - final GetObjectRequest request = new GetObjectRequest(object.getBucket(), object.getPath()); - request.setRange(offset, -1 /*from offset to end*/); - - try { - final OSSObject ossObject = ossClient.getObject(request); - if (ossObject == null) { - throw new ISE( - "Failed to get an Aliyun OSS object for bucket[%s], key[%s], and start[%d]", - object.getBucket(), - object.getPath(), - offset - ); - } - return ossObject.getObjectContent(); - } - catch (OSSException e) { - throw new IOException(e); - } - } - - @Override - protected String getPath() - { - return object.getPath(); - } - - @Override - public Predicate getRetryCondition() - { - return OssUtils.RETRYABLE; - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssInputSource.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssInputSource.java deleted file mode 100644 index e8559e5606e8..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssInputSource.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.model.OSSObjectSummary; -import com.fasterxml.jackson.annotation.JacksonInject; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.base.Supplier; -import com.google.common.base.Suppliers; -import org.apache.druid.data.input.InputEntity; -import org.apache.druid.data.input.InputFileAttribute; -import org.apache.druid.data.input.InputSplit; -import org.apache.druid.data.input.SplitHintSpec; -import org.apache.druid.data.input.impl.CloudObjectInputSource; -import org.apache.druid.data.input.impl.CloudObjectLocation; -import org.apache.druid.data.input.impl.SplittableInputSource; -import org.apache.druid.storage.aliyun.OssInputDataConfig; -import org.apache.druid.storage.aliyun.OssStorageDruidModule; -import org.apache.druid.storage.aliyun.OssUtils; -import org.apache.druid.utils.Streams; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import java.net.URI; -import java.util.Iterator; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -public class OssInputSource extends CloudObjectInputSource -{ - private final Supplier clientSupplier; - @JsonProperty("properties") - private final OssClientConfig inputSourceConfig; - private final OssInputDataConfig inputDataConfig; - - /** - * Constructor for OssInputSource - * - * @param client The default client built with all default configs - * from Guice. This injected singleton client is used when {@param inputSourceConfig} - * is not provided and hence - * @param inputDataConfig Stores the configuration for options related to reading input data - * @param uris User provided uris to read input data - * @param prefixes User provided prefixes to read input data - * @param objects User provided cloud objects values to read input data - * @param inputSourceConfig User provided properties for overriding the default aliyun OSS configuration - */ - @JsonCreator - public OssInputSource( - @JacksonInject OSS client, - @JacksonInject OssInputDataConfig inputDataConfig, - @JsonProperty("uris") @Nullable List uris, - @JsonProperty("prefixes") @Nullable List prefixes, - @JsonProperty("objects") @Nullable List objects, - @JsonProperty("properties") @Nullable OssClientConfig inputSourceConfig - ) - { - super(OssStorageDruidModule.SCHEME, uris, prefixes, objects); - this.inputDataConfig = Preconditions.checkNotNull(inputDataConfig, "inputDataConfig"); - Preconditions.checkNotNull(client, "client"); - this.inputSourceConfig = inputSourceConfig; - this.clientSupplier = Suppliers.memoize( - () -> { - if (inputSourceConfig != null) { - return inputSourceConfig.buildClient(); - } else { - return client; - } - } - ); - } - - - @Nullable - @JsonProperty("properties") - public OssClientConfig getOssInputSourceConfig() - { - return inputSourceConfig; - } - - @Override - protected InputEntity createEntity(CloudObjectLocation location) - { - return new OssEntity(clientSupplier.get(), location); - } - - @Override - protected Stream>> getPrefixesSplitStream(@Nonnull SplitHintSpec splitHintSpec) - { - final Iterator> splitIterator = splitHintSpec.split( - getIterableObjectsFromPrefixes().iterator(), - object -> new InputFileAttribute(object.getSize()) - ); - - return Streams.sequentialStreamFrom(splitIterator) - .map(objects -> objects.stream() - .map(OssUtils::summaryToCloudObjectLocation) - .collect(Collectors.toList())) - .map(InputSplit::new); - } - - @Override - public SplittableInputSource> withSplit(InputSplit> split) - { - return new OssInputSource( - clientSupplier.get(), - inputDataConfig, - null, - null, - split.get(), - getOssInputSourceConfig() - ); - } - - @Override - public int hashCode() - { - return Objects.hash(super.hashCode(), inputSourceConfig); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - if (!super.equals(o)) { - return false; - } - OssInputSource that = (OssInputSource) o; - return Objects.equals(inputSourceConfig, that.inputSourceConfig); - } - - @Override - public String toString() - { - return "OssInputSource{" + - "uris=" + getUris() + - ", prefixes=" + getPrefixes() + - ", objects=" + getObjects() + - ", ossInputSourceConfig=" + getOssInputSourceConfig() + - '}'; - } - - private Iterable getIterableObjectsFromPrefixes() - { - return () -> OssUtils.objectSummaryIterator( - clientSupplier.get(), - getPrefixes(), - inputDataConfig.getMaxListingLength() - ); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssInputSourceDruidModule.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssInputSourceDruidModule.java deleted file mode 100644 index b33a62e6c350..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/data/input/aliyun/OssInputSourceDruidModule.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.aliyun; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.storage.aliyun.OssStorageDruidModule; - -import java.util.List; - -/** - * Druid module to wire up native batch support for aliyun OSS input - */ -public class OssInputSourceDruidModule implements DruidModule -{ - @Override - public List getJacksonModules() - { - return ImmutableList.of( - new SimpleModule().registerSubtypes(new NamedType(OssInputSource.class, OssStorageDruidModule.SCHEME)) - ); - } - - @Override - public void configure(Binder binder) - { - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/firehose/aliyun/OssFirehoseDruidModule.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/firehose/aliyun/OssFirehoseDruidModule.java deleted file mode 100644 index 864717657fe8..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/firehose/aliyun/OssFirehoseDruidModule.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.firehose.aliyun; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import org.apache.druid.initialization.DruidModule; - -import java.util.List; - -public class OssFirehoseDruidModule implements DruidModule -{ - @Override - public List getJacksonModules() - { - return ImmutableList.of( - new SimpleModule().registerSubtypes(new NamedType(StaticOssFirehoseFactory.class, "static-aliyun-oss")) - ); - } - - @Override - public void configure(Binder binder) - { - - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/firehose/aliyun/StaticOssFirehoseFactory.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/firehose/aliyun/StaticOssFirehoseFactory.java deleted file mode 100644 index d71198826dc7..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/firehose/aliyun/StaticOssFirehoseFactory.java +++ /dev/null @@ -1,243 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.firehose.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSException; -import com.aliyun.oss.model.GetObjectRequest; -import com.aliyun.oss.model.OSSObject; -import com.aliyun.oss.model.OSSObjectSummary; -import com.fasterxml.jackson.annotation.JacksonInject; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.base.Predicate; -import org.apache.druid.data.input.FiniteFirehoseFactory; -import org.apache.druid.data.input.InputSplit; -import org.apache.druid.data.input.impl.StringInputRowParser; -import org.apache.druid.data.input.impl.prefetch.PrefetchableTextFilesFirehoseFactory; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.storage.aliyun.OssStorageDruidModule; -import org.apache.druid.storage.aliyun.OssUtils; -import org.apache.druid.utils.CompressionUtils; - -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -/** - * Builds firehoses that read from a predefined list of aliyun OSS objects and then dry up. - */ -public class StaticOssFirehoseFactory extends PrefetchableTextFilesFirehoseFactory -{ - private static final Logger log = new Logger(StaticOssFirehoseFactory.class); - - private final OSS client; - private final List uris; - private final List prefixes; - - @JsonCreator - public StaticOssFirehoseFactory( - @JacksonInject OSS client, - @JsonProperty("uris") List uris, - @JsonProperty("prefixes") List prefixes, - @JsonProperty("maxCacheCapacityBytes") Long maxCacheCapacityBytes, - @JsonProperty("maxFetchCapacityBytes") Long maxFetchCapacityBytes, - @JsonProperty("prefetchTriggerBytes") Long prefetchTriggerBytes, - @JsonProperty("fetchTimeout") Long fetchTimeout, - @JsonProperty("maxFetchRetry") Integer maxFetchRetry - ) - { - super(maxCacheCapacityBytes, maxFetchCapacityBytes, prefetchTriggerBytes, fetchTimeout, maxFetchRetry); - this.client = Preconditions.checkNotNull(client, "client"); - this.uris = uris == null ? new ArrayList<>() : uris; - this.prefixes = prefixes == null ? new ArrayList<>() : prefixes; - - if (!this.uris.isEmpty() && !this.prefixes.isEmpty()) { - throw new IAE("uris and prefixes cannot be used together"); - } - - if (this.uris.isEmpty() && this.prefixes.isEmpty()) { - throw new IAE("uris or prefixes must be specified"); - } - - for (final URI inputURI : this.uris) { - Preconditions.checkArgument(OssStorageDruidModule.SCHEME.equals(inputURI.getScheme()), - "input uri scheme == %s (%s)", - OssStorageDruidModule.SCHEME, - inputURI); - } - - for (final URI inputURI : this.prefixes) { - Preconditions.checkArgument(OssStorageDruidModule.SCHEME.equals(inputURI.getScheme()), - "input uri scheme == %s (%s)", - OssStorageDruidModule.SCHEME, - inputURI); - } - } - - @JsonProperty - public List getUris() - { - return uris; - } - - @JsonProperty("prefixes") - public List getPrefixes() - { - return prefixes; - } - - @Override - protected Collection initObjects() - { - if (!uris.isEmpty()) { - return uris; - } else { - final List objects = new ArrayList<>(); - for (final URI prefix : prefixes) { - final Iterator objectSummaryIterator = OssUtils.objectSummaryIterator( - client, - Collections.singletonList(prefix), - OssUtils.MAX_LISTING_LENGTH - ); - - objectSummaryIterator.forEachRemaining(objects::add); - } - return objects.stream().map(OssUtils::summaryToUri).collect(Collectors.toList()); - } - } - - @Override - protected InputStream openObjectStream(URI object) throws IOException - { - try { - // Get data of the given object and open an input stream - final String bucket = object.getAuthority(); - final String key = OssUtils.extractKey(object); - - final OSSObject ossObject = client.getObject(bucket, key); - if (ossObject == null) { - throw new ISE("Failed to get an Aliyun OSS object for bucket[%s] and key[%s]", bucket, key); - } - return ossObject.getObjectContent(); - } - catch (OSSException e) { - throw new IOException(e); - } - } - - @Override - protected InputStream openObjectStream(URI object, long start) throws IOException - { - final String bucket = object.getAuthority(); - final String key = OssUtils.extractKey(object); - - final GetObjectRequest request = new GetObjectRequest(bucket, key); - try { - final OSSObject ossObject = client.getObject(request); - if (ossObject == null) { - throw new ISE( - "Failed to get an Aliyun OSS object for bucket[%s], key[%s], and start[%d]", - bucket, - key, - start - ); - } - InputStream is = ossObject.getObjectContent(); - is.skip(start); - return is; - } - catch (OSSException e) { - throw new IOException(e); - } - } - - @Override - protected InputStream wrapObjectStream(URI object, InputStream stream) throws IOException - { - return CompressionUtils.decompress(stream, OssUtils.extractKey(object)); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - StaticOssFirehoseFactory that = (StaticOssFirehoseFactory) o; - - return Objects.equals(uris, that.uris) && - Objects.equals(prefixes, that.prefixes) && - getMaxCacheCapacityBytes() == that.getMaxCacheCapacityBytes() && - getMaxFetchCapacityBytes() == that.getMaxFetchCapacityBytes() && - getPrefetchTriggerBytes() == that.getPrefetchTriggerBytes() && - getFetchTimeout() == that.getFetchTimeout() && - getMaxFetchRetry() == that.getMaxFetchRetry(); - } - - @Override - public int hashCode() - { - return Objects.hash( - uris, - prefixes, - getMaxCacheCapacityBytes(), - getMaxFetchCapacityBytes(), - getPrefetchTriggerBytes(), - getFetchTimeout(), - getMaxFetchRetry() - ); - } - - @Override - protected Predicate getRetryCondition() - { - return OssUtils.RETRYABLE; - } - - @Override - public FiniteFirehoseFactory withSplit(InputSplit split) - { - return new StaticOssFirehoseFactory( - client, - Collections.singletonList(split.get()), - null, - getMaxCacheCapacityBytes(), - getMaxFetchCapacityBytes(), - getPrefetchTriggerBytes(), - getFetchTimeout(), - getMaxFetchRetry() - ); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiver.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiver.java deleted file mode 100644 index b836876af987..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiver.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Objects; -import com.google.common.collect.ImmutableMap; -import com.google.inject.Inject; -import org.apache.druid.guice.annotations.Json; -import org.apache.druid.segment.loading.DataSegmentArchiver; -import org.apache.druid.segment.loading.LoadSpec; -import org.apache.druid.segment.loading.SegmentLoadingException; -import org.apache.druid.timeline.DataSegment; - -public class OssDataSegmentArchiver extends OssDataSegmentMover implements DataSegmentArchiver -{ - private final OssDataSegmentArchiverConfig archiveConfig; - private final OssStorageConfig restoreConfig; - private final ObjectMapper mapper; - - @Inject - public OssDataSegmentArchiver( - @Json ObjectMapper mapper, - OSS client, - OssDataSegmentArchiverConfig archiveConfig, - OssStorageConfig restoreConfig - ) - { - super(client, restoreConfig); - this.mapper = mapper; - this.archiveConfig = archiveConfig; - this.restoreConfig = restoreConfig; - } - - @Override - public DataSegment archive(DataSegment segment) throws SegmentLoadingException - { - String targetBucket = archiveConfig.getArchiveBucket(); - String targetKey = archiveConfig.getArchiveBaseKey(); - - final DataSegment archived = move( - segment, - ImmutableMap.of( - "bucket", targetBucket, - "baseKey", targetKey - ) - ); - if (sameLoadSpec(segment, archived)) { - return null; - } - return archived; - } - - @Override - public DataSegment restore(DataSegment segment) throws SegmentLoadingException - { - String targetBucket = restoreConfig.getBucket(); - String targetKey = restoreConfig.getPrefix(); - - final DataSegment restored = move( - segment, - ImmutableMap.of( - "bucket", targetBucket, - "baseKey", targetKey - ) - ); - - if (sameLoadSpec(segment, restored)) { - return null; - } - return restored; - } - - boolean sameLoadSpec(DataSegment s1, DataSegment s2) - { - final OssLoadSpec s1LoadSpec = (OssLoadSpec) mapper.convertValue(s1.getLoadSpec(), LoadSpec.class); - final OssLoadSpec s2LoadSpec = (OssLoadSpec) mapper.convertValue(s2.getLoadSpec(), LoadSpec.class); - return Objects.equal(s1LoadSpec.getBucket(), s2LoadSpec.getBucket()) && Objects.equal( - s1LoadSpec.getKey(), - s2LoadSpec.getKey() - ); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiverConfig.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiverConfig.java deleted file mode 100644 index dd659044e9fe..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiverConfig.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.fasterxml.jackson.annotation.JsonProperty; - -public class OssDataSegmentArchiverConfig -{ - @JsonProperty - private String archiveBucket = ""; - - @JsonProperty - private String archiveBaseKey = ""; - - public String getArchiveBucket() - { - return archiveBucket; - } - - public String getArchiveBaseKey() - { - return archiveBaseKey; - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentKiller.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentKiller.java deleted file mode 100644 index 4149d6457c0d..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentKiller.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSException; -import com.google.common.base.Predicates; -import com.google.inject.Inject; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.MapUtils; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.segment.loading.DataSegmentKiller; -import org.apache.druid.segment.loading.SegmentLoadingException; -import org.apache.druid.timeline.DataSegment; - -import java.io.IOException; -import java.util.Map; - -public class OssDataSegmentKiller implements DataSegmentKiller -{ - private static final Logger log = new Logger(OssDataSegmentKiller.class); - - private final OSS client; - private final OssStorageConfig segmentPusherConfig; - private final OssInputDataConfig inputDataConfig; - - @Inject - public OssDataSegmentKiller( - OSS client, - OssStorageConfig segmentPusherConfig, - OssInputDataConfig inputDataConfig - ) - { - this.client = client; - this.segmentPusherConfig = segmentPusherConfig; - this.inputDataConfig = inputDataConfig; - } - - @Override - public void kill(DataSegment segment) throws SegmentLoadingException - { - try { - Map loadSpec = segment.getLoadSpec(); - String bucket = MapUtils.getString(loadSpec, "bucket"); - String path = MapUtils.getString(loadSpec, "key"); - - if (client.doesObjectExist(bucket, path)) { - log.info("Removing index file[%s://%s/%s] from aliyun OSS!", OssStorageDruidModule.SCHEME, bucket, path); - client.deleteObject(bucket, path); - } - } - catch (OSSException e) { - throw new SegmentLoadingException(e, "Couldn't kill segment[%s]: [%s]", segment.getId(), e); - } - } - - @Override - public void killAll() throws IOException - { - if (segmentPusherConfig.getBucket() == null || segmentPusherConfig.getPrefix() == null) { - throw new ISE( - "Cannot delete all segment from aliyun OSS Deep Storage since druid.storage.bucket and druid.storage.baseKey are not both set."); - } - log.info("Deleting all segment files from aliyun OSS location [bucket: '%s' prefix: '%s']", - segmentPusherConfig.getBucket(), segmentPusherConfig.getPrefix() - ); - try { - OssUtils.deleteObjectsInPath( - client, - inputDataConfig, - segmentPusherConfig.getBucket(), - segmentPusherConfig.getPrefix(), - Predicates.alwaysTrue() - ); - } - catch (Exception e) { - log.error("Error occurred while deleting segment files from aliyun OSS. Error: %s", e.getMessage()); - throw new IOException(e); - } - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentMover.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentMover.java deleted file mode 100644 index a93a2762f658..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentMover.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSException; -import com.aliyun.oss.model.CopyObjectRequest; -import com.aliyun.oss.model.ListObjectsRequest; -import com.aliyun.oss.model.OSSObjectSummary; -import com.aliyun.oss.model.ObjectListing; -import com.aliyun.oss.model.StorageClass; -import com.google.common.base.Predicate; -import com.google.common.base.Throwables; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; -import com.google.inject.Inject; -import org.apache.druid.java.util.common.IOE; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.MapUtils; -import org.apache.druid.java.util.common.RetryUtils; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.segment.loading.DataSegmentMover; -import org.apache.druid.segment.loading.DataSegmentPusher; -import org.apache.druid.segment.loading.SegmentLoadingException; -import org.apache.druid.timeline.DataSegment; - -import java.io.IOException; -import java.util.Map; - -public class OssDataSegmentMover implements DataSegmentMover -{ - private static final Logger log = new Logger(OssDataSegmentMover.class); - - private final OSS client; - private final OssStorageConfig config; - - @Inject - public OssDataSegmentMover( - OSS client, - OssStorageConfig config - ) - { - this.client = client; - this.config = config; - } - - @Override - public DataSegment move(DataSegment segment, Map targetLoadSpec) throws SegmentLoadingException - { - try { - Map loadSpec = segment.getLoadSpec(); - String bucket = MapUtils.getString(loadSpec, "bucket"); - String key = MapUtils.getString(loadSpec, "key"); - - final String targetBucket = MapUtils.getString(targetLoadSpec, "bucket"); - final String targetKey = MapUtils.getString(targetLoadSpec, "baseKey"); - - final String targetPath = OssUtils.constructSegmentPath( - targetKey, - DataSegmentPusher.getDefaultStorageDir(segment, false) - ); - - if (targetBucket.isEmpty()) { - throw new SegmentLoadingException("Target OSS bucket is not specified"); - } - if (targetPath.isEmpty()) { - throw new SegmentLoadingException("Target OSS baseKey is not specified"); - } - - safeMove(bucket, key, targetBucket, targetPath); - - return segment.withLoadSpec( - ImmutableMap.builder() - .putAll( - Maps.filterKeys( - loadSpec, - new Predicate() - { - @Override - public boolean apply(String input) - { - return !("bucket".equals(input) || "key".equals(input)); - } - } - ) - ) - .put("bucket", targetBucket) - .put("key", targetPath) - .build() - ); - } - catch (OSSException e) { - throw new SegmentLoadingException(e, "Unable to move segment[%s]: [%s]", segment.getId(), e); - } - } - - private void safeMove( - final String srcBucket, - final String srcPath, - final String targetBucket, - final String targetPath - ) throws SegmentLoadingException - { - try { - OssUtils.retry( - () -> { - final String copyMsg = StringUtils.format( - "[%s://%s/%s] to [%s://%s/%s]", - OssStorageDruidModule.SCHEME, - srcBucket, - srcPath, - OssStorageDruidModule.SCHEME, - targetBucket, - targetPath - ); - try { - selfCheckingMove(srcBucket, targetBucket, srcPath, targetPath, copyMsg); - return null; - } - catch (OSSException | IOException | SegmentLoadingException e) { - log.info(e, "Error while trying to move " + copyMsg); - throw e; - } - } - ); - } - catch (Exception e) { - Throwables.propagateIfInstanceOf(e, OSSException.class); - Throwables.propagateIfInstanceOf(e, SegmentLoadingException.class); - throw new RuntimeException(e); - } - } - - /** - * Copies an object and after that checks that the object is present at the target location, via a separate API call. - * If it is not, an exception is thrown, and the object is not deleted at the old location. This "paranoic" check - * is added after it was observed that oss may report a successful move, and the object is not found at the target - * location. - */ - private void selfCheckingMove( - String srcBucket, - String dstBucket, - String srcPath, - String dstPath, - String copyMsg - ) throws IOException, SegmentLoadingException - { - if (srcBucket.equals(dstBucket) && srcPath.equals(dstPath)) { - log.info("No need to move file[%s://%s/%s] onto itself", OssStorageDruidModule.SCHEME, srcBucket, srcPath); - return; - } - if (client.doesObjectExist(srcBucket, srcPath)) { - final ObjectListing listResult = client.listObjects( - new ListObjectsRequest(srcBucket, srcPath, null, null, 1) - ); - // Using getObjectSummaries().size() instead of getKeyCount as, in some cases - // it is observed that even though the getObjectSummaries returns some data - // keyCount is still zero. - if (listResult.getObjectSummaries().size() == 0) { - // should never happen - throw new ISE("Unable to list object [%s://%s/%s]", OssStorageDruidModule.SCHEME, srcBucket, srcPath); - } - final OSSObjectSummary objectSummary = listResult.getObjectSummaries().get(0); - if (objectSummary.getStorageClass() != null && - objectSummary.getStorageClass().equals(StorageClass.IA.name())) { - throw new OSSException( - StringUtils.format( - "Cannot move file[%s://%s/%s] of storage class glacier, skipping.", - OssStorageDruidModule.SCHEME, - srcBucket, - srcPath - ) - ); - } else { - log.info("Moving file %s", copyMsg); - final CopyObjectRequest copyRequest = new CopyObjectRequest(srcBucket, srcPath, dstBucket, dstPath); - client.copyObject(copyRequest); - if (!client.doesObjectExist(dstBucket, dstPath)) { - throw new IOE( - "After copy was reported as successful the file doesn't exist in the target location [%s]", - copyMsg - ); - } - deleteWithRetriesSilent(srcBucket, srcPath); - log.debug("Finished moving file %s", copyMsg); - } - } else { - // ensure object exists in target location - if (client.doesObjectExist(dstBucket, dstPath)) { - log.info( - "Not moving file [%s://%s/%s], already present in target location [%s://%s/%s]", - OssStorageDruidModule.SCHEME, - srcBucket, - srcPath, - OssStorageDruidModule.SCHEME, - dstBucket, - dstPath - ); - } else { - throw new SegmentLoadingException( - "Unable to move file %s, not present in either source or target location", - copyMsg - ); - } - } - } - - private void deleteWithRetriesSilent(final String bucket, final String path) - { - try { - deleteWithRetries(bucket, path); - } - catch (Exception e) { - log.error(e, "Failed to delete file [%s://%s/%s], giving up", OssStorageDruidModule.SCHEME, bucket, path); - } - } - - private void deleteWithRetries(final String bucket, final String path) throws Exception - { - RetryUtils.retry( - () -> { - try { - client.deleteObject(bucket, path); - return null; - } - catch (Exception e) { - log.info(e, "Error while trying to delete [%s://%s/%s]", OssStorageDruidModule.SCHEME, bucket, path); - throw e; - } - }, - OssUtils.RETRYABLE, - 3 - ); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPuller.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPuller.java deleted file mode 100644 index 82c0f2ed8c24..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPuller.java +++ /dev/null @@ -1,308 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSException; -import com.aliyun.oss.model.OSSObject; -import com.aliyun.oss.model.OSSObjectSummary; -import com.google.common.base.Predicate; -import com.google.common.base.Strings; -import com.google.common.io.ByteSource; -import com.google.common.io.Files; -import com.google.inject.Inject; -import org.apache.druid.data.input.impl.CloudObjectLocation; -import org.apache.druid.java.util.common.FileUtils; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.java.util.common.IOE; -import org.apache.druid.java.util.common.RE; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.UOE; -import org.apache.druid.java.util.common.io.Closer; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.segment.loading.SegmentLoadingException; -import org.apache.druid.segment.loading.URIDataPuller; -import org.apache.druid.utils.CompressionUtils; - -import javax.tools.FileObject; -import java.io.File; -import java.io.FilterInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Reader; -import java.io.Writer; -import java.net.URI; - -/** - * A data segment puller that also hanldes URI data pulls. - */ -public class OssDataSegmentPuller implements URIDataPuller -{ - private static final Logger log = new Logger(OssDataSegmentPuller.class); - - static final String BUCKET = "bucket"; - protected static final String KEY = "key"; - - protected final OSS client; - - @Inject - public OssDataSegmentPuller(OSS client) - { - this.client = client; - } - - FileUtils.FileCopyResult getSegmentFiles(final CloudObjectLocation ossCoords, final File outDir) - throws SegmentLoadingException - { - - log.info("Pulling index at path[%s] to outDir[%s]", ossCoords, outDir); - - if (!isObjectInBucket(ossCoords)) { - throw new SegmentLoadingException("IndexFile[%s] does not exist.", ossCoords); - } - - try { - org.apache.commons.io.FileUtils.forceMkdir(outDir); - - final URI uri = ossCoords.toUri(OssStorageDruidModule.SCHEME); - final ByteSource byteSource = new ByteSource() - { - @Override - public InputStream openStream() throws IOException - { - try { - return buildFileObject(uri).openInputStream(); - } - catch (OSSException e) { - if (e.getCause() != null) { - if (OssUtils.RETRYABLE.apply(e)) { - throw new IOException("Recoverable exception", e); - } - } - throw new RuntimeException(e); - } - } - }; - if (CompressionUtils.isZip(ossCoords.getPath())) { - final FileUtils.FileCopyResult result = CompressionUtils.unzip( - byteSource, - outDir, - OssUtils.RETRYABLE, - false - ); - log.info("Loaded %d bytes from [%s] to [%s]", result.size(), ossCoords.toString(), outDir.getAbsolutePath()); - return result; - } - if (CompressionUtils.isGz(ossCoords.getPath())) { - final String fname = Files.getNameWithoutExtension(uri.getPath()); - final File outFile = new File(outDir, fname); - - final FileUtils.FileCopyResult result = CompressionUtils.gunzip(byteSource, outFile, OssUtils.RETRYABLE); - log.info("Loaded %d bytes from [%s] to [%s]", result.size(), ossCoords.toString(), outFile.getAbsolutePath()); - return result; - } - throw new IAE("Do not know how to load file type at [%s]", uri.toString()); - } - catch (Exception e) { - try { - FileUtils.deleteDirectory(outDir); - } - catch (IOException ioe) { - log.warn( - ioe, - "Failed to remove output directory [%s] for segment pulled from [%s]", - outDir.getAbsolutePath(), - ossCoords.toString() - ); - } - throw new SegmentLoadingException(e, e.getMessage()); - } - } - - @Override - public InputStream getInputStream(URI uri) throws IOException - { - try { - return buildFileObject(uri).openInputStream(); - } - catch (OSSException e) { - throw new IOE(e, "Could not load URI [%s]", uri); - } - } - - private FileObject buildFileObject(final URI uri) throws OSSException - { - final CloudObjectLocation coords = new CloudObjectLocation(OssUtils.checkURI(uri)); - final OSSObjectSummary objectSummary = - OssUtils.getSingleObjectSummary(client, coords.getBucket(), coords.getPath()); - final String path = uri.getPath(); - - return new FileObject() - { - OSSObject ossObject = null; - - @Override - public URI toUri() - { - return uri; - } - - @Override - public String getName() - { - final String ext = Files.getFileExtension(path); - return Files.getNameWithoutExtension(path) + (Strings.isNullOrEmpty(ext) ? "" : ("." + ext)); - } - - /** - * Returns an input stream for an OSS object. The returned input stream is not thread-safe. - */ - @Override - public InputStream openInputStream() throws IOException - { - try { - if (ossObject == null) { - // lazily promote to full GET - ossObject = client.getObject(objectSummary.getBucketName(), objectSummary.getKey()); - } - - final InputStream in = ossObject.getObjectContent(); - final Closer closer = Closer.create(); - closer.register(in); - closer.register(ossObject); - - return new FilterInputStream(in) - { - @Override - public void close() throws IOException - { - closer.close(); - } - }; - } - catch (OSSException e) { - throw new IOE(e, "Could not load OSS URI [%s]", uri); - } - } - - @Override - public OutputStream openOutputStream() - { - throw new UOE("Cannot stream OSS output"); - } - - @Override - public Reader openReader(boolean ignoreEncodingErrors) - { - throw new UOE("Cannot open reader"); - } - - @Override - public CharSequence getCharContent(boolean ignoreEncodingErrors) - { - throw new UOE("Cannot open character sequence"); - } - - @Override - public Writer openWriter() - { - throw new UOE("Cannot open writer"); - } - - @Override - public long getLastModified() - { - return objectSummary.getLastModified().getTime(); - } - - @Override - public boolean delete() - { - throw new UOE("Cannot delete OSS items anonymously. jetS3t doesn't support authenticated deletes easily."); - } - }; - } - - @Override - public Predicate shouldRetryPredicate() - { - // Yay! smart retries! - return new Predicate() - { - @Override - public boolean apply(Throwable e) - { - if (e == null) { - return false; - } - if (e instanceof OSSException) { - return OssUtils.isServiceExceptionRecoverable((OSSException) e); - } - if (OssUtils.RETRYABLE.apply(e)) { - return true; - } - // Look all the way down the cause chain, just in case something wraps it deep. - return apply(e.getCause()); - } - }; - } - - /** - * Returns the "version" (aka last modified timestamp) of the URI - * - * @param uri The URI to check the last timestamp - * @return The time in ms of the last modification of the URI in String format - * @throws IOException - */ - @Override - public String getVersion(URI uri) throws IOException - { - try { - final CloudObjectLocation coords = new CloudObjectLocation(OssUtils.checkURI(uri)); - final OSSObjectSummary objectSummary = - OssUtils.getSingleObjectSummary(client, coords.getBucket(), coords.getPath()); - return StringUtils.format("%d", objectSummary.getLastModified().getTime()); - } - catch (OSSException e) { - if (OssUtils.isServiceExceptionRecoverable(e)) { - // The recoverable logic is always true for IOException, so we want to only pass IOException if it is recoverable - throw new IOE(e, "Could not fetch last modified timestamp from URI [%s]", uri); - } else { - throw new RE(e, "Error fetching last modified timestamp from URI [%s]", uri); - } - } - } - - private boolean isObjectInBucket(final CloudObjectLocation coords) throws SegmentLoadingException - { - try { - return OssUtils.retry( - () -> OssUtils.isObjectInBucketIgnoringPermission(client, coords.getBucket(), coords.getPath()) - ); - } - catch (OSSException | IOException e) { - throw new SegmentLoadingException(e, "fail! Key[%s]", coords); - } - catch (Exception e) { - throw new RuntimeException(e); - } - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPusher.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPusher.java deleted file mode 100644 index 3f2fd2fde62d..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssDataSegmentPusher.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSException; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.inject.Inject; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.emitter.EmittingLogger; -import org.apache.druid.segment.SegmentUtils; -import org.apache.druid.segment.loading.DataSegmentPusher; -import org.apache.druid.timeline.DataSegment; -import org.apache.druid.utils.CompressionUtils; - -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.util.List; -import java.util.Map; - -public class OssDataSegmentPusher implements DataSegmentPusher -{ - private static final EmittingLogger log = new EmittingLogger(OssDataSegmentPusher.class); - - private final OSS client; - private final OssStorageConfig config; - - @Inject - public OssDataSegmentPusher( - OSS client, - OssStorageConfig config - ) - { - this.client = client; - this.config = config; - } - - @Override - public String getPathForHadoop() - { - return StringUtils.format("%s/%s", config.getBucket(), config.getPrefix()); - } - - @Deprecated - @Override - public String getPathForHadoop(String dataSource) - { - return getPathForHadoop(); - } - - @Override - public List getAllowedPropertyPrefixesForHadoop() - { - return ImmutableList.of("druid.oss"); - } - - @Override - public DataSegment push(final File indexFilesDir, final DataSegment inSegment, final boolean useUniquePath) - throws IOException - { - final String path = OssUtils.constructSegmentPath(config.getPrefix(), getStorageDir(inSegment, useUniquePath)); - - log.debug("Copying segment[%s] to OSS at location[%s]", inSegment.getId(), path); - - final File zipOutFile = File.createTempFile("druid", "index.zip"); - final long indexSize = CompressionUtils.zip(indexFilesDir, zipOutFile); - - final DataSegment outSegment = inSegment.withSize(indexSize) - .withLoadSpec(makeLoadSpec(config.getBucket(), path)) - .withBinaryVersion(SegmentUtils.getVersionFromDir(indexFilesDir)); - - try { - return OssUtils.retry( - () -> { - OssUtils.uploadFileIfPossible(client, config.getBucket(), path, zipOutFile); - - return outSegment; - } - ); - } - catch (OSSException e) { - throw new IOException(e); - } - catch (Exception e) { - throw new RuntimeException(e); - } - finally { - log.debug("Deleting temporary cached index.zip"); - zipOutFile.delete(); - } - } - - @Override - public Map makeLoadSpec(URI finalIndexZipFilePath) - { - // remove the leading "/" - return makeLoadSpec(finalIndexZipFilePath.getHost(), finalIndexZipFilePath.getPath().substring(1)); - } - - private Map makeLoadSpec(String bucket, String key) - { - return ImmutableMap.of( - "type", - OssStorageDruidModule.SCHEME_ZIP, - "bucket", - bucket, - "key", - key - ); - } - -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssInputDataConfig.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssInputDataConfig.java deleted file mode 100644 index c2ef2dfb465b..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssInputDataConfig.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import javax.validation.constraints.Max; -import javax.validation.constraints.Min; - -/** - * Stores the configuration for options related to reading - * input data from aliyun OSS into Druid - */ -public class OssInputDataConfig -{ - /** - * The maximum number of input files matching a given prefix to retrieve - * from aliyun OSS at a time. - * valid range is [1,1000] - */ - @JsonProperty - @Min(1) - @Max(OssUtils.MAX_LISTING_LENGTH) - private int maxListingLength = OssUtils.MAX_LISTING_LENGTH; - - public void setMaxListingLength(int maxListingLength) - { - this.maxListingLength = maxListingLength; - } - - public int getMaxListingLength() - { - return maxListingLength; - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssLoadSpec.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssLoadSpec.java deleted file mode 100644 index 155c26fbf3ca..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssLoadSpec.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.fasterxml.jackson.annotation.JacksonInject; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeName; -import com.google.common.base.Preconditions; -import org.apache.druid.data.input.impl.CloudObjectLocation; -import org.apache.druid.segment.loading.LoadSpec; -import org.apache.druid.segment.loading.SegmentLoadingException; - -import java.io.File; - -@JsonTypeName(OssStorageDruidModule.SCHEME_ZIP) -public class OssLoadSpec implements LoadSpec -{ - private final String bucket; - private final String key; - - private final OssDataSegmentPuller puller; - - @JsonCreator - public OssLoadSpec( - @JacksonInject OssDataSegmentPuller puller, - @JsonProperty(OssDataSegmentPuller.BUCKET) String bucket, - @JsonProperty(OssDataSegmentPuller.KEY) String key - ) - { - Preconditions.checkNotNull(bucket); - Preconditions.checkNotNull(key); - this.bucket = bucket; - this.key = key; - this.puller = puller; - } - - @Override - public LoadSpecResult loadSegment(File outDir) throws SegmentLoadingException - { - return new LoadSpecResult(puller.getSegmentFiles(new CloudObjectLocation(bucket, key), outDir).size()); - } - - @JsonProperty(OssDataSegmentPuller.BUCKET) - public String getBucket() - { - return bucket; - } - - @JsonProperty(OssDataSegmentPuller.KEY) - public String getKey() - { - return key; - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssObjectSummaryIterator.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssObjectSummaryIterator.java deleted file mode 100644 index 8bba8961eeeb..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssObjectSummaryIterator.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSException; -import com.aliyun.oss.model.ListObjectsRequest; -import com.aliyun.oss.model.OSSObjectSummary; -import com.aliyun.oss.model.ObjectListing; -import org.apache.druid.java.util.common.RE; - -import java.net.URI; -import java.util.Iterator; -import java.util.NoSuchElementException; - -/** - * Iterator class used by {@link OssUtils#objectSummaryIterator}. - *

- * As required by the specification of that method, this iterator is computed incrementally in batches of - * {@code maxListLength}. The first call is made at the same time the iterator is constructed. - * - */ -public class OssObjectSummaryIterator implements Iterator -{ - private final OSS client; - private final Iterator prefixesIterator; - private final int maxListingLength; - - private ListObjectsRequest request; - private ObjectListing result; - private Iterator objectSummaryIterator; - private OSSObjectSummary currentObjectSummary; - - OssObjectSummaryIterator( - final OSS client, - final Iterable prefixes, - final int maxListingLength - ) - { - this.client = client; - this.prefixesIterator = prefixes.iterator(); - this.maxListingLength = Math.min(OssUtils.MAX_LISTING_LENGTH, maxListingLength); - - prepareNextRequest(); - fetchNextBatch(); - advanceObjectSummary(); - } - - @Override - public boolean hasNext() - { - return currentObjectSummary != null; - } - - @Override - public OSSObjectSummary next() - { - if (currentObjectSummary == null) { - throw new NoSuchElementException(); - } - - final OSSObjectSummary retVal = currentObjectSummary; - advanceObjectSummary(); - return retVal; - } - - private void prepareNextRequest() - { - final URI currentUri = prefixesIterator.next(); - final String currentBucket = currentUri.getAuthority(); - final String currentPrefix = OssUtils.extractKey(currentUri); - - request = new ListObjectsRequest(currentBucket, currentPrefix, null, null, maxListingLength); - } - - private void fetchNextBatch() - { - try { - result = OssUtils.retry(() -> client.listObjects(request)); - request.setMarker(result.getNextMarker()); - objectSummaryIterator = result.getObjectSummaries().iterator(); - } - catch (OSSException e) { - throw new RE( - e, - "Failed to get object summaries from aliyun OSS bucket[%s], prefix[%s]; error: %s", - request.getBucketName(), - request.getPrefix(), - e.getMessage() - ); - } - catch (Exception e) { - throw new RE( - e, - "Failed to get object summaries from aliyun OSS bucket[%s], prefix[%s]", - request.getBucketName(), - request.getPrefix() - ); - } - } - - /** - * Advance objectSummaryIterator to the next non-placeholder, updating "currentObjectSummary". - */ - private void advanceObjectSummary() - { - while (objectSummaryIterator.hasNext() || result.isTruncated() || prefixesIterator.hasNext()) { - while (objectSummaryIterator.hasNext()) { - currentObjectSummary = objectSummaryIterator.next(); - // skips directories and empty objects - if (currentObjectSummary.getSize() > 0 && !isDirectory(currentObjectSummary)) { - return; - } - } - - // Exhausted "objectSummaryIterator" without finding a non-placeholder. - if (result.isTruncated()) { - fetchNextBatch(); - } else if (prefixesIterator.hasNext()) { - prepareNextRequest(); - fetchNextBatch(); - } - } - - // Truly nothing left to read. - currentObjectSummary = null; - } - - /** - * Checks if a given object is a directory placeholder and should be ignored. - * - * Based on {@link org.apache.druid.storage.s3.ObjectSummaryIterator} which is adapted from org.jets3t.service.model.StorageObject.isDirectoryPlaceholder(). - * - */ - private static boolean isDirectory(final OSSObjectSummary objectSummary) - { - return objectSummary.getSize() == 0 && objectSummary.getKey().endsWith("/"); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssStorageConfig.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssStorageConfig.java deleted file mode 100644 index d3edcd6105c6..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssStorageConfig.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.fasterxml.jackson.annotation.JsonProperty; - -public class OssStorageConfig -{ - @JsonProperty - private String bucket = ""; - - @JsonProperty - private String prefix = ""; - - public void setBucket(String bucket) - { - this.bucket = bucket; - } - public void setPrefix(String prefix) - { - this.prefix = prefix; - } - - public String getBucket() - { - return bucket; - } - - public String getPrefix() - { - return prefix; - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssStorageDruidModule.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssStorageDruidModule.java deleted file mode 100644 index d682bbac8232..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssStorageDruidModule.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.fasterxml.jackson.core.Version; -import com.fasterxml.jackson.databind.Module; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import com.google.inject.Provides; -import com.google.inject.multibindings.MapBinder; -import org.apache.druid.data.SearchableVersionedDataFinder; -import org.apache.druid.data.input.aliyun.OssClientConfig; -import org.apache.druid.guice.Binders; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.LazySingleton; -import org.apache.druid.initialization.DruidModule; - -import java.util.List; - -public class OssStorageDruidModule implements DruidModule -{ - public static final String SCHEME = "oss"; - public static final String SCHEME_ZIP = "oss_zip"; - - @Override - public List getJacksonModules() - { - return ImmutableList.of( - new Module() - { - @Override - public String getModuleName() - { - return "DruidAliyunOss-" + System.identityHashCode(this); - } - - @Override - public Version version() - { - return Version.unknownVersion(); - } - - @Override - public void setupModule(SetupContext context) - { - context.registerSubtypes(OssLoadSpec.class); - } - } - ); - } - - @Override - public void configure(Binder binder) - { - MapBinder.newMapBinder(binder, String.class, SearchableVersionedDataFinder.class) - .addBinding(SCHEME) - .to(OssTimestampVersionedDataFinder.class) - .in(LazySingleton.class); - Binders.dataSegmentKillerBinder(binder) - .addBinding(SCHEME_ZIP) - .to(OssDataSegmentKiller.class) - .in(LazySingleton.class); - Binders.dataSegmentMoverBinder(binder) - .addBinding(SCHEME_ZIP) - .to(OssDataSegmentMover.class) - .in(LazySingleton.class); - Binders.dataSegmentArchiverBinder(binder) - .addBinding(SCHEME_ZIP) - .to(OssDataSegmentArchiver.class) - .in(LazySingleton.class); - Binders.dataSegmentPusherBinder(binder).addBinding(SCHEME).to(OssDataSegmentPusher.class).in(LazySingleton.class); - JsonConfigProvider.bind(binder, "druid.oss", OssClientConfig.class); - JsonConfigProvider.bind(binder, "druid.storage.oss", OssInputDataConfig.class); - JsonConfigProvider.bind(binder, "druid.storage.oss", OssStorageConfig.class); - JsonConfigProvider.bind(binder, "druid.storage.oss", OssDataSegmentArchiverConfig.class); - - Binders.taskLogsBinder(binder).addBinding(SCHEME).to(OssTaskLogs.class); - JsonConfigProvider.bind(binder, "druid.indexer.logs.oss", OssTaskLogsConfig.class); - binder.bind(OssTaskLogs.class).in(LazySingleton.class); - } - - @Provides - @LazySingleton - public OSS initializeOssClient(OssClientConfig inputSourceConfig) - { - return inputSourceConfig.buildClient(); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTaskLogs.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTaskLogs.java deleted file mode 100644 index 515d85096e03..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTaskLogs.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSException; -import com.aliyun.oss.model.GetObjectRequest; -import com.aliyun.oss.model.ObjectMetadata; -import com.google.common.base.Optional; -import com.google.common.base.Throwables; -import com.google.common.io.ByteSource; -import com.google.inject.Inject; -import org.apache.druid.common.utils.CurrentTimeMillisSupplier; -import org.apache.druid.java.util.common.IOE; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.tasklogs.TaskLogs; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.util.Collections; -import java.util.Date; - -/** - * Provides task logs archived in aliyun OSS - */ -public class OssTaskLogs implements TaskLogs -{ - private static final Logger log = new Logger(OssTaskLogs.class); - - private final OSS client; - private final OssTaskLogsConfig config; - private final OssInputDataConfig inputDataConfig; - private final CurrentTimeMillisSupplier timeSupplier; - - @Inject - public OssTaskLogs( - OSS service, - OssTaskLogsConfig config, - OssInputDataConfig inputDataConfig, - CurrentTimeMillisSupplier timeSupplier - ) - { - this.client = service; - this.config = config; - this.inputDataConfig = inputDataConfig; - this.timeSupplier = timeSupplier; - } - - @Override - public Optional streamTaskLog(final String taskid, final long offset) throws IOException - { - final String taskKey = getTaskLogKey(taskid, "log"); - return streamTaskFile(offset, taskKey); - } - - @Override - public Optional streamTaskReports(String taskid) throws IOException - { - final String taskKey = getTaskLogKey(taskid, "report.json"); - return streamTaskFile(0, taskKey); - } - - private Optional streamTaskFile(final long offset, String taskKey) throws IOException - { - try { - final ObjectMetadata objectMetadata = client.getObjectMetadata(config.getBucket(), taskKey); - - return Optional.of( - new ByteSource() - { - @Override - public InputStream openStream() throws IOException - { - try { - final long start; - final long end = objectMetadata.getContentLength() - 1; - - if (offset > 0 && offset < objectMetadata.getContentLength()) { - start = offset; - } else if (offset < 0 && (-1 * offset) < objectMetadata.getContentLength()) { - start = objectMetadata.getContentLength() + offset; - } else { - start = 0; - } - - final GetObjectRequest request = new GetObjectRequest(config.getBucket(), taskKey); - request.setMatchingETagConstraints(Collections.singletonList(objectMetadata.getETag())); - request.setRange(start, end); - - return client.getObject(request).getObjectContent(); - } - catch (OSSException e) { - throw new IOException(e); - } - } - } - ); - } - catch (OSSException e) { - if ("NoSuchKey".equals(e.getErrorCode()) - || "NoSuchBucket".equals(e.getErrorCode())) { - return Optional.absent(); - } else { - throw new IOE(e, "Failed to stream logs from: %s", taskKey); - } - } - } - - @Override - public void pushTaskLog(final String taskid, final File logFile) throws IOException - { - final String taskKey = getTaskLogKey(taskid, "log"); - log.info("Pushing task log %s to: %s", logFile, taskKey); - pushTaskFile(logFile, taskKey); - } - - @Override - public void pushTaskReports(String taskid, File reportFile) throws IOException - { - final String taskKey = getTaskLogKey(taskid, "report.json"); - log.info("Pushing task reports %s to: %s", reportFile, taskKey); - pushTaskFile(reportFile, taskKey); - } - - private void pushTaskFile(final File logFile, String taskKey) throws IOException - { - try { - OssUtils.retry( - () -> { - OssUtils.uploadFileIfPossible(client, config.getBucket(), taskKey, logFile); - return null; - } - ); - } - catch (Exception e) { - Throwables.propagateIfInstanceOf(e, IOException.class); - throw new RuntimeException(e); - } - } - - String getTaskLogKey(String taskid, String filename) - { - return StringUtils.format("%s/%s/%s", config.getPrefix(), taskid, filename); - } - - @Override - public void killAll() throws IOException - { - log.info( - "Deleting all task logs from aliyun OSS location [bucket: '%s' prefix: '%s'].", - config.getBucket(), - config.getPrefix() - ); - - long now = timeSupplier.getAsLong(); - killOlderThan(now); - } - - @Override - public void killOlderThan(long timestamp) throws IOException - { - log.info( - "Deleting all task logs from aliyun OSS location [bucket: '%s' prefix: '%s'] older than %s.", - config.getBucket(), - config.getPrefix(), - new Date(timestamp) - ); - try { - OssUtils.deleteObjectsInPath( - client, - inputDataConfig, - config.getBucket(), - config.getPrefix(), - (object) -> object.getLastModified().getTime() < timestamp - ); - } - catch (Exception e) { - log.error("Error occurred while deleting task log files from aliyun OSS. Error: %s", e.getMessage()); - throw new IOException(e); - } - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTaskLogsConfig.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTaskLogsConfig.java deleted file mode 100644 index 3a3e5c37c1b8..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTaskLogsConfig.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.annotations.VisibleForTesting; - -import javax.validation.constraints.NotNull; - -public class OssTaskLogsConfig -{ - @JsonProperty - @NotNull - private String bucket = null; - - @JsonProperty - @NotNull - private String prefix = null; - - @JsonProperty - private boolean disableAcl = false; - - @VisibleForTesting - void setDisableAcl(boolean disableAcl) - { - this.disableAcl = disableAcl; - } - - public String getBucket() - { - return bucket; - } - - @VisibleForTesting - void setBucket(String bucket) - { - this.bucket = bucket; - } - - public String getPrefix() - { - return prefix; - } - - @VisibleForTesting - void setPrefix(String prefix) - { - this.prefix = prefix; - } - - public boolean getDisableAcl() - { - return disableAcl; - } - -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTimestampVersionedDataFinder.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTimestampVersionedDataFinder.java deleted file mode 100644 index 0b3f708477de..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssTimestampVersionedDataFinder.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.model.OSSObjectSummary; -import com.google.inject.Inject; -import org.apache.druid.data.SearchableVersionedDataFinder; -import org.apache.druid.data.input.impl.CloudObjectLocation; -import org.apache.druid.java.util.common.StringUtils; - -import javax.annotation.Nullable; -import java.net.URI; -import java.util.Collections; -import java.util.Iterator; -import java.util.regex.Pattern; - -public class OssTimestampVersionedDataFinder extends OssDataSegmentPuller implements SearchableVersionedDataFinder -{ - @Inject - public OssTimestampVersionedDataFinder(OSS client) - { - super(client); - } - - /** - * Gets the key with the most recently modified timestamp. - * `pattern` is evaluated against the entire key AFTER the path given in `uri`. - * The substring `pattern` is matched against will have a leading `/` removed. - * For example `oss://some_bucket/some_prefix/some_key` with a URI of `oss://some_bucket/some_prefix` will match against `some_key`. - * `oss://some_bucket/some_prefixsome_key` with a URI of `oss://some_bucket/some_prefix` will match against `some_key` - * `oss://some_bucket/some_prefix//some_key` with a URI of `oss://some_bucket/some_prefix` will match against `/some_key` - * - * @param uri The URI of in the form of `oss://some_bucket/some_key` - * @param pattern The pattern matcher to determine if a *key* is of interest, or `null` to match everything. - * @return A URI to the most recently modified object which matched the pattern. - */ - @Override - public URI getLatestVersion(final URI uri, final @Nullable Pattern pattern) - { - try { - final CloudObjectLocation coords = new CloudObjectLocation(OssUtils.checkURI(uri)); - long mostRecent = Long.MIN_VALUE; - URI latest = null; - final Iterator objectSummaryIterator = OssUtils.objectSummaryIterator( - client, - Collections.singletonList(uri), - OssUtils.MAX_LISTING_LENGTH - ); - while (objectSummaryIterator.hasNext()) { - final OSSObjectSummary objectSummary = objectSummaryIterator.next(); - final CloudObjectLocation objectLocation = OssUtils.summaryToCloudObjectLocation(objectSummary); - // remove coords path prefix from object path - String keyString = StringUtils.maybeRemoveLeadingSlash( - objectLocation.getPath().substring(coords.getPath().length()) - ); - if (pattern != null && !pattern.matcher(keyString).matches()) { - continue; - } - final long latestModified = objectSummary.getLastModified().getTime(); - if (latestModified >= mostRecent) { - mostRecent = latestModified; - latest = objectLocation.toUri(OssStorageDruidModule.SCHEME); - } - } - return latest; - } - catch (Exception e) { - throw new RuntimeException(e); - } - } - -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssUtils.java b/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssUtils.java deleted file mode 100644 index 1a707c785570..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/java/org/apache/druid/storage/aliyun/OssUtils.java +++ /dev/null @@ -1,271 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSException; -import com.aliyun.oss.model.DeleteObjectsRequest; -import com.aliyun.oss.model.ListObjectsRequest; -import com.aliyun.oss.model.OSSObjectSummary; -import com.aliyun.oss.model.ObjectListing; -import com.aliyun.oss.model.PutObjectRequest; -import com.google.common.base.Joiner; -import com.google.common.base.Predicate; -import com.google.common.collect.ImmutableList; -import org.apache.druid.data.input.impl.CloudObjectLocation; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.RetryUtils; -import org.apache.druid.java.util.common.RetryUtils.Task; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.logger.Logger; - -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -public class OssUtils -{ - private static final String SCHEME = OssStorageDruidModule.SCHEME; - private static final Joiner JOINER = Joiner.on("/").skipNulls(); - private static final Logger log = new Logger(OssUtils.class); - public static final int MAX_LISTING_LENGTH = 1000; //limited by Aliyun OSS SDK - - - static boolean isServiceExceptionRecoverable(OSSException ex) - { - final boolean isIOException = ex.getCause() instanceof IOException; - final boolean isTimeout = "RequestTimeout".equals(ex.getErrorCode()); - final boolean badStatusCode = false; //ex. == 400 || ex.getStatusCode() == 403 || ex.getStatusCode() == 404; - return !badStatusCode && (isIOException || isTimeout); - } - - public static final Predicate RETRYABLE = new Predicate() - { - @Override - public boolean apply(Throwable e) - { - if (e == null) { - return false; - } else if (e instanceof IOException) { - return true; - } else if (e instanceof OSSException) { - return isServiceExceptionRecoverable((OSSException) e); - } else { - return apply(e.getCause()); - } - } - }; - - /** - * Retries aliyun OSS operations that fail due to io-related exceptions. Service-level exceptions (access denied, file not - * found, etc) are not retried. - */ - static T retry(Task f) throws Exception - { - return RetryUtils.retry(f, RETRYABLE, RetryUtils.DEFAULT_MAX_TRIES); - } - - static boolean isObjectInBucketIgnoringPermission( - OSS client, - String bucketName, - String objectKey - ) - { - try { - return client.doesObjectExist(bucketName, objectKey); - } - catch (OSSException e) { - if (e.getErrorCode().equals("NoSuchKey")) { - // Object is inaccessible to current user, but does exist. - return true; - } - // Something else has gone wrong - throw e; - } - } - - /** - * Create an iterator over a set of aliyun OSS objects specified by a set of prefixes. - *

- * For each provided prefix URI, the iterator will walk through all objects that are in the same bucket as the - * provided URI and whose keys start with that URI's path, except for directory placeholders (which will be - * ignored). The iterator is computed incrementally by calling {@link OSS#listObjects} for - * each prefix in batches of {@param maxListingLength}. The first call is made at the same time the iterator is - * constructed. - */ - public static Iterator objectSummaryIterator( - final OSS client, - final Iterable prefixes, - final int maxListingLength - ) - { - return new OssObjectSummaryIterator(client, prefixes, maxListingLength); - } - - /** - * Create an {@link URI} from the given {@link OSSObjectSummary}. The result URI is composed as below. - * - *

-   * {@code oss://{BUCKET_NAME}/{OBJECT_KEY}}
-   * 
- */ - public static URI summaryToUri(OSSObjectSummary object) - { - return summaryToCloudObjectLocation(object).toUri(SCHEME); - } - - public static CloudObjectLocation summaryToCloudObjectLocation(OSSObjectSummary object) - { - return new CloudObjectLocation(object.getBucketName(), object.getKey()); - } - - static String constructSegmentPath(String baseKey, String storageDir) - { - return JOINER.join( - baseKey.isEmpty() ? null : baseKey, - storageDir - ) + "/index.zip"; - } - - public static String extractKey(URI uri) - { - return StringUtils.maybeRemoveLeadingSlash(uri.getPath()); - } - - public static URI checkURI(URI uri) - { - if (uri.getScheme().equalsIgnoreCase(OssStorageDruidModule.SCHEME_ZIP)) { - uri = URI.create(SCHEME + uri.toString().substring(OssStorageDruidModule.SCHEME_ZIP.length())); - } - return CloudObjectLocation.validateUriScheme(SCHEME, uri); - } - - /** - * Gets a single {@link OSSObjectSummary} from aliyun OSS. Since this method might return a wrong object if there are multiple - * objects that match the given key, this method should be used only when it's guaranteed that the given key is unique - * in the given bucket. - * - * @param client aliyun OSS client - * @param bucket aliyun OSS bucket - * @param key unique key for the object to be retrieved - */ - public static OSSObjectSummary getSingleObjectSummary(OSS client, String bucket, String key) - { - final ListObjectsRequest request = new ListObjectsRequest(); - request.setBucketName(bucket); - request.setPrefix(key); - request.setMaxKeys(1); - final ObjectListing result = client.listObjects(request); - - // Using getObjectSummaries().size() instead of getKeyCount as, in some cases - // it is observed that even though the getObjectSummaries returns some data - // keyCount is still zero. - if (result.getObjectSummaries().size() == 0) { - throw new ISE("Cannot find object for bucket[%s] and key[%s]", bucket, key); - } - final OSSObjectSummary objectSummary = result.getObjectSummaries().get(0); - if (!objectSummary.getBucketName().equals(bucket) || !objectSummary.getKey().equals(key)) { - throw new ISE("Wrong object[%s] for bucket[%s] and key[%s]", objectSummary, bucket, key); - } - - return objectSummary; - } - - /** - * Delete the files from aliyun OSS in a specified bucket, matching a specified prefix and filter - * - * @param client aliyun OSS client - * @param config specifies the configuration to use when finding matching files in aliyun OSS to delete - * @param bucket aliyun OSS bucket - * @param prefix the file prefix - * @param filter function which returns true if the prefix file found should be deleted and false otherwise. - * @throws Exception - */ - public static void deleteObjectsInPath( - OSS client, - OssInputDataConfig config, - String bucket, - String prefix, - Predicate filter - ) - throws Exception - { - final List keysToDelete = new ArrayList<>(config.getMaxListingLength()); - final OssObjectSummaryIterator iterator = new OssObjectSummaryIterator( - client, - ImmutableList.of(new CloudObjectLocation(bucket, prefix).toUri("http")), - config.getMaxListingLength() - ); - - while (iterator.hasNext()) { - final OSSObjectSummary nextObject = iterator.next(); - if (filter.apply(nextObject)) { - keysToDelete.add(nextObject.getKey()); - if (keysToDelete.size() == config.getMaxListingLength()) { - deleteBucketKeys(client, bucket, keysToDelete); - log.info("Deleted %d files", keysToDelete.size()); - keysToDelete.clear(); - } - } - } - - if (keysToDelete.size() > 0) { - deleteBucketKeys(client, bucket, keysToDelete); - log.info("Deleted %d files", keysToDelete.size()); - } - } - - private static void deleteBucketKeys( - OSS client, - String bucket, - List keysToDelete - ) - throws Exception - { - DeleteObjectsRequest deleteRequest = new DeleteObjectsRequest(bucket).withKeys(keysToDelete); - OssUtils.retry(() -> { - client.deleteObjects(deleteRequest); - return null; - }); - } - - /** - * Uploads a file to aliyun OSS if possible. First trying to set ACL to give the bucket owner full control of the file before uploading. - * - * @param client aliyun OSS client - * @param key The key under which to store the new object. - * @param file The path of the file to upload to aliyun OSS. - */ - static void uploadFileIfPossible( - OSS client, - String bucket, - String key, - File file - ) - { - final PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, file); - - log.info("Pushing [%s] to bucket[%s] and key[%s].", file, bucket, key); - client.putObject(putObjectRequest); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/aliyun-oss-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index 3d434e7c9021..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.storage.aliyun.OssStorageDruidModule -org.apache.druid.firehose.aliyun.OssFirehoseDruidModule -org.apache.druid.data.input.aliyun.OssInputSourceDruidModule \ No newline at end of file diff --git a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/data/input/aliyun/OssInputSourceTest.java b/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/data/input/aliyun/OssInputSourceTest.java deleted file mode 100644 index 2bd9d5816acc..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/data/input/aliyun/OssInputSourceTest.java +++ /dev/null @@ -1,660 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSClient; -import com.aliyun.oss.OSSException; -import com.aliyun.oss.model.GetObjectRequest; -import com.aliyun.oss.model.ListObjectsRequest; -import com.aliyun.oss.model.OSSObject; -import com.aliyun.oss.model.OSSObjectSummary; -import com.aliyun.oss.model.ObjectListing; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.deser.std.StdDeserializer; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.fasterxml.jackson.module.guice.ObjectMapperModule; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.google.inject.Provides; -import org.apache.druid.data.input.InputRow; -import org.apache.druid.data.input.InputRowSchema; -import org.apache.druid.data.input.InputSourceReader; -import org.apache.druid.data.input.InputSplit; -import org.apache.druid.data.input.MaxSizeSplitHintSpec; -import org.apache.druid.data.input.impl.CloudObjectLocation; -import org.apache.druid.data.input.impl.CsvInputFormat; -import org.apache.druid.data.input.impl.DimensionsSpec; -import org.apache.druid.data.input.impl.JsonInputFormat; -import org.apache.druid.data.input.impl.TimestampSpec; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.parsers.CloseableIterator; -import org.apache.druid.java.util.common.parsers.JSONPathSpec; -import org.apache.druid.metadata.DefaultPasswordProvider; -import org.apache.druid.storage.aliyun.OssInputDataConfig; -import org.apache.druid.storage.aliyun.OssUtils; -import org.apache.druid.testing.InitializedNullHandlingTest; -import org.apache.druid.utils.CompressionUtils; -import org.easymock.EasyMock; -import org.easymock.IArgumentMatcher; -import org.hamcrest.CoreMatchers; -import org.joda.time.DateTime; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.internal.matchers.ThrowableMessageMatcher; -import org.junit.rules.ExpectedException; -import org.junit.rules.TemporaryFolder; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.net.URI; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -public class OssInputSourceTest extends InitializedNullHandlingTest -{ - private static final ObjectMapper MAPPER = createObjectMapper(); - private static final OSS OSSCLIENT = EasyMock.createMock(OSSClient.class); - private static final OssInputDataConfig INPUT_DATA_CONFIG; - private static final int MAX_LISTING_LENGTH = 10; - - private static final List EXPECTED_URIS = Arrays.asList( - URI.create("oss://foo/bar/file.csv"), - URI.create("oss://bar/foo/file2.csv") - ); - - private static final List EXPECTED_COMPRESSED_URIS = Arrays.asList( - URI.create("oss://foo/bar/file.csv.gz"), - URI.create("oss://bar/foo/file2.csv.gz") - ); - - private static final List> EXPECTED_COORDS = - EXPECTED_URIS.stream() - .map(uri -> Collections.singletonList(new CloudObjectLocation(uri))) - .collect(Collectors.toList()); - - private static final List PREFIXES = Arrays.asList( - URI.create("oss://foo/bar"), - URI.create("oss://bar/foo") - ); - - private static final OssClientConfig CLOUD_CONFIG_PROPERTIES = new OssClientConfig( - "test.oss-cn.aliyun.com", - new DefaultPasswordProvider("myKey"), - new DefaultPasswordProvider("mySecret")); - - private static final List EXPECTED_LOCATION = - ImmutableList.of(new CloudObjectLocation("foo", "bar/file.csv")); - - private static final DateTime NOW = DateTimes.nowUtc(); - private static final byte[] CONTENT = - StringUtils.toUtf8(StringUtils.format("%d,hello,world", NOW.getMillis())); - - static { - INPUT_DATA_CONFIG = new OssInputDataConfig(); - INPUT_DATA_CONFIG.setMaxListingLength(MAX_LISTING_LENGTH); - } - - @Rule - public TemporaryFolder temporaryFolder = new TemporaryFolder(); - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void testSerdeWithUris() throws Exception - { - final OssInputSource withUris = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - EXPECTED_URIS, - null, - null, - null - ); - final OssInputSource serdeWithUris = MAPPER.readValue(MAPPER.writeValueAsString(withUris), OssInputSource.class); - Assert.assertEquals(withUris, serdeWithUris); - } - - @Test - public void testSerdeWithPrefixes() throws Exception - { - final OssInputSource withPrefixes = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - null, - PREFIXES, - null, - null - ); - final OssInputSource serdeWithPrefixes = - MAPPER.readValue(MAPPER.writeValueAsString(withPrefixes), OssInputSource.class); - Assert.assertEquals(withPrefixes, serdeWithPrefixes); - } - - @Test - public void testSerdeWithObjects() throws Exception - { - final OssInputSource withPrefixes = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - null, - null, - EXPECTED_LOCATION, - null - ); - final OssInputSource serdeWithPrefixes = - MAPPER.readValue(MAPPER.writeValueAsString(withPrefixes), OssInputSource.class); - Assert.assertEquals(withPrefixes, serdeWithPrefixes); - } - - @Test - public void testInputSourceUseDefaultPasswordWhenCloudConfigPropertiesWithoutCrediential() - { - OssClientConfig mockConfigPropertiesWithoutKeyAndSecret = EasyMock.createMock(OssClientConfig.class); - EasyMock.reset(mockConfigPropertiesWithoutKeyAndSecret); - EasyMock.expect(mockConfigPropertiesWithoutKeyAndSecret.isCredentialsConfigured()) - .andStubReturn(false); - EasyMock.expect(mockConfigPropertiesWithoutKeyAndSecret.buildClient()) - .andReturn(OSSCLIENT); - EasyMock.replay(mockConfigPropertiesWithoutKeyAndSecret); - final OssInputSource withPrefixes = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - null, - null, - EXPECTED_LOCATION, - mockConfigPropertiesWithoutKeyAndSecret - ); - Assert.assertNotNull(withPrefixes); - - withPrefixes.createEntity(new CloudObjectLocation("bucket", "path")); - EasyMock.verify(mockConfigPropertiesWithoutKeyAndSecret); - } - - @Test - public void testSerdeOssClientLazyInitializedWithCrediential() throws Exception - { - OssClientConfig clientConfig = EasyMock.createMock(OssClientConfig.class); - EasyMock.replay(clientConfig); - final OssInputSource withPrefixes = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - null, - null, - EXPECTED_LOCATION, - CLOUD_CONFIG_PROPERTIES - ); - final OssInputSource serdeWithPrefixes = - MAPPER.readValue(MAPPER.writeValueAsString(withPrefixes), OssInputSource.class); - Assert.assertEquals(withPrefixes, serdeWithPrefixes); - EasyMock.verify(clientConfig); - } - - @Test - public void testSerdeOssClientLazyInitializedWithoutCrediential() throws Exception - { - OssClientConfig clientConfig = EasyMock.createMock(OssClientConfig.class); - EasyMock.replay(clientConfig); - final OssInputSource withPrefixes = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - null, - null, - EXPECTED_LOCATION, - null - ); - final OssInputSource serdeWithPrefixes = - MAPPER.readValue(MAPPER.writeValueAsString(withPrefixes), OssInputSource.class); - Assert.assertEquals(withPrefixes, serdeWithPrefixes); - EasyMock.verify(clientConfig); - } - - @Test - public void testSerdeWithExtraEmptyLists() throws Exception - { - final OssInputSource withPrefixes = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - ImmutableList.of(), - ImmutableList.of(), - EXPECTED_LOCATION, - null - ); - final OssInputSource serdeWithPrefixes = - MAPPER.readValue(MAPPER.writeValueAsString(withPrefixes), OssInputSource.class); - Assert.assertEquals(withPrefixes, serdeWithPrefixes); - } - - @Test - public void testSerdeWithInvalidArgs() - { - expectedException.expect(IllegalArgumentException.class); - // constructor will explode - new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - EXPECTED_URIS, - PREFIXES, - EXPECTED_LOCATION, - null - ); - } - - @Test - public void testSerdeWithOtherInvalidArgs() - { - expectedException.expect(IllegalArgumentException.class); - // constructor will explode - new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - EXPECTED_URIS, - PREFIXES, - ImmutableList.of(), - null - ); - } - - @Test - public void testSerdeWithOtherOtherInvalidArgs() - { - expectedException.expect(IllegalArgumentException.class); - // constructor will explode - new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - ImmutableList.of(), - PREFIXES, - EXPECTED_LOCATION, - null - ); - } - - @Test - public void testWithUrisSplit() - { - OssInputSource inputSource = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - EXPECTED_URIS, - null, - null, - null - ); - - Stream>> splits = inputSource.createSplits( - new JsonInputFormat(JSONPathSpec.DEFAULT, null, null), - null - ); - - Assert.assertEquals(EXPECTED_COORDS, splits.map(InputSplit::get).collect(Collectors.toList())); - } - - @Test - public void testWithPrefixesSplit() - { - EasyMock.reset(OSSCLIENT); - expectListObjects(PREFIXES.get(0), ImmutableList.of(EXPECTED_URIS.get(0)), CONTENT); - expectListObjects(PREFIXES.get(1), ImmutableList.of(EXPECTED_URIS.get(1)), CONTENT); - EasyMock.replay(OSSCLIENT); - - OssInputSource inputSource = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - null, - PREFIXES, - null, - null - ); - - Stream>> splits = inputSource.createSplits( - new JsonInputFormat(JSONPathSpec.DEFAULT, null, null), - new MaxSizeSplitHintSpec(1L) // set maxSplitSize to 1 so that each inputSplit has only one object - ); - - Assert.assertEquals(EXPECTED_COORDS, splits.map(InputSplit::get).collect(Collectors.toList())); - EasyMock.verify(OSSCLIENT); - } - - @Test - public void testCreateSplitsWithSplitHintSpecRespectingHint() - { - EasyMock.reset(OSSCLIENT); - expectListObjects(PREFIXES.get(0), ImmutableList.of(EXPECTED_URIS.get(0)), CONTENT); - expectListObjects(PREFIXES.get(1), ImmutableList.of(EXPECTED_URIS.get(1)), CONTENT); - EasyMock.replay(OSSCLIENT); - - OssInputSource inputSource = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - null, - PREFIXES, - null, - null - ); - - Stream>> splits = inputSource.createSplits( - new JsonInputFormat(JSONPathSpec.DEFAULT, null, null), - new MaxSizeSplitHintSpec(CONTENT.length * 3L) - ); - - Assert.assertEquals( - ImmutableList.of(EXPECTED_URIS.stream().map(CloudObjectLocation::new).collect(Collectors.toList())), - splits.map(InputSplit::get).collect(Collectors.toList()) - ); - EasyMock.verify(OSSCLIENT); - } - - @Test - public void testCreateSplitsWithEmptyObjectsIteratingOnlyNonEmptyObjects() - { - EasyMock.reset(OSSCLIENT); - expectListObjects(PREFIXES.get(0), ImmutableList.of(EXPECTED_URIS.get(0)), CONTENT); - expectListObjects(PREFIXES.get(1), ImmutableList.of(EXPECTED_URIS.get(1)), new byte[0]); - EasyMock.replay(OSSCLIENT); - - OssInputSource inputSource = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - null, - PREFIXES, - null, - null - ); - - Stream>> splits = inputSource.createSplits( - new JsonInputFormat(JSONPathSpec.DEFAULT, null, null), - null - ); - Assert.assertEquals( - ImmutableList.of(ImmutableList.of(new CloudObjectLocation(EXPECTED_URIS.get(0)))), - splits.map(InputSplit::get).collect(Collectors.toList()) - ); - EasyMock.verify(OSSCLIENT); - } - - @Test - public void testAccessDeniedWhileListingPrefix() - { - EasyMock.reset(OSSCLIENT); - expectListObjects(PREFIXES.get(0), ImmutableList.of(EXPECTED_URIS.get(0)), CONTENT); - expectListObjectsAndThrowAccessDenied(EXPECTED_URIS.get(1)); - EasyMock.replay(OSSCLIENT); - - OssInputSource inputSource = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - null, - ImmutableList.of(PREFIXES.get(0), EXPECTED_URIS.get(1)), - null, - null - ); - - expectedException.expectMessage("Failed to get object summaries from aliyun OSS bucket[bar], prefix[foo/file2.csv]"); - expectedException.expectCause( - ThrowableMessageMatcher.hasMessage(CoreMatchers.containsString("can't list that bucket")) - ); - - inputSource.createSplits( - new JsonInputFormat(JSONPathSpec.DEFAULT, null, null), - null - ).collect(Collectors.toList()); - } - - @Test - public void testReader() throws IOException - { - EasyMock.reset(OSSCLIENT); - expectListObjects(PREFIXES.get(0), ImmutableList.of(EXPECTED_URIS.get(0)), CONTENT); - expectListObjects(EXPECTED_URIS.get(1), ImmutableList.of(EXPECTED_URIS.get(1)), CONTENT); - expectGetObject(EXPECTED_URIS.get(0)); - expectGetObject(EXPECTED_URIS.get(1)); - EasyMock.replay(OSSCLIENT); - - OssInputSource inputSource = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - null, - ImmutableList.of(PREFIXES.get(0), EXPECTED_URIS.get(1)), - null, - null - ); - - InputRowSchema someSchema = new InputRowSchema( - new TimestampSpec("time", "auto", null), - new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim1", "dim2"))), - ImmutableList.of("count") - ); - - InputSourceReader reader = inputSource.reader( - someSchema, - new CsvInputFormat(ImmutableList.of("time", "dim1", "dim2"), "|", false, null, 0), - temporaryFolder.newFolder() - ); - - CloseableIterator iterator = reader.read(); - - while (iterator.hasNext()) { - InputRow nextRow = iterator.next(); - Assert.assertEquals(NOW, nextRow.getTimestamp()); - Assert.assertEquals("hello", nextRow.getDimension("dim1").get(0)); - Assert.assertEquals("world", nextRow.getDimension("dim2").get(0)); - } - - EasyMock.verify(OSSCLIENT); - } - - @Test - public void testCompressedReader() throws IOException - { - EasyMock.reset(OSSCLIENT); - expectListObjects(PREFIXES.get(0), ImmutableList.of(EXPECTED_COMPRESSED_URIS.get(0)), CONTENT); - expectListObjects(EXPECTED_COMPRESSED_URIS.get(1), ImmutableList.of(EXPECTED_COMPRESSED_URIS.get(1)), CONTENT); - expectGetObjectCompressed(EXPECTED_COMPRESSED_URIS.get(0)); - expectGetObjectCompressed(EXPECTED_COMPRESSED_URIS.get(1)); - EasyMock.replay(OSSCLIENT); - - OssInputSource inputSource = new OssInputSource( - OSSCLIENT, - INPUT_DATA_CONFIG, - null, - ImmutableList.of(PREFIXES.get(0), EXPECTED_COMPRESSED_URIS.get(1)), - null, - null - ); - - InputRowSchema someSchema = new InputRowSchema( - new TimestampSpec("time", "auto", null), - new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim1", "dim2"))), - ImmutableList.of("count") - ); - - InputSourceReader reader = inputSource.reader( - someSchema, - new CsvInputFormat(ImmutableList.of("time", "dim1", "dim2"), "|", false, null, 0), - temporaryFolder.newFolder() - ); - - CloseableIterator iterator = reader.read(); - - while (iterator.hasNext()) { - InputRow nextRow = iterator.next(); - Assert.assertEquals(NOW, nextRow.getTimestamp()); - Assert.assertEquals("hello", nextRow.getDimension("dim1").get(0)); - Assert.assertEquals("world", nextRow.getDimension("dim2").get(0)); - } - - EasyMock.verify(OSSCLIENT); - } - - private static void expectListObjects(URI prefix, List uris, byte[] content) - { - final ObjectListing result = new ObjectListing(); - result.setBucketName(prefix.getAuthority()); - result.setMaxKeys(uris.size()); - for (URI uri : uris) { - final String bucket = uri.getAuthority(); - final String key = OssUtils.extractKey(uri); - final OSSObjectSummary objectSummary = new OSSObjectSummary(); - objectSummary.setBucketName(bucket); - objectSummary.setKey(key); - objectSummary.setSize(content.length); - result.getObjectSummaries().add(objectSummary); - } - - EasyMock.expect( - OSSCLIENT.listObjects(matchListObjectsRequest(prefix)) - ).andReturn(result).once(); - } - - private static void expectListObjectsAndThrowAccessDenied(final URI prefix) - { - OSSException boom = new OSSException("oh dang, you can't list that bucket friend"); - boom.setRawResponseError("403"); - EasyMock.expect( - OSSCLIENT.listObjects(matchListObjectsRequest(prefix)) - ).andThrow(boom).once(); - } - - private static void expectGetObject(URI uri) - { - final String bucket = uri.getAuthority(); - final String key = OssUtils.extractKey(uri); - - OSSObject someObject = new OSSObject(); - someObject.setBucketName(bucket); - someObject.setKey(key); - someObject.setObjectContent(new ByteArrayInputStream(CONTENT)); - EasyMock.expect(OSSCLIENT.getObject(EasyMock.anyObject(GetObjectRequest.class))).andReturn(someObject).once(); - } - - private static void expectGetObjectCompressed(URI uri) throws IOException - { - final String bucket = uri.getAuthority(); - final String key = OssUtils.extractKey(uri); - - OSSObject someObject = new OSSObject(); - someObject.setBucketName(bucket); - someObject.setKey(key); - ByteArrayOutputStream gzipped = new ByteArrayOutputStream(); - CompressionUtils.gzip(new ByteArrayInputStream(CONTENT), gzipped); - someObject.setObjectContent(new ByteArrayInputStream(gzipped.toByteArray())); - EasyMock.expect(OSSCLIENT.getObject(EasyMock.anyObject(GetObjectRequest.class))).andReturn(someObject).once(); - } - - private static ListObjectsRequest matchListObjectsRequest(final URI prefixUri) - { - // Use an IArgumentMatcher to verify that the request has the correct bucket and prefix. - EasyMock.reportMatcher( - new IArgumentMatcher() - { - @Override - public boolean matches(Object argument) - { - if (!(argument instanceof ListObjectsRequest)) { - return false; - } - - final ListObjectsRequest request = (ListObjectsRequest) argument; - return prefixUri.getAuthority().equals(request.getBucketName()) - && OssUtils.extractKey(prefixUri).equals(request.getPrefix()); - } - - @Override - public void appendTo(StringBuffer buffer) - { - buffer.append(""); - } - } - ); - - return null; - } - - public static ObjectMapper createObjectMapper() - { - DruidModule baseModule = new TestOssModule(); - final Injector injector = Guice.createInjector( - new ObjectMapperModule(), - baseModule - ); - final ObjectMapper baseMapper = injector.getInstance(ObjectMapper.class); - - baseModule.getJacksonModules().forEach(baseMapper::registerModule); - return baseMapper; - } - - public static class TestOssModule implements DruidModule - { - @Override - public List getJacksonModules() - { - // Deserializer is need for OSS even though it is injected. - // See https://github.com/FasterXML/jackson-databind/issues/962. - return ImmutableList.of( - new SimpleModule() - .addDeserializer(OSS.class, new ItemDeserializer()) - ); - } - - @Override - public void configure(Binder binder) - { - } - - @Provides - public OSS getOssClient() - { - return OSSCLIENT; - } - } - - public static class ItemDeserializer extends StdDeserializer - { - ItemDeserializer() - { - this(null); - } - - ItemDeserializer(Class vc) - { - super(vc); - } - - @Override - public T deserialize(JsonParser jp, DeserializationContext ctxt) - { - throw new UnsupportedOperationException(); - } - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiverTest.java b/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiverTest.java deleted file mode 100644 index 02a88ec885c9..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentArchiverTest.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSClient; -import com.fasterxml.jackson.databind.BeanProperty; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.InjectableValues; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.java.util.common.Intervals; -import org.apache.druid.timeline.DataSegment; -import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Test; - -import java.util.Map; - -public class OssDataSegmentArchiverTest -{ - private static final ObjectMapper MAPPER = new DefaultObjectMapper() - .setInjectableValues( - new InjectableValues() - { - @Override - public Object findInjectableValue( - Object valueId, - DeserializationContext ctxt, - BeanProperty forProperty, - Object beanInstance - ) - { - return PULLER; - } - } - ) - .registerModule(new SimpleModule("aliyun-oss-archive-test-module").registerSubtypes(OssLoadSpec.class)); - private static final OssDataSegmentArchiverConfig ARCHIVER_CONFIG = new OssDataSegmentArchiverConfig() - { - @Override - public String getArchiveBucket() - { - return "archive_bucket"; - } - - @Override - public String getArchiveBaseKey() - { - return "archive_base_key"; - } - }; - private static final OssStorageConfig PUSHER_CONFIG = new OssStorageConfig(); - private static final OSS OSS_CLIENT = EasyMock.createStrictMock(OSSClient.class); - private static final OssDataSegmentPuller PULLER = new OssDataSegmentPuller(OSS_CLIENT); - private static final DataSegment SOURCE_SEGMENT = DataSegment - .builder() - .binaryVersion(1) - .dataSource("dataSource") - .dimensions(ImmutableList.of()) - .interval(Intervals.of("2015/2016")) - .version("version") - .loadSpec(ImmutableMap.of( - "type", - OssStorageDruidModule.SCHEME_ZIP, - OssDataSegmentPuller.BUCKET, - "source_bucket", - OssDataSegmentPuller.KEY, - "source_key" - )) - .size(0) - .build(); - - @BeforeClass - public static void setUpStatic() - { - PUSHER_CONFIG.setPrefix("push_base"); - PUSHER_CONFIG.setBucket("push_bucket"); - } - - @Test - public void testSimpleArchive() throws Exception - { - final DataSegment archivedSegment = SOURCE_SEGMENT - .withLoadSpec(ImmutableMap.of( - "type", - OssStorageDruidModule.SCHEME_ZIP, - OssDataSegmentPuller.BUCKET, - ARCHIVER_CONFIG.getArchiveBucket(), - OssDataSegmentPuller.KEY, - ARCHIVER_CONFIG.getArchiveBaseKey() + "archived" - )); - final OssDataSegmentArchiver archiver = new OssDataSegmentArchiver( - MAPPER, - OSS_CLIENT, - ARCHIVER_CONFIG, - PUSHER_CONFIG - ) - { - @Override - public DataSegment move(DataSegment segment, Map targetLoadSpec) - { - return archivedSegment; - } - }; - Assert.assertEquals(archivedSegment, archiver.archive(SOURCE_SEGMENT)); - } - - @Test - public void testSimpleArchiveDoesntMove() throws Exception - { - final OssDataSegmentArchiver archiver = new OssDataSegmentArchiver( - MAPPER, - OSS_CLIENT, - ARCHIVER_CONFIG, - PUSHER_CONFIG - ) - { - @Override - public DataSegment move(DataSegment segment, Map targetLoadSpec) - { - return SOURCE_SEGMENT; - } - }; - Assert.assertNull(archiver.archive(SOURCE_SEGMENT)); - } - - @Test - public void testSimpleRestore() throws Exception - { - final DataSegment archivedSegment = SOURCE_SEGMENT - .withLoadSpec(ImmutableMap.of( - "type", - OssStorageDruidModule.SCHEME_ZIP, - OssDataSegmentPuller.BUCKET, - ARCHIVER_CONFIG.getArchiveBucket(), - OssDataSegmentPuller.KEY, - ARCHIVER_CONFIG.getArchiveBaseKey() + "archived" - )); - final OssDataSegmentArchiver archiver = new OssDataSegmentArchiver( - MAPPER, - OSS_CLIENT, - ARCHIVER_CONFIG, - PUSHER_CONFIG - ) - { - @Override - public DataSegment move(DataSegment segment, Map targetLoadSpec) - { - return archivedSegment; - } - }; - Assert.assertEquals(archivedSegment, archiver.restore(SOURCE_SEGMENT)); - } - - @Test - public void testSimpleRestoreDoesntMove() throws Exception - { - final OssDataSegmentArchiver archiver = new OssDataSegmentArchiver( - MAPPER, - OSS_CLIENT, - ARCHIVER_CONFIG, - PUSHER_CONFIG - ) - { - @Override - public DataSegment move(DataSegment segment, Map targetLoadSpec) - { - return SOURCE_SEGMENT; - } - }; - Assert.assertNull(archiver.restore(SOURCE_SEGMENT)); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentKillerTest.java b/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentKillerTest.java deleted file mode 100644 index 638348379a27..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentKillerTest.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.ClientException; -import com.aliyun.oss.OSS; -import com.aliyun.oss.model.DeleteObjectsRequest; -import com.aliyun.oss.model.OSSObjectSummary; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.StringUtils; -import org.easymock.EasyMock; -import org.easymock.EasyMockRunner; -import org.easymock.EasyMockSupport; -import org.easymock.Mock; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; - -import java.io.IOException; -import java.net.URI; -import java.util.Collections; - -@RunWith(EasyMockRunner.class) -public class OssDataSegmentKillerTest extends EasyMockSupport -{ - private static final String KEY_1 = "key1"; - private static final String KEY_2 = "key2"; - private static final String TEST_BUCKET = "test_bucket"; - private static final String TEST_PREFIX = "test_prefix"; - private static final URI PREFIX_URI = URI.create(StringUtils.format(OssStorageDruidModule.SCHEME + "://%s/%s", TEST_BUCKET, TEST_PREFIX)); - private static final long TIME_0 = 0L; - private static final long TIME_1 = 1L; - private static final int MAX_KEYS = 1; - private static final Exception RECOVERABLE_EXCEPTION = new ClientException(new IOException("mocked by test case")); - private static final Exception NON_RECOVERABLE_EXCEPTION = new ClientException(new NullPointerException("mocked by test case")); - - @Mock - private OSS client; - @Mock - private OssStorageConfig segmentPusherConfig; - @Mock - private OssInputDataConfig inputDataConfig; - - private OssDataSegmentKiller segmentKiller; - - @Test - public void test_killAll_accountConfigWithNullBucketAndBaseKey_throwsISEException() throws IOException - { - EasyMock.expect(segmentPusherConfig.getBucket()).andReturn(null); - EasyMock.expectLastCall().atLeastOnce(); - EasyMock.expect(segmentPusherConfig.getPrefix()).andReturn(null); - EasyMock.expectLastCall().anyTimes(); - - boolean thrownISEException = false; - - try { - - EasyMock.replay(client, segmentPusherConfig, inputDataConfig); - - segmentKiller = new OssDataSegmentKiller(client, segmentPusherConfig, inputDataConfig); - segmentKiller.killAll(); - } - catch (ISE e) { - thrownISEException = true; - } - Assert.assertTrue(thrownISEException); - EasyMock.verify(client, segmentPusherConfig, inputDataConfig); - } - - @Test - public void test_killAll_noException_deletesAllSegments() throws IOException - { - OSSObjectSummary objectSummary1 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_1, TIME_0); - OSSObjectSummary objectSummary2 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_2, TIME_1); - - OssTestUtils.expectListObjects( - client, - PREFIX_URI, - ImmutableList.of(objectSummary1, objectSummary2) - ); - - DeleteObjectsRequest deleteRequest1 = new DeleteObjectsRequest(TEST_BUCKET); - deleteRequest1.setKeys(Collections.singletonList(KEY_1)); - DeleteObjectsRequest deleteRequest2 = new DeleteObjectsRequest(TEST_BUCKET); - deleteRequest2.setKeys(Collections.singletonList(KEY_2)); - - OssTestUtils.mockClientDeleteObjects( - client, - ImmutableList.of(deleteRequest1, deleteRequest2), - ImmutableMap.of() - ); - - EasyMock.expect(segmentPusherConfig.getBucket()).andReturn(TEST_BUCKET); - EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(segmentPusherConfig.getPrefix()).andReturn(TEST_PREFIX); - EasyMock.expectLastCall().anyTimes(); - - EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_KEYS); - EasyMock.expectLastCall().anyTimes(); - - EasyMock.replay(client, segmentPusherConfig, inputDataConfig); - - segmentKiller = new OssDataSegmentKiller(client, segmentPusherConfig, inputDataConfig); - segmentKiller.killAll(); - EasyMock.verify(client, segmentPusherConfig, inputDataConfig); - } - - @Test - public void test_killAll_recoverableExceptionWhenListingObjects_deletesAllSegments() throws IOException - { - OSSObjectSummary objectSummary1 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_1, TIME_0); - - OssTestUtils.expectListObjects( - client, - PREFIX_URI, - ImmutableList.of(objectSummary1) - ); - - DeleteObjectsRequest deleteRequest1 = new DeleteObjectsRequest(TEST_BUCKET); - deleteRequest1.setKeys(Collections.singletonList(KEY_1)); - - OssTestUtils.mockClientDeleteObjects( - client, - ImmutableList.of(deleteRequest1), - ImmutableMap.of(deleteRequest1, RECOVERABLE_EXCEPTION) - ); - - EasyMock.expect(segmentPusherConfig.getBucket()).andReturn(TEST_BUCKET); - EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(segmentPusherConfig.getPrefix()).andReturn(TEST_PREFIX); - EasyMock.expectLastCall().anyTimes(); - - EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_KEYS); - EasyMock.expectLastCall().anyTimes(); - - EasyMock.replay(client, segmentPusherConfig, inputDataConfig); - - segmentKiller = new OssDataSegmentKiller(client, segmentPusherConfig, inputDataConfig); - segmentKiller.killAll(); - EasyMock.verify(client, segmentPusherConfig, inputDataConfig); - } - - @Test - public void test_killAll_nonrecoverableExceptionWhenListingObjects_deletesAllSegments() - { - boolean ioExceptionThrown = false; - try { - OSSObjectSummary objectSummary1 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_1, TIME_0); - - OssTestUtils.expectListObjects( - client, - PREFIX_URI, - ImmutableList.of(objectSummary1) - ); - - DeleteObjectsRequest deleteRequest1 = new DeleteObjectsRequest(TEST_BUCKET); - deleteRequest1.withKeys(ImmutableList.of(KEY_1)); - - OssTestUtils.mockClientDeleteObjects( - client, - ImmutableList.of(), - ImmutableMap.of(deleteRequest1, NON_RECOVERABLE_EXCEPTION) - ); - - - EasyMock.expect(segmentPusherConfig.getBucket()).andReturn(TEST_BUCKET); - EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(segmentPusherConfig.getPrefix()).andReturn(TEST_PREFIX); - EasyMock.expectLastCall().anyTimes(); - - EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_KEYS); - EasyMock.expectLastCall().anyTimes(); - - EasyMock.replay(client, segmentPusherConfig, inputDataConfig); - - segmentKiller = new OssDataSegmentKiller(client, segmentPusherConfig, inputDataConfig); - segmentKiller.killAll(); - } - catch (IOException e) { - ioExceptionThrown = true; - } - - Assert.assertTrue(ioExceptionThrown); - EasyMock.verify(client, segmentPusherConfig, inputDataConfig); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentMoverTest.java b/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentMoverTest.java deleted file mode 100644 index 66c6f25006f1..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentMoverTest.java +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSSClient; -import com.aliyun.oss.OSSException; -import com.aliyun.oss.model.CopyObjectRequest; -import com.aliyun.oss.model.CopyObjectResult; -import com.aliyun.oss.model.ListObjectsRequest; -import com.aliyun.oss.model.OSSObjectSummary; -import com.aliyun.oss.model.ObjectListing; -import com.aliyun.oss.model.PutObjectResult; -import com.aliyun.oss.model.StorageClass; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import org.apache.druid.java.util.common.Intervals; -import org.apache.druid.java.util.common.MapUtils; -import org.apache.druid.segment.loading.SegmentLoadingException; -import org.apache.druid.timeline.DataSegment; -import org.apache.druid.timeline.partition.NoneShardSpec; -import org.junit.Assert; -import org.junit.Test; - -import java.io.File; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -public class OssDataSegmentMoverTest -{ - private static final DataSegment SOURCE_SEGMENT = new DataSegment( - "test", - Intervals.of("2013-01-01/2013-01-02"), - "1", - ImmutableMap.of( - "key", - "baseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip", - "bucket", - "main" - ), - ImmutableList.of("dim1", "dim1"), - ImmutableList.of("metric1", "metric2"), - NoneShardSpec.instance(), - 0, - 1 - ); - - @Test - public void testMove() throws Exception - { - MockClient mockClient = new MockClient(); - OssDataSegmentMover mover = new OssDataSegmentMover(mockClient, new OssStorageConfig()); - - mockClient.putObject( - "main", - "baseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip" - ); - - DataSegment movedSegment = mover.move( - SOURCE_SEGMENT, - ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") - ); - - Map targetLoadSpec = movedSegment.getLoadSpec(); - Assert.assertEquals( - "targetBaseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip", - MapUtils.getString(targetLoadSpec, "key") - ); - Assert.assertEquals("archive", MapUtils.getString(targetLoadSpec, "bucket")); - Assert.assertTrue(mockClient.didMove()); - } - - @Test - public void testMoveNoop() throws Exception - { - MockClient mockOssClient = new MockClient(); - OssDataSegmentMover mover = new OssDataSegmentMover(mockOssClient, new OssStorageConfig()); - - mockOssClient.putObject( - "archive", - "targetBaseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip" - ); - - DataSegment movedSegment = mover.move( - SOURCE_SEGMENT, - ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") - ); - - Map targetLoadSpec = movedSegment.getLoadSpec(); - - Assert.assertEquals( - "targetBaseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip", - MapUtils.getString(targetLoadSpec, "key") - ); - Assert.assertEquals("archive", MapUtils.getString(targetLoadSpec, "bucket")); - Assert.assertFalse(mockOssClient.didMove()); - } - - @Test(expected = SegmentLoadingException.class) - public void testMoveException() throws Exception - { - MockClient mockClient = new MockClient(); - OssDataSegmentMover mover = new OssDataSegmentMover(mockClient, new OssStorageConfig()); - - mover.move( - SOURCE_SEGMENT, - ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") - ); - } - - @Test - public void testIgnoresGoneButAlreadyMoved() throws Exception - { - MockClient mockOssClient = new MockClient(); - OssDataSegmentMover mover = new OssDataSegmentMover(mockOssClient, new OssStorageConfig()); - mover.move(new DataSegment( - "test", - Intervals.of("2013-01-01/2013-01-02"), - "1", - ImmutableMap.of( - "key", - "baseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip", - "bucket", - "DOES NOT EXIST" - ), - ImmutableList.of("dim1", "dim1"), - ImmutableList.of("metric1", "metric2"), - NoneShardSpec.instance(), - 0, - 1 - ), ImmutableMap.of("bucket", "DOES NOT EXIST", "baseKey", "baseKey")); - } - - @Test(expected = SegmentLoadingException.class) - public void testFailsToMoveMissing() throws Exception - { - MockClient client = new MockClient(); - OssDataSegmentMover mover = new OssDataSegmentMover(client, new OssStorageConfig()); - mover.move(new DataSegment( - "test", - Intervals.of("2013-01-01/2013-01-02"), - "1", - ImmutableMap.of( - "key", - "baseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip", - "bucket", - "DOES NOT EXIST" - ), - ImmutableList.of("dim1", "dim1"), - ImmutableList.of("metric1", "metric2"), - NoneShardSpec.instance(), - 0, - 1 - ), ImmutableMap.of("bucket", "DOES NOT EXIST", "baseKey", "baseKey2")); - } - - private static class MockClient extends OSSClient - { - Map> storage = new HashMap<>(); - boolean copied = false; - boolean deletedOld = false; - - private MockClient() - { - super("endpoint", "accessKeyId", "keySecret"); - } - - public boolean didMove() - { - return copied && deletedOld; - } - - @Override - public boolean doesObjectExist(String bucketName, String objectKey) - { - Set objects = storage.get(bucketName); - return (objects != null && objects.contains(objectKey)); - } - - @Override - public ObjectListing listObjects(ListObjectsRequest listObjectsV2Request) - { - final String bucketName = listObjectsV2Request.getBucketName(); - final String objectKey = listObjectsV2Request.getPrefix(); - if (doesObjectExist(bucketName, objectKey)) { - final OSSObjectSummary objectSummary = new OSSObjectSummary(); - objectSummary.setBucketName(bucketName); - objectSummary.setKey(objectKey); - objectSummary.setStorageClass(StorageClass.Standard.name()); - - final ObjectListing result = new ObjectListing(); - result.setBucketName(bucketName); - result.setPrefix(objectKey); - //result.setKeyCount(1); - result.getObjectSummaries().add(objectSummary); - result.setTruncated(true); - return result; - } else { - return new ObjectListing(); - } - } - - @Override - public CopyObjectResult copyObject(CopyObjectRequest copyObjectRequest) - { - final String sourceBucketName = copyObjectRequest.getSourceBucketName(); - final String sourceObjectKey = copyObjectRequest.getSourceKey(); - final String destinationBucketName = copyObjectRequest.getDestinationBucketName(); - final String destinationObjectKey = copyObjectRequest.getDestinationKey(); - copied = true; - if (doesObjectExist(sourceBucketName, sourceObjectKey)) { - storage.computeIfAbsent(destinationBucketName, k -> new HashSet<>()) - .add(destinationObjectKey); - return new CopyObjectResult(); - } else { - final OSSException exception = new OSSException( - "OssDataSegmentMoverTest", - "NoSuchKey", - null, - null, - null, - null, - null - ); - throw exception; - } - } - - @Override - public void deleteObject(String bucket, String objectKey) - { - deletedOld = true; - storage.get(bucket).remove(objectKey); - } - - public PutObjectResult putObject(String bucketName, String key) - { - return putObject(bucketName, key, (File) null); - } - - @Override - public PutObjectResult putObject(String bucketName, String key, File file) - { - storage.computeIfAbsent(bucketName, bName -> new HashSet<>()).add(key); - return new PutObjectResult(); - } - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPullerTest.java b/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPullerTest.java deleted file mode 100644 index 46584cac5e18..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPullerTest.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSException; -import com.aliyun.oss.model.ListObjectsRequest; -import com.aliyun.oss.model.OSSObject; -import com.aliyun.oss.model.OSSObjectSummary; -import com.aliyun.oss.model.ObjectListing; -import org.apache.druid.data.input.impl.CloudObjectLocation; -import org.apache.druid.java.util.common.FileUtils; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.segment.loading.SegmentLoadingException; -import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.net.URI; -import java.nio.charset.StandardCharsets; -import java.util.Date; -import java.util.zip.GZIPOutputStream; - -/** - * - */ -public class OssDataSegmentPullerTest -{ - @Rule - public TemporaryFolder temporaryFolder = new TemporaryFolder(); - - @Test - public void testSimpleGetVersion() throws IOException - { - String bucket = "bucket"; - String keyPrefix = "prefix/dir/0"; - OSS ossClient = EasyMock.createStrictMock(OSS.class); - - final OSSObjectSummary objectSummary = new OSSObjectSummary(); - objectSummary.setBucketName(bucket); - objectSummary.setKey(keyPrefix + "/renames-0.gz"); - objectSummary.setLastModified(new Date(0)); - - final ObjectListing result = new ObjectListing(); - result.getObjectSummaries().add(objectSummary); - - EasyMock.expect(ossClient.listObjects(EasyMock.anyObject(ListObjectsRequest.class))) - .andReturn(result) - .once(); - OssDataSegmentPuller puller = new OssDataSegmentPuller(ossClient); - - EasyMock.replay(ossClient); - - String version = puller.getVersion(URI.create(StringUtils.format(OssStorageDruidModule.SCHEME + "://%s/%s", bucket, objectSummary.getKey()))); - - EasyMock.verify(ossClient); - - Assert.assertEquals(StringUtils.format("%d", new Date(0).getTime()), version); - } - - @Test - public void testGZUncompress() throws IOException, SegmentLoadingException - { - final String bucket = "bucket"; - final String keyPrefix = "prefix/dir/0"; - final OSS ossClient = EasyMock.createStrictMock(OSS.class); - final byte[] value = bucket.getBytes(StandardCharsets.UTF_8); - - final File tmpFile = temporaryFolder.newFile("gzTest.gz"); - - try (OutputStream outputStream = new GZIPOutputStream(new FileOutputStream(tmpFile))) { - outputStream.write(value); - } - - final OSSObject object0 = new OSSObject(); - object0.setBucketName(bucket); - object0.setKey(keyPrefix + "/renames-0.gz"); - object0.getObjectMetadata().setLastModified(new Date(0)); - object0.setObjectContent(new FileInputStream(tmpFile)); - - final OSSObjectSummary objectSummary = new OSSObjectSummary(); - objectSummary.setBucketName(bucket); - objectSummary.setKey(keyPrefix + "/renames-0.gz"); - objectSummary.setLastModified(new Date(0)); - - final ObjectListing listObjectsResult = new ObjectListing(); - listObjectsResult.getObjectSummaries().add(objectSummary); - - final File tmpDir = temporaryFolder.newFolder("gzTestDir"); - - EasyMock.expect(ossClient.doesObjectExist(EasyMock.eq(object0.getBucketName()), EasyMock.eq(object0.getKey()))) - .andReturn(true) - .once(); - EasyMock.expect(ossClient.listObjects(EasyMock.anyObject(ListObjectsRequest.class))) - .andReturn(listObjectsResult) - .once(); - EasyMock.expect(ossClient.getObject(EasyMock.eq(object0.getBucketName()), EasyMock.eq(object0.getKey()))) - .andReturn(object0) - .once(); - OssDataSegmentPuller puller = new OssDataSegmentPuller(ossClient); - - EasyMock.replay(ossClient); - FileUtils.FileCopyResult result = puller.getSegmentFiles( - new CloudObjectLocation( - bucket, - object0.getKey() - ), tmpDir - ); - EasyMock.verify(ossClient); - - Assert.assertEquals(value.length, result.size()); - File expected = new File(tmpDir, "renames-0"); - Assert.assertTrue(expected.exists()); - Assert.assertEquals(value.length, expected.length()); - } - - @Test - public void testGZUncompressRetries() throws IOException, SegmentLoadingException - { - final String bucket = "bucket"; - final String keyPrefix = "prefix/dir/0"; - final OSS ossClient = EasyMock.createStrictMock(OSS.class); - final byte[] value = bucket.getBytes(StandardCharsets.UTF_8); - - final File tmpFile = temporaryFolder.newFile("gzTest.gz"); - - try (OutputStream outputStream = new GZIPOutputStream(new FileOutputStream(tmpFile))) { - outputStream.write(value); - } - - OSSObject object0 = new OSSObject(); - - object0.setBucketName(bucket); - object0.setKey(keyPrefix + "/renames-0.gz"); - object0.getObjectMetadata().setLastModified(new Date(0)); - object0.setObjectContent(new FileInputStream(tmpFile)); - - final OSSObjectSummary objectSummary = new OSSObjectSummary(); - objectSummary.setBucketName(bucket); - objectSummary.setKey(keyPrefix + "/renames-0.gz"); - objectSummary.setLastModified(new Date(0)); - - final ObjectListing listObjectsResult = new ObjectListing(); - listObjectsResult.getObjectSummaries().add(objectSummary); - - File tmpDir = temporaryFolder.newFolder("gzTestDir"); - - OSSException exception = new OSSException("OssDataSegmentPullerTest", "NoSuchKey", null, null, null, null, null); - EasyMock.expect(ossClient.doesObjectExist(EasyMock.eq(object0.getBucketName()), EasyMock.eq(object0.getKey()))) - .andReturn(true) - .once(); - EasyMock.expect(ossClient.listObjects(EasyMock.anyObject(ListObjectsRequest.class))) - .andReturn(listObjectsResult) - .once(); - EasyMock.expect(ossClient.getObject(EasyMock.eq(bucket), EasyMock.eq(object0.getKey()))) - .andThrow(exception) - .once(); - EasyMock.expect(ossClient.listObjects(EasyMock.anyObject(ListObjectsRequest.class))) - .andReturn(listObjectsResult) - .once(); - EasyMock.expect(ossClient.getObject(EasyMock.eq(bucket), EasyMock.eq(object0.getKey()))) - .andReturn(object0) - .once(); - OssDataSegmentPuller puller = new OssDataSegmentPuller(ossClient); - - EasyMock.replay(ossClient); - FileUtils.FileCopyResult result = puller.getSegmentFiles( - new CloudObjectLocation( - bucket, - object0.getKey() - ), tmpDir - ); - EasyMock.verify(ossClient); - - Assert.assertEquals(value.length, result.size()); - File expected = new File(tmpDir, "renames-0"); - Assert.assertTrue(expected.exists()); - Assert.assertEquals(value.length, expected.length()); - } - -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPusherConfigTest.java b/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPusherConfigTest.java deleted file mode 100644 index d558a08068cd..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPusherConfigTest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.junit.Assert; -import org.junit.Test; - -import java.io.IOException; - -public class OssDataSegmentPusherConfigTest -{ - private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); - - @Test - public void testSerialization() throws IOException - { - String jsonConfig = "{\"bucket\":\"bucket1\",\"prefix\":\"dataSource1\"}"; - - OssStorageConfig config = JSON_MAPPER.readValue(jsonConfig, OssStorageConfig.class); - Assert.assertEquals(jsonConfig, JSON_MAPPER.writeValueAsString(config)); - } - - @Test - public void testSerializationWithDefaults() throws IOException - { - String jsonConfig = "{\"bucket\":\"bucket1\",\"prefix\":\"dataSource1\"}"; - String expectedJsonConfig = "{\"bucket\":\"bucket1\",\"prefix\":\"dataSource1\"}"; - - OssStorageConfig config = JSON_MAPPER.readValue(jsonConfig, OssStorageConfig.class); - Assert.assertEquals(expectedJsonConfig, JSON_MAPPER.writeValueAsString(config)); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPusherTest.java b/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPusherTest.java deleted file mode 100644 index b3d91c7af548..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssDataSegmentPusherTest.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.model.PutObjectResult; -import com.google.common.io.Files; -import org.apache.druid.java.util.common.Intervals; -import org.apache.druid.timeline.DataSegment; -import org.apache.druid.timeline.partition.NoneShardSpec; -import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.regex.Pattern; - -/** - * - */ -public class OssDataSegmentPusherTest -{ - private static class ValueContainer - { - private T value; - - public T getValue() - { - return value; - } - - public void setValue(T value) - { - this.value = value; - } - } - - @Rule - public final TemporaryFolder tempFolder = new TemporaryFolder(); - - @Test - public void testPush() throws Exception - { - testPushInternal(false, "key/foo/2015-01-01T00:00:00\\.000Z_2016-01-01T00:00:00\\.000Z/0/0/index\\.zip"); - } - - @Test - public void testPushUseUniquePath() throws Exception - { - testPushInternal( - true, - "key/foo/2015-01-01T00:00:00\\.000Z_2016-01-01T00:00:00\\.000Z/0/0/[A-Za-z0-9-]{36}/index\\.zip" - ); - } - - private void testPushInternal(boolean useUniquePath, String matcher) throws Exception - { - OSS client = EasyMock.createStrictMock(OSS.class); - - EasyMock.expect(client.putObject(EasyMock.anyObject())) - .andReturn(new PutObjectResult()) - .once(); - - EasyMock.replay(client); - - OssStorageConfig config = new OssStorageConfig(); - config.setBucket("bucket"); - config.setPrefix("key"); - - OssDataSegmentPusher pusher = new OssDataSegmentPusher(client, config); - - // Create a mock segment on disk - File tmp = tempFolder.newFile("version.bin"); - - final byte[] data = new byte[]{0x0, 0x0, 0x0, 0x1}; - Files.write(data, tmp); - final long size = data.length; - - DataSegment segmentToPush = new DataSegment( - "foo", - Intervals.of("2015/2016"), - "0", - new HashMap<>(), - new ArrayList<>(), - new ArrayList<>(), - NoneShardSpec.instance(), - 0, - size - ); - - DataSegment segment = pusher.push(tempFolder.getRoot(), segmentToPush, useUniquePath); - - Assert.assertEquals(segmentToPush.getSize(), segment.getSize()); - Assert.assertEquals(1, (int) segment.getBinaryVersion()); - Assert.assertEquals("bucket", segment.getLoadSpec().get("bucket")); - Assert.assertTrue( - segment.getLoadSpec().get("key").toString(), - Pattern.compile(matcher).matcher(segment.getLoadSpec().get("key").toString()).matches() - ); - Assert.assertEquals("oss_zip", segment.getLoadSpec().get("type")); - - EasyMock.verify(client); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssObjectSummaryIteratorTest.java b/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssObjectSummaryIteratorTest.java deleted file mode 100644 index d124b6bf6f9c..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssObjectSummaryIteratorTest.java +++ /dev/null @@ -1,276 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.OSSClient; -import com.aliyun.oss.model.ListObjectsRequest; -import com.aliyun.oss.model.OSSObjectSummary; -import com.aliyun.oss.model.ObjectListing; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; -import org.junit.Assert; -import org.junit.Test; - -import java.net.URI; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; - -public class OssObjectSummaryIteratorTest -{ - private static final ImmutableList TEST_OBJECTS = - ImmutableList.of( - makeObjectSummary("b", "foo", 10L), - makeObjectSummary("b", "foo/", 0L), // directory - makeObjectSummary("b", "foo/bar1", 10L), - makeObjectSummary("b", "foo/bar2", 10L), - makeObjectSummary("b", "foo/bar3", 10L), - makeObjectSummary("b", "foo/bar4", 10L), - makeObjectSummary("b", "foo/bar5", 0L), // empty object - makeObjectSummary("b", "foo/baz", 10L), - makeObjectSummary("bucketnotmine", "a/different/bucket", 10L), - makeObjectSummary("b", "foo/bar/", 0L) // another directory at the end of list - ); - - @Test - public void testSingleObject() - { - test( - ImmutableList.of(OssStorageDruidModule.SCHEME + "://b/foo/baz"), - ImmutableList.of(OssStorageDruidModule.SCHEME + "://b/foo/baz"), - 5 - ); - } - - @Test - public void testMultiObjectOneKeyAtATime() - { - test( - ImmutableList.of( - OssStorageDruidModule.SCHEME + "://b/foo/bar1", - OssStorageDruidModule.SCHEME + "://b/foo/bar2", - OssStorageDruidModule.SCHEME + "://b/foo/bar3", - OssStorageDruidModule.SCHEME + "://b/foo/bar4", - OssStorageDruidModule.SCHEME + "://b/foo/baz" - ), - ImmutableList.of(OssStorageDruidModule.SCHEME + "://b/foo/"), - 1 - ); - } - - @Test - public void testMultiObjectTwoKeysAtATime() - { - test( - ImmutableList.of( - OssStorageDruidModule.SCHEME + "://b/foo/bar1", - OssStorageDruidModule.SCHEME + "://b/foo/bar2", - OssStorageDruidModule.SCHEME + "://b/foo/bar3", - OssStorageDruidModule.SCHEME + "://b/foo/bar4", - OssStorageDruidModule.SCHEME + "://b/foo/baz" - ), - ImmutableList.of(OssStorageDruidModule.SCHEME + "://b/foo/"), - 2 - ); - } - - @Test - public void testMultiObjectTenKeysAtATime() - { - test( - ImmutableList.of( - OssStorageDruidModule.SCHEME + "://b/foo/bar1", - OssStorageDruidModule.SCHEME + "://b/foo/bar2", - OssStorageDruidModule.SCHEME + "://b/foo/bar3", - OssStorageDruidModule.SCHEME + "://b/foo/bar4", - OssStorageDruidModule.SCHEME + "://b/foo/baz" - ), - ImmutableList.of(OssStorageDruidModule.SCHEME + "://b/foo/"), - 10 - ); - } - - @Test - public void testPrefixInMiddleOfKey() - { - test( - ImmutableList.of(OssStorageDruidModule.SCHEME + "://b/foo/bar1", OssStorageDruidModule.SCHEME + "://b/foo/bar2", OssStorageDruidModule.SCHEME + "://b/foo/bar3", OssStorageDruidModule.SCHEME + "://b/foo/bar4"), - ImmutableList.of(OssStorageDruidModule.SCHEME + "://b/foo/bar"), - 10 - ); - } - - @Test - public void testNoPath() - { - test( - ImmutableList.of( - OssStorageDruidModule.SCHEME + "://b/foo", - OssStorageDruidModule.SCHEME + "://b/foo/bar1", - OssStorageDruidModule.SCHEME + "://b/foo/bar2", - OssStorageDruidModule.SCHEME + "://b/foo/bar3", - OssStorageDruidModule.SCHEME + "://b/foo/bar4", - OssStorageDruidModule.SCHEME + "://b/foo/baz" - ), - ImmutableList.of(OssStorageDruidModule.SCHEME + "://b"), - 10 - ); - } - - @Test - public void testSlashPath() - { - test( - ImmutableList.of( - OssStorageDruidModule.SCHEME + "://b/foo", - OssStorageDruidModule.SCHEME + "://b/foo/bar1", - OssStorageDruidModule.SCHEME + "://b/foo/bar2", - OssStorageDruidModule.SCHEME + "://b/foo/bar3", - OssStorageDruidModule.SCHEME + "://b/foo/bar4", - OssStorageDruidModule.SCHEME + "://b/foo/baz" - ), - ImmutableList.of(OssStorageDruidModule.SCHEME + "://b/"), - 10 - ); - } - - @Test - public void testDifferentBucket() - { - test( - ImmutableList.of(), - ImmutableList.of(OssStorageDruidModule.SCHEME + "://bx/foo/"), - 10 - ); - } - - @Test - public void testWithMultiplePrefixesReturningAllNonEmptyObjectsStartingWithOneOfPrefixes() - { - test( - ImmutableList.of( - OssStorageDruidModule.SCHEME + "://b/foo/bar1", - OssStorageDruidModule.SCHEME + "://b/foo/bar2", - OssStorageDruidModule.SCHEME + "://b/foo/bar3", - OssStorageDruidModule.SCHEME + "://b/foo/bar4", - OssStorageDruidModule.SCHEME + "://b/foo/baz" - ), - ImmutableList.of(OssStorageDruidModule.SCHEME + "://b/foo/bar", OssStorageDruidModule.SCHEME + "://b/foo/baz"), - 10 - ); - } - - private static void test( - final List expectedUris, - final List prefixes, - final int maxListingLength - ) - { - final List expectedObjects = new ArrayList<>(); - - // O(N^2) but who cares -- the list is short. - for (final String uri : expectedUris) { - final List matches = TEST_OBJECTS.stream() - .filter( - summary -> - OssUtils.summaryToUri(summary).toString().equals(uri) - ) - .collect(Collectors.toList()); - - expectedObjects.add(Iterables.getOnlyElement(matches)); - } - - final List actualObjects = ImmutableList.copyOf( - OssUtils.objectSummaryIterator( - makeMockClient(TEST_OBJECTS), - prefixes.stream().map(URI::create).collect(Collectors.toList()), - maxListingLength - ) - ); - - Assert.assertEquals( - prefixes.toString(), - expectedObjects.stream().map(OssUtils::summaryToUri).collect(Collectors.toList()), - actualObjects.stream().map(OssUtils::summaryToUri).collect(Collectors.toList()) - ); - } - - /** - * Makes a mock OSS client that handles enough of "listObjects" to test the functionality of the - * {@link OssObjectSummaryIterator} class. - */ - private static OSS makeMockClient( - final List objects - ) - { - return new OSSClient("endpoint", "accessKey", "keySecret") - { - @Override - public ObjectListing listObjects(final ListObjectsRequest request) - { - // Continuation token is an index in the "objects" list.q - final String continuationToken = request.getMarker(); - final int startIndex = continuationToken == null ? 0 : Integer.parseInt(continuationToken); - - // Find matching objects. - final List summaries = new ArrayList<>(); - int nextIndex = -1; - - for (int i = startIndex; i < objects.size(); i++) { - final OSSObjectSummary summary = objects.get(i); - - if (summary.getBucketName().equals(request.getBucketName()) - && summary.getKey().startsWith(request.getPrefix())) { - - if (summaries.size() == request.getMaxKeys()) { - // We reached our max key limit; set nextIndex (which will lead to a result with truncated = true). - nextIndex = i; - break; - } - - // Generate a summary. - summaries.add(summary); - } - } - - // Generate the result. - final ObjectListing retVal = new ObjectListing(); - retVal.getObjectSummaries().addAll(summaries); - - if (nextIndex >= 0) { - retVal.setTruncated(true); - retVal.setNextMarker(String.valueOf(nextIndex)); - } - - return retVal; - } - }; - } - - private static OSSObjectSummary makeObjectSummary(final String bucket, final String key, final long size) - { - final OSSObjectSummary summary = new OSSObjectSummary(); - summary.setBucketName(bucket); - summary.setKey(key); - summary.setSize(size); - return summary; - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTaskLogsTest.java b/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTaskLogsTest.java deleted file mode 100644 index 1264a0fe9d7a..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTaskLogsTest.java +++ /dev/null @@ -1,336 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.ClientException; -import com.aliyun.oss.OSS; -import com.aliyun.oss.model.AccessControlList; -import com.aliyun.oss.model.DeleteObjectsRequest; -import com.aliyun.oss.model.Grant; -import com.aliyun.oss.model.OSSObjectSummary; -import com.aliyun.oss.model.Owner; -import com.aliyun.oss.model.PutObjectRequest; -import com.aliyun.oss.model.PutObjectResult; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import org.apache.druid.common.utils.CurrentTimeMillisSupplier; -import org.apache.druid.java.util.common.StringUtils; -import org.easymock.EasyMock; -import org.easymock.EasyMockRunner; -import org.easymock.EasyMockSupport; -import org.easymock.Mock; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; - -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -@RunWith(EasyMockRunner.class) -public class OssTaskLogsTest extends EasyMockSupport -{ - - private static final String KEY_1 = "key1"; - private static final String KEY_2 = "key2"; - private static final String TEST_BUCKET = "test_bucket"; - private static final String TEST_PREFIX = "test_prefix"; - private static final URI PREFIX_URI = URI.create(StringUtils.format("oss://%s/%s", TEST_BUCKET, TEST_PREFIX)); - private static final long TIME_0 = 0L; - private static final long TIME_1 = 1L; - private static final long TIME_NOW = 2L; - private static final long TIME_FUTURE = 3L; - private static final int MAX_KEYS = 1; - private static final Exception RECOVERABLE_EXCEPTION = new ClientException(new IOException()); - private static final Exception NON_RECOVERABLE_EXCEPTION = new ClientException(new NullPointerException()); - - @Mock - private CurrentTimeMillisSupplier timeSupplier; - @Mock - private OSS ossClient; - - @Rule - public final TemporaryFolder tempFolder = new TemporaryFolder(); - - @Test - public void testTaskLogsPushWithAclDisabled() throws Exception - { - String ownerId = "test_owner"; - String ownerDisplayName = "test_owner"; - - List grantList = testPushInternal(true, ownerId, ownerDisplayName); - - Assert.assertNotNull("Grant list should not be null", grantList); - Assert.assertEquals("Grant list should be empty as ACL is disabled", 0, grantList.size()); - } - - @Test - public void test_killAll_noException_deletesAllTaskLogs() throws IOException - { - OSSObjectSummary objectSummary1 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_1, TIME_0); - OSSObjectSummary objectSummary2 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_2, TIME_1); - - EasyMock.expect(timeSupplier.getAsLong()).andReturn(TIME_NOW); - - OssTestUtils.expectListObjects( - ossClient, - PREFIX_URI, - ImmutableList.of(objectSummary1, objectSummary2) - ); - - DeleteObjectsRequest deleteRequest1 = new DeleteObjectsRequest(TEST_BUCKET); - deleteRequest1.setKeys(Collections.singletonList(KEY_1)); - DeleteObjectsRequest deleteRequest2 = new DeleteObjectsRequest(TEST_BUCKET); - deleteRequest2.setKeys(Collections.singletonList(KEY_2)); - - OssTestUtils.mockClientDeleteObjects( - ossClient, - ImmutableList.of(deleteRequest1, deleteRequest2), - ImmutableMap.of() - ); - - EasyMock.replay(ossClient, timeSupplier); - - OssTaskLogsConfig config = new OssTaskLogsConfig(); - config.setBucket(TEST_BUCKET); - config.setPrefix(TEST_PREFIX); - OssInputDataConfig inputDataConfig = new OssInputDataConfig(); - inputDataConfig.setMaxListingLength(MAX_KEYS); - OssTaskLogs taskLogs = new OssTaskLogs(ossClient, config, inputDataConfig, timeSupplier); - taskLogs.killAll(); - - EasyMock.verify(ossClient, timeSupplier); - } - - @Test - public void test_killAll_recoverableExceptionWhenDeletingObjects_deletesAllTaskLogs() throws IOException - { - OSSObjectSummary objectSummary1 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_1, TIME_0); - - EasyMock.expect(timeSupplier.getAsLong()).andReturn(TIME_NOW); - - OssTestUtils.expectListObjects( - ossClient, - PREFIX_URI, - ImmutableList.of(objectSummary1) - ); - - DeleteObjectsRequest expectedRequest = new DeleteObjectsRequest(TEST_BUCKET); - expectedRequest.setKeys(Collections.singletonList(KEY_1)); - OssTestUtils.mockClientDeleteObjects( - ossClient, - ImmutableList.of(expectedRequest), - ImmutableMap.of(expectedRequest, RECOVERABLE_EXCEPTION) - ); - - EasyMock.replay(ossClient, timeSupplier); - - OssTaskLogsConfig config = new OssTaskLogsConfig(); - config.setBucket(TEST_BUCKET); - config.setPrefix(TEST_PREFIX); - OssInputDataConfig inputDataConfig = new OssInputDataConfig(); - inputDataConfig.setMaxListingLength(MAX_KEYS); - OssTaskLogs taskLogs = new OssTaskLogs(ossClient, config, inputDataConfig, timeSupplier); - taskLogs.killAll(); - - EasyMock.verify(ossClient, timeSupplier); - } - - @Test - public void test_killAll_nonrecoverableExceptionWhenListingObjects_doesntDeleteAnyTaskLogs() - { - boolean ioExceptionThrown = false; - try { - OSSObjectSummary objectSummary1 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_1, TIME_0); - EasyMock.expect(timeSupplier.getAsLong()).andReturn(TIME_NOW); - OssTestUtils.expectListObjects( - ossClient, - PREFIX_URI, - ImmutableList.of(objectSummary1) - ); - - DeleteObjectsRequest deleteRequest1 = new DeleteObjectsRequest(TEST_BUCKET); - deleteRequest1.setKeys(Collections.singletonList(KEY_1)); - OssTestUtils.mockClientDeleteObjects( - ossClient, - ImmutableList.of(), - ImmutableMap.of(deleteRequest1, NON_RECOVERABLE_EXCEPTION) - ); - - EasyMock.replay(ossClient, timeSupplier); - - OssTaskLogsConfig config = new OssTaskLogsConfig(); - config.setBucket(TEST_BUCKET); - config.setPrefix(TEST_PREFIX); - OssInputDataConfig inputDataConfig = new OssInputDataConfig(); - inputDataConfig.setMaxListingLength(MAX_KEYS); - OssTaskLogs taskLogs = new OssTaskLogs(ossClient, config, inputDataConfig, timeSupplier); - taskLogs.killAll(); - } - catch (IOException e) { - ioExceptionThrown = true; - } - - Assert.assertTrue(ioExceptionThrown); - - EasyMock.verify(ossClient, timeSupplier); - } - - @Test - public void test_killOlderThan_noException_deletesOnlyTaskLogsOlderThan() throws IOException - { - OSSObjectSummary objectSummary1 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_1, TIME_0); - OSSObjectSummary objectSummary2 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_2, TIME_FUTURE); - - OssTestUtils.expectListObjects( - ossClient, - PREFIX_URI, - ImmutableList.of(objectSummary1, objectSummary2) - ); - - DeleteObjectsRequest deleteRequest1 = new DeleteObjectsRequest(TEST_BUCKET); - deleteRequest1.setKeys(Collections.singletonList(KEY_1)); - - OssTestUtils.mockClientDeleteObjects(ossClient, ImmutableList.of(deleteRequest1), ImmutableMap.of()); - - EasyMock.replay(ossClient, timeSupplier); - - OssTaskLogsConfig config = new OssTaskLogsConfig(); - config.setBucket(TEST_BUCKET); - config.setPrefix(TEST_PREFIX); - OssInputDataConfig inputDataConfig = new OssInputDataConfig(); - inputDataConfig.setMaxListingLength(MAX_KEYS); - OssTaskLogs taskLogs = new OssTaskLogs(ossClient, config, inputDataConfig, timeSupplier); - taskLogs.killOlderThan(TIME_NOW); - - EasyMock.verify(ossClient, timeSupplier); - } - - @Test - public void test_killOlderThan_recoverableExceptionWhenListingObjects_deletesAllTaskLogs() throws IOException - { - OSSObjectSummary objectSummary1 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_1, TIME_0); - - OssTestUtils.expectListObjects( - ossClient, - PREFIX_URI, - ImmutableList.of(objectSummary1) - ); - - DeleteObjectsRequest deleteRequest1 = new DeleteObjectsRequest(TEST_BUCKET); - deleteRequest1.setKeys(Collections.singletonList(KEY_1)); - - OssTestUtils.mockClientDeleteObjects( - ossClient, - ImmutableList.of(deleteRequest1), - ImmutableMap.of(deleteRequest1, RECOVERABLE_EXCEPTION) - ); - - EasyMock.replay(ossClient, timeSupplier); - - OssTaskLogsConfig config = new OssTaskLogsConfig(); - config.setBucket(TEST_BUCKET); - config.setPrefix(TEST_PREFIX); - OssInputDataConfig inputDataConfig = new OssInputDataConfig(); - inputDataConfig.setMaxListingLength(MAX_KEYS); - OssTaskLogs taskLogs = new OssTaskLogs(ossClient, config, inputDataConfig, timeSupplier); - taskLogs.killOlderThan(TIME_NOW); - - EasyMock.verify(ossClient, timeSupplier); - } - - @Test - public void test_killOlderThan_nonrecoverableExceptionWhenListingObjects_doesntDeleteAnyTaskLogs() - { - boolean ioExceptionThrown = false; - try { - OSSObjectSummary objectSummary1 = OssTestUtils.newOSSObjectSummary(TEST_BUCKET, KEY_1, TIME_0); - OssTestUtils.expectListObjects( - ossClient, - PREFIX_URI, - ImmutableList.of(objectSummary1) - ); - - DeleteObjectsRequest deleteRequest1 = new DeleteObjectsRequest(TEST_BUCKET); - deleteRequest1.setKeys(Collections.singletonList(KEY_1)); - OssTestUtils.mockClientDeleteObjects( - ossClient, - ImmutableList.of(), - ImmutableMap.of(deleteRequest1, NON_RECOVERABLE_EXCEPTION) - ); - - EasyMock.replay(ossClient, timeSupplier); - - OssTaskLogsConfig config = new OssTaskLogsConfig(); - config.setBucket(TEST_BUCKET); - config.setPrefix(TEST_PREFIX); - OssInputDataConfig inputDataConfig = new OssInputDataConfig(); - inputDataConfig.setMaxListingLength(MAX_KEYS); - OssTaskLogs taskLogs = new OssTaskLogs(ossClient, config, inputDataConfig, timeSupplier); - taskLogs.killOlderThan(TIME_NOW); - } - catch (IOException e) { - ioExceptionThrown = true; - } - - Assert.assertTrue(ioExceptionThrown); - - EasyMock.verify(ossClient, timeSupplier); - } - - private List testPushInternal(boolean disableAcl, String ownerId, String ownerDisplayName) throws Exception - { - EasyMock.expect(ossClient.putObject(EasyMock.anyObject())) - .andReturn(new PutObjectResult()) - .once(); - - AccessControlList aclExpected = new AccessControlList(); - aclExpected.setOwner(new Owner(ownerId, ownerDisplayName)); - - EasyMock.expect(ossClient.getBucketAcl(TEST_BUCKET)) - .andReturn(aclExpected) - .once(); - - EasyMock.expect(ossClient.putObject(EasyMock.anyObject(PutObjectRequest.class))) - .andReturn(new PutObjectResult()) - .once(); - - EasyMock.replay(ossClient); - - OssTaskLogsConfig config = new OssTaskLogsConfig(); - config.setDisableAcl(disableAcl); - config.setBucket(TEST_BUCKET); - CurrentTimeMillisSupplier timeSupplier = new CurrentTimeMillisSupplier(); - OssInputDataConfig inputDataConfig = new OssInputDataConfig(); - OssTaskLogs taskLogs = new OssTaskLogs(ossClient, config, inputDataConfig, timeSupplier); - - String taskId = "index_test-datasource_2019-06-18T13:30:28.887Z"; - File logFile = tempFolder.newFile("test_log_file"); - - taskLogs.pushTaskLog(taskId, logFile); - - return new ArrayList<>(aclExpected.getGrants()); - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTestUtils.java b/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTestUtils.java deleted file mode 100644 index 35ef96663e07..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTestUtils.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.model.DeleteObjectsRequest; -import com.aliyun.oss.model.DeleteObjectsResult; -import com.aliyun.oss.model.ListObjectsRequest; -import com.aliyun.oss.model.OSSObjectSummary; -import com.aliyun.oss.model.ObjectListing; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.StringUtils; -import org.easymock.EasyMock; -import org.easymock.EasyMockSupport; -import org.easymock.IArgumentMatcher; -import org.easymock.IExpectationSetters; -import org.joda.time.DateTime; - -import java.net.URI; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class OssTestUtils extends EasyMockSupport -{ - private static final DateTime NOW = DateTimes.nowUtc(); - private static final byte[] CONTENT = - StringUtils.toUtf8(StringUtils.format("%d,hello,world", NOW.getMillis())); - - public static DeleteObjectsRequest deleteObjectsRequestArgumentMatcher(DeleteObjectsRequest deleteObjectsRequest) - { - EasyMock.reportMatcher(new IArgumentMatcher() - { - @Override - public boolean matches(Object argument) - { - - boolean matches = argument instanceof DeleteObjectsRequest - && deleteObjectsRequest.getBucketName() - .equals(((DeleteObjectsRequest) argument).getBucketName()) - && deleteObjectsRequest.getKeys().size() == ((DeleteObjectsRequest) argument).getKeys() - .size(); - if (matches) { - List expectedKeysAndVersions = deleteObjectsRequest.getKeys(); - List actualKeysAndVersions = ((DeleteObjectsRequest) argument).getKeys(); - matches = expectedKeysAndVersions.equals(actualKeysAndVersions); - } - return matches; - } - - @Override - public void appendTo(StringBuffer buffer) - { - String str = "DeleteObjectsRequest(\"bucketName:\" \"" - + deleteObjectsRequest.getBucketName() - + "\", \"keys:\"" - + deleteObjectsRequest.getKeys() - + "\")"; - buffer.append(str); - } - }); - return null; - } - - public static void expectListObjects( - OSS client, - URI prefix, - List objectSummaries - ) - { - final ObjectListing result = new ObjectListing(); - result.setBucketName(prefix.getAuthority()); - //result.setsetKeyCount(objectSummaries.size()); - for (OSSObjectSummary objectSummary : objectSummaries) { - result.getObjectSummaries().add(objectSummary); - } - - EasyMock.expect( - client.listObjects(matchListObjectsRequest(prefix)) - ).andReturn(result).once(); - } - - public static void mockClientDeleteObjects( - OSS client, - List deleteRequestsExpected, - Map requestToException - ) - { - Map> requestToResultExpectationSetter = new HashMap<>(); - - for (Map.Entry requestsAndErrors : requestToException.entrySet()) { - DeleteObjectsRequest request = requestsAndErrors.getKey(); - Exception exception = requestsAndErrors.getValue(); - IExpectationSetters resultExpectationSetter = requestToResultExpectationSetter.get(request); - if (resultExpectationSetter == null) { - client.deleteObjects( - OssTestUtils.deleteObjectsRequestArgumentMatcher(request)); - resultExpectationSetter = EasyMock.expectLastCall().andThrow(exception); - requestToResultExpectationSetter.put(request, resultExpectationSetter); - } else { - resultExpectationSetter.andThrow(exception); - } - } - - for (DeleteObjectsRequest request : deleteRequestsExpected) { - IExpectationSetters resultExpectationSetter = requestToResultExpectationSetter.get(request); - if (resultExpectationSetter == null) { - client.deleteObjects(OssTestUtils.deleteObjectsRequestArgumentMatcher(request)); - resultExpectationSetter = EasyMock.expectLastCall(); - requestToResultExpectationSetter.put(request, resultExpectationSetter); - } - resultExpectationSetter.andReturn(new DeleteObjectsResult()); - } - } - - public static ListObjectsRequest matchListObjectsRequest(final URI prefixUri) - { - // Use an IArgumentMatcher to verify that the request has the correct bucket and prefix. - EasyMock.reportMatcher( - new IArgumentMatcher() - { - @Override - public boolean matches(Object argument) - { - if (!(argument instanceof ListObjectsRequest)) { - return false; - } - - final ListObjectsRequest request = (ListObjectsRequest) argument; - return prefixUri.getAuthority().equals(request.getBucketName()) - && OssUtils.extractKey(prefixUri).equals(request.getPrefix()); - } - - @Override - public void appendTo(StringBuffer buffer) - { - buffer.append(""); - } - } - ); - - return null; - } - - public static OSSObjectSummary newOSSObjectSummary( - String bucket, - String key, - long lastModifiedTimestamp - ) - { - OSSObjectSummary objectSummary = new OSSObjectSummary(); - objectSummary.setBucketName(bucket); - objectSummary.setKey(key); - objectSummary.setLastModified(new Date(lastModifiedTimestamp)); - objectSummary.setETag("etag"); - objectSummary.setSize(CONTENT.length); - return objectSummary; - } -} diff --git a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTimestampVersionedDataFinderTest.java b/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTimestampVersionedDataFinderTest.java deleted file mode 100644 index 8443d2f4abc6..000000000000 --- a/extensions-contrib/aliyun-oss-extensions/src/test/java/org/apache/druid/storage/aliyun/OssTimestampVersionedDataFinderTest.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.aliyun; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.model.ListObjectsRequest; -import com.aliyun.oss.model.OSSObjectSummary; -import com.aliyun.oss.model.ObjectListing; -import org.apache.druid.java.util.common.StringUtils; -import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.Test; - -import java.net.URI; -import java.util.Date; -import java.util.regex.Pattern; - -public class OssTimestampVersionedDataFinderTest -{ - - @Test - public void testSimpleLatestVersion() - { - String bucket = "bucket"; - String keyPrefix = "prefix/dir/0"; - OSS client = EasyMock.createStrictMock(OSS.class); - - OSSObjectSummary object0 = new OSSObjectSummary(), object1 = new OSSObjectSummary(); - - object0.setBucketName(bucket); - object0.setKey(keyPrefix + "/renames-0.gz"); - object0.setLastModified(new Date(0)); - object0.setSize(10); - - object1.setBucketName(bucket); - object1.setKey(keyPrefix + "/renames-1.gz"); - object1.setLastModified(new Date(1)); - object1.setSize(10); - - final ObjectListing result = new ObjectListing(); - result.getObjectSummaries().add(object0); - result.getObjectSummaries().add(object1); - result.setTruncated(false); - - EasyMock.expect(client.listObjects(EasyMock.anyObject(ListObjectsRequest.class))) - .andReturn(result) - .once(); - OssTimestampVersionedDataFinder finder = new OssTimestampVersionedDataFinder(client); - - Pattern pattern = Pattern.compile("renames-[0-9]*\\.gz"); - - EasyMock.replay(client); - - - URI latest = finder.getLatestVersion(URI.create(StringUtils.format("%s://%s/%s", OssStorageDruidModule.SCHEME, bucket, keyPrefix)), pattern); - - EasyMock.verify(client); - - URI expected = URI.create(StringUtils.format("%s://%s/%s", OssStorageDruidModule.SCHEME, bucket, object1.getKey())); - - Assert.assertEquals(expected, latest); - } - - @Test - public void testMissing() - { - String bucket = "bucket"; - String keyPrefix = "prefix/dir/0"; - OSS oss = EasyMock.createStrictMock(OSS.class); - - final ObjectListing result = new ObjectListing(); - result.setTruncated(false); - - EasyMock.expect(oss.listObjects(EasyMock.anyObject(ListObjectsRequest.class))) - .andReturn(result) - .once(); - OssTimestampVersionedDataFinder finder = new OssTimestampVersionedDataFinder(oss); - - Pattern pattern = Pattern.compile("renames-[0-9]*\\.gz"); - - EasyMock.replay(oss); - - - URI latest = finder.getLatestVersion(URI.create(StringUtils.format("%s://%s/%s", OssStorageDruidModule.SCHEME, bucket, keyPrefix)), pattern); - - EasyMock.verify(oss); - - Assert.assertEquals(null, latest); - } - - @Test - public void testFindSelf() - { - String bucket = "bucket"; - String keyPrefix = "prefix/dir/0"; - OSS ossClient = EasyMock.createStrictMock(OSS.class); - - OSSObjectSummary object0 = new OSSObjectSummary(); - - object0.setBucketName(bucket); - object0.setKey(keyPrefix + "/renames-0.gz"); - object0.setLastModified(new Date(0)); - object0.setSize(10); - - final ObjectListing result = new ObjectListing(); - result.getObjectSummaries().add(object0); - result.setTruncated(false); - - EasyMock.expect(ossClient.listObjects(EasyMock.anyObject(ListObjectsRequest.class))) - .andReturn(result) - .once(); - OssTimestampVersionedDataFinder finder = new OssTimestampVersionedDataFinder(ossClient); - - Pattern pattern = Pattern.compile("renames-[0-9]*\\.gz"); - - EasyMock.replay(ossClient); - - - URI latest = finder.getLatestVersion(URI.create(StringUtils.format("%s://%s/%s", OssStorageDruidModule.SCHEME, bucket, keyPrefix)), pattern); - - EasyMock.verify(ossClient); - - URI expected = URI.create(StringUtils.format("%s://%s/%s", OssStorageDruidModule.SCHEME, bucket, object0.getKey())); - - Assert.assertEquals(expected, latest); - } - - @Test - public void testFindExact() - { - String bucket = "bucket"; - String keyPrefix = "prefix/dir/0"; - OSS ossClient = EasyMock.createStrictMock(OSS.class); - - OSSObjectSummary object0 = new OSSObjectSummary(); - - object0.setBucketName(bucket); - object0.setKey(keyPrefix + "/renames-0.gz"); - object0.setLastModified(new Date(0)); - object0.setSize(10); - - final ObjectListing result = new ObjectListing(); - result.getObjectSummaries().add(object0); - result.setTruncated(false); - - EasyMock.expect(ossClient.listObjects(EasyMock.anyObject(ListObjectsRequest.class))) - .andReturn(result) - .once(); - OssTimestampVersionedDataFinder finder = new OssTimestampVersionedDataFinder(ossClient); - - EasyMock.replay(ossClient); - - URI latest = finder.getLatestVersion(URI.create(StringUtils.format("%s://%s/%s", OssStorageDruidModule.SCHEME, bucket, object0.getKey())), null); - - EasyMock.verify(ossClient); - - URI expected = URI.create(StringUtils.format("%s://%s/%s", OssStorageDruidModule.SCHEME, bucket, object0.getKey())); - - Assert.assertEquals(expected, latest); - } -} diff --git a/extensions-contrib/ambari-metrics-emitter/pom.xml b/extensions-contrib/ambari-metrics-emitter/pom.xml deleted file mode 100644 index 316b59f2d145..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/pom.xml +++ /dev/null @@ -1,169 +0,0 @@ - - - - - 4.0.0 - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - org.apache.druid.extensions.contrib - ambari-metrics-emitter - ambari-metrics-emitter - Druid emitter extension to support ambari metrics server - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-server - ${project.parent.version} - test-jar - test - - - org.apache.ambari - ambari-metrics-common - 2.4.1.0.22 - - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-mapper-asl - - - - jdk.tools - jdk.tools - - - - - - org.apache.hadoop - hadoop-annotations - ${hadoop.compile.version} - - - commons-io - commons-io - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - joda-time - joda-time - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - com.google.guava - guava - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - test - - - junit - junit - test - - - org.easymock - easymock - test - - - pl.pragmatists - JUnitParams - test - - - org.codehaus.jackson - jackson-core-asl - ${codehaus.jackson.version} - test - - - org.codehaus.jackson - jackson-mapper-asl - ${codehaus.jackson.version} - test - - - - - - - org.apache.maven.plugins - maven-dependency-plugin - - - - org.apache.hadoop:hadoop-annotations - - - - - - - - - hortonworks - hortonworks - https://repo.hortonworks.com/content/repositories/releases - - - diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java deleted file mode 100644 index d1ff8e362473..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.ambari.metrics; - -import com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.core.Emitter; -import org.apache.druid.java.util.emitter.core.Event; -import org.apache.druid.java.util.emitter.service.AlertEvent; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics; - -import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.regex.Pattern; - - -public class AmbariMetricsEmitter extends AbstractTimelineMetricsSink implements Emitter -{ - private static final Logger log = new Logger(AmbariMetricsEmitter.class); - - private final DruidToTimelineMetricConverter timelineMetricConverter; - private final List emitterList; - private final AtomicBoolean started = new AtomicBoolean(false); - private final LinkedBlockingQueue eventsQueue; - private final AmbariMetricsEmitterConfig config; - private final String collectorURI; - private static final long DEFAULT_FLUSH_TIMEOUT_MILLIS = 60000; // default flush wait 1 min - private static final Pattern DOT_OR_WHITESPACE_PATTERN = Pattern.compile("[\\s]+|[.]+"); - private final ScheduledExecutorService exec = Executors.newScheduledThreadPool( - 2, // Thread pool of two in order to schedule flush runnable - new ThreadFactoryBuilder().setDaemon(true).setNameFormat("AmbariMetricsEmitter-%s").build() - ); - private final AtomicLong countLostEvents = new AtomicLong(0); - - public AmbariMetricsEmitter( - AmbariMetricsEmitterConfig config, - List emitterList - ) - { - this.config = config; - this.emitterList = emitterList; - this.timelineMetricConverter = config.getDruidToTimelineEventConverter(); - this.eventsQueue = new LinkedBlockingQueue<>(config.getMaxQueueSize()); - this.collectorURI = StringUtils.format( - "%s://%s:%s%s", - config.getProtocol(), - config.getHostname(), - config.getPort(), - WS_V1_TIMELINE_METRICS - ); - } - - @Override - public void start() - { - synchronized (started) { - log.info("Starting Ambari Metrics Emitter."); - if (!started.get()) { - if ("https".equals(config.getProtocol())) { - loadTruststore(config.getTrustStorePath(), config.getTrustStoreType(), config.getTrustStorePassword()); - } - exec.scheduleAtFixedRate( - new ConsumerRunnable(), - config.getFlushPeriod(), - config.getFlushPeriod(), - TimeUnit.MILLISECONDS - ); - started.set(true); - } - } - } - - - @Override - public void emit(Event event) - { - if (!started.get()) { - throw new ISE("WTF emit was called while service is not started yet"); - } - if (event instanceof ServiceMetricEvent) { - final TimelineMetric timelineEvent = timelineMetricConverter.druidEventToTimelineMetric((ServiceMetricEvent) event); - if (timelineEvent == null) { - return; - } - try { - final boolean isSuccessful = eventsQueue.offer( - timelineEvent, - config.getEmitWaitTime(), - TimeUnit.MILLISECONDS - ); - if (!isSuccessful) { - if (countLostEvents.getAndIncrement() % 1000 == 0) { - log.error( - "Lost total of [%s] events because of emitter queue is full. Please increase the capacity or/and the consumer frequency", - countLostEvents.get() - ); - } - } - } - catch (InterruptedException e) { - log.error(e, "got interrupted with message [%s]", e.getMessage()); - Thread.currentThread().interrupt(); - } - } else if (event instanceof AlertEvent) { - for (Emitter emitter : emitterList) { - emitter.emit(event); - } - } else { - throw new ISE("unknown event type [%s]", event.getClass()); - } - } - - @Override - protected String getCollectorUri() - { - return collectorURI; - } - - @Override - protected int getTimeoutSeconds() - { - return (int) (DEFAULT_FLUSH_TIMEOUT_MILLIS / 1000); - } - - private class ConsumerRunnable implements Runnable - { - @Override - public void run() - { - try { - int batchSize = config.getBatchSize(); - TimelineMetrics metrics = new TimelineMetrics(); - while (eventsQueue.size() > 0 && !exec.isShutdown()) { - try { - final TimelineMetric metricEvent = eventsQueue.poll( - config.getWaitForEventTime(), - TimeUnit.MILLISECONDS - ); - if (metricEvent != null) { - metrics.addOrMergeTimelineMetric(metricEvent); - if (metrics.getMetrics().size() == batchSize) { - emitMetrics(metrics); - log.debug( - "sent [%d] events", - metrics.getMetrics().size() - ); - metrics = new TimelineMetrics(); - } - } - } - catch (InterruptedException e) { - log.error(e, e.getMessage()); - Thread.currentThread().interrupt(); - } - } - if (metrics.getMetrics().size() > 0) { - emitMetrics(metrics); - log.debug( - "sent [%d] events", - metrics.getMetrics().size() - ); - } - } - catch (Exception e) { - log.error(e, e.getMessage()); - } - - } - } - - @Override - public void flush() - { - synchronized (started) { - if (started.get()) { - Future future = exec.schedule(new ConsumerRunnable(), 0, TimeUnit.MILLISECONDS); - try { - future.get(DEFAULT_FLUSH_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); - } - catch (InterruptedException | ExecutionException | TimeoutException e) { - if (e instanceof InterruptedException) { - throw new RuntimeException("interrupted flushing elements from queue", e); - } - } - } - } - } - - @Override - public void close() - { - synchronized (started) { - flush(); - exec.shutdown(); - started.set(false); - } - } - - protected static String sanitize(String namespace) - { - return DOT_OR_WHITESPACE_PATTERN.matcher(namespace).replaceAll("_"); - } -} diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfig.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfig.java deleted file mode 100644 index 69c6999b2c9f..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfig.java +++ /dev/null @@ -1,262 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.ambari.metrics; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; - -import java.util.Collections; -import java.util.List; -import java.util.concurrent.TimeUnit; - -public class AmbariMetricsEmitterConfig -{ - private static final int DEFAULT_BATCH_SIZE = 100; - private static final long DEFAULT_FLUSH_PERIOD_MILLIS = TimeUnit.MINUTES.toMillis(1); // flush every one minute - private static final long DEFAULT_GET_TIMEOUT_MILLIS = TimeUnit.SECONDS.toMillis(1); // default wait for get operations on the queue 1 sec - private static final String DEFAULT_PROTOCOL = "http"; - - @JsonProperty - private final String hostname; - - @JsonProperty - private final int port; - - @JsonProperty - private final String protocol; - - @JsonProperty - private final String trustStorePath; - - @JsonProperty - private final String trustStoreType; - - @JsonProperty - private final String trustStorePassword; - - @JsonProperty - private final int batchSize; - - @JsonProperty - private final long flushPeriod; - - @JsonProperty - private final int maxQueueSize; - - @JsonProperty("eventConverter") - private final DruidToTimelineMetricConverter druidToTimelineEventConverter; - - @JsonProperty - private final List alertEmitters; - - @JsonProperty - private final long emitWaitTime; - - //waiting up to the specified wait time if necessary for an event to become available. - @JsonProperty - private final long waitForEventTime; - - @JsonCreator - public AmbariMetricsEmitterConfig( - @JsonProperty("hostname") String hostname, - @JsonProperty("port") Integer port, - @JsonProperty("protocol") String protocol, - @JsonProperty("trustStorePath") String trustStorePath, - @JsonProperty("trustStoreType") String trustStoreType, - @JsonProperty("trustStorePassword") String trustStorePassword, - @JsonProperty("batchSize") Integer batchSize, - @JsonProperty("flushPeriod") Long flushPeriod, - @JsonProperty("maxQueueSize") Integer maxQueueSize, - @JsonProperty("eventConverter") DruidToTimelineMetricConverter druidToTimelineEventConverter, - @JsonProperty("alertEmitters") List alertEmitters, - @JsonProperty("emitWaitTime") Long emitWaitTime, - @JsonProperty("waitForEventTime") Long waitForEventTime - ) - { - this.hostname = Preconditions.checkNotNull(hostname, "hostname can not be null"); - this.port = Preconditions.checkNotNull(port, "port can not be null"); - this.protocol = protocol == null ? DEFAULT_PROTOCOL : protocol; - this.trustStorePath = trustStorePath; - this.trustStoreType = trustStoreType; - this.trustStorePassword = trustStorePassword; - this.batchSize = (batchSize == null) ? DEFAULT_BATCH_SIZE : batchSize; - this.flushPeriod = flushPeriod == null ? DEFAULT_FLUSH_PERIOD_MILLIS : flushPeriod; - this.maxQueueSize = maxQueueSize == null ? Integer.MAX_VALUE : maxQueueSize; - this.druidToTimelineEventConverter = Preconditions.checkNotNull( - druidToTimelineEventConverter, - "Event converter can not be null" - ); - this.alertEmitters = alertEmitters == null ? Collections.emptyList() : alertEmitters; - this.emitWaitTime = emitWaitTime == null ? 0 : emitWaitTime; - this.waitForEventTime = waitForEventTime == null ? DEFAULT_GET_TIMEOUT_MILLIS : waitForEventTime; - } - - @JsonProperty - public String getHostname() - { - return hostname; - } - - @JsonProperty - public int getPort() - { - return port; - } - - @JsonProperty - public String getProtocol() - { - return protocol; - } - - @JsonProperty - public String getTrustStorePath() - { - return trustStorePath; - } - - @JsonProperty - public String getTrustStoreType() - { - return trustStoreType; - } - - @JsonProperty - public String getTrustStorePassword() - { - return trustStorePassword; - } - - @JsonProperty - public int getBatchSize() - { - return batchSize; - } - - @JsonProperty - public int getMaxQueueSize() - { - return maxQueueSize; - } - - @JsonProperty - public long getFlushPeriod() - { - return flushPeriod; - } - - @JsonProperty - public DruidToTimelineMetricConverter getDruidToTimelineEventConverter() - { - return druidToTimelineEventConverter; - } - - @JsonProperty - public List getAlertEmitters() - { - return alertEmitters; - } - - @JsonProperty - public long getEmitWaitTime() - { - return emitWaitTime; - } - - @JsonProperty - public long getWaitForEventTime() - { - return waitForEventTime; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - AmbariMetricsEmitterConfig that = (AmbariMetricsEmitterConfig) o; - - if (port != that.port) { - return false; - } - if (batchSize != that.batchSize) { - return false; - } - if (flushPeriod != that.flushPeriod) { - return false; - } - if (maxQueueSize != that.maxQueueSize) { - return false; - } - if (emitWaitTime != that.emitWaitTime) { - return false; - } - if (waitForEventTime != that.waitForEventTime) { - return false; - } - if (!hostname.equals(that.hostname)) { - return false; - } - if (!protocol.equals(that.protocol)) { - return false; - } - if (trustStorePath != null ? !trustStorePath.equals(that.trustStorePath) : that.trustStorePath != null) { - return false; - } - if (trustStoreType != null ? !trustStoreType.equals(that.trustStoreType) : that.trustStoreType != null) { - return false; - } - if (trustStorePassword != null - ? !trustStorePassword.equals(that.trustStorePassword) - : that.trustStorePassword != null) { - return false; - } - if (!druidToTimelineEventConverter.equals(that.druidToTimelineEventConverter)) { - return false; - } - return alertEmitters.equals(that.alertEmitters); - - } - - @Override - public int hashCode() - { - int result = hostname.hashCode(); - result = 31 * result + port; - result = 31 * result + protocol.hashCode(); - result = 31 * result + (trustStorePath != null ? trustStorePath.hashCode() : 0); - result = 31 * result + (trustStoreType != null ? trustStoreType.hashCode() : 0); - result = 31 * result + (trustStorePassword != null ? trustStorePassword.hashCode() : 0); - result = 31 * result + batchSize; - result = 31 * result + (int) (flushPeriod ^ (flushPeriod >>> 32)); - result = 31 * result + maxQueueSize; - result = 31 * result + druidToTimelineEventConverter.hashCode(); - result = 31 * result + alertEmitters.hashCode(); - result = 31 * result + (int) (emitWaitTime ^ (emitWaitTime >>> 32)); - result = 31 * result + (int) (waitForEventTime ^ (waitForEventTime >>> 32)); - return result; - } -} diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterModule.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterModule.java deleted file mode 100644 index bdb3bf17195a..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterModule.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.ambari.metrics; - -import com.fasterxml.jackson.databind.Module; -import com.google.inject.Binder; -import com.google.inject.Injector; -import com.google.inject.Key; -import com.google.inject.Provides; -import com.google.inject.name.Named; -import com.google.inject.name.Names; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.ManageLifecycle; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.java.util.emitter.core.Emitter; - -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; - -public class AmbariMetricsEmitterModule implements DruidModule -{ - private static final String EMITTER_TYPE = "ambari-metrics"; - - @Override - public List getJacksonModules() - { - return Collections.emptyList(); - } - - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bind(binder, "druid.emitter." + EMITTER_TYPE, AmbariMetricsEmitterConfig.class); - } - - @Provides - @ManageLifecycle - @Named(EMITTER_TYPE) - public Emitter getEmitter(AmbariMetricsEmitterConfig emitterConfig, final Injector injector) - { - List emitters = emitterConfig - .getAlertEmitters() - .stream() - .map((String name) -> injector.getInstance(Key.get(Emitter.class, Names.named(name)))) - .collect(Collectors.toList()); - return new AmbariMetricsEmitter(emitterConfig, emitters); - } -} diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/DruidToTimelineMetricConverter.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/DruidToTimelineMetricConverter.java deleted file mode 100644 index 10183e51370a..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/DruidToTimelineMetricConverter.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.ambari.metrics; - -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; - - -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WhiteListBasedDruidToTimelineEventConverter.class) -@JsonSubTypes(value = { - @JsonSubTypes.Type(name = "all", value = SendAllTimelineEventConverter.class), - @JsonSubTypes.Type(name = "whiteList", value = WhiteListBasedDruidToTimelineEventConverter.class) -}) -public interface DruidToTimelineMetricConverter -{ - /** - * This function acts as a filter. It returns null if the event is not suppose to be emitted to Ambari Server - * Also This function will define the mapping between the druid event dimension's values and Ambari Metric Name - * - * @param serviceMetricEvent Druid event ot type {@link ServiceMetricEvent} - * - * @return {@link TimelineMetric} or null - */ - TimelineMetric druidEventToTimelineMetric(ServiceMetricEvent serviceMetricEvent); -} diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/SendAllTimelineEventConverter.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/SendAllTimelineEventConverter.java deleted file mode 100644 index 6467418468b0..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/SendAllTimelineEventConverter.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.ambari.metrics; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeName; -import com.google.common.base.Joiner; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSortedSet; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; - -/** - * Emits all the events instance of {@link ServiceMetricEvent}. - *

- * All the dimensions will be retained and lexicographically order using dimensions name. - *

- * The metric name of the timeline event is: - * ... - *

- * Note that this name will be sanitized by replacing all the `.` or `space` to `_` {@link AmbariMetricsEmitter#sanitize(String)} - */ - -@JsonTypeName("all") -public class SendAllTimelineEventConverter implements DruidToTimelineMetricConverter -{ - public static final String DEFAULT_APP_NAME = "druid"; - - @JsonProperty - private final String namespacePrefix; - - @JsonProperty - private final String appName; - - @JsonProperty - public String getNamespacePrefix() - { - return namespacePrefix; - } - - @JsonCreator - public SendAllTimelineEventConverter( - @JsonProperty("namespacePrefix") String namespacePrefix, - @JsonProperty("appName") String appName - ) - { - this.namespacePrefix = namespacePrefix; - this.appName = appName == null ? DEFAULT_APP_NAME : appName; - } - - @Override - public TimelineMetric druidEventToTimelineMetric(ServiceMetricEvent serviceMetricEvent) - { - ImmutableList.Builder metricNameBuilder = new ImmutableList.Builder<>(); - if (!Strings.isNullOrEmpty(namespacePrefix)) { - metricNameBuilder.add(namespacePrefix); - } - metricNameBuilder.add(AmbariMetricsEmitter.sanitize(serviceMetricEvent.getService())); - ImmutableSortedSet dimNames = ImmutableSortedSet.copyOf(serviceMetricEvent.getUserDims().keySet()); - for (String dimName : dimNames) { - metricNameBuilder.add( - AmbariMetricsEmitter.sanitize( - String.valueOf( - serviceMetricEvent.getUserDims().get(dimName) - ) - ) - ); - } - metricNameBuilder.add(AmbariMetricsEmitter.sanitize(serviceMetricEvent.getMetric())); - - TimelineMetric metric = new TimelineMetric(); - metric.setMetricName(Joiner.on(".").join(metricNameBuilder.build())); - metric.setAppId(appName); - metric.setHostName(serviceMetricEvent.getHost()); - metric.setType(serviceMetricEvent.getFeed()); - metric.setInstanceId(serviceMetricEvent.getService()); - long ts = serviceMetricEvent.getCreatedTime().getMillis(); - metric.setStartTime(ts); - metric.setTimestamp(ts); - metric.getMetricValues().put(ts, serviceMetricEvent.getValue().doubleValue()); - return metric; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - SendAllTimelineEventConverter that = (SendAllTimelineEventConverter) o; - - if (namespacePrefix != null ? !namespacePrefix.equals(that.namespacePrefix) : that.namespacePrefix != null) { - return false; - } - return appName.equals(that.appName); - - } - - @Override - public int hashCode() - { - int result = namespacePrefix != null ? namespacePrefix.hashCode() : 0; - result = 31 * result + appName.hashCode(); - return result; - } -} diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverter.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverter.java deleted file mode 100644 index aa0def755c1c..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/main/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverter.java +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.ambari.metrics; - -import com.fasterxml.jackson.annotation.JacksonInject; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeName; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Joiner; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSortedMap; -import com.google.common.io.Files; -import com.google.common.io.Resources; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; - -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.SortedMap; - -@JsonTypeName("whiteList") -public class WhiteListBasedDruidToTimelineEventConverter implements DruidToTimelineMetricConverter -{ - private static final Logger LOGGER = new Logger(WhiteListBasedDruidToTimelineEventConverter.class); - /** - * @code whiteListDimsMapper is a white list of metric->dimensions mappings. - * Key is the druid metric name or the metric's prefix. - * Value is a list of metric's dimensions names. - * The order of the dimension name is important, it will be used to build the ambari metrics name. - * For instance if we have dimensions dataSource and queryType for metric query/time - * the final metric name will be -> prefix.dataSource.queryType.metricName - * Note that this name will be sanitized by replacing all the `.` or `space` to `_` {@link AmbariMetricsEmitter#sanitize(String)} - */ - private final ImmutableSortedMap> whiteListDimsMapper; - - @JsonProperty - private final String namespacePrefix; - - @JsonProperty - private final String appName; - - @JsonProperty - private final String mapPath; - - private final ObjectMapper mapper; - - @JsonCreator - public WhiteListBasedDruidToTimelineEventConverter( - @JsonProperty("namespacePrefix") String namespacePrefix, - @JsonProperty("appName") String appName, - @JsonProperty("mapPath") String mapPath, - @JacksonInject ObjectMapper mapper - ) - { - this.mapper = Preconditions.checkNotNull(mapper); - this.mapPath = mapPath; - this.whiteListDimsMapper = readMap(this.mapPath); - this.namespacePrefix = namespacePrefix; - this.appName = appName == null ? SendAllTimelineEventConverter.DEFAULT_APP_NAME : appName; - - } - - @JsonProperty - public String getNamespacePrefix() - { - return namespacePrefix; - } - - /** - * @param event Event subject to filtering - * - * @return true if and only if the event prefix key is in the {@code whiteListDimsMapper} - */ - private boolean isInWhiteList(ServiceMetricEvent event) - { - return getPrefixKey(event.getMetric(), whiteListDimsMapper) != null; - } - - /** - * @param key the metric name to lookup - * @param whiteList - * - * @return null if the key does not match with any of the prefixes keys in @code metricsWhiteList, - * or the prefix in @code whiteListDimsMapper - */ - private String getPrefixKey(String key, SortedMap whiteList) - { - String prefixKey = null; - if (whiteList.containsKey(key)) { - return key; - } - SortedMap headMap = whiteList.headMap(key); - if (!headMap.isEmpty() && key.startsWith(headMap.lastKey())) { - prefixKey = headMap.lastKey(); - } - return prefixKey; - } - - /** - * Returns a {@link List} of the white-listed dimension's values to send. - * The list is order is the same as the order of dimensions {@code whiteListDimsMapper} - * - * @param event the event for which will filter dimensions - * - * @return {@link List} of the filtered dimension values to send or null if the event is not in the white list - */ - private List getOrderedDimValues(ServiceMetricEvent event) - { - String prefixKey = getPrefixKey(event.getMetric(), whiteListDimsMapper); - if (prefixKey == null) { - return null; - } - ImmutableList.Builder outputList = new ImmutableList.Builder<>(); - List dimensions = whiteListDimsMapper.get(prefixKey); - if (dimensions == null) { - return Collections.emptyList(); - } - - for (String dimKey : dimensions) { - Object rawValue = event.getUserDims().get(dimKey); - String value = null; - - if (rawValue instanceof String) { - value = (String) rawValue; - } else if (rawValue instanceof Collection) { - Collection values = (Collection) rawValue; - if (!values.isEmpty()) { - value = (String) values.iterator().next(); - } - } - - if (value != null) { - outputList.add(AmbariMetricsEmitter.sanitize(value)); - } - } - return outputList.build(); - } - - /** - * @param serviceMetricEvent druid metric event to convert - * - * @return null if the event is not white listed, otherwise return {@link TimelineMetric} - *

- * The metric name of the ambari timeline metric event is: - * ... - *

- * The order of the dimension is the order returned by {@code getOrderedDimValues()} - * Note that this name will be sanitized by replacing all the `.` or space by `_` {@link AmbariMetricsEmitter#sanitize(String)} - *

- */ - - @Override - public TimelineMetric druidEventToTimelineMetric(ServiceMetricEvent serviceMetricEvent) - { - if (!this.isInWhiteList(serviceMetricEvent)) { - return null; - } - final ImmutableList.Builder metricNameBuilder = new ImmutableList.Builder<>(); - if (!Strings.isNullOrEmpty(namespacePrefix)) { - metricNameBuilder.add(namespacePrefix); - } - metricNameBuilder.add(AmbariMetricsEmitter.sanitize(serviceMetricEvent.getService())); - metricNameBuilder.addAll(this.getOrderedDimValues(serviceMetricEvent)); - metricNameBuilder.add(AmbariMetricsEmitter.sanitize(serviceMetricEvent.getMetric())); - - TimelineMetric metric = new TimelineMetric(); - metric.setMetricName(Joiner.on(".").join(metricNameBuilder.build())); - metric.setAppId(appName); - metric.setHostName(serviceMetricEvent.getHost()); - metric.setType(serviceMetricEvent.getFeed()); - metric.setInstanceId(serviceMetricEvent.getService()); - long ts = serviceMetricEvent.getCreatedTime().getMillis(); - metric.setStartTime(ts); - metric.setTimestamp(ts); - metric.getMetricValues().put(ts, serviceMetricEvent.getValue().doubleValue()); - return metric; - } - - private ImmutableSortedMap> readMap(final String mapPath) - { - String fileContent; - String actualPath = mapPath; - try { - if (Strings.isNullOrEmpty(mapPath)) { - URL defaultWhiteListMapUrl = this.getClass().getClassLoader().getResource("defaultWhiteListMap.json"); - actualPath = defaultWhiteListMapUrl.getFile(); - LOGGER.info("using default whiteList map located at [%s]", actualPath); - fileContent = Resources.toString(defaultWhiteListMapUrl, StandardCharsets.UTF_8); - } else { - fileContent = Files.asCharSource(new File(mapPath), StandardCharsets.UTF_8).read(); - } - return mapper.readerFor(new TypeReference>>() - { - }).readValue(fileContent); - } - catch (IOException e) { - throw new ISE(e, "Got an exception while parsing file [%s]", actualPath); - } - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - WhiteListBasedDruidToTimelineEventConverter that = (WhiteListBasedDruidToTimelineEventConverter) o; - - if (namespacePrefix != null ? !namespacePrefix.equals(that.namespacePrefix) : that.namespacePrefix != null) { - return false; - } - if (!appName.equals(that.appName)) { - return false; - } - return mapPath != null ? mapPath.equals(that.mapPath) : that.mapPath == null; - - } - - @Override - public int hashCode() - { - int result = namespacePrefix != null ? namespacePrefix.hashCode() : 0; - result = 31 * result + appName.hashCode(); - result = 31 * result + (mapPath != null ? mapPath.hashCode() : 0); - return result; - } -} diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/ambari-metrics-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index 78118ba31b84..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.emitter.ambari.metrics.AmbariMetricsEmitterModule diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/resources/defaultWhiteListMap.json b/extensions-contrib/ambari-metrics-emitter/src/main/resources/defaultWhiteListMap.json deleted file mode 100644 index ea31beed2a0f..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/main/resources/defaultWhiteListMap.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "ingest/events": [ - "dataSource" - ], - "ingest/handoff/failed": [ - "dataSource" - ], - "ingest/persists": [ - "dataSource" - ], - "ingest/rows/output": [ - "dataSource" - ], - "ingest/merge": [ - "dataSource" - ], - "jvm/gc": [], - "jvm/mem": [ - "memKind" - ], - "query/cpu/time": [ - "dataSource", - "type" - ], - "query/node/time": [ - "dataSource", - "type" - ], - "query/node/ttfb": [ - "dataSource", - "type" - ], - "query/partial/time": [ - "dataSource", - "type" - ], - "query/segment/time": [ - "dataSource", - "type" - ], - "query/segmentAndCache/time": [ - "dataSource", - "type" - ], - "query/time": [ - "dataSource", - "type" - ], - "query/wait/time": [ - "dataSource", - "type" - ], - "segment/count": [ - "dataSource" - ], - "segment/dropQueue/count": [], - "segment/loadQueue/count": [], - "segment/loadQueue/failed": [], - "segment/loadQueue/size": [], - "segment/scan/pending": [], - "segment/size": [ - "dataSource" - ], - "segment/usedPercent": [ - "dataSource" - ], - "segment/added/bytes" : [ - "dataSource" - ], - "segment/nuked/bytes" : [ - "dataSource" - ] -} diff --git a/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfigTest.java b/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfigTest.java deleted file mode 100644 index f2ea37a40fc7..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/AmbariMetricsEmitterConfigTest.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.ambari.metrics; - -import com.fasterxml.jackson.databind.InjectableValues; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; -import java.util.Collections; - -public class AmbariMetricsEmitterConfigTest -{ - private final ObjectMapper mapper = new DefaultObjectMapper(); - - @Before - public void setUp() - { - mapper.setInjectableValues(new InjectableValues.Std().addValue( - ObjectMapper.class, - new DefaultObjectMapper() - )); - } - - @Test - public void testSerDeAmbariMetricsEmitterConfig() throws IOException - { - AmbariMetricsEmitterConfig config = new AmbariMetricsEmitterConfig( - "hostname", - 8080, - "http", - "truststore.path", - "truststore.type", - "truststore.password", - 1000, - 1000L, - 100, - new SendAllTimelineEventConverter("prefix", "druid"), - Collections.emptyList(), - 500L, - 400L - ); - AmbariMetricsEmitterConfig serde = mapper.readerFor(AmbariMetricsEmitterConfig.class).readValue( - mapper.writeValueAsBytes(config) - ); - Assert.assertEquals(config, serde); - } - - @Test - public void testSerDeDruidToTimelineEventConverter() throws IOException - { - SendAllTimelineEventConverter sendAllConverter = new SendAllTimelineEventConverter("prefix", "druid"); - DruidToTimelineMetricConverter serde = mapper.readerFor(DruidToTimelineMetricConverter.class) - .readValue(mapper.writeValueAsBytes(sendAllConverter)); - Assert.assertEquals(sendAllConverter, serde); - - WhiteListBasedDruidToTimelineEventConverter whiteListBasedDruidToTimelineEventConverter = new WhiteListBasedDruidToTimelineEventConverter( - "prefix", - "druid", - "", - new DefaultObjectMapper() - ); - serde = mapper.readerFor(DruidToTimelineMetricConverter.class) - .readValue(mapper.writeValueAsBytes( - whiteListBasedDruidToTimelineEventConverter)); - Assert.assertEquals(whiteListBasedDruidToTimelineEventConverter, serde); - } -} diff --git a/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/DruidToWhiteListBasedConverterTest.java b/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/DruidToWhiteListBasedConverterTest.java deleted file mode 100644 index d4fe42290f19..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/DruidToWhiteListBasedConverterTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.ambari.metrics; - -import org.junit.Assert; -import org.junit.Test; - -public class DruidToWhiteListBasedConverterTest -{ - - @Test - public void testSanitize() - { - String test = "host name.yahoo.com:8080"; - Assert.assertEquals("host_name_yahoo_com:8080", AmbariMetricsEmitter.sanitize(test)); - } -} diff --git a/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java b/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java deleted file mode 100644 index d5ec9213501e..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.ambari.metrics; - -import junitparams.JUnitParamsRunner; -import junitparams.Parameters; -import org.apache.commons.io.IOUtils; -import org.apache.druid.annotations.UsedByJUnitParamsRunner; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; -import org.easymock.EasyMock; -import org.joda.time.DateTime; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.HashMap; - - -@RunWith(JUnitParamsRunner.class) -public class WhiteListBasedDruidToTimelineEventConverterTest -{ - private final String prefix = "druid"; - private final WhiteListBasedDruidToTimelineEventConverter defaultWhiteListBasedDruidToTimelineEventConverter = - new WhiteListBasedDruidToTimelineEventConverter(prefix, "druid", null, new DefaultObjectMapper()); - private ServiceMetricEvent event; - private final DateTime createdTime = DateTimes.nowUtc(); - private final String hostname = "testHost:8080"; - private final String serviceName = "historical"; - private final String defaultNamespace = prefix + "." + serviceName; - - @Before - public void setUp() - { - event = EasyMock.createMock(ServiceMetricEvent.class); - EasyMock.expect(event.getHost()).andReturn(hostname).anyTimes(); - EasyMock.expect(event.getService()).andReturn(serviceName).anyTimes(); - EasyMock.expect(event.getCreatedTime()).andReturn(createdTime).anyTimes(); - EasyMock.expect(event.getUserDims()).andReturn(new HashMap<>()).anyTimes(); - EasyMock.expect(event.getValue()).andReturn(10).anyTimes(); - EasyMock.expect(event.getFeed()).andReturn("metrics").anyTimes(); - } - - @Test - @Parameters( - { - "query/time, true", - "query/node/ttfb, true", - "query/segmentAndCache/time, true", - "query/time/balaba, true", - "query/tim, false", - "segment/added/bytes, true", - "segment/count, true", - "segment/size, true", - "segment/cost/raw, false", - "coordinator/TIER_1 /cost/raw, false", - "segment/Kost/raw, false", - ", false", - "word, false", - "coordinator, false", - "server/, false", - "ingest/persists/time, true", - "jvm/mem/init, true", - "jvm/gc/count, true" - } - ) - public void testDefaultIsInWhiteList(String key, boolean expectedValue) - { - EasyMock.expect(event.getMetric()).andReturn(key).anyTimes(); - EasyMock.replay(event); - boolean isIn = defaultWhiteListBasedDruidToTimelineEventConverter.druidEventToTimelineMetric(event) != null; - Assert.assertEquals(expectedValue, isIn); - } - - @Test - @Parameters - public void testGetName(ServiceMetricEvent serviceMetricEvent, String expectedPath) - { - TimelineMetric metric = defaultWhiteListBasedDruidToTimelineEventConverter.druidEventToTimelineMetric(serviceMetricEvent); - String path = null; - if (metric != null) { - path = metric.getMetricName(); - } - Assert.assertEquals(expectedPath, path); - } - - @Test - public void testWhiteListedStringArrayDimension() throws IOException - { - File mapFile = File.createTempFile("testing-" + System.nanoTime(), ".json"); - mapFile.deleteOnExit(); - - try (OutputStream outputStream = new FileOutputStream(mapFile)) { - IOUtils.copyLarge( - getClass().getResourceAsStream("/testWhiteListedStringArrayDimension.json"), - outputStream - ); - } - - WhiteListBasedDruidToTimelineEventConverter converter = new WhiteListBasedDruidToTimelineEventConverter( - prefix, - "druid", - mapFile.getAbsolutePath(), - new DefaultObjectMapper() - ); - - ServiceMetricEvent event = new ServiceMetricEvent.Builder() - .setDimension("gcName", new String[] {"g1"}) - .build(createdTime, "jvm/gc/cpu", 10) - .build(serviceName, hostname); - - TimelineMetric metric = converter.druidEventToTimelineMetric(event); - - Assert.assertNotNull(metric); - Assert.assertEquals(defaultNamespace + ".g1.jvm/gc/cpu", metric.getMetricName()); - } - - @UsedByJUnitParamsRunner - private Object[] parametersForTestGetName() - { - return new Object[]{ - new Object[]{ - new ServiceMetricEvent.Builder().setDimension("id", "dummy_id") - .setDimension("status", "some_status") - .setDimension("numDimensions", "1") - .setDimension("segment", "dummy_segment") - .build(createdTime, "query/segment/time/balabla/more", 10) - .build(serviceName, hostname), - defaultNamespace + ".query/segment/time/balabla/more" - }, - new Object[]{ - new ServiceMetricEvent.Builder().setDimension("dataSource", "some_data_source") - .setDimension("tier", "_default_tier") - .build(createdTime, "segment/max", 10) - .build(serviceName, hostname), - null - }, - new Object[]{ - new ServiceMetricEvent.Builder().setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .setDimension("interval", "2013/2015") - .setDimension("some_random_dim1", "random_dim_value1") - .setDimension("some_random_dim2", "random_dim_value2") - .setDimension("hasFilters", "no") - .setDimension("duration", "P1D") - .setDimension("remoteAddress", "194.0.90.2") - .setDimension("id", "ID") - .setDimension("context", "{context}") - .build(createdTime, "query/time", 10) - .build(serviceName, hostname), - defaultNamespace + ".data-source.groupBy.query/time" - }, - new Object[]{ - new ServiceMetricEvent.Builder().setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .setDimension("some_random_dim1", "random_dim_value1") - .build(createdTime, "ingest/persists/count", 10) - .build(serviceName, hostname), - defaultNamespace + ".data-source.ingest/persists/count" - }, - new Object[]{ - new ServiceMetricEvent.Builder().setDimension("bufferpoolName", "BufferPool") - .setDimension("type", "groupBy") - .setDimension("some_random_dim1", "random_dim_value1") - .build(createdTime, "jvm/bufferpool/capacity", 10) - .build(serviceName, hostname), - null - } - }; - } -} diff --git a/extensions-contrib/ambari-metrics-emitter/src/test/resources/testWhiteListedStringArrayDimension.json b/extensions-contrib/ambari-metrics-emitter/src/test/resources/testWhiteListedStringArrayDimension.json deleted file mode 100644 index 757742e1eed3..000000000000 --- a/extensions-contrib/ambari-metrics-emitter/src/test/resources/testWhiteListedStringArrayDimension.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "jvm/gc": ["gcName"] -} diff --git a/extensions-contrib/cassandra-storage/pom.xml b/extensions-contrib/cassandra-storage/pom.xml deleted file mode 100644 index de610aaf4e29..000000000000 --- a/extensions-contrib/cassandra-storage/pom.xml +++ /dev/null @@ -1,176 +0,0 @@ - - - - - 4.0.0 - - org.apache.druid.extensions.contrib - druid-cassandra-storage - druid-cassandra-storage - druid-cassandra-storage - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - - com.netflix.astyanax - astyanax - 1.0.1 - - - jline - jline - - - joda-time - joda-time - - - org.mortbay.jetty - jetty-util - - - com.ning - compress-lzf - - - commons-lang - commons-lang - - - commons-cli - commons-cli - - - log4j - log4j - - - commons-codec - commons-codec - - - commons-logging - commons-logging - - - org.apache.httpcomponents - httpclient - - - org.apache.httpcomponents - httpcore - - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-mapper-asl - - - org.apache.zookeeper - zookeeper - - - org.slf4j - slf4j-api - - - org.slf4j - slf4j-log4j12 - - - com.github.stephenc.high-scale-lib - high-scale-lib - - - - org.mortbay.jetty - jetty - - - - - commons-io - commons-io - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.guava - guava - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - javax.validation - validation-api - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - com.google.inject.extensions - guice-multibindings - provided - - - - - junit - junit - test - - - - diff --git a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentConfig.java b/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentConfig.java deleted file mode 100644 index 6302e3dc222f..000000000000 --- a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentConfig.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cassandra; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import javax.validation.constraints.NotNull; - -/** - * Cassandra Config - */ -public class CassandraDataSegmentConfig -{ - @JsonProperty - @NotNull - public String host = null; - - @JsonProperty - @NotNull - public String keyspace = null; - - public String getKeyspace() - { - return keyspace; - } - - public String getHost() - { - return host; - } -} diff --git a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java b/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java deleted file mode 100644 index 076a3306a1a2..000000000000 --- a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cassandra; - -import com.google.common.base.Predicates; -import com.google.inject.Inject; -import com.netflix.astyanax.recipes.storage.ChunkedStorage; -import org.apache.druid.java.util.common.FileUtils; -import org.apache.druid.java.util.common.RetryUtils; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.segment.loading.SegmentLoadingException; -import org.apache.druid.utils.CompressionUtils; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; - -/** - * Cassandra Segment Puller - */ -public class CassandraDataSegmentPuller extends CassandraStorage -{ - private static final Logger log = new Logger(CassandraDataSegmentPuller.class); - private static final int CONCURRENCY = 10; - private static final int BATCH_SIZE = 10; - - @Inject - public CassandraDataSegmentPuller(CassandraDataSegmentConfig config) - { - super(config); - } - - FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir) throws SegmentLoadingException - { - log.info("Pulling index from C* at path[%s] to outDir[%s]", key, outDir); - try { - org.apache.commons.io.FileUtils.forceMkdir(outDir); - } - catch (IOException e) { - throw new SegmentLoadingException(e, ""); - } - - long startTime = System.currentTimeMillis(); - final File tmpFile = new File(outDir, "index.zip"); - log.info("Pulling to temporary local cache [%s]", tmpFile.getAbsolutePath()); - - try { - RetryUtils.retry( - () -> { - try (OutputStream os = new FileOutputStream(tmpFile)) { - ChunkedStorage - .newReader(indexStorage, key, os) - .withBatchSize(BATCH_SIZE) - .withConcurrencyLevel(CONCURRENCY) - .call(); - } - return new FileUtils.FileCopyResult(tmpFile); - }, - Predicates.alwaysTrue(), - 10 - ); - } - catch (Exception e) { - throw new SegmentLoadingException(e, "Unable to copy key [%s] to file [%s]", key, tmpFile.getAbsolutePath()); - } - try { - final FileUtils.FileCopyResult result = CompressionUtils.unzip(tmpFile, outDir); - log.info( - "Pull of file[%s] completed in %,d millis (%s bytes)", key, System.currentTimeMillis() - startTime, - result.size() - ); - return result; - } - catch (Exception e) { - try { - FileUtils.deleteDirectory(outDir); - } - catch (IOException e1) { - log.error(e1, "Error clearing segment directory [%s]", outDir.getAbsolutePath()); - e.addSuppressed(e1); - } - throw new SegmentLoadingException(e, e.getMessage()); - } - finally { - if (!tmpFile.delete()) { - log.warn("Could not delete cache file at [%s]", tmpFile.getAbsolutePath()); - } - } - } -} diff --git a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPusher.java b/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPusher.java deleted file mode 100644 index c61443f924a9..000000000000 --- a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPusher.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cassandra; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Joiner; -import com.google.common.collect.ImmutableMap; -import com.google.inject.Inject; -import com.netflix.astyanax.MutationBatch; -import com.netflix.astyanax.connectionpool.exceptions.NotFoundException; -import com.netflix.astyanax.recipes.storage.ChunkedStorage; -import com.netflix.astyanax.recipes.storage.ChunkedStorageProvider; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.segment.SegmentUtils; -import org.apache.druid.segment.loading.DataSegmentPusher; -import org.apache.druid.timeline.DataSegment; -import org.apache.druid.utils.CompressionUtils; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.nio.file.Files; -import java.util.Map; - -/** - * Cassandra Segment Pusher - */ -public class CassandraDataSegmentPusher extends CassandraStorage implements DataSegmentPusher -{ - private static final Logger log = new Logger(CassandraDataSegmentPusher.class); - private static final int CONCURRENCY = 10; - private static final Joiner JOINER = Joiner.on("/").skipNulls(); - private final ObjectMapper jsonMapper; - - @Inject - public CassandraDataSegmentPusher( - CassandraDataSegmentConfig config, - ObjectMapper jsonMapper - ) - { - super(config); - this.jsonMapper = jsonMapper; - } - - @Override - public String getPathForHadoop() - { - throw new UnsupportedOperationException("Cassandra storage does not support indexing via Hadoop"); - } - - @Deprecated - @Override - public String getPathForHadoop(String dataSource) - { - return getPathForHadoop(); - } - - @Override - public DataSegment push(final File indexFilesDir, DataSegment segment, final boolean useUniquePath) throws IOException - { - log.info("Writing [%s] to C*", indexFilesDir); - String key = JOINER.join( - config.getKeyspace().isEmpty() ? null : config.getKeyspace(), - this.getStorageDir(segment, useUniquePath) - ); - - // Create index - final File compressedIndexFile = File.createTempFile("druid", "index.zip"); - long indexSize = CompressionUtils.zip(indexFilesDir, compressedIndexFile); - log.info("Wrote compressed file [%s] to [%s]", compressedIndexFile.getAbsolutePath(), key); - - int version = SegmentUtils.getVersionFromDir(indexFilesDir); - - try (final InputStream fileStream = Files.newInputStream(compressedIndexFile.toPath())) { - long start = System.currentTimeMillis(); - ChunkedStorage.newWriter(indexStorage, key, fileStream) - .withConcurrencyLevel(CONCURRENCY).call(); - byte[] json = jsonMapper.writeValueAsBytes(segment); - MutationBatch mutation = this.keyspace.prepareMutationBatch(); - mutation.withRow(descriptorStorage, key) - .putColumn("lastmodified", System.currentTimeMillis(), null) - .putColumn("descriptor", json, null); - mutation.execute(); - log.info("Wrote index to C* in [%s] ms", System.currentTimeMillis() - start); - } - catch (Exception e) { - throw new IOException(e); - } - - segment = segment.withSize(indexSize) - .withLoadSpec(ImmutableMap.of("type", "c*", "key", key)) - .withBinaryVersion(version); - - log.info("Deleting zipped index File[%s]", compressedIndexFile); - compressedIndexFile.delete(); - return segment; - } - - @Override - public Map makeLoadSpec(URI uri) - { - throw new UnsupportedOperationException("not supported"); - } - - private boolean doesObjectExist(ChunkedStorageProvider provider, String objectName) throws Exception - { - try { - return ChunkedStorage.newInfoReader(provider, objectName).call().isValidForRead(); - } - catch (NotFoundException e) { - return false; - } - } -} diff --git a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDruidModule.java b/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDruidModule.java deleted file mode 100644 index 26a8434afc38..000000000000 --- a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDruidModule.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cassandra; - -import com.fasterxml.jackson.core.Version; -import com.fasterxml.jackson.databind.Module; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import com.google.inject.Key; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.LazySingleton; -import org.apache.druid.guice.PolyBind; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.segment.loading.DataSegmentPusher; - -import java.util.List; - -/** - */ -public class CassandraDruidModule implements DruidModule -{ - public static final String SCHEME = "c*"; - - @Override - public void configure(Binder binder) - { - PolyBind.optionBinder(binder, Key.get(DataSegmentPusher.class)) - .addBinding(SCHEME) - .to(CassandraDataSegmentPusher.class) - .in(LazySingleton.class); - JsonConfigProvider.bind(binder, "druid.storage", CassandraDataSegmentConfig.class); - } - - @Override - public List getJacksonModules() - { - return ImmutableList.of( - new Module() - { - @Override - public String getModuleName() - { - return "DruidCassandraStorage-" + System.identityHashCode(this); - } - - @Override - public Version version() - { - return Version.unknownVersion(); - } - - @Override - public void setupModule(SetupContext context) - { - context.registerSubtypes(CassandraLoadSpec.class); - } - } - ); - } -} diff --git a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraLoadSpec.java b/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraLoadSpec.java deleted file mode 100644 index d17e5ab1f135..000000000000 --- a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraLoadSpec.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cassandra; - -import com.fasterxml.jackson.annotation.JacksonInject; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeName; -import org.apache.druid.segment.loading.LoadSpec; -import org.apache.druid.segment.loading.SegmentLoadingException; - -import java.io.File; - -/** - * - */ -@JsonTypeName(CassandraDruidModule.SCHEME) -public class CassandraLoadSpec implements LoadSpec -{ - @JsonProperty - private final String key; - private final CassandraDataSegmentPuller puller; - - @JsonCreator - public CassandraLoadSpec( - @JacksonInject CassandraDataSegmentPuller puller, - @JsonProperty("key") String key - ) - { - this.puller = puller; - this.key = key; - } - - @Override - public LoadSpecResult loadSegment(File outDir) throws SegmentLoadingException - { - return new LoadSpecResult(puller.getSegmentFiles(key, outDir).size()); - } -} diff --git a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraStorage.java b/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraStorage.java deleted file mode 100644 index 9588eb946fb5..000000000000 --- a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraStorage.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cassandra; - -import com.netflix.astyanax.AstyanaxContext; -import com.netflix.astyanax.Keyspace; -import com.netflix.astyanax.connectionpool.NodeDiscoveryType; -import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl; -import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor; -import com.netflix.astyanax.impl.AstyanaxConfigurationImpl; -import com.netflix.astyanax.model.ColumnFamily; -import com.netflix.astyanax.recipes.storage.CassandraChunkedStorageProvider; -import com.netflix.astyanax.recipes.storage.ChunkedStorageProvider; -import com.netflix.astyanax.serializers.StringSerializer; -import com.netflix.astyanax.thrift.ThriftFamilyFactory; - -/** - * Superclass for accessing Cassandra Storage. - * - * This is the schema used to support the index and descriptor storage: - * - * CREATE TABLE index_storage ( key text, chunk text, value blob, PRIMARY KEY (key, chunk)) WITH COMPACT STORAGE; - * CREATE TABLE descriptor_storage ( key varchar, lastModified timestamp, descriptor varchar, PRIMARY KEY (key) ) WITH COMPACT STORAGE; - */ -public class CassandraStorage -{ - private static final String CLUSTER_NAME = "druid_cassandra_cluster"; - private static final String INDEX_TABLE_NAME = "index_storage"; - private static final String DESCRIPTOR_TABLE_NAME = "descriptor_storage"; - - private AstyanaxContext astyanaxContext; - final Keyspace keyspace; - final ChunkedStorageProvider indexStorage; - final ColumnFamily descriptorStorage; - final CassandraDataSegmentConfig config; - - public CassandraStorage(CassandraDataSegmentConfig config) - { - this.astyanaxContext = new AstyanaxContext.Builder() - .forCluster(CLUSTER_NAME) - .forKeyspace(config.getKeyspace()) - .withAstyanaxConfiguration(new AstyanaxConfigurationImpl().setDiscoveryType(NodeDiscoveryType.NONE)) - .withConnectionPoolConfiguration( - new ConnectionPoolConfigurationImpl("MyConnectionPool").setMaxConnsPerHost(10) - .setSeeds(config.getHost())).withConnectionPoolMonitor(new CountingConnectionPoolMonitor()) - .buildKeyspace(ThriftFamilyFactory.getInstance()); - this.astyanaxContext.start(); - this.keyspace = this.astyanaxContext.getEntity(); - this.config = config; - indexStorage = new CassandraChunkedStorageProvider(keyspace, INDEX_TABLE_NAME); - - descriptorStorage = new ColumnFamily(DESCRIPTOR_TABLE_NAME, - StringSerializer.get(), StringSerializer.get()); - } -} diff --git a/extensions-contrib/cassandra-storage/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/cassandra-storage/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index c0190a51f7a3..000000000000 --- a/extensions-contrib/cassandra-storage/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.storage.cassandra.CassandraDruidModule diff --git a/extensions-contrib/cloudfiles-extensions/pom.xml b/extensions-contrib/cloudfiles-extensions/pom.xml deleted file mode 100644 index c3cbf94e0191..000000000000 --- a/extensions-contrib/cloudfiles-extensions/pom.xml +++ /dev/null @@ -1,187 +0,0 @@ - - - - - 4.0.0 - - org.apache.druid.extensions.contrib - druid-cloudfiles-extensions - druid-cloudfiles-extensions - druid-cloudfiles-extensions - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - - UTF-8 - 1.9.1 - - 3.0 - - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - com.google.inject - guice - ${guice.version} - - - aopalliance - aopalliance - - - - - - com.google.inject.extensions - guice-multibindings - ${guice.version} - - - - commons-io - commons-io - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - com.google.guava - guava - - - javax.validation - validation-api - provided - - - - org.apache.jclouds - jclouds-core - ${jclouds.version} - - - org.apache.jclouds.api - openstack-swift - ${jclouds.version} - - - org.apache.jclouds.driver - jclouds-slf4j - ${jclouds.version} - - - org.slf4j - slf4j-api - - - javax.ws.rs - jsr311-api - - - - - org.apache.jclouds.api - openstack-keystone - ${jclouds.version} - - - org.apache.jclouds.api - rackspace-cloudfiles - ${jclouds.version} - - - - org.apache.jclouds.provider - rackspace-cloudfiles-us - ${jclouds.version} - - - - org.apache.jclouds.provider - rackspace-cloudfiles-uk - ${jclouds.version} - - - com.fasterxml.jackson.module - jackson-module-guice - ${jackson.version} - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-core - - - - - - - junit - junit - test - - - org.easymock - easymock - test - - - org.apache.druid - druid-server - ${project.parent.version} - test - - - org.apache.druid - druid-processing - ${project.parent.version} - test - - - - diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesBlob.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesBlob.java deleted file mode 100644 index edc0bbb65c06..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesBlob.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.firehose.cloudfiles; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -import javax.validation.constraints.NotNull; -import java.util.Objects; - -public class CloudFilesBlob -{ - @JsonProperty - @NotNull - private String container; - - @JsonProperty - @NotNull - private String path; - - @JsonProperty - @NotNull - private String region; - - @JsonCreator - public CloudFilesBlob( - @JsonProperty("container") String container, - @JsonProperty("path") String path, - @JsonProperty("region") String region - ) - { - this.container = container; - this.path = path; - this.region = region; - } - - public String getContainer() - { - return container; - } - - public String getPath() - { - return path; - } - - public String getRegion() - { - return region; - } - - @Override - public String toString() - { - return "CloudFilesBlob{" - + "container=" + container - + ",path=" + path - + ",region=" + region - + "}"; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - - if (o == null || getClass() != o.getClass()) { - return false; - } - - final CloudFilesBlob that = (CloudFilesBlob) o; - return Objects.equals(container, that.container) && - Objects.equals(path, that.path) && - Objects.equals(region, that.region); - } - - @Override - public int hashCode() - { - return Objects.hash(container, path, region); - } -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesFirehoseDruidModule.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesFirehoseDruidModule.java deleted file mode 100644 index 3531e7584e45..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesFirehoseDruidModule.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.firehose.cloudfiles; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import org.apache.druid.initialization.DruidModule; - -import java.util.List; - -public class CloudFilesFirehoseDruidModule implements DruidModule -{ - - @Override - public List getJacksonModules() - { - return ImmutableList.of( - new SimpleModule().registerSubtypes( - new NamedType(StaticCloudFilesFirehoseFactory.class, "staticcloudfiles"))); - } - - @Override - public void configure(Binder arg0) - { - - } - -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactory.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactory.java deleted file mode 100644 index f0de9f7e98de..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactory.java +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.firehose.cloudfiles; - -import com.fasterxml.jackson.annotation.JacksonInject; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Predicate; -import org.apache.druid.data.input.FiniteFirehoseFactory; -import org.apache.druid.data.input.InputSplit; -import org.apache.druid.data.input.impl.StringInputRowParser; -import org.apache.druid.data.input.impl.prefetch.PrefetchableTextFilesFirehoseFactory; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.storage.cloudfiles.CloudFilesByteSource; -import org.apache.druid.storage.cloudfiles.CloudFilesObjectApiProxy; -import org.apache.druid.storage.cloudfiles.CloudFilesUtils; -import org.apache.druid.utils.CompressionUtils; -import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -public class StaticCloudFilesFirehoseFactory extends PrefetchableTextFilesFirehoseFactory -{ - private static final Logger log = new Logger(StaticCloudFilesFirehoseFactory.class); - - private final CloudFilesApi cloudFilesApi; - private final List blobs; - - @JsonCreator - public StaticCloudFilesFirehoseFactory( - @JacksonInject CloudFilesApi cloudFilesApi, - @JsonProperty("blobs") List blobs, - @JsonProperty("maxCacheCapacityBytes") Long maxCacheCapacityBytes, - @JsonProperty("maxFetchCapacityBytes") Long maxFetchCapacityBytes, - @JsonProperty("prefetchTriggerBytes") Long prefetchTriggerBytes, - @JsonProperty("fetchTimeout") Long fetchTimeout, - @JsonProperty("maxFetchRetry") Integer maxFetchRetry - ) - { - super(maxCacheCapacityBytes, maxFetchCapacityBytes, prefetchTriggerBytes, fetchTimeout, maxFetchRetry); - this.cloudFilesApi = cloudFilesApi; - this.blobs = blobs; - } - - @JsonProperty - public List getBlobs() - { - return blobs; - } - - @Override - protected Collection initObjects() - { - return blobs; - } - - @Override - protected InputStream openObjectStream(CloudFilesBlob object) throws IOException - { - return openObjectStream(object, 0); - } - - @Override - protected InputStream openObjectStream(CloudFilesBlob object, long start) throws IOException - { - return createCloudFilesByteSource(object).openStream(start); - } - - private CloudFilesByteSource createCloudFilesByteSource(CloudFilesBlob object) - { - final String region = object.getRegion(); - final String container = object.getContainer(); - final String path = object.getPath(); - - log.info("Retrieving file from region[%s], container[%s] and path [%s]", - region, container, path - ); - CloudFilesObjectApiProxy objectApi = new CloudFilesObjectApiProxy(cloudFilesApi, region, container); - return new CloudFilesByteSource(objectApi, path); - } - - @Override - protected InputStream wrapObjectStream(CloudFilesBlob object, InputStream stream) throws IOException - { - return CompressionUtils.decompress(stream, object.getPath()); - } - - @Override - public boolean equals(Object o) - { - if (o == this) { - return true; - } - - if (o == null || getClass() != o.getClass()) { - return false; - } - - final StaticCloudFilesFirehoseFactory that = (StaticCloudFilesFirehoseFactory) o; - return Objects.equals(blobs, that.blobs) && - getMaxCacheCapacityBytes() == that.getMaxCacheCapacityBytes() && - getMaxFetchCapacityBytes() == that.getMaxFetchCapacityBytes() && - getPrefetchTriggerBytes() == that.getPrefetchTriggerBytes() && - getFetchTimeout() == that.getFetchTimeout() && - getMaxFetchRetry() == that.getMaxFetchRetry(); - } - - @Override - public int hashCode() - { - return Objects.hash( - blobs, - getMaxCacheCapacityBytes(), - getMaxFetchCapacityBytes(), - getPrefetchTriggerBytes(), - getFetchTimeout(), - getMaxFetchRetry() - ); - } - - @Override - protected Predicate getRetryCondition() - { - return CloudFilesUtils.CLOUDFILESRETRY; - } - - @Override - public FiniteFirehoseFactory withSplit(InputSplit split) - { - return new StaticCloudFilesFirehoseFactory( - cloudFilesApi, - Collections.singletonList(split.get()), - getMaxCacheCapacityBytes(), - getMaxFetchCapacityBytes(), - getPrefetchTriggerBytes(), - getFetchTimeout(), - getMaxFetchRetry() - ); - } -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesAccountConfig.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesAccountConfig.java deleted file mode 100644 index 0317d6124996..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesAccountConfig.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import javax.validation.constraints.NotNull; - -public class CloudFilesAccountConfig -{ - - @JsonProperty - @NotNull - private String provider; - - @JsonProperty - @NotNull - private String userName; - - @JsonProperty - @NotNull - private String apiKey; - - @JsonProperty - @NotNull - private boolean useServiceNet = true; - - public String getProvider() - { - return provider; - } - - public String getUserName() - { - return userName; - } - - public String getApiKey() - { - return apiKey; - } - - public boolean getUseServiceNet() - { - return useServiceNet; - } - -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSource.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSource.java deleted file mode 100644 index eaa602a5f46f..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSource.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import com.google.common.io.ByteSource; -import org.jclouds.io.Payload; - -import java.io.IOException; -import java.io.InputStream; - -public class CloudFilesByteSource extends ByteSource -{ - - private final CloudFilesObjectApiProxy objectApi; - private final String path; - private Payload payload; - - public CloudFilesByteSource(CloudFilesObjectApiProxy objectApi, String path) - { - this.objectApi = objectApi; - this.path = path; - this.payload = null; - } - - public void closeStream() throws IOException - { - if (payload != null) { - payload.close(); - payload = null; - } - } - - @Override - public InputStream openStream() throws IOException - { - return openStream(0); - } - - public InputStream openStream(long start) throws IOException - { - payload = (payload == null) ? objectApi.get(path, start).getPayload() : payload; - - try { - return payload.openStream(); - } - catch (IOException e) { - if (CloudFilesUtils.CLOUDFILESRETRY.apply(e)) { - throw new IOException("Recoverable exception", e); - } - throw new RuntimeException(e); - } - } -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java deleted file mode 100644 index 55399fa78aac..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPuller.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import com.google.inject.Inject; -import org.apache.druid.java.util.common.FileUtils; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.segment.loading.SegmentLoadingException; -import org.apache.druid.utils.CompressionUtils; -import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; - -import java.io.File; -import java.io.IOException; - -public class CloudFilesDataSegmentPuller -{ - - private static final Logger log = new Logger(CloudFilesDataSegmentPuller.class); - private final CloudFilesApi cloudFilesApi; - - @Inject - public CloudFilesDataSegmentPuller(final CloudFilesApi cloudFilesApi) - { - this.cloudFilesApi = cloudFilesApi; - } - - FileUtils.FileCopyResult getSegmentFiles(String region, String container, String path, File outDir) - throws SegmentLoadingException - { - CloudFilesObjectApiProxy objectApi = new CloudFilesObjectApiProxy(cloudFilesApi, region, container); - final CloudFilesByteSource byteSource = new CloudFilesByteSource(objectApi, path); - - try { - final FileUtils.FileCopyResult result = CompressionUtils.unzip( - byteSource, - outDir, - CloudFilesUtils.CLOUDFILESRETRY, - false - ); - log.info("Loaded %d bytes from [%s] to [%s]", result.size(), path, outDir.getAbsolutePath()); - return result; - } - catch (Exception e) { - try { - FileUtils.deleteDirectory(outDir); - } - catch (IOException ioe) { - log.warn( - ioe, - "Failed to remove output directory [%s] for segment pulled from [%s]", - outDir.getAbsolutePath(), - path - ); - } - throw new SegmentLoadingException(e, e.getMessage()); - } - finally { - try { - byteSource.closeStream(); - } - catch (IOException ioe) { - log.warn(ioe, "Failed to close payload for segmente pulled from [%s]", path); - } - } - } - - private void prepareOutDir(final File outDir) throws ISE - { - if (!outDir.exists()) { - outDir.mkdirs(); - } - - if (!outDir.isDirectory()) { - throw new ISE("outDir[%s] must be a directory.", outDir); - } - } - -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java deleted file mode 100644 index 42fe23f94318..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; -import com.google.inject.Inject; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.segment.SegmentUtils; -import org.apache.druid.segment.loading.DataSegmentPusher; -import org.apache.druid.timeline.DataSegment; -import org.apache.druid.utils.CompressionUtils; -import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; - -import java.io.File; -import java.net.URI; -import java.nio.file.Files; -import java.util.Map; - -public class CloudFilesDataSegmentPusher implements DataSegmentPusher -{ - - private static final Logger log = new Logger(CloudFilesDataSegmentPusher.class); - private final CloudFilesObjectApiProxy objectApi; - private final CloudFilesDataSegmentPusherConfig config; - private final ObjectMapper jsonMapper; - - @Inject - public CloudFilesDataSegmentPusher( - final CloudFilesApi cloudFilesApi, - final CloudFilesDataSegmentPusherConfig config, final ObjectMapper jsonMapper - ) - { - this.config = config; - String region = this.config.getRegion(); - String container = this.config.getContainer(); - this.objectApi = new CloudFilesObjectApiProxy(cloudFilesApi, region, container); - this.jsonMapper = jsonMapper; - } - - @Override - public String getPathForHadoop() - { - return null; - } - - @Deprecated - @Override - public String getPathForHadoop(final String dataSource) - { - return getPathForHadoop(); - } - - @Override - public DataSegment push(final File indexFilesDir, final DataSegment inSegment, final boolean useUniquePath) - { - final String segmentPath = CloudFilesUtils.buildCloudFilesPath( - this.config.getBasePath(), - getStorageDir(inSegment, useUniquePath) - ); - - File descriptorFile = null; - File zipOutFile = null; - - try { - final File descFile = descriptorFile = File.createTempFile("descriptor", ".json"); - final File outFile = zipOutFile = File.createTempFile("druid", "index.zip"); - - final long indexSize = CompressionUtils.zip(indexFilesDir, zipOutFile); - - log.info("Copying segment[%s] to CloudFiles at location[%s]", inSegment.getId(), segmentPath); - return CloudFilesUtils.retryCloudFilesOperation( - () -> { - CloudFilesObject segmentData = new CloudFilesObject( - segmentPath, - outFile, - objectApi.getRegion(), - objectApi.getContainer() - ); - - log.info("Pushing %s.", segmentData.getPath()); - objectApi.put(segmentData); - - // Avoid using Guava in DataSegmentPushers because they might be used with very diverse Guava versions in - // runtime, and because Guava deletes methods over time, that causes incompatibilities. - Files.write(descFile.toPath(), jsonMapper.writeValueAsBytes(inSegment)); - CloudFilesObject descriptorData = new CloudFilesObject( - segmentPath, - descFile, - objectApi.getRegion(), - objectApi.getContainer() - ); - log.info("Pushing %s.", descriptorData.getPath()); - objectApi.put(descriptorData); - - final DataSegment outSegment = inSegment - .withSize(indexSize) - .withLoadSpec(makeLoadSpec(new URI(segmentData.getPath()))) - .withBinaryVersion(SegmentUtils.getVersionFromDir(indexFilesDir)); - - return outSegment; - }, - this.config.getOperationMaxRetries() - ); - } - catch (Exception e) { - throw new RuntimeException(e); - } - finally { - if (zipOutFile != null) { - log.info("Deleting zipped index File[%s]", zipOutFile); - zipOutFile.delete(); - } - - if (descriptorFile != null) { - log.info("Deleting descriptor file[%s]", descriptorFile); - descriptorFile.delete(); - } - } - } - - @Override - public Map makeLoadSpec(URI uri) - { - return ImmutableMap.of( - "type", - CloudFilesStorageDruidModule.SCHEME, - "region", - objectApi.getRegion(), - "container", - objectApi.getContainer(), - "path", - uri.toString() - ); - } -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherConfig.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherConfig.java deleted file mode 100644 index 68619737ab93..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherConfig.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; - -import javax.validation.constraints.NotNull; - -/** - */ -public class CloudFilesDataSegmentPusherConfig -{ - @JsonProperty - @NotNull - private String region; - - @JsonProperty - @NotNull - private String container; - - @JsonProperty - @NotNull - private String basePath; - - @JsonProperty - private int operationMaxRetries = 10; - - public void setRegion(String region) - { - this.region = region; - } - - public void setContainer(String container) - { - this.container = container; - } - - public void setBasePath(String basePath) - { - this.basePath = basePath; - } - - @SuppressWarnings("unused") // Used by Jackson deserialization? - public void setOperationMaxRetries(int operationMaxRetries) - { - this.operationMaxRetries = operationMaxRetries; - } - - public String getRegion() - { - Preconditions.checkNotNull(region); - return region; - } - - public String getContainer() - { - Preconditions.checkNotNull(container); - return container; - } - - public String getBasePath() - { - Preconditions.checkNotNull(basePath); - return basePath; - } - - public int getOperationMaxRetries() - { - return operationMaxRetries; - } - -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesLoadSpec.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesLoadSpec.java deleted file mode 100644 index d06261dd9e68..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesLoadSpec.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import com.fasterxml.jackson.annotation.JacksonInject; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeName; -import com.google.common.base.Preconditions; -import org.apache.druid.segment.loading.LoadSpec; -import org.apache.druid.segment.loading.SegmentLoadingException; - -import java.io.File; - -@JsonTypeName(CloudFilesStorageDruidModule.SCHEME) -public class CloudFilesLoadSpec implements LoadSpec -{ - - @JsonProperty - private final String region; - - @JsonProperty - private final String container; - - @JsonProperty - private final String path; - - private final CloudFilesDataSegmentPuller puller; - - @JsonCreator - public CloudFilesLoadSpec( - @JsonProperty("region") String region, @JsonProperty("container") String container, - @JsonProperty("path") String path, @JacksonInject CloudFilesDataSegmentPuller puller - ) - { - Preconditions.checkNotNull(region); - Preconditions.checkNotNull(container); - Preconditions.checkNotNull(path); - this.container = container; - this.region = region; - this.path = path; - this.puller = puller; - } - - @Override - public LoadSpecResult loadSegment(File file) throws SegmentLoadingException - { - return new LoadSpecResult(puller.getSegmentFiles(region, container, path, file).size()); - } -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesObject.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesObject.java deleted file mode 100644 index 5f44856051c8..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesObject.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import com.google.common.io.ByteSource; -import com.google.common.io.Files; -import org.jclouds.io.Payload; -import org.jclouds.io.Payloads; - -import java.io.File; - -public class CloudFilesObject -{ - - private Payload payload; - private String path; - private final String region; - private final String container; - - public CloudFilesObject(final String basePath, final File file, final String region, final String container) - { - this(region, container); - ByteSource byteSource = Files.asByteSource(file); - this.payload = Payloads.newByteSourcePayload(byteSource); - this.path = CloudFilesUtils.buildCloudFilesPath(basePath, file.getName()); - } - - public CloudFilesObject(final Payload payload, final String region, final String container, final String path) - { - this(region, container, path); - this.payload = payload; - } - - private CloudFilesObject(final String region, final String container, final String path) - { - this(region, container); - this.path = path; - } - - private CloudFilesObject(final String region, final String container) - { - this.region = region; - this.container = container; - } - - public String getRegion() - { - return region; - } - - public String getContainer() - { - return container; - } - - public String getPath() - { - return path; - } - - public Payload getPayload() - { - return payload; - } - -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxy.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxy.java deleted file mode 100644 index 07d8363fada9..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxy.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import org.jclouds.http.options.GetOptions; -import org.jclouds.io.Payload; -import org.jclouds.openstack.swift.v1.domain.SwiftObject; -import org.jclouds.openstack.swift.v1.features.ObjectApi; -import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; - -public class CloudFilesObjectApiProxy -{ - private final ObjectApi objectApi; - private final String region; - private final String container; - - public CloudFilesObjectApiProxy(final CloudFilesApi cloudFilesApi, final String region, final String container) - { - this.region = region; - this.container = container; - this.objectApi = cloudFilesApi.getObjectApi(region, container); - } - - public String getRegion() - { - return region; - } - - public String getContainer() - { - return container; - } - - public String put(final CloudFilesObject cloudFilesObject) - { - return objectApi.put(cloudFilesObject.getPath(), cloudFilesObject.getPayload()); - } - - public CloudFilesObject get(String path, long start) - { - final SwiftObject swiftObject; - if (start == 0) { - swiftObject = objectApi.get(path); - } else { - swiftObject = objectApi.get(path, new GetOptions().startAt(start)); - } - Payload payload = swiftObject.getPayload(); - return new CloudFilesObject(payload, this.region, this.container, path); - } - - public boolean exists(String path) - { - return objectApi.getWithoutBody(path) != null; - } -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java deleted file mode 100644 index c54342fd32de..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import com.fasterxml.jackson.core.Version; -import com.fasterxml.jackson.databind.Module; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; -import com.google.inject.Binder; -import com.google.inject.Provides; -import org.apache.druid.guice.Binders; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.LazySingleton; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.java.util.common.logger.Logger; -import org.jclouds.ContextBuilder; -import org.jclouds.logging.slf4j.config.SLF4JLoggingModule; -import org.jclouds.openstack.v2_0.config.InternalUrlModule; -import org.jclouds.osgi.ProviderRegistry; -import org.jclouds.rackspace.cloudfiles.uk.CloudFilesUKProviderMetadata; -import org.jclouds.rackspace.cloudfiles.us.CloudFilesUSProviderMetadata; -import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; - -import java.util.List; - -/** - */ -public class CloudFilesStorageDruidModule implements DruidModule -{ - - private static final Logger log = new Logger(CloudFilesStorageDruidModule.class); - public static final String SCHEME = "cloudfiles"; - - @Override - public List getJacksonModules() - { - log.info("Getting jackson modules..."); - - return ImmutableList.of( - new Module() - { - @Override - public String getModuleName() - { - return "CloudFiles-" + System.identityHashCode(this); - } - - @Override - public Version version() - { - return Version.unknownVersion(); - } - - @Override - public void setupModule(SetupContext context) - { - context.registerSubtypes(CloudFilesLoadSpec.class); - } - } - ); - } - - @Override - public void configure(Binder binder) - { - log.info("Configuring CloudFilesStorageDruidModule..."); - JsonConfigProvider.bind(binder, "druid.storage", CloudFilesDataSegmentPusherConfig.class); - JsonConfigProvider.bind(binder, "druid.cloudfiles", CloudFilesAccountConfig.class); - - Binders.dataSegmentPusherBinder(binder).addBinding(SCHEME).to(CloudFilesDataSegmentPusher.class) - .in(LazySingleton.class); - - log.info("Configured CloudFilesStorageDruidModule."); - } - - @Provides - @LazySingleton - public CloudFilesApi getCloudFilesApi(final CloudFilesAccountConfig config) - { - log.info("Building Cloud Files Api..."); - - Iterable modules; - if (config.getUseServiceNet()) { - log.info("Configuring Cloud Files Api to use the internal service network..."); - modules = ImmutableSet.of(new SLF4JLoggingModule(), new InternalUrlModule()); - } else { - log.info("Configuring Cloud Files Api to use the public network..."); - modules = ImmutableSet.of(new SLF4JLoggingModule()); - } - - ProviderRegistry.registerProvider(CloudFilesUSProviderMetadata.builder().build()); - ProviderRegistry.registerProvider(CloudFilesUKProviderMetadata.builder().build()); - ContextBuilder cb = ContextBuilder.newBuilder(config.getProvider()) - .credentials(config.getUserName(), config.getApiKey()).modules(modules); - CloudFilesApi cfa = cb.buildApi(CloudFilesApi.class); - log.info("Cloud Files Api built."); - return cfa; - } - -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesUtils.java b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesUtils.java deleted file mode 100644 index bc883b28fd84..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesUtils.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import com.google.common.base.Predicate; -import org.apache.druid.java.util.common.RetryUtils; -import org.apache.druid.java.util.common.RetryUtils.Task; - -import java.io.IOException; - -/** - * - */ -public class CloudFilesUtils -{ - - public static final Predicate CLOUDFILESRETRY = new Predicate() - { - @Override - public boolean apply(Throwable e) - { - if (e == null) { - return false; - } else if (e instanceof IOException) { - return true; - } else { - return apply(e.getCause()); - } - } - }; - - /** - * Retries CloudFiles operations that fail due to io-related exceptions. - */ - public static T retryCloudFilesOperation(Task f, final int maxTries) throws Exception - { - return RetryUtils.retry(f, CLOUDFILESRETRY, maxTries); - } - - public static String buildCloudFilesPath(String basePath, final String fileName) - { - String path = fileName; - if (!basePath.isEmpty()) { - int lastSlashIndex = basePath.lastIndexOf('/'); - if (lastSlashIndex != -1) { - basePath = basePath.substring(0, lastSlashIndex); - } - path = basePath + "/" + fileName; - } - return path; - } - -} diff --git a/extensions-contrib/cloudfiles-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/cloudfiles-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index 81a1411f89b6..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -org.apache.druid.storage.cloudfiles.CloudFilesStorageDruidModule -org.apache.druid.firehose.cloudfiles.CloudFilesFirehoseDruidModule diff --git a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactoryTest.java b/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactoryTest.java deleted file mode 100644 index 48f3ca694fa3..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/firehose/cloudfiles/StaticCloudFilesFirehoseFactoryTest.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.firehose.cloudfiles; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.fasterxml.jackson.module.guice.ObjectMapperModule; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.google.inject.Provides; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.easymock.EasyMock; -import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; -import org.junit.Assert; -import org.junit.Test; - -import java.io.IOException; -import java.util.List; - -public class StaticCloudFilesFirehoseFactoryTest -{ - private static final CloudFilesApi API = EasyMock.niceMock(CloudFilesApi.class); - - @Test - public void testSerde() throws IOException - { - final ObjectMapper mapper = createObjectMapper(new TestModule()); - - final List blobs = ImmutableList.of( - new CloudFilesBlob("container", "foo", "bar"), - new CloudFilesBlob("container", "foo", "bar2") - ); - - final StaticCloudFilesFirehoseFactory factory = new StaticCloudFilesFirehoseFactory( - API, - blobs, - 2048L, - 1024L, - 512L, - 100L, - 5 - ); - - final StaticCloudFilesFirehoseFactory outputFact = mapper.readValue( - mapper.writeValueAsString(factory), - StaticCloudFilesFirehoseFactory.class - ); - - Assert.assertEquals(factory, outputFact); - } - - private static ObjectMapper createObjectMapper(DruidModule baseModule) - { - final ObjectMapper baseMapper = new DefaultObjectMapper(); - baseModule.getJacksonModules().forEach(baseMapper::registerModule); - - final Injector injector = Guice.createInjector( - new ObjectMapperModule(), - baseModule - ); - return injector.getInstance(ObjectMapper.class); - } - - private static class TestModule implements DruidModule - { - @Override - public List getJacksonModules() - { - return ImmutableList.of(new SimpleModule()); - } - - @Override - public void configure(Binder binder) - { - - } - - @Provides - public CloudFilesApi getRestS3Service() - { - return API; - } - } -} diff --git a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSourceTest.java b/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSourceTest.java deleted file mode 100644 index 6a07063c722e..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSourceTest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import org.easymock.EasyMock; -import org.easymock.EasyMockSupport; -import org.jclouds.io.Payload; -import org.junit.Assert; -import org.junit.Test; - -import java.io.IOException; -import java.io.InputStream; - -public class CloudFilesByteSourceTest extends EasyMockSupport -{ - @Test - public void openStreamTest() throws IOException - { - final String path = "path"; - - CloudFilesObjectApiProxy objectApi = createMock(CloudFilesObjectApiProxy.class); - CloudFilesObject cloudFilesObject = createMock(CloudFilesObject.class); - Payload payload = createMock(Payload.class); - InputStream stream = createMock(InputStream.class); - - EasyMock.expect(objectApi.get(path, 0)).andReturn(cloudFilesObject); - EasyMock.expect(cloudFilesObject.getPayload()).andReturn(payload); - EasyMock.expect(payload.openStream()).andReturn(stream); - payload.close(); - - replayAll(); - - CloudFilesByteSource byteSource = new CloudFilesByteSource(objectApi, path); - Assert.assertEquals(stream, byteSource.openStream()); - byteSource.closeStream(); - - verifyAll(); - } - - @Test() - public void openStreamWithRecoverableErrorTest() throws IOException - { - final String path = "path"; - - CloudFilesObjectApiProxy objectApi = createMock(CloudFilesObjectApiProxy.class); - CloudFilesObject cloudFilesObject = createMock(CloudFilesObject.class); - Payload payload = createMock(Payload.class); - InputStream stream = createMock(InputStream.class); - - EasyMock.expect(objectApi.get(path, 0)).andReturn(cloudFilesObject); - EasyMock.expect(cloudFilesObject.getPayload()).andReturn(payload); - EasyMock.expect(payload.openStream()).andThrow(new IOException()).andReturn(stream); - payload.close(); - - replayAll(); - - CloudFilesByteSource byteSource = new CloudFilesByteSource(objectApi, path); - try { - byteSource.openStream(); - } - catch (Exception e) { - Assert.assertEquals("Recoverable exception", e.getMessage()); - } - - Assert.assertEquals(stream, byteSource.openStream()); - byteSource.closeStream(); - - verifyAll(); - } -} diff --git a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java b/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java deleted file mode 100644 index 8a2e3f697eb3..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import com.google.common.io.Files; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.java.util.common.Intervals; -import org.apache.druid.timeline.DataSegment; -import org.apache.druid.timeline.partition.NoneShardSpec; -import org.easymock.EasyMock; -import org.jclouds.openstack.swift.v1.features.ObjectApi; -import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; - -/** - */ -public class CloudFilesDataSegmentPusherTest -{ - @Rule - public final TemporaryFolder tempFolder = new TemporaryFolder(); - - @Test - public void testPush() throws Exception - { - ObjectApi objectApi = EasyMock.createStrictMock(ObjectApi.class); - EasyMock.expect(objectApi.put(EasyMock.anyString(), EasyMock.anyObject())).andReturn(null).atLeastOnce(); - EasyMock.replay(objectApi); - - CloudFilesApi api = EasyMock.createStrictMock(CloudFilesApi.class); - EasyMock.expect(api.getObjectApi(EasyMock.anyString(), EasyMock.anyString())) - .andReturn(objectApi) - .atLeastOnce(); - EasyMock.replay(api); - - - CloudFilesDataSegmentPusherConfig config = new CloudFilesDataSegmentPusherConfig(); - config.setRegion("region"); - config.setContainer("container"); - config.setBasePath("basePath"); - - CloudFilesDataSegmentPusher pusher = new CloudFilesDataSegmentPusher(api, config, new DefaultObjectMapper()); - - // Create a mock segment on disk - File tmp = tempFolder.newFile("version.bin"); - - final byte[] data = new byte[]{0x0, 0x0, 0x0, 0x1}; - Files.write(data, tmp); - final long size = data.length; - - DataSegment segmentToPush = new DataSegment( - "foo", - Intervals.of("2015/2016"), - "0", - new HashMap<>(), - new ArrayList<>(), - new ArrayList<>(), - NoneShardSpec.instance(), - 0, - size - ); - - DataSegment segment = pusher.push(tempFolder.getRoot(), segmentToPush, false); - - Assert.assertEquals(segmentToPush.getSize(), segment.getSize()); - - EasyMock.verify(api); - } -} diff --git a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxyTest.java b/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxyTest.java deleted file mode 100644 index eb3b61c21494..000000000000 --- a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxyTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.storage.cloudfiles; - -import org.easymock.EasyMock; -import org.easymock.EasyMockSupport; -import org.jclouds.io.Payload; -import org.jclouds.openstack.swift.v1.domain.SwiftObject; -import org.jclouds.openstack.swift.v1.features.ObjectApi; -import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; -import org.junit.Assert; -import org.junit.Test; - -public class CloudFilesObjectApiProxyTest extends EasyMockSupport -{ - @Test - public void getTest() - { - final String path = "path"; - final String region = "region"; - final String container = "container"; - - CloudFilesApi cloudFilesApi = createMock(CloudFilesApi.class); - ObjectApi objectApi = createMock(ObjectApi.class); - SwiftObject swiftObject = createMock(SwiftObject.class); - Payload payload = createMock(Payload.class); - - EasyMock.expect(cloudFilesApi.getObjectApi(region, container)).andReturn(objectApi); - EasyMock.expect(objectApi.get(path)).andReturn(swiftObject); - EasyMock.expect(swiftObject.getPayload()).andReturn(payload); - - replayAll(); - - CloudFilesObjectApiProxy cfoApiProxy = new CloudFilesObjectApiProxy(cloudFilesApi, region, container); - CloudFilesObject cloudFilesObject = cfoApiProxy.get(path, 0); - - Assert.assertEquals(cloudFilesObject.getPayload(), payload); - Assert.assertEquals(cloudFilesObject.getRegion(), region); - Assert.assertEquals(cloudFilesObject.getContainer(), container); - Assert.assertEquals(cloudFilesObject.getPath(), path); - - verifyAll(); - } -} diff --git a/extensions-contrib/distinctcount/pom.xml b/extensions-contrib/distinctcount/pom.xml deleted file mode 100644 index 5933e1ce51b0..000000000000 --- a/extensions-contrib/distinctcount/pom.xml +++ /dev/null @@ -1,117 +0,0 @@ - - - - - 4.0.0 - - org.apache.druid.extensions.contrib - druid-distinctcount - druid-distinctcount - druid-distinctcount - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - com.google.code.findbugs - jsr305 - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.guava - guava - provided - - - joda-time - joda-time - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - it.unimi.dsi - fastutil - provided - - - - - org.apache.druid - druid-core - ${project.parent.version} - test - test-jar - - - org.apache.druid - druid-processing - ${project.parent.version} - test - test-jar - - - junit - junit - test - - - org.easymock - easymock - test - - - org.hamcrest - hamcrest-core - test - - - - diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/BitMapFactory.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/BitMapFactory.java deleted file mode 100644 index 5bf148d063f4..000000000000 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/BitMapFactory.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import org.apache.druid.collections.bitmap.MutableBitmap; - -/** - */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = RoaringBitMapFactory.class) -@JsonSubTypes(value = { - @JsonSubTypes.Type(name = "java", value = JavaBitMapFactory.class), - @JsonSubTypes.Type(name = "concise", value = ConciseBitMapFactory.class), - @JsonSubTypes.Type(name = "roaring", value = RoaringBitMapFactory.class) -}) -public interface BitMapFactory -{ - /** - * Create a new empty bitmap - * - * @return the new bitmap - */ - MutableBitmap makeEmptyMutableBitmap(); -} diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/ConciseBitMapFactory.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/ConciseBitMapFactory.java deleted file mode 100644 index aa0833ce9f21..000000000000 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/ConciseBitMapFactory.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import org.apache.druid.collections.bitmap.BitmapFactory; -import org.apache.druid.collections.bitmap.ConciseBitmapFactory; -import org.apache.druid.collections.bitmap.MutableBitmap; - -public class ConciseBitMapFactory implements BitMapFactory -{ - private static final BitmapFactory BITMAP_FACTORY = new ConciseBitmapFactory(); - - public ConciseBitMapFactory() - { - } - - @Override - public MutableBitmap makeEmptyMutableBitmap() - { - return BITMAP_FACTORY.makeEmptyMutableBitmap(); - } - - @Override - public String toString() - { - return "ConciseBitMapFactory"; - } - - @Override - public boolean equals(Object o) - { - return this == o || o instanceof ConciseBitMapFactory; - } - - @Override - public int hashCode() - { - return 0; - } -} diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountAggregator.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountAggregator.java deleted file mode 100644 index d7099e368914..000000000000 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountAggregator.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import org.apache.druid.collections.bitmap.MutableBitmap; -import org.apache.druid.query.aggregation.Aggregator; -import org.apache.druid.segment.DimensionSelector; -import org.apache.druid.segment.data.IndexedInts; - -public class DistinctCountAggregator implements Aggregator -{ - - private final DimensionSelector selector; - private final MutableBitmap mutableBitmap; - - public DistinctCountAggregator( - DimensionSelector selector, - MutableBitmap mutableBitmap - ) - { - this.selector = selector; - this.mutableBitmap = mutableBitmap; - } - - @Override - public void aggregate() - { - IndexedInts row = selector.getRow(); - for (int i = 0, rowSize = row.size(); i < rowSize; i++) { - int index = row.get(i); - mutableBitmap.add(index); - } - } - - @Override - public Object get() - { - return mutableBitmap.size(); - } - - @Override - public float getFloat() - { - return (float) mutableBitmap.size(); - } - - @Override - public void close() - { - mutableBitmap.clear(); - } - - @Override - public long getLong() - { - return (long) mutableBitmap.size(); - } - - @Override - public double getDouble() - { - return (double) mutableBitmap.size(); - } -} diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java deleted file mode 100644 index b74aba1e24bd..000000000000 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.primitives.Longs; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.query.aggregation.AggregateCombiner; -import org.apache.druid.query.aggregation.Aggregator; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.AggregatorUtil; -import org.apache.druid.query.aggregation.BufferAggregator; -import org.apache.druid.query.aggregation.LongSumAggregateCombiner; -import org.apache.druid.query.aggregation.LongSumAggregatorFactory; -import org.apache.druid.query.dimension.DefaultDimensionSpec; -import org.apache.druid.segment.ColumnSelectorFactory; -import org.apache.druid.segment.DimensionSelector; - -import javax.annotation.Nullable; -import java.nio.ByteBuffer; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; - -public class DistinctCountAggregatorFactory extends AggregatorFactory -{ - private static final BitMapFactory DEFAULT_BITMAP_FACTORY = new RoaringBitMapFactory(); - - private final String name; - private final String fieldName; - private final BitMapFactory bitMapFactory; - - @JsonCreator - public DistinctCountAggregatorFactory( - @JsonProperty("name") String name, - @JsonProperty("fieldName") String fieldName, - @JsonProperty("bitmapFactory") BitMapFactory bitMapFactory - ) - { - Preconditions.checkNotNull(name); - Preconditions.checkNotNull(fieldName); - this.name = name; - this.fieldName = fieldName; - this.bitMapFactory = bitMapFactory == null ? DEFAULT_BITMAP_FACTORY : bitMapFactory; - } - - @Override - public Aggregator factorize(ColumnSelectorFactory columnFactory) - { - DimensionSelector selector = makeDimensionSelector(columnFactory); - if (selector == null) { - return new NoopDistinctCountAggregator(); - } else { - return new DistinctCountAggregator( - selector, - bitMapFactory.makeEmptyMutableBitmap() - ); - } - } - - @Override - public BufferAggregator factorizeBuffered(ColumnSelectorFactory columnFactory) - { - DimensionSelector selector = makeDimensionSelector(columnFactory); - if (selector == null) { - return NoopDistinctCountBufferAggregator.instance(); - } else { - return new DistinctCountBufferAggregator(makeDimensionSelector(columnFactory)); - } - } - - private DimensionSelector makeDimensionSelector(final ColumnSelectorFactory columnFactory) - { - return columnFactory.makeDimensionSelector(new DefaultDimensionSpec(fieldName, fieldName)); - } - - @Override - public Comparator getComparator() - { - return new Comparator() - { - @Override - public int compare(Object o, Object o1) - { - return Longs.compare(((Number) o).longValue(), ((Number) o1).longValue()); - } - }; - } - - @Override - public Object combine(Object lhs, Object rhs) - { - if (lhs == null && rhs == null) { - return 0L; - } - if (rhs == null) { - return ((Number) lhs).longValue(); - } - if (lhs == null) { - return ((Number) rhs).longValue(); - } - return ((Number) lhs).longValue() + ((Number) rhs).longValue(); - } - - @Override - public AggregateCombiner makeAggregateCombiner() - { - // This is likely wrong as well as combine(), see https://github.com/apache/druid/pull/2602#issuecomment-321224202 - return new LongSumAggregateCombiner(); - } - - @Override - public AggregatorFactory getCombiningFactory() - { - return new LongSumAggregatorFactory(name, name); - } - - @Override - public List getRequiredColumns() - { - return Collections.singletonList( - new DistinctCountAggregatorFactory(fieldName, fieldName, bitMapFactory) - ); - } - - @Override - public Object deserialize(Object object) - { - return object; - } - - @Nullable - @Override - public Object finalizeComputation(@Nullable Object object) - { - return object; - } - - @JsonProperty - public String getFieldName() - { - return fieldName; - } - - @JsonProperty("bitmapFactory") - public BitMapFactory getBitMapFactory() - { - return bitMapFactory; - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @Override - public List requiredFields() - { - return Collections.singletonList(fieldName); - } - - @Override - public byte[] getCacheKey() - { - byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - byte[] bitMapFactoryCacheKey = StringUtils.toUtf8(bitMapFactory.toString()); - return ByteBuffer.allocate(2 + fieldNameBytes.length + bitMapFactoryCacheKey.length) - .put(AggregatorUtil.DISTINCT_COUNT_CACHE_KEY) - .put(fieldNameBytes) - .put(AggregatorUtil.STRING_SEPARATOR) - .put(bitMapFactoryCacheKey) - .array(); - } - - @Override - public String getTypeName() - { - return "distinctCount"; - } - - @Override - public int getMaxIntermediateSize() - { - return Long.BYTES; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - DistinctCountAggregatorFactory that = (DistinctCountAggregatorFactory) o; - - if (!fieldName.equals(that.fieldName)) { - return false; - } - if (!name.equals(that.name)) { - return false; - } - - return true; - } - - @Override - public int hashCode() - { - int result = name.hashCode(); - result = 31 * result + fieldName.hashCode(); - return result; - } - - @Override - public String toString() - { - return "DistinctCountAggregatorFactory{" + - "name='" + name + '\'' + - ", fieldName='" + fieldName + '\'' + - '}'; - } -} diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountBufferAggregator.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountBufferAggregator.java deleted file mode 100644 index 0b1ebf556d60..000000000000 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountBufferAggregator.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import it.unimi.dsi.fastutil.ints.Int2ObjectMap; -import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; -import org.apache.druid.collections.bitmap.MutableBitmap; -import org.apache.druid.collections.bitmap.WrappedRoaringBitmap; -import org.apache.druid.query.aggregation.BufferAggregator; -import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector; -import org.apache.druid.segment.DimensionSelector; -import org.apache.druid.segment.data.IndexedInts; - -import java.nio.ByteBuffer; - -public class DistinctCountBufferAggregator implements BufferAggregator -{ - private final DimensionSelector selector; - private final Int2ObjectMap mutableBitmapCollection = new Int2ObjectOpenHashMap<>(); - - public DistinctCountBufferAggregator( - DimensionSelector selector - ) - { - this.selector = selector; - } - - @Override - public void init(ByteBuffer buf, int position) - { - buf.putLong(position, 0L); - } - - @Override - public void aggregate(ByteBuffer buf, int position) - { - MutableBitmap mutableBitmap = getMutableBitmap(position); - IndexedInts row = selector.getRow(); - for (int i = 0, rowSize = row.size(); i < rowSize; i++) { - int index = row.get(i); - mutableBitmap.add(index); - } - buf.putLong(position, mutableBitmap.size()); - } - - private MutableBitmap getMutableBitmap(int position) - { - MutableBitmap mutableBitmap = mutableBitmapCollection.get(position); - if (mutableBitmap == null) { - mutableBitmap = new WrappedRoaringBitmap(); - mutableBitmapCollection.put(position, mutableBitmap); - } - return mutableBitmap; - } - - @Override - public Object get(ByteBuffer buf, int position) - { - return buf.getLong(position); - } - - @Override - public float getFloat(ByteBuffer buf, int position) - { - return (float) buf.getLong(position); - } - - @Override - public long getLong(ByteBuffer buf, int position) - { - return buf.getLong(position); - } - - @Override - public double getDouble(ByteBuffer buf, int position) - { - return (double) buf.getLong(position); - } - - @Override - public void close() - { - mutableBitmapCollection.clear(); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("selector", selector); - } -} diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountDruidModule.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountDruidModule.java deleted file mode 100644 index 4e2cb12a239c..000000000000 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountDruidModule.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import org.apache.druid.initialization.DruidModule; - -import java.util.List; - -public class DistinctCountDruidModule implements DruidModule -{ - public static final String DISTINCT_COUNT = "distinctCount"; - - @Override - public List getJacksonModules() - { - return ImmutableList.of( - new SimpleModule("DistinctCountModule").registerSubtypes( - new NamedType(DistinctCountAggregatorFactory.class, DISTINCT_COUNT) - ) - ); - } - - @Override - public void configure(Binder binder) - { - } -} diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/JavaBitMapFactory.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/JavaBitMapFactory.java deleted file mode 100644 index 2b4992c81805..000000000000 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/JavaBitMapFactory.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import org.apache.druid.collections.bitmap.BitSetBitmapFactory; -import org.apache.druid.collections.bitmap.BitmapFactory; -import org.apache.druid.collections.bitmap.MutableBitmap; - -public class JavaBitMapFactory implements BitMapFactory -{ - private static final BitmapFactory BITMAP_FACTORY = new BitSetBitmapFactory(); - - public JavaBitMapFactory() - { - } - - @Override - public MutableBitmap makeEmptyMutableBitmap() - { - return BITMAP_FACTORY.makeEmptyMutableBitmap(); - } - - @Override - public String toString() - { - return "JavaBitMapFactory"; - } - - @Override - public boolean equals(Object o) - { - return this == o || o instanceof JavaBitMapFactory; - } - - @Override - public int hashCode() - { - return 0; - } -} diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/NoopDistinctCountAggregator.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/NoopDistinctCountAggregator.java deleted file mode 100644 index d6b2a1be06b8..000000000000 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/NoopDistinctCountAggregator.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import org.apache.druid.query.aggregation.Aggregator; - -public class NoopDistinctCountAggregator implements Aggregator -{ - public NoopDistinctCountAggregator() - { - } - - @Override - public void aggregate() - { - } - - @Override - public Object get() - { - return 0L; - } - - @Override - public float getFloat() - { - return 0.0f; - } - - @Override - public long getLong() - { - return 0L; - } - - @Override - public double getDouble() - { - return 0.0; - } - - @Override - public void close() - { - } -} diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/NoopDistinctCountBufferAggregator.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/NoopDistinctCountBufferAggregator.java deleted file mode 100644 index 1ac10183a99d..000000000000 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/NoopDistinctCountBufferAggregator.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import org.apache.druid.query.aggregation.BufferAggregator; -import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector; - -import java.nio.ByteBuffer; - -/** - * The difference from {@link org.apache.druid.query.aggregation.NoopBufferAggregator} is that - * NoopDistinctCountBufferAggregator returns 0 instead of null from {@link #get(ByteBuffer, int)}. - */ -public final class NoopDistinctCountBufferAggregator implements BufferAggregator -{ - private static final NoopDistinctCountBufferAggregator INSTANCE = new NoopDistinctCountBufferAggregator(); - - static NoopDistinctCountBufferAggregator instance() - { - return INSTANCE; - } - - private NoopDistinctCountBufferAggregator() - { - } - - @Override - public void init(ByteBuffer buf, int position) - { - } - - @Override - public void aggregate(ByteBuffer buf, int position) - { - } - - @Override - public Object get(ByteBuffer buf, int position) - { - return 0L; - } - - @Override - public float getFloat(ByteBuffer buf, int position) - { - return (float) 0; - } - - @Override - public long getLong(ByteBuffer buf, int position) - { - return (long) 0; - } - - @Override - public double getDouble(ByteBuffer buf, int position) - { - return 0; - } - - @Override - public void close() - { - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - // nothing to inspect - } -} diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/RoaringBitMapFactory.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/RoaringBitMapFactory.java deleted file mode 100644 index 48b9db19531a..000000000000 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/RoaringBitMapFactory.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import org.apache.druid.collections.bitmap.BitmapFactory; -import org.apache.druid.collections.bitmap.MutableBitmap; -import org.apache.druid.collections.bitmap.RoaringBitmapFactory; - -public class RoaringBitMapFactory implements BitMapFactory -{ - private static final BitmapFactory BITMAP_FACTORY = new RoaringBitmapFactory(); - - public RoaringBitMapFactory() - { - } - - @Override - public MutableBitmap makeEmptyMutableBitmap() - { - return BITMAP_FACTORY.makeEmptyMutableBitmap(); - } - - @Override - public String toString() - { - return "RoaringBitMapFactory"; - } - - @Override - public boolean equals(Object o) - { - return this == o || o instanceof RoaringBitMapFactory; - } - - @Override - public int hashCode() - { - return 0; - } -} diff --git a/extensions-contrib/distinctcount/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/distinctcount/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index e83dbd1f2b29..000000000000 --- a/extensions-contrib/distinctcount/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.query.aggregation.distinctcount.DistinctCountDruidModule diff --git a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java deleted file mode 100644 index 6c7db8eb3638..000000000000 --- a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import org.apache.druid.data.input.MapBasedInputRow; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.Pair; -import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.java.util.common.io.Closer; -import org.apache.druid.query.QueryRunnerTestHelper; -import org.apache.druid.query.aggregation.CountAggregatorFactory; -import org.apache.druid.query.dimension.DefaultDimensionSpec; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.groupby.GroupByQueryConfig; -import org.apache.druid.query.groupby.GroupByQueryRunnerFactory; -import org.apache.druid.query.groupby.GroupByQueryRunnerTest; -import org.apache.druid.query.groupby.GroupByQueryRunnerTestHelper; -import org.apache.druid.query.groupby.ResultRow; -import org.apache.druid.query.groupby.orderby.DefaultLimitSpec; -import org.apache.druid.query.groupby.orderby.OrderByColumnSpec; -import org.apache.druid.segment.IncrementalIndexSegment; -import org.apache.druid.segment.Segment; -import org.apache.druid.segment.TestHelper; -import org.apache.druid.segment.incremental.IncrementalIndex; -import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.testing.InitializedNullHandlingTest; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -public class DistinctCountGroupByQueryTest extends InitializedNullHandlingTest -{ - private GroupByQueryRunnerFactory factory; - private Closer resourceCloser; - - @Before - public void setup() - { - final GroupByQueryConfig config = new GroupByQueryConfig(); - config.setMaxIntermediateRows(10000); - final Pair factoryCloserPair = GroupByQueryRunnerTest.makeQueryRunnerFactory( - config - ); - factory = factoryCloserPair.lhs; - resourceCloser = factoryCloserPair.rhs; - } - - @After - public void teardown() throws IOException - { - resourceCloser.close(); - } - - @Test - public void testGroupByWithDistinctCountAgg() throws Exception - { - IncrementalIndex index = new IncrementalIndex.Builder() - .setIndexSchema( - new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularities.SECOND) - .withMetrics(new CountAggregatorFactory("cnt")) - .build() - ) - .setConcurrentEventAdd(true) - .setMaxRowCount(1000) - .buildOnheap(); - - String visitor_id = "visitor_id"; - String client_type = "client_type"; - long timestamp = DateTimes.of("2010-01-01").getMillis(); - index.add( - new MapBasedInputRow( - timestamp, - Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "0", client_type, "iphone") - ) - ); - index.add( - new MapBasedInputRow( - timestamp + 1, - Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "1", client_type, "iphone") - ) - ); - index.add( - new MapBasedInputRow( - timestamp + 2, - Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "2", client_type, "android") - ) - ); - - GroupByQuery query = new GroupByQuery.Builder() - .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) - .setGranularity(QueryRunnerTestHelper.ALL_GRAN) - .setDimensions(new DefaultDimensionSpec( - client_type, - client_type - )) - .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) - .setLimitSpec( - new DefaultLimitSpec( - Collections.singletonList(new OrderByColumnSpec(client_type, OrderByColumnSpec.Direction.DESCENDING)), - 10 - ) - ) - .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new DistinctCountAggregatorFactory("UV", visitor_id, null)) - .build(); - final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null); - - Iterable results = GroupByQueryRunnerTestHelper.runQuery( - factory, - factory.createRunner(incrementalIndexSegment), - query - ); - - List expectedResults = Arrays.asList( - GroupByQueryRunnerTestHelper.createExpectedRow( - query, - "1970-01-01T00:00:00.000Z", - client_type, "iphone", - "UV", 2L, - "rows", 2L - ), - GroupByQueryRunnerTestHelper.createExpectedRow( - query, - "1970-01-01T00:00:00.000Z", - client_type, "android", - "UV", 1L, - "rows", 1L - ) - ); - TestHelper.assertExpectedObjects(expectedResults, results, "distinct-count"); - } -} diff --git a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java deleted file mode 100644 index 2cc0526480bf..000000000000 --- a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import org.apache.druid.data.input.MapBasedInputRow; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.query.Druids; -import org.apache.druid.query.QueryRunnerTestHelper; -import org.apache.druid.query.Result; -import org.apache.druid.query.aggregation.CountAggregatorFactory; -import org.apache.druid.query.timeseries.TimeseriesQuery; -import org.apache.druid.query.timeseries.TimeseriesQueryEngine; -import org.apache.druid.query.timeseries.TimeseriesResultValue; -import org.apache.druid.segment.TestHelper; -import org.apache.druid.segment.incremental.IncrementalIndex; -import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter; -import org.apache.druid.testing.InitializedNullHandlingTest; -import org.joda.time.DateTime; -import org.junit.Test; - -import java.util.Collections; -import java.util.List; - -public class DistinctCountTimeseriesQueryTest extends InitializedNullHandlingTest -{ - - @Test - public void testTopNWithDistinctCountAgg() throws Exception - { - TimeseriesQueryEngine engine = new TimeseriesQueryEngine(); - - IncrementalIndex index = new IncrementalIndex.Builder() - .setIndexSchema( - new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularities.SECOND) - .withMetrics(new CountAggregatorFactory("cnt")) - .build() - ) - .setMaxRowCount(1000) - .buildOnheap(); - - String visitor_id = "visitor_id"; - String client_type = "client_type"; - DateTime time = DateTimes.of("2016-03-04T00:00:00.000Z"); - long timestamp = time.getMillis(); - index.add( - new MapBasedInputRow( - timestamp, - Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "0", client_type, "iphone") - ) - ); - index.add( - new MapBasedInputRow( - timestamp, - Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "1", client_type, "iphone") - ) - ); - index.add( - new MapBasedInputRow( - timestamp, - Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "2", client_type, "android") - ) - ); - - TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.DATA_SOURCE) - .granularity(QueryRunnerTestHelper.ALL_GRAN) - .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) - .aggregators( - Lists.newArrayList( - QueryRunnerTestHelper.ROWS_COUNT, - new DistinctCountAggregatorFactory("UV", visitor_id, null) - ) - ) - .build(); - - final Iterable> results = - engine.process(query, new IncrementalIndexStorageAdapter(index)).toList(); - - List> expectedResults = Collections.singletonList( - new Result<>( - time, - new TimeseriesResultValue( - ImmutableMap.of("UV", 3, "rows", 3L) - ) - ) - ); - TestHelper.assertExpectedResults(expectedResults, results); - } -} diff --git a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java deleted file mode 100644 index 7b14fba07f11..000000000000 --- a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.distinctcount; - -import com.google.common.base.Supplier; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import org.apache.druid.collections.CloseableStupidPool; -import org.apache.druid.data.input.MapBasedInputRow; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.query.QueryRunnerTestHelper; -import org.apache.druid.query.Result; -import org.apache.druid.query.aggregation.CountAggregatorFactory; -import org.apache.druid.query.topn.TopNQuery; -import org.apache.druid.query.topn.TopNQueryBuilder; -import org.apache.druid.query.topn.TopNQueryEngine; -import org.apache.druid.query.topn.TopNResultValue; -import org.apache.druid.segment.TestHelper; -import org.apache.druid.segment.incremental.IncrementalIndex; -import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter; -import org.apache.druid.testing.InitializedNullHandlingTest; -import org.joda.time.DateTime; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -public class DistinctCountTopNQueryTest extends InitializedNullHandlingTest -{ - private CloseableStupidPool pool; - - @Before - public void setup() - { - pool = new CloseableStupidPool<>( - "TopNQueryEngine-bufferPool", - new Supplier() - { - @Override - public ByteBuffer get() - { - return ByteBuffer.allocate(1024 * 1024); - } - } - ); - } - - @After - public void teardown() - { - pool.close(); - } - - @Test - public void testTopNWithDistinctCountAgg() throws Exception - { - TopNQueryEngine engine = new TopNQueryEngine(pool); - - IncrementalIndex index = new IncrementalIndex.Builder() - .setIndexSchema( - new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularities.SECOND) - .withMetrics(new CountAggregatorFactory("cnt")) - .build() - ) - .setMaxRowCount(1000) - .buildOnheap(); - - String visitor_id = "visitor_id"; - String client_type = "client_type"; - DateTime time = DateTimes.of("2016-03-04T00:00:00.000Z"); - long timestamp = time.getMillis(); - index.add( - new MapBasedInputRow( - timestamp, - Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "0", client_type, "iphone") - ) - ); - index.add( - new MapBasedInputRow( - timestamp, - Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "1", client_type, "iphone") - ) - ); - index.add( - new MapBasedInputRow( - timestamp, - Lists.newArrayList(visitor_id, client_type), - ImmutableMap.of(visitor_id, "2", client_type, "android") - ) - ); - - TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE) - .granularity(QueryRunnerTestHelper.ALL_GRAN) - .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) - .dimension(client_type) - .metric("UV") - .threshold(10) - .aggregators( - QueryRunnerTestHelper.ROWS_COUNT, - new DistinctCountAggregatorFactory("UV", visitor_id, null) - ) - .build(); - - final Iterable> results = - engine.query(query, new IncrementalIndexStorageAdapter(index), null).toList(); - - List> expectedResults = Collections.singletonList( - new Result<>( - time, - new TopNResultValue( - Arrays.>asList( - ImmutableMap.of( - client_type, "iphone", - "UV", 2L, - "rows", 2L - ), - ImmutableMap.of( - client_type, "android", - "UV", 1L, - "rows", 1L - ) - ) - ) - ) - ); - TestHelper.assertExpectedResults(expectedResults, results); - } -} diff --git a/extensions-contrib/dropwizard-emitter/pom.xml b/extensions-contrib/dropwizard-emitter/pom.xml deleted file mode 100644 index 5ccfe295d9de..000000000000 --- a/extensions-contrib/dropwizard-emitter/pom.xml +++ /dev/null @@ -1,128 +0,0 @@ - - - - - 4.0.0 - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - org.apache.druid.extensions.contrib - dropwizard-emitter - dropwizard-emitter - Druid emitter extension to convert druid metric to Dropwizard metrics - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-server - ${project.parent.version} - provided - - - org.apache.druid - druid-server - test-jar - ${project.parent.version} - test - - - io.dropwizard.metrics - metrics-core - ${dropwizard.metrics.version} - - - io.dropwizard.metrics - metrics-jmx - ${dropwizard.metrics.version} - - - junit - junit - test - - - org.easymock - easymock - test - - - pl.pragmatists - JUnitParams - test - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - org.apache.curator - curator-client - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - com.github.ben-manes.caffeine - caffeine - provided - - - com.google.guava - guava - provided - - - com.google.code.findbugs - jsr305 - provided - - - diff --git a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardConverter.java b/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardConverter.java deleted file mode 100644 index 7c1194726b31..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardConverter.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.dropwizard; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Strings; -import org.apache.curator.shaded.com.google.common.io.Closeables; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.logger.Logger; - -import javax.annotation.Nullable; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Map; - -/** - * - */ -public class DropwizardConverter -{ - private static final Logger log = new Logger(DropwizardConverter.class); - private final Map metricMap; - - public DropwizardConverter(ObjectMapper mapper, String dimensionMapPath) - { - metricMap = readMap(mapper, dimensionMapPath); - } - - /** - * Filters user dimensions for given metric and adds them to filteredDimensions. - * Returns null if there is no mapping present for the given metric. - */ - @Nullable - public DropwizardMetricSpec addFilteredUserDims( - String service, - String metric, - Map userDims, - Map filteredDimensions - ) - { - - // Find the metric in the map. If we cant find it try to look it up prefixed by the service name. - // This is because some metrics are reported differently, but with the same name, from different services. - DropwizardMetricSpec metricSpec = null; - DropwizardMetricSpec dropwizardMetricSpec = metricMap.get(metric); - if (dropwizardMetricSpec != null) { - metricSpec = dropwizardMetricSpec; - } else if (metricMap.containsKey(service + "-" + metric)) { - metricSpec = metricMap.get(service + "-" + metric); - } - if (metricSpec != null) { - for (String dim : metricSpec.getDimensions()) { - if (userDims.containsKey(dim)) { - filteredDimensions.put(dim, userDims.get(dim).toString()); - } - } - return metricSpec; - } else { - // No mapping found for given metric, return null - return null; - } - } - - private Map readMap(ObjectMapper mapper, String dimensionMapPath) - { - InputStream is = null; - try { - if (Strings.isNullOrEmpty(dimensionMapPath)) { - log.info("Using default metric dimension and types"); - is = this.getClass().getClassLoader().getResourceAsStream("defaultMetricDimensions.json"); - } else { - log.info("Using metric dimensions at types at [%s]", dimensionMapPath); - is = new FileInputStream(new File(dimensionMapPath)); - } - return mapper.readerFor(new TypeReference>() - { - }).readValue(is); - } - catch (IOException e) { - throw new ISE(e, "Failed to parse metric dimensions and types"); - } - finally { - Closeables.closeQuietly(is); - } - } -} diff --git a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitter.java b/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitter.java deleted file mode 100644 index 5baa1b5da245..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitter.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.dropwizard; - -import com.codahale.metrics.Gauge; -import com.codahale.metrics.Metric; -import com.codahale.metrics.MetricRegistry; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.github.benmanes.caffeine.cache.Cache; -import com.github.benmanes.caffeine.cache.Caffeine; -import com.google.common.base.Joiner; -import com.google.common.collect.ImmutableList; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.core.Emitter; -import org.apache.druid.java.util.emitter.core.Event; -import org.apache.druid.java.util.emitter.service.AlertEvent; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; - -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.RejectedExecutionException; -import java.util.concurrent.atomic.AtomicBoolean; - -public class DropwizardEmitter implements Emitter -{ - private static final Logger log = new Logger(DropwizardEmitter.class); - private final MetricRegistry metricsRegistry; - private final AtomicBoolean started = new AtomicBoolean(false); - private final DropwizardConverter converter; - private final List alertEmitters; - private final List reporters; - private final DropwizardEmitterConfig config; - - public DropwizardEmitter( - DropwizardEmitterConfig config, - ObjectMapper mapper, - List alertEmitters - ) - { - this.alertEmitters = alertEmitters; - this.config = config; - this.reporters = config.getReporters(); - this.converter = new DropwizardConverter(mapper, config.getDimensionMapPath()); - final Cache metricsRegistryCache = Caffeine.newBuilder() - .recordStats() - .maximumSize(config.getMaxMetricsRegistrySize()) - .build(); - metricsRegistry = new MetricRegistry() - { - @Override - protected ConcurrentMap buildMap() - { - return metricsRegistryCache.asMap(); - } - }; - } - - - @Override - public void start() - { - final boolean alreadyStarted = started.getAndSet(true); - if (!alreadyStarted) { - for (DropwizardReporter reporter : reporters) { - reporter.start(metricsRegistry); - } - } - } - - @Override - public void emit(Event event) - { - synchronized (started) { - if (!started.get()) { - throw new RejectedExecutionException("Dropwizard emitter Service not started."); - } - } - if (event instanceof ServiceMetricEvent) { - ServiceMetricEvent metricEvent = (ServiceMetricEvent) event; - String host = metricEvent.getHost(); - String service = metricEvent.getService(); - String metric = metricEvent.getMetric(); - Map userDims = metricEvent.getUserDims(); - Number value = metricEvent.getValue(); - ImmutableList.Builder nameBuilder = new ImmutableList.Builder<>(); - LinkedHashMap dims = new LinkedHashMap<>(); - final DropwizardMetricSpec metricSpec = converter.addFilteredUserDims(service, metric, userDims, dims); - - if (metricSpec != null) { - if (config.getPrefix() != null) { - nameBuilder.add(config.getPrefix()); - } - nameBuilder.add(StringUtils.format("metric=%s", metric)); - nameBuilder.add(StringUtils.format("service=%s", service)); - if (config.getIncludeHost()) { - nameBuilder.add(StringUtils.format("hostname=%s", host)); - } - dims.forEach((key, value1) -> nameBuilder.add(StringUtils.format("%s=%s", key, value1))); - - String fullName = StringUtils.replaceChar(Joiner.on(",").join(nameBuilder.build()), '/', "."); - updateMetric(fullName, value, metricSpec); - } else { - log.debug("Service=[%s], Metric=[%s] has no mapping", service, metric); - } - } else if (event instanceof AlertEvent) { - for (Emitter emitter : alertEmitters) { - emitter.emit(event); - } - } else { - throw new ISE("unknown event type [%s]", event.getClass()); - } - } - - private void updateMetric(String name, Number value, DropwizardMetricSpec metricSpec) - { - switch (metricSpec.getType()) { - case meter: - metricsRegistry.meter(name).mark(value.longValue()); - break; - case timer: - metricsRegistry.timer(name) - .update(value.longValue(), metricSpec.getTimeUnit()); - break; - case counter: - metricsRegistry.counter(name).inc(value.longValue()); - break; - case histogram: - metricsRegistry.histogram(name).update(value.longValue()); - break; - case gauge: - SettableGauge gauge = (SettableGauge) metricsRegistry.gauge(name, () -> new SettableGauge(value)); - gauge.setValue(value); - break; - default: - throw new ISE("Unknown Metric Type [%s]", metricSpec.getType()); - } - } - - @Override - public void flush() - { - for (DropwizardReporter reporter : reporters) { - reporter.flush(); - } - } - - @Override - public void close() - { - final boolean wasStarted = started.getAndSet(false); - if (wasStarted) { - for (DropwizardReporter reporter : reporters) { - reporter.close(); - } - } - } - - private static class SettableGauge implements Gauge - { - private Number value; - - public SettableGauge(Number value) - { - this.value = value; - } - - public void setValue(Number value) - { - this.value = value; - } - - @Override - public Number getValue() - { - return value; - } - } - -} diff --git a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterConfig.java b/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterConfig.java deleted file mode 100644 index ce2904308187..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterConfig.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.dropwizard; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; - -import java.util.Collections; -import java.util.List; -import java.util.Objects; - - -public class DropwizardEmitterConfig -{ - // default to 100 Mb - private static int DEFAULT_METRICS_REGISTRY_SIZE = 100_000_000; - @JsonProperty - private final List reporters; - @JsonProperty - private final String prefix; - @JsonProperty - private final Boolean includeHost; - @JsonProperty - private final String dimensionMapPath; - @JsonProperty - private final List alertEmitters; - @JsonProperty - private final int maxMetricsRegistrySize; - - @JsonCreator - public DropwizardEmitterConfig( - @JsonProperty("reporters") List reporters, - @JsonProperty("prefix") String prefix, - @JsonProperty("includeHost") Boolean includeHost, - @JsonProperty("dimensionMapPath") String dimensionMapPath, - @JsonProperty("alertEmitters") List alertEmitters, - @JsonProperty("maxMetricsRegistrySize") Integer maxMetricsRegistrySize - ) - { - Preconditions.checkArgument(reporters != null && !reporters.isEmpty()); - this.reporters = reporters; - this.prefix = prefix; - this.alertEmitters = alertEmitters == null ? Collections.emptyList() : alertEmitters; - this.includeHost = includeHost != null ? includeHost : true; - this.dimensionMapPath = dimensionMapPath; - this.maxMetricsRegistrySize = maxMetricsRegistrySize == null ? DEFAULT_METRICS_REGISTRY_SIZE : maxMetricsRegistrySize; - } - - @JsonProperty - public List getReporters() - { - return reporters; - } - - @JsonProperty - public String getPrefix() - { - return prefix; - } - - @JsonProperty - public Boolean getIncludeHost() - { - return includeHost; - } - - @JsonProperty - public String getDimensionMapPath() - { - return dimensionMapPath; - } - - @JsonProperty - public List getAlertEmitters() - { - return alertEmitters; - } - - @JsonProperty - public int getMaxMetricsRegistrySize() - { - return maxMetricsRegistrySize; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - DropwizardEmitterConfig that = (DropwizardEmitterConfig) o; - return maxMetricsRegistrySize == that.maxMetricsRegistrySize && - Objects.equals(reporters, that.reporters) && - Objects.equals(prefix, that.prefix) && - Objects.equals(includeHost, that.includeHost) && - Objects.equals(dimensionMapPath, that.dimensionMapPath) && - Objects.equals(alertEmitters, that.alertEmitters); - } - - @Override - public int hashCode() - { - return Objects.hash(reporters, prefix, includeHost, dimensionMapPath, alertEmitters, maxMetricsRegistrySize); - } - - @Override - public String toString() - { - return "DropwizardEmitterConfig{" + - "reporters=" + reporters + - ", prefix='" + prefix + '\'' + - ", includeHost=" + includeHost + - ", dimensionMapPath='" + dimensionMapPath + '\'' + - ", alertEmitters=" + alertEmitters + - ", maxMetricsRegistrySize=" + maxMetricsRegistrySize + - '}'; - } -} diff --git a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterModule.java b/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterModule.java deleted file mode 100644 index 524dad165e38..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterModule.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.dropwizard; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.inject.Binder; -import com.google.inject.Injector; -import com.google.inject.Key; -import com.google.inject.Provides; -import com.google.inject.name.Named; -import com.google.inject.name.Names; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.java.util.emitter.core.Emitter; - -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; - -public class DropwizardEmitterModule implements DruidModule -{ - private static final String EMITTER_TYPE = "dropwizard"; - - @Override - public List getJacksonModules() - { - return Collections.emptyList(); - } - - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bind(binder, "druid.emitter." + EMITTER_TYPE, DropwizardEmitterConfig.class); - } - - @Provides - @Named(EMITTER_TYPE) - public Emitter getEmitter( - DropwizardEmitterConfig dropwizardEmitterConfig, - ObjectMapper mapper, - final Injector injector - ) - { - List alertEmitters = dropwizardEmitterConfig.getAlertEmitters() - .stream() - .map(s -> injector.getInstance( - Key.get( - Emitter.class, - Names.named(s) - ))) - .collect(Collectors.toList()); - - return new DropwizardEmitter(dropwizardEmitterConfig, mapper, alertEmitters); - } -} diff --git a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardMetricSpec.java b/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardMetricSpec.java deleted file mode 100644 index 98a652928106..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardMetricSpec.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.dropwizard; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.util.List; -import java.util.concurrent.TimeUnit; - -public class DropwizardMetricSpec -{ - @JsonProperty("dimensions") - private final List dimensions; - @JsonProperty("type") - private final Type type; - @JsonProperty("timeUnit") - private final TimeUnit timeUnit; - - @JsonCreator - DropwizardMetricSpec( - @JsonProperty("dimensions") List dimensions, - @JsonProperty("type") Type type, - @JsonProperty("timeUnit") TimeUnit timeUnit - ) - { - this.dimensions = dimensions; - this.type = type; - this.timeUnit = timeUnit; - } - - @JsonProperty - public Type getType() - { - return type; - } - - @JsonProperty - public List getDimensions() - { - return dimensions; - } - - @JsonProperty - public TimeUnit getTimeUnit() - { - return timeUnit; - } - - public enum Type - { - histogram, timer, meter, counter, gauge - } - -} diff --git a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardReporter.java b/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardReporter.java deleted file mode 100644 index 9bb32481460e..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/DropwizardReporter.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.dropwizard; - -import com.codahale.metrics.MetricRegistry; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import org.apache.druid.emitter.dropwizard.reporters.DropwizardConsoleReporter; -import org.apache.druid.emitter.dropwizard.reporters.DropwizardJMXReporter; - -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") -@JsonSubTypes(value = { - @JsonSubTypes.Type(name = "console", value = DropwizardConsoleReporter.class), - @JsonSubTypes.Type(name = "jmx", value = DropwizardJMXReporter.class), -}) -public interface DropwizardReporter -{ - - void start(MetricRegistry metricRegistry); - - /** - * Used for reporters that choose to buffer events to trigger flushing of buffered events. - * It should be a non-blocking operation. - */ - void flush(); - - void close(); -} diff --git a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/GaugesCache.java b/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/GaugesCache.java deleted file mode 100644 index 0f465f8f7103..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/GaugesCache.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.dropwizard; - -import java.util.LinkedHashMap; -import java.util.Map; - -class GaugesCache extends LinkedHashMap -{ - private int capacity; - - public GaugesCache(int capacity) - { - this.capacity = capacity; - } - - @Override - protected boolean removeEldestEntry(Map.Entry eldest) - { - return this.size() > this.capacity; - } -} diff --git a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/reporters/DropwizardConsoleReporter.java b/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/reporters/DropwizardConsoleReporter.java deleted file mode 100644 index 9b0a072faf0d..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/reporters/DropwizardConsoleReporter.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.dropwizard.reporters; - -import com.codahale.metrics.ConsoleReporter; -import com.codahale.metrics.MetricRegistry; -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.druid.emitter.dropwizard.DropwizardReporter; - -import java.util.concurrent.TimeUnit; - -public class DropwizardConsoleReporter implements DropwizardReporter -{ - private long emitIntervalInSecs; - private TimeUnit rates = TimeUnit.SECONDS; - private TimeUnit durations = TimeUnit.MILLISECONDS; - private ConsoleReporter consoleReporter; - - @JsonProperty - public long getEmitIntervalInSecs() - { - return emitIntervalInSecs; - } - - @JsonProperty - public void setEmitIntervalInSecs(long emitIntervalInSecs) - { - this.emitIntervalInSecs = emitIntervalInSecs; - } - - @JsonProperty - public TimeUnit getRates() - { - return rates; - } - - @JsonProperty - public void setRates(String rates) - { - this.rates = TimeUnit.valueOf(rates); - } - - @JsonProperty - public TimeUnit getDurations() - { - return durations; - } - - @JsonProperty - public void setDurations(String durations) - { - this.durations = TimeUnit.valueOf(durations); - } - - @Override - public void start(MetricRegistry metricRegistry) - { - consoleReporter = ConsoleReporter.forRegistry(metricRegistry) - .convertDurationsTo(durations) - .convertRatesTo(rates) - .build(); - consoleReporter.start(emitIntervalInSecs, TimeUnit.SECONDS); - - } - - @Override - public void flush() - { - // no-op - } - - @Override - public void close() - { - consoleReporter.stop(); - } - - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - DropwizardConsoleReporter that = (DropwizardConsoleReporter) o; - - if (emitIntervalInSecs != that.emitIntervalInSecs) { - return false; - } - if (consoleReporter != null ? !consoleReporter.equals(that.consoleReporter) : that.consoleReporter != null) { - return false; - } - if (durations != that.durations) { - return false; - } - if (rates != that.rates) { - return false; - } - - return true; - } - - @Override - public int hashCode() - { - int result = (int) (emitIntervalInSecs ^ (emitIntervalInSecs >>> 32)); - result = 31 * result + (rates != null ? rates.hashCode() : 0); - result = 31 * result + (durations != null ? durations.hashCode() : 0); - result = 31 * result + (consoleReporter != null ? consoleReporter.hashCode() : 0); - return result; - } - - @Override - public String toString() - { - return "DropwizardConsoleReporter{" + - "emitIntervalInSecs=" + emitIntervalInSecs + - ", rates=" + rates + - ", durations=" + durations + - '}'; - } -} diff --git a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/reporters/DropwizardJMXReporter.java b/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/reporters/DropwizardJMXReporter.java deleted file mode 100644 index 654ff67a5d1a..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/main/java/org/apache/druid/emitter/dropwizard/reporters/DropwizardJMXReporter.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.dropwizard.reporters; - -import com.codahale.metrics.MetricRegistry; -import com.codahale.metrics.jmx.JmxReporter; -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.druid.emitter.dropwizard.DropwizardReporter; - -import java.util.Objects; - -public class DropwizardJMXReporter implements DropwizardReporter -{ - private String domain = "org.apache.druid"; - private JmxReporter reporter; - - @JsonProperty - public String getDomain() - { - return domain; - } - - @Override - public void start(MetricRegistry metricRegistry) - { - reporter = JmxReporter.forRegistry(metricRegistry) - .inDomain(domain).build(); - reporter.start(); - } - - @Override - public void flush() - { - // no-op - } - - @Override - public void close() - { - reporter.close(); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - DropwizardJMXReporter that = (DropwizardJMXReporter) o; - return Objects.equals(domain, that.domain); - } - - @Override - public int hashCode() - { - return Objects.hash(domain); - } - - @Override - public String toString() - { - return "DropwizardJMXReporter{" + - "domain='" + domain + '\'' + - '}'; - } -} diff --git a/extensions-contrib/dropwizard-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/dropwizard-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index c6d84260ee30..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.emitter.dropwizard.DropwizardEmitterModule diff --git a/extensions-contrib/dropwizard-emitter/src/main/resources/defaultMetricDimensions.json b/extensions-contrib/dropwizard-emitter/src/main/resources/defaultMetricDimensions.json deleted file mode 100644 index 950d2638b000..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/main/resources/defaultMetricDimensions.json +++ /dev/null @@ -1,538 +0,0 @@ -{ - "query/time": { - "dimensions": [ - "dataSource", - "type" - ], - "type": "timer", - "timeUnit": "MILLISECONDS" - }, - "query/node/time": { - "dimensions": [ - "server" - ], - "type": "timer", - "timeUnit": "MILLISECONDS" - }, - "query/node/ttfb": { - "dimensions": [ - "server" - ], - "type": "timer", - "timeUnit": "MILLISECONDS" - }, - "query/node/backpressure": { - "dimensions": [ - "server" - ], - "type": "timer", - "timeUnit": "MILLISECONDS" - }, - "query/segment/time": { - "dimensions": [], - "type": "timer", - "timeUnit": "MILLISECONDS" - }, - "query/wait/time": { - "dimensions": [], - "type": "timer", - "timeUnit": "MILLISECONDS" - }, - "segment/scan/pending": { - "dimensions": [], - "type": "gauge" - }, - "query/segmentAndCache/time": { - "dimensions": [], - "type": "timer", - "timeUnit": "MILLISECONDS" - }, - "query/cpu/time": { - "dimensions": [ - "dataSource", - "type" - ], - "type": "timer", - "timeUnit": "NANOSECONDS" - }, - "query/cache/delta/numEntries": { - "dimensions": [], - "type": "counter" - }, - "query/cache/delta/sizeBytes": { - "dimensions": [], - "type": "counter" - }, - "query/cache/delta/hits": { - "dimensions": [], - "type": "counter" - }, - "query/cache/delta/misses": { - "dimensions": [], - "type": "counter" - }, - "query/cache/delta/evictions": { - "dimensions": [], - "type": "counter" - }, - "query/cache/delta/hitRate": { - "dimensions": [], - "type": "counter" - }, - "query/cache/delta/averageBytes": { - "dimensions": [], - "type": "counter" - }, - "query/cache/delta/timeouts": { - "dimensions": [], - "type": "counter" - }, - "query/cache/delta/errors": { - "dimensions": [], - "type": "counter" - }, - "query/cache/total/numEntries": { - "dimensions": [], - "type": "gauge" - }, - "query/cache/total/sizeBytes": { - "dimensions": [], - "type": "gauge" - }, - "query/cache/total/hits": { - "dimensions": [], - "type": "gauge" - }, - "query/cache/total/misses": { - "dimensions": [], - "type": "gauge" - }, - "query/cache/total/evictions": { - "dimensions": [], - "type": "gauge" - }, - "query/cache/total/hitRate": { - "dimensions": [], - "type": "gauge" - }, - "query/cache/total/averageBytes": { - "dimensions": [], - "type": "gauge" - }, - "query/cache/total/timeouts": { - "dimensions": [], - "type": "gauge" - }, - "query/cache/total/errors": { - "dimensions": [], - "type": "gauge" - }, - "ingest/events/thrownAway": { - "dimensions": [ - "dataSource" - ], - "type": "counter" - }, - "ingest/events/unparseable": { - "dimensions": [ - "dataSource" - ], - "type": "counter" - }, - "ingest/events/duplicate": { - "dimensions": [ - "dataSource" - ], - "type": "counter" - }, - "ingest/events/processed": { - "dimensions": [ - "dataSource" - ], - "type": "counter" - }, - "ingest/rows/output": { - "dimensions": [ - "dataSource" - ], - "type": "counter" - }, - "ingest/persist/counter": { - "dimensions": [ - "dataSource" - ], - "type": "counter" - }, - "ingest/persist/time": { - "dimensions": [ - "dataSource" - ], - "type": "timer", - "timeUnit": "MILLISECONDS" - }, - "ingest/persist/cpu": { - "dimensions": [ - "dataSource" - ], - "type": "timer", - "timeUnit": "NANOSECONDS" - }, - "ingest/persist/backPressure": { - "dimensions": [ - "dataSource" - ], - "type": "gauge" - }, - "ingest/persist/failed": { - "dimensions": [ - "dataSource" - ], - "type": "counter" - }, - "ingest/handoff/failed": { - "dimensions": [ - "dataSource" - ], - "type": "counter" - }, - "ingest/merge/time": { - "dimensions": [ - "dataSource" - ], - "type": "timer", - "timeUnit": "MILLISECONDS" - }, - "ingest/merge/cpu": { - "dimensions": [ - "dataSource" - ], - "type": "timer", - "timeUnit": "NANOSECONDS" - }, - "task/run/time": { - "dimensions": [ - "dataSource", - "taskType" - ], - "type": "timer", - "timeUnit": "MILLISECONDS" - }, - "segment/added/bytes": { - "dimensions": [ - "dataSource", - "taskType" - ], - "type": "counter" - }, - "segment/moved/bytes": { - "dimensions": [ - "dataSource", - "taskType" - ], - "type": "counter" - }, - "segment/nuked/bytes": { - "dimensions": [ - "dataSource", - "taskType" - ], - "type": "counter" - }, - "segment/assigned/counter": { - "dimensions": [ - "tier" - ], - "type": "counter" - }, - "segment/moved/counter": { - "dimensions": [ - "tier" - ], - "type": "counter" - }, - "segment/dropped/counter": { - "dimensions": [ - "tier" - ], - "type": "counter" - }, - "segment/deleted/counter": { - "dimensions": [ - "tier" - ], - "type": "counter" - }, - "segment/unneeded/counter": { - "dimensions": [ - "tier" - ], - "type": "counter" - }, - "segment/cost/raw": { - "dimensions": [ - "tier" - ], - "type": "counter" - }, - "segment/cost/normalization": { - "dimensions": [ - "tier" - ], - "type": "counter" - }, - "segment/cost/normalized": { - "dimensions": [ - "tier" - ], - "type": "counter" - }, - "segment/loadQueue/size": { - "dimensions": [ - "server" - ], - "type": "gauge" - }, - "segment/loadQueue/failed": { - "dimensions": [ - "server" - ], - "type": "gauge" - }, - "segment/loadQueue/counter": { - "dimensions": [ - "server" - ], - "type": "gauge" - }, - "segment/dropQueue/counter": { - "dimensions": [ - "server" - ], - "type": "gauge" - }, - "segment/size": { - "dimensions": [ - "dataSource" - ], - "type": "gauge" - }, - "segment/overShadowed/counter": { - "dimensions": [], - "type": "gauge" - }, - "segment/max": { - "dimensions": [], - "type": "gauge" - }, - "segment/used": { - "dimensions": [ - "dataSource", - "tier", - "priority" - ], - "type": "gauge" - }, - "segment/usedPercent": { - "dimensions": [ - "dataSource", - "tier", - "priority" - ], - "type": "gauge" - }, - "jvm/pool/committed": { - "dimensions": [ - "poolKind", - "poolName" - ], - "type": "gauge" - }, - "jvm/pool/init": { - "dimensions": [ - "poolKind", - "poolName" - ], - "type": "gauge" - }, - "jvm/pool/max": { - "dimensions": [ - "poolKind", - "poolName" - ], - "type": "gauge" - }, - "jvm/pool/used": { - "dimensions": [ - "poolKind", - "poolName" - ], - "type": "gauge" - }, - "jvm/bufferpool/counter": { - "dimensions": [ - "bufferpoolName" - ], - "type": "gauge" - }, - "jvm/bufferpool/used": { - "dimensions": [ - "bufferpoolName" - ], - "type": "gauge" - }, - "jvm/bufferpool/capacity": { - "dimensions": [ - "bufferpoolName" - ], - "type": "gauge" - }, - "jvm/mem/init": { - "dimensions": [ - "memKind" - ], - "type": "gauge" - }, - "jvm/mem/max": { - "dimensions": [ - "memKind" - ], - "type": "gauge" - }, - "jvm/mem/used": { - "dimensions": [ - "memKind" - ], - "type": "gauge" - }, - "jvm/mem/committed": { - "dimensions": [ - "memKind" - ], - "type": "gauge" - }, - "jvm/gc/counter": { - "dimensions": [ - "gcName", - "gcGen" - ], - "type": "counter" - }, - "jvm/gc/cpu": { - "dimensions": [ - "gcName", - "gcGen" - ], - "type": "timer", - "timeUnit": "NANOSECONDS" - }, - "ingest/events/buffered": { - "dimensions": [ - "serviceName", - "bufferCapacity" - ], - "type": "gauge" - }, - "sys/swap/free": { - "dimensions": [], - "type": "gauge" - }, - "sys/swap/max": { - "dimensions": [], - "type": "gauge" - }, - "sys/swap/pageIn": { - "dimensions": [], - "type": "gauge" - }, - "sys/swap/pageOut": { - "dimensions": [], - "type": "gauge" - }, - "sys/disk/write/counter": { - "dimensions": [ - "fsDevName" - ], - "type": "counter" - }, - "sys/disk/read/counter": { - "dimensions": [ - "fsDevName" - ], - "type": "counter" - }, - "sys/disk/write/size": { - "dimensions": [ - "fsDevName" - ], - "type": "counter" - }, - "sys/disk/read/size": { - "dimensions": [ - "fsDevName" - ], - "type": "counter" - }, - "sys/net/write/size": { - "dimensions": [], - "type": "counter" - }, - "sys/net/read/size": { - "dimensions": [], - "type": "counter" - }, - "sys/fs/used": { - "dimensions": [ - "fsDevName", - "fsDirName", - "fsTypeName", - "fsSysTypeName", - "fsOptions" - ], - "type": "gauge" - }, - "sys/fs/max": { - "dimensions": [ - "fsDevName", - "fsDirName", - "fsTypeName", - "fsSysTypeName", - "fsOptions" - ], - "type": "gauge" - }, - "sys/mem/used": { - "dimensions": [], - "type": "gauge" - }, - "sys/mem/max": { - "dimensions": [], - "type": "gauge" - }, - "sys/storage/used": { - "dimensions": [ - "fsDirName" - ], - "type": "gauge" - }, - "sys/cpu": { - "dimensions": [ - "cpuName", - "cpuTime" - ], - "type": "gauge" - }, - "coordinator-segment/counter": { - "dimensions": [ - "dataSource" - ], - "type": "gauge" - }, - "historical-segment/counter": { - "dimensions": [ - "dataSource", - "tier", - "priority" - ], - "type": "gauge" - } -} \ No newline at end of file diff --git a/extensions-contrib/dropwizard-emitter/src/test/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterConfigTest.java b/extensions-contrib/dropwizard-emitter/src/test/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterConfigTest.java deleted file mode 100644 index 1ace634f9ed3..000000000000 --- a/extensions-contrib/dropwizard-emitter/src/test/java/org/apache/druid/emitter/dropwizard/DropwizardEmitterConfigTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.dropwizard; - -import com.fasterxml.jackson.databind.InjectableValues; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; -import org.apache.druid.emitter.dropwizard.reporters.DropwizardConsoleReporter; -import org.apache.druid.emitter.dropwizard.reporters.DropwizardJMXReporter; -import org.apache.druid.guice.JsonConfigTesterBase; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; - -public class DropwizardEmitterConfigTest extends JsonConfigTesterBase -{ - private ObjectMapper mapper = new DefaultObjectMapper(); - - @Before - public void setUp() - { - testProperties.put(getPropertyKey("reporters"), "[{\"type\":\"jmx\", \"domain\" : \"mydomain\"}]"); - propertyValues.put(getPropertyKey("reporters"), "[DropwizardJMXReporter{domain='mydomain'}]"); - propertyValues.put(getPropertyKey("includeHost"), "true"); - mapper.setInjectableValues(new InjectableValues.Std().addValue( - ObjectMapper.class, - new DefaultObjectMapper() - )); - } - - @Test - public void testSerDeserDropwizardEmitterConfig() throws IOException - { - DropwizardEmitterConfig dropwizardEmitterConfig = new DropwizardEmitterConfig( - Lists.newArrayList(new DropwizardConsoleReporter(), new DropwizardJMXReporter()), - "my-prefix", - false, - "my/config/path", - null, - 400 - ); - String dropwizardEmitterConfigString = mapper.writeValueAsString(dropwizardEmitterConfig); - DropwizardEmitterConfig dropwizardEmitterConfigExpected = mapper.readerFor(DropwizardEmitterConfig.class).readValue( - dropwizardEmitterConfigString - ); - Assert.assertEquals(dropwizardEmitterConfigExpected, dropwizardEmitterConfig); - } - - @Test - public void testSerde() - { - propertyValues.put(getPropertyKey("reporters"), "[{\"type\":\"jmx\"}]"); - propertyValues.put(getPropertyKey("prefix"), "test-prefix"); - propertyValues.put(getPropertyKey("includeHost"), "true"); - testProperties.putAll(propertyValues); - configProvider.inject(testProperties, configurator); - DropwizardEmitterConfig config = configProvider.get().get(); - Assert.assertTrue("IncludeHost", config.getIncludeHost()); - Assert.assertEquals("test-prefix", config.getPrefix()); - Assert.assertEquals(1, config.getReporters().size()); - Assert.assertTrue("jmx reporter", config.getReporters().get(0) instanceof DropwizardJMXReporter); - } - -} - - diff --git a/extensions-contrib/gce-extensions/pom.xml b/extensions-contrib/gce-extensions/pom.xml deleted file mode 100644 index 7fea7a2a3cad..000000000000 --- a/extensions-contrib/gce-extensions/pom.xml +++ /dev/null @@ -1,130 +0,0 @@ - - - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - 4.0.0 - - org.apache.druid.extensions.contrib - gce-extensions - gce-extensions - Extension to support the autoscaling in GCE - - - UTF-8 - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-indexing-service - ${project.parent.version} - provided - - - org.apache.druid - druid-aws-common - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - com.google.code.findbugs - jsr305 - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.guava - guava - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.google.apis - google-api-services-compute - v1-rev214-1.25.0 - compile - - - com.google.http-client - google-http-client - provided - - - com.google.http-client - google-http-client-jackson2 - provided - - - com.google.api-client - google-api-client - provided - - - org.apache.curator - curator-client - provided - - - - junit - junit - test - - - org.easymock - easymock - test - - - nl.jqno.equalsverifier - equalsverifier - test - - - diff --git a/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceAutoScaler.java b/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceAutoScaler.java deleted file mode 100644 index 3c8f52915017..000000000000 --- a/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceAutoScaler.java +++ /dev/null @@ -1,526 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.overlord.autoscaling.gce; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeName; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; -import com.google.api.client.http.HttpTransport; -import com.google.api.client.json.JsonFactory; -import com.google.api.client.json.jackson2.JacksonFactory; -import com.google.api.services.compute.Compute; -import com.google.api.services.compute.ComputeScopes; -import com.google.api.services.compute.model.Instance; -import com.google.api.services.compute.model.InstanceGroupManagersDeleteInstancesRequest; -import com.google.api.services.compute.model.InstanceGroupManagersListManagedInstancesResponse; -import com.google.api.services.compute.model.InstanceList; -import com.google.api.services.compute.model.ManagedInstance; -import com.google.api.services.compute.model.NetworkInterface; -import com.google.api.services.compute.model.Operation; -import com.google.common.base.Preconditions; -import com.google.common.net.InetAddresses; -import org.apache.druid.indexing.overlord.autoscaling.AutoScaler; -import org.apache.druid.indexing.overlord.autoscaling.AutoScalingData; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.emitter.EmittingLogger; - -import javax.annotation.Nullable; -import java.io.IOException; -import java.security.GeneralSecurityException; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; - -/** - * This module permits the autoscaling of the workers in GCE - * - * General notes: - * - The IPs are IPs as in Internet Protocol, and they look like 1.2.3.4 - * - The IDs are the names of the instances of instances created, they look like prefix-abcd, - * where the prefix is chosen by you and abcd is a suffix assigned by GCE - */ -@JsonTypeName("gce") -public class GceAutoScaler implements AutoScaler -{ - private static final EmittingLogger log = new EmittingLogger(GceAutoScaler.class); - - private final GceEnvironmentConfig envConfig; - private final int minNumWorkers; - private final int maxNumWorkers; - - private Compute cachedComputeService = null; - - private static final long POLL_INTERVAL_MS = 5 * 1000; // 5 sec - private static final int RUNNING_INSTANCES_MAX_RETRIES = 10; - private static final int OPERATION_END_MAX_RETRIES = 10; - - @JsonCreator - public GceAutoScaler( - @JsonProperty("minNumWorkers") int minNumWorkers, - @JsonProperty("maxNumWorkers") int maxNumWorkers, - @JsonProperty("envConfig") GceEnvironmentConfig envConfig - ) - { - Preconditions.checkArgument(minNumWorkers > 0, - "minNumWorkers must be greater than 0"); - this.minNumWorkers = minNumWorkers; - Preconditions.checkArgument(maxNumWorkers > 0, - "maxNumWorkers must be greater than 0"); - Preconditions.checkArgument(maxNumWorkers > minNumWorkers, - "maxNumWorkers must be greater than minNumWorkers"); - this.maxNumWorkers = maxNumWorkers; - this.envConfig = envConfig; - } - - @Override - @JsonProperty - public int getMinNumWorkers() - { - return minNumWorkers; - } - - @Override - @JsonProperty - public int getMaxNumWorkers() - { - return maxNumWorkers; - } - - @Override - @JsonProperty - public GceEnvironmentConfig getEnvConfig() - { - return envConfig; - } - - @Nullable - Compute createComputeServiceImpl() - throws IOException, GeneralSecurityException, GceServiceException - { - HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport(); - JsonFactory jsonFactory = JacksonFactory.getDefaultInstance(); - GoogleCredential credential = GoogleCredential.getApplicationDefault( - httpTransport, - jsonFactory - ); - if (credential.createScopedRequired()) { - List scopes = new ArrayList<>(); - scopes.add(ComputeScopes.CLOUD_PLATFORM); - scopes.add(ComputeScopes.COMPUTE); - credential = credential.createScoped(scopes); - } - - if (credential.getClientAuthentication() != null) { - throw new GceServiceException("Not using a service account"); - } - - return new Compute.Builder(httpTransport, jsonFactory, credential) - .setApplicationName("DruidAutoscaler") - .build(); - } - - private synchronized Compute createComputeService() - throws IOException, GeneralSecurityException, InterruptedException, GceServiceException - { - final int maxRetries = 5; - - int retries = 0; - // This retry loop is here to catch the cases in which the underlying call to - // Compute.Builder(...).build() returns null, case that has been experienced - // sporadically at start time - while (cachedComputeService == null && retries < maxRetries) { - if (retries > 0) { - Thread.sleep(POLL_INTERVAL_MS); - } - - log.info("Creating new ComputeService [%d/%d]", retries + 1, maxRetries); - - try { - cachedComputeService = createComputeServiceImpl(); - retries++; - } - catch (Throwable e) { - log.error(e, "Got Exception in creating the ComputeService"); - throw e; - } - } - return cachedComputeService; - } - - // Used to wait for an operation to finish - @Nullable - private Operation.Error waitForOperationEnd( - Compute compute, - Operation operation) throws Exception - { - String status = operation.getStatus(); - String opId = operation.getName(); - for (int i = 0; i < OPERATION_END_MAX_RETRIES; i++) { - if (operation == null || "DONE".equals(status)) { - return operation == null ? null : operation.getError(); - } - log.info("Waiting for operation %s to end", opId); - Thread.sleep(POLL_INTERVAL_MS); - Compute.ZoneOperations.Get get = compute.zoneOperations().get( - envConfig.getProjectId(), - envConfig.getZoneName(), - opId - ); - operation = get.execute(); - if (operation != null) { - status = operation.getStatus(); - } - } - throw new InterruptedException( - StringUtils.format("Timed out waiting for operation %s to complete", opId) - ); - } - - /** - * When called resizes envConfig.getManagedInstanceGroupName() increasing it by creating - * envConfig.getNumInstances() new workers (unless the maximum is reached). Return the - * IDs of the workers created - */ - @Override - public AutoScalingData provision() - { - final String project = envConfig.getProjectId(); - final String zone = envConfig.getZoneName(); - final int numInstances = envConfig.getNumInstances(); - final String managedInstanceGroupName = envConfig.getManagedInstanceGroupName(); - - try { - List before = getRunningInstances(); - log.debug("Existing instances [%s]", String.join(",", before)); - - int toSize = Math.min(before.size() + numInstances, getMaxNumWorkers()); - if (before.size() >= toSize) { - // nothing to scale - return new AutoScalingData(new ArrayList<>()); - } - log.info("Asked to provision instances, will resize to %d", toSize); - - Compute computeService = createComputeService(); - Compute.InstanceGroupManagers.Resize request = - computeService.instanceGroupManagers().resize(project, zone, - managedInstanceGroupName, toSize); - - Operation response = request.execute(); - Operation.Error err = waitForOperationEnd(computeService, response); - if (err == null || err.isEmpty()) { - List after = null; - // as the waitForOperationEnd only waits for the operation to be scheduled - // this loop waits until the requested machines actually go up (or up to a - // certain amount of retries in checking) - for (int i = 0; i < RUNNING_INSTANCES_MAX_RETRIES; i++) { - after = getRunningInstances(); - if (after.size() == toSize) { - break; - } - log.info("Machines not up yet, waiting"); - Thread.sleep(POLL_INTERVAL_MS); - } - after.removeAll(before); // these should be the new ones - log.info("Added instances [%s]", String.join(",", after)); - return new AutoScalingData(after); - } else { - log.error("Unable to provision instances: %s", err.toPrettyString()); - } - } - catch (Exception e) { - log.error(e, "Unable to provision any gce instances."); - } - - return new AutoScalingData(new ArrayList<>()); - } - - /** - * Terminates the instances in the list of IPs provided by the caller - */ - @Override - public AutoScalingData terminate(List ips) - { - log.info("Asked to terminate: [%s]", String.join(",", ips)); - - if (ips.isEmpty()) { - return new AutoScalingData(new ArrayList<>()); - } - - List nodeIds = ipToIdLookup(ips); // if they are not IPs, they will be unchanged - try { - return terminateWithIds(nodeIds != null ? nodeIds : new ArrayList<>()); - } - catch (Exception e) { - log.error(e, "Unable to terminate any instances."); - } - - return new AutoScalingData(new ArrayList<>()); - } - - private List namesToInstances(List names) - { - List instances = new ArrayList<>(); - for (String name : names) { - instances.add( - // convert the name into a URL's path to be used in calls to the API - StringUtils.format("zones/%s/instances/%s", envConfig.getZoneName(), name) - ); - } - return instances; - } - - /** - * Terminates the instances in the list of IDs provided by the caller - */ - @Override - public AutoScalingData terminateWithIds(List ids) - { - log.info("Asked to terminate IDs: [%s]", String.join(",", ids)); - - if (ids.isEmpty()) { - return new AutoScalingData(new ArrayList<>()); - } - - try { - final String project = envConfig.getProjectId(); - final String zone = envConfig.getZoneName(); - final String managedInstanceGroupName = envConfig.getManagedInstanceGroupName(); - - List before = getRunningInstances(); - - InstanceGroupManagersDeleteInstancesRequest requestBody = - new InstanceGroupManagersDeleteInstancesRequest(); - requestBody.setInstances(namesToInstances(ids)); - - Compute computeService = createComputeService(); - Compute.InstanceGroupManagers.DeleteInstances request = - computeService - .instanceGroupManagers() - .deleteInstances(project, zone, managedInstanceGroupName, requestBody); - - Operation response = request.execute(); - Operation.Error err = waitForOperationEnd(computeService, response); - if (err == null || err.isEmpty()) { - List after = null; - // as the waitForOperationEnd only waits for the operation to be scheduled - // this loop waits until the requested machines actually go down (or up to a - // certain amount of retries in checking) - for (int i = 0; i < RUNNING_INSTANCES_MAX_RETRIES; i++) { - after = getRunningInstances(); - if (after.size() == (before.size() - ids.size())) { - break; - } - log.info("Machines not down yet, waiting"); - Thread.sleep(POLL_INTERVAL_MS); - } - before.removeAll(after); // keep only the ones no more present - return new AutoScalingData(before); - } else { - log.error("Unable to terminate instances: %s", err.toPrettyString()); - } - } - catch (Exception e) { - log.error(e, "Unable to terminate any instances."); - } - - return new AutoScalingData(new ArrayList<>()); - } - - // Returns the list of the IDs of the machines running in the MIG - private List getRunningInstances() - { - final long maxResults = 500L; // 500 is sadly the max, see below - - ArrayList ids = new ArrayList<>(); - try { - final String project = envConfig.getProjectId(); - final String zone = envConfig.getZoneName(); - final String managedInstanceGroupName = envConfig.getManagedInstanceGroupName(); - - Compute computeService = createComputeService(); - Compute.InstanceGroupManagers.ListManagedInstances request = - computeService - .instanceGroupManagers() - .listManagedInstances(project, zone, managedInstanceGroupName); - // Notice that while the doc says otherwise, there is not nextPageToken to page - // through results and so everything needs to be in the same page - request.setMaxResults(maxResults); - InstanceGroupManagersListManagedInstancesResponse response = request.execute(); - for (ManagedInstance mi : response.getManagedInstances()) { - ids.add(GceUtils.extractNameFromInstance(mi.getInstance())); - } - log.debug("Found running instances [%s]", String.join(",", ids)); - } - catch (Exception e) { - log.error(e, "Unable to get instances."); - } - return ids; - } - - /** - * Converts the IPs to IDs - */ - @Override - public List ipToIdLookup(List ips) - { - log.info("Asked IPs -> IDs for: [%s]", String.join(",", ips)); - - if (ips.isEmpty()) { - return new ArrayList<>(); - } - - // If the first one is not an IP, just assume all the other ones are not as well and just - // return them as they are. This check is here because Druid does not check if IPs are - // actually IPs and can send IDs to this function instead - if (!InetAddresses.isInetAddress(ips.get(0))) { - log.debug("Not IPs, doing nothing"); - return ips; - } - - final String project = envConfig.getProjectId(); - final String zone = envConfig.getZoneName(); - try { - Compute computeService = createComputeService(); - Compute.Instances.List request = computeService.instances().list(project, zone); - // Cannot filter by IP atm, see below - // request.setFilter(GceUtils.buildFilter(ips, "networkInterfaces[0].networkIP")); - - List instanceIds = new ArrayList<>(); - InstanceList response; - do { - response = request.execute(); - if (response.getItems() == null) { - continue; - } - for (Instance instance : response.getItems()) { - // This stupid look up is needed because atm it is not possible to filter - // by IP, see https://issuetracker.google.com/issues/73455339 - for (NetworkInterface ni : instance.getNetworkInterfaces()) { - if (ips.contains(ni.getNetworkIP())) { - instanceIds.add(instance.getName()); - } - } - } - request.setPageToken(response.getNextPageToken()); - } while (response.getNextPageToken() != null); - - log.debug("Converted to [%s]", String.join(",", instanceIds)); - return instanceIds; - } - catch (Exception e) { - log.error(e, "Unable to convert IPs to IDs."); - } - - return new ArrayList<>(); - } - - /** - * Converts the IDs to IPs - this is actually never called from the outside but it is called once - * from inside the class if terminate is used instead of terminateWithIds - */ - @Override - public List idToIpLookup(List nodeIds) - { - log.info("Asked IDs -> IPs for: [%s]", String.join(",", nodeIds)); - - if (nodeIds.isEmpty()) { - return new ArrayList<>(); - } - - final String project = envConfig.getProjectId(); - final String zone = envConfig.getZoneName(); - - try { - Compute computeService = createComputeService(); - Compute.Instances.List request = computeService.instances().list(project, zone); - request.setFilter(GceUtils.buildFilter(nodeIds, "name")); - - List instanceIps = new ArrayList<>(); - InstanceList response; - do { - response = request.execute(); - if (response.getItems() == null) { - continue; - } - for (Instance instance : response.getItems()) { - // Assuming that every server has at least one network interface... - String ip = instance.getNetworkInterfaces().get(0).getNetworkIP(); - // ...even though some IPs are reported as null on the spot but later they are ok, - // so we skip the ones that are null. fear not, they are picked up later this just - // prevents to have a machine called 'null' around which makes the caller wait for - // it for maxScalingDuration time before doing anything else - if (ip != null && !"null".equals(ip)) { - instanceIps.add(ip); - } else { - // log and skip it - log.warn("Call returned null IP for %s, skipping", instance.getName()); - } - } - request.setPageToken(response.getNextPageToken()); - } while (response.getNextPageToken() != null); - - return instanceIps; - } - catch (Exception e) { - log.error(e, "Unable to convert IDs to IPs."); - } - - return new ArrayList<>(); - } - - @Override - public String toString() - { - return "gceAutoScaler={" + - "envConfig=" + envConfig + - ", maxNumWorkers=" + maxNumWorkers + - ", minNumWorkers=" + minNumWorkers + - '}'; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - GceAutoScaler that = (GceAutoScaler) o; - - return Objects.equals(envConfig, that.envConfig) && - minNumWorkers == that.minNumWorkers && - maxNumWorkers == that.maxNumWorkers; - } - - @Override - public int hashCode() - { - int result = 0; - result = 31 * result + Objects.hashCode(envConfig); - result = 31 * result + minNumWorkers; - result = 31 * result + maxNumWorkers; - return result; - } -} diff --git a/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceEnvironmentConfig.java b/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceEnvironmentConfig.java deleted file mode 100644 index 2bc9df3e78d4..000000000000 --- a/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceEnvironmentConfig.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.overlord.autoscaling.gce; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; - -import java.util.Objects; - -/** - */ -public class GceEnvironmentConfig -{ - /** - * numInstances: the number of workers to try to spawn at each call to provision - * projectId: the id of the project where to operate - * zoneName: the name of the zone where to operata - * instanceTemplate: the template to use when creating the instances - * minworkers: the minimum number of workers in the pool (*) - * maxWorkers: the maximum number of workers in the pool (*) - * - * (*) both used by the caller of the AutoScaler to know if it makes sense to call - * provision / terminate or if there is no hope that something would be done - */ - private final int numInstances; - private final String projectId; - private final String zoneName; - private final String managedInstanceGroupName; - - @JsonCreator - public GceEnvironmentConfig( - @JsonProperty("numInstances") int numInstances, - @JsonProperty("projectId") String projectId, - @JsonProperty("zoneName") String zoneName, - @JsonProperty("managedInstanceGroupName") String managedInstanceGroupName - ) - { - Preconditions.checkArgument(numInstances > 0, - "numInstances must be greater than 0"); - this.numInstances = numInstances; - this.projectId = Preconditions.checkNotNull(projectId, - "projectId must be not null"); - this.zoneName = Preconditions.checkNotNull(zoneName, - "zoneName nust be not null"); - this.managedInstanceGroupName = Preconditions.checkNotNull( - managedInstanceGroupName, - "managedInstanceGroupName must be not null" - ); - } - - @JsonProperty - public int getNumInstances() - { - return numInstances; - } - - - @JsonProperty - String getZoneName() - { - return zoneName; - } - - @JsonProperty - String getProjectId() - { - return projectId; - } - - @JsonProperty - String getManagedInstanceGroupName() - { - return managedInstanceGroupName; - } - - @Override - public String toString() - { - return "GceEnvironmentConfig={" + - "projectId=" + projectId + - ", zoneName=" + zoneName + - ", numInstances=" + numInstances + - ", managedInstanceGroupName=" + managedInstanceGroupName + - '}'; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - GceEnvironmentConfig that = (GceEnvironmentConfig) o; - return (numInstances == that.numInstances && - projectId.equals(that.projectId) && - zoneName.equals(that.zoneName) && - managedInstanceGroupName.equals(that.managedInstanceGroupName)); - } - - @Override - public int hashCode() - { - int result = 0; - result = 31 * result + Objects.hashCode(projectId); - result = 31 * result + Objects.hashCode(zoneName); - result = 31 * result + Objects.hashCode(managedInstanceGroupName); - result = 31 * result + numInstances; - return result; - } -} diff --git a/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceModule.java b/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceModule.java deleted file mode 100644 index ce1d1c1b20fd..000000000000 --- a/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceModule.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.overlord.autoscaling.gce; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.inject.Binder; -import org.apache.druid.initialization.DruidModule; - -import java.util.Collections; -import java.util.List; - -public class GceModule implements DruidModule -{ - @Override - public List getJacksonModules() - { - return Collections.singletonList(new SimpleModule("DruidGCEModule").registerSubtypes(GceAutoScaler.class)); - } - - @Override - public void configure(Binder binder) - { - } -} diff --git a/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceServiceException.java b/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceServiceException.java deleted file mode 100644 index e618d46ee2f4..000000000000 --- a/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceServiceException.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.overlord.autoscaling.gce; - - -/** - * Provides a specialized Exception type for the GCE module - */ -public class GceServiceException extends Exception -{ - public GceServiceException(String message) - { - super(message); - } -} diff --git a/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceUtils.java b/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceUtils.java deleted file mode 100644 index 6e9109f78213..000000000000 --- a/extensions-contrib/gce-extensions/src/main/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceUtils.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.overlord.autoscaling.gce; - -import org.apache.druid.java.util.common.StringUtils; - -import java.util.Iterator; -import java.util.List; - -/** - * Simple collection of utilities extracted to ease testing and simplify the GceAutoScaler class - */ -public class GceUtils -{ - - /** - * converts https://www.googleapis.com/compute/v1/projects/X/zones/Y/instances/name-of-the-thing - * into just `name-of-the-thing` as it is needed by the other pieces of the API - */ - public static String extractNameFromInstance(String instance) - { - String name = instance; - if (instance != null && !instance.isEmpty()) { - int lastSlash = instance.lastIndexOf('/'); - if (lastSlash > -1) { - name = instance.substring(lastSlash + 1); - } else { - name = instance; // let's assume not the URI like thing - } - } - return name; - } - - /** - * Converts a list of terms to a 'OR' list of terms to look for a specific 'key' - */ - public static String buildFilter(List list, String key) - { - if (list == null || list.isEmpty() || key == null || key.isEmpty()) { - throw new IllegalArgumentException("Arguments cannot be empty of null"); - } - Iterator it = list.iterator(); - - StringBuilder sb = new StringBuilder(); - sb.append(StringUtils.format("(%s = \"%s\")", key, it.next())); - while (it.hasNext()) { - sb.append(" OR ").append(StringUtils.format("(%s = \"%s\")", key, it.next())); - } - return sb.toString(); - } - - // cannot build it! - private GceUtils() - { - } -} diff --git a/extensions-contrib/gce-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/gce-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index c0fecfd530bd..000000000000 --- a/extensions-contrib/gce-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.indexing.overlord.autoscaling.gce.GceModule diff --git a/extensions-contrib/gce-extensions/src/test/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceAutoScalerTest.java b/extensions-contrib/gce-extensions/src/test/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceAutoScalerTest.java deleted file mode 100644 index 4c7e78597f1b..000000000000 --- a/extensions-contrib/gce-extensions/src/test/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceAutoScalerTest.java +++ /dev/null @@ -1,853 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.overlord.autoscaling.gce; - -import com.fasterxml.jackson.databind.BeanProperty; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.InjectableValues; -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.api.services.compute.Compute; -import com.google.api.services.compute.model.Instance; -import com.google.api.services.compute.model.InstanceGroupManagersDeleteInstancesRequest; -import com.google.api.services.compute.model.InstanceGroupManagersListManagedInstancesResponse; -import com.google.api.services.compute.model.InstanceList; -import com.google.api.services.compute.model.ManagedInstance; -import com.google.api.services.compute.model.NetworkInterface; -import com.google.api.services.compute.model.Operation; -import nl.jqno.equalsverifier.EqualsVerifier; -import org.apache.druid.indexing.overlord.autoscaling.AutoScaler; -import org.apache.druid.indexing.overlord.autoscaling.AutoScalingData; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.java.util.common.StringUtils; -import org.easymock.EasyMock; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; -import java.security.GeneralSecurityException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -/** - */ -public class GceAutoScalerTest -{ - private Compute mockCompute = null; - // id -> ip & ip -> id - private Compute.Instances mockInstances = null; - private Compute.Instances.List mockIpToIdRequest = null; - private Compute.Instances.List mockIdToIpRequest = null; - // running instances - private Compute.InstanceGroupManagers mockInstanceGroupManagers = null; - private Compute.InstanceGroupManagers.ListManagedInstances mockInstancesRequest = null; - // terminate - private Compute.InstanceGroupManagers.DeleteInstances mockDeleteRequest = null; - //provision - private Compute.InstanceGroupManagers.Resize mockResizeRequest = null; - - @Before - public void setUp() - { - // for every test let's create all (only a subset needed for each test tho) - - mockCompute = EasyMock.createMock(Compute.class); - - mockInstances = EasyMock.createMock(Compute.Instances.class); - mockIpToIdRequest = EasyMock.createMock(Compute.Instances.List.class); - mockIdToIpRequest = EasyMock.createMock(Compute.Instances.List.class); - - mockInstanceGroupManagers = EasyMock.createMock(Compute.InstanceGroupManagers.class); - mockInstancesRequest = EasyMock.createMock( - Compute.InstanceGroupManagers.ListManagedInstances.class - ); - - mockDeleteRequest = EasyMock.createMock(Compute.InstanceGroupManagers.DeleteInstances.class); - - mockResizeRequest = EasyMock.createMock(Compute.InstanceGroupManagers.Resize.class); - } - - @After - public void tearDown() - { - // not calling verify here as we use different bits and pieces in each test - } - - private static void verifyAutoScaler(final GceAutoScaler autoScaler) - { - Assert.assertEquals(1, autoScaler.getEnvConfig().getNumInstances()); - Assert.assertEquals(4, autoScaler.getMaxNumWorkers()); - Assert.assertEquals(2, autoScaler.getMinNumWorkers()); - Assert.assertEquals("winkie-country", autoScaler.getEnvConfig().getZoneName()); - Assert.assertEquals("super-project", autoScaler.getEnvConfig().getProjectId()); - Assert.assertEquals("druid-mig", autoScaler.getEnvConfig().getManagedInstanceGroupName()); - } - - @Test - public void testConfig() - { - final String json = "{\n" - + " \"envConfig\" : {\n" - + " \"numInstances\" : 1,\n" - + " \"projectId\" : \"super-project\",\n" - + " \"zoneName\" : \"winkie-country\",\n" - + " \"managedInstanceGroupName\" : \"druid-mig\"\n" - + " },\n" - + " \"maxNumWorkers\" : 4,\n" - + " \"minNumWorkers\" : 2,\n" - + " \"type\" : \"gce\"\n" - + "}"; - - final ObjectMapper objectMapper = new DefaultObjectMapper() - .registerModules((Iterable) new GceModule().getJacksonModules()); - objectMapper.setInjectableValues( - new InjectableValues() - { - @Override - public Object findInjectableValue( - Object o, - DeserializationContext deserializationContext, - BeanProperty beanProperty, - Object o1 - ) - { - return null; - } - } - ); - - try { - final GceAutoScaler autoScaler = - (GceAutoScaler) objectMapper.readValue(json, AutoScaler.class); - verifyAutoScaler(autoScaler); - - final GceAutoScaler roundTripAutoScaler = (GceAutoScaler) objectMapper.readValue( - objectMapper.writeValueAsBytes(autoScaler), - AutoScaler.class - ); - verifyAutoScaler(roundTripAutoScaler); - - Assert.assertEquals("Round trip equals", autoScaler, roundTripAutoScaler); - } - catch (Exception e) { - Assert.fail(StringUtils.format("Got exception in test %s", e.getMessage())); - } - } - - @Test - public void testConfigEquals() - { - EqualsVerifier.forClass(GceEnvironmentConfig.class).withNonnullFields( - "projectId", "zoneName", "managedInstanceGroupName", "numInstances" - ).usingGetClass().verify(); - } - - private Instance makeInstance(String name, String ip) - { - Instance instance = new Instance(); - instance.setName(name); - NetworkInterface net = new NetworkInterface(); - net.setNetworkIP(ip); - instance.setNetworkInterfaces(Collections.singletonList(net)); - return instance; - } - - @Test - public void testIpToId() - throws IOException, GeneralSecurityException, GceServiceException - { - GceAutoScaler autoScaler = EasyMock.createMockBuilder(GceAutoScaler.class).withConstructor( - int.class, - int.class, - GceEnvironmentConfig.class - ).withArgs( - 2, - 4, - new GceEnvironmentConfig(1, "proj-x", "us-central-1", "druid-mig") - ).addMockedMethod( - "createComputeServiceImpl" - ).createMock(); - - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(mockCompute); - EasyMock.replay(autoScaler); - - // empty IPs - List ips1 = Collections.emptyList(); - List ids1 = autoScaler.ipToIdLookup(ips1); - Assert.assertEquals(0, ids1.size()); - - // actually not IPs - List ips2 = Collections.singletonList("foo-bar-baz"); - List ids2 = autoScaler.ipToIdLookup(ips2); - Assert.assertEquals(ips2, ids2); - - // actually IPs - Instance i1 = makeInstance("foo", "1.2.3.5"); // not the one we look for - Instance i2 = makeInstance("bar", "1.2.3.4"); // the one we do look for - InstanceList mockResponse = new InstanceList(); - mockResponse.setNextPageToken(null); - mockResponse.setItems(Arrays.asList(i1, i2)); - - EasyMock.expect(mockIpToIdRequest.execute()).andReturn(mockResponse); - EasyMock.expect(mockIpToIdRequest.setPageToken(EasyMock.anyString())).andReturn( - mockIpToIdRequest // the method needs to return something, what is actually irrelevant here - ); - EasyMock.replay(mockIpToIdRequest); - - EasyMock.expect(mockInstances.list("proj-x", "us-central-1")).andReturn(mockIpToIdRequest); - EasyMock.replay(mockInstances); - - EasyMock.expect(mockCompute.instances()).andReturn(mockInstances); - EasyMock.replay(mockCompute); - - List ips3 = Collections.singletonList("1.2.3.4"); - List ids3 = autoScaler.ipToIdLookup(ips3); - Assert.assertEquals(1, ids3.size()); - Assert.assertEquals("bar", ids3.get(0)); - - EasyMock.verify(mockCompute); - EasyMock.verify(mockInstances); - EasyMock.verify(mockIpToIdRequest); - } - - @Test - public void testIdToIp() - throws IOException, GeneralSecurityException, GceServiceException - { - GceAutoScaler autoScaler = EasyMock.createMockBuilder(GceAutoScaler.class).withConstructor( - int.class, - int.class, - GceEnvironmentConfig.class - ).withArgs( - 2, - 4, - new GceEnvironmentConfig(1, "proj-x", "us-central-1", "druid-mig") - ).addMockedMethod( - "createComputeServiceImpl" - ).createMock(); - - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(mockCompute); - EasyMock.replay(autoScaler); - - // empty IPs - List ids1 = Collections.emptyList(); - List ips1 = autoScaler.idToIpLookup(ids1); - Assert.assertEquals(0, ips1.size()); - - // actually IDs - Instance i1 = makeInstance("foo", "null"); // invalid ip, not returned - Instance i2 = makeInstance("bar", "1.2.3.4"); // valid ip, returned - InstanceList mockResponse = new InstanceList(); - mockResponse.setNextPageToken(null); - mockResponse.setItems(Arrays.asList(i1, i2)); - - EasyMock.expect(mockIdToIpRequest.setFilter("(name = \"foo\") OR (name = \"bar\")")).andReturn( - mockIdToIpRequest // the method needs to return something but it is actually irrelevant - ); - EasyMock.expect(mockIdToIpRequest.execute()).andReturn(mockResponse); - EasyMock.expect(mockIdToIpRequest.setPageToken(EasyMock.anyString())).andReturn( - mockIdToIpRequest // the method needs to return something but it is actually irrelevant - ); - EasyMock.replay(mockIdToIpRequest); - - EasyMock.expect(mockInstances.list("proj-x", "us-central-1")).andReturn(mockIdToIpRequest); - EasyMock.replay(mockInstances); - - EasyMock.expect(mockCompute.instances()).andReturn(mockInstances); - EasyMock.replay(mockCompute); - - List ids3 = Arrays.asList("foo", "bar"); - List ips3 = autoScaler.idToIpLookup(ids3); - Assert.assertEquals(1, ips3.size()); - Assert.assertEquals("1.2.3.4", ips3.get(0)); - - EasyMock.verify(mockCompute); - EasyMock.verify(mockInstances); - EasyMock.verify(mockIdToIpRequest); - } - - private InstanceGroupManagersListManagedInstancesResponse createRunningInstances( - List instances - ) - { - InstanceGroupManagersListManagedInstancesResponse mockResponse = - new InstanceGroupManagersListManagedInstancesResponse(); - mockResponse.setManagedInstances(new ArrayList<>()); - for (String x : instances) { - ManagedInstance mi = new ManagedInstance(); - mi.setInstance(x); - mockResponse.getManagedInstances().add(mi); - } - return mockResponse; - } - - @Test - public void testTerminateWithIds() - throws IOException, GeneralSecurityException, GceServiceException - { - GceAutoScaler autoScaler = EasyMock.createMockBuilder(GceAutoScaler.class).withConstructor( - int.class, - int.class, - GceEnvironmentConfig.class - ).withArgs( - 2, - 4, - new GceEnvironmentConfig(1, "proj-x", "us-central-1", "druid-mig") - ).addMockedMethod( - "createComputeServiceImpl" - ).createMock(); - - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(mockCompute); - EasyMock.replay(autoScaler); - - // set up getRunningInstances results - InstanceGroupManagersListManagedInstancesResponse beforeRunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar", - "http://xyz/baz" - )); - InstanceGroupManagersListManagedInstancesResponse afterRunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar" - )); - - EasyMock.expect(mockInstancesRequest.execute()).andReturn(beforeRunningInstance); // 1st call - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.expect(mockInstancesRequest.execute()).andReturn(afterRunningInstance); // 2nd call - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.replay(mockInstancesRequest); - - - EasyMock.expect(mockInstanceGroupManagers.listManagedInstances( - "proj-x", - "us-central-1", - "druid-mig" - )).andReturn(mockInstancesRequest).times(2); - - // set up the delete operation - Operation mockResponse = new Operation(); - mockResponse.setStatus("DONE"); - mockResponse.setError(new Operation.Error()); - - EasyMock.expect(mockDeleteRequest.execute()).andReturn(mockResponse); - EasyMock.replay(mockDeleteRequest); - - InstanceGroupManagersDeleteInstancesRequest requestBody = - new InstanceGroupManagersDeleteInstancesRequest(); - requestBody.setInstances(Collections.singletonList("zones/us-central-1/instances/baz")); - - EasyMock.expect(mockInstanceGroupManagers.deleteInstances( - "proj-x", - "us-central-1", - "druid-mig", - requestBody - )).andReturn(mockDeleteRequest); - - EasyMock.replay(mockInstanceGroupManagers); - - // called twice in getRunningInstances... - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - // ...and once in terminateWithIds - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - - // and that's all folks! - EasyMock.replay(mockCompute); - - AutoScalingData autoScalingData = - autoScaler.terminateWithIds(Collections.singletonList("baz")); - Assert.assertEquals(1, autoScalingData.getNodeIds().size()); - Assert.assertEquals("baz", autoScalingData.getNodeIds().get(0)); - - EasyMock.verify(mockCompute); - EasyMock.verify(mockInstanceGroupManagers); - EasyMock.verify(mockDeleteRequest); - EasyMock.verify(mockInstancesRequest); - } - - @Test - public void testTerminate() - throws IOException, GeneralSecurityException, GceServiceException - { - GceAutoScaler autoScaler = EasyMock.createMockBuilder(GceAutoScaler.class).withConstructor( - int.class, - int.class, - GceEnvironmentConfig.class - ).withArgs( - 2, - 4, - new GceEnvironmentConfig(1, "proj-x", "us-central-1", "druid-mig") - ).addMockedMethod( - "createComputeServiceImpl" - ).createMock(); - - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(mockCompute); - EasyMock.replay(autoScaler); - - // testing the ip --> id part - Instance i0 = makeInstance("baz", "1.2.3.6"); - InstanceList mockInstanceListResponse = new InstanceList(); - mockInstanceListResponse.setNextPageToken(null); - mockInstanceListResponse.setItems(Collections.singletonList(i0)); - - EasyMock.expect(mockIpToIdRequest.execute()).andReturn(mockInstanceListResponse); - EasyMock.expect(mockIpToIdRequest.setPageToken(EasyMock.anyString())).andReturn( - mockIpToIdRequest // the method needs to return something, what is actually irrelevant here - ); - EasyMock.replay(mockIpToIdRequest); - - EasyMock.expect(mockInstances.list("proj-x", "us-central-1")).andReturn(mockIpToIdRequest); - - EasyMock.expect(mockCompute.instances()).andReturn(mockInstances); - EasyMock.replay(mockInstances); - - // testing the delete part - InstanceGroupManagersListManagedInstancesResponse beforeRunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar", - "http://xyz/baz" - )); - InstanceGroupManagersListManagedInstancesResponse afterRunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar" - )); - - EasyMock.expect(mockInstancesRequest.execute()).andReturn(beforeRunningInstance); // 1st call - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.expect(mockInstancesRequest.execute()).andReturn(afterRunningInstance); // 2nd call - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.replay(mockInstancesRequest); - - EasyMock.expect(mockInstanceGroupManagers.listManagedInstances( - "proj-x", - "us-central-1", - "druid-mig" - )).andReturn(mockInstancesRequest).times(2); - - // set up the delete operation - Operation mockResponse = new Operation(); - mockResponse.setStatus("DONE"); - mockResponse.setError(new Operation.Error()); - - EasyMock.expect(mockDeleteRequest.execute()).andReturn(mockResponse); - EasyMock.replay(mockDeleteRequest); - - InstanceGroupManagersDeleteInstancesRequest requestBody = - new InstanceGroupManagersDeleteInstancesRequest(); - requestBody.setInstances(Collections.singletonList("zones/us-central-1/instances/baz")); - - EasyMock.expect(mockInstanceGroupManagers.deleteInstances( - "proj-x", - "us-central-1", - "druid-mig", - requestBody - )).andReturn(mockDeleteRequest); - - EasyMock.replay(mockInstanceGroupManagers); - - // called twice in getRunningInstances... - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - // ...and once in terminateWithIds - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - - // and that's all folks! - EasyMock.replay(mockCompute); - - AutoScalingData autoScalingData = - autoScaler.terminate(Collections.singletonList("1.2.3.6")); - Assert.assertEquals(1, autoScalingData.getNodeIds().size()); - Assert.assertEquals("baz", autoScalingData.getNodeIds().get(0)); - - EasyMock.verify(mockCompute); - EasyMock.verify(mockIpToIdRequest); - EasyMock.verify(mockInstanceGroupManagers); - EasyMock.verify(mockDeleteRequest); - EasyMock.verify(mockInstancesRequest); - } - - @Test - public void testTerminateWithIdsWithMissingRemoval() - throws IOException, GeneralSecurityException, GceServiceException - { - GceAutoScaler autoScaler = EasyMock.createMockBuilder(GceAutoScaler.class).withConstructor( - int.class, - int.class, - GceEnvironmentConfig.class - ).withArgs( - 2, - 4, - new GceEnvironmentConfig(1, "proj-x", "us-central-1", "druid-mig") - ).addMockedMethod( - "createComputeServiceImpl" - ).createMock(); - - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(mockCompute); - EasyMock.replay(autoScaler); - - // set up getRunningInstances results - InstanceGroupManagersListManagedInstancesResponse beforeRunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar", - "http://xyz/baz" - )); - InstanceGroupManagersListManagedInstancesResponse after1RunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar", - "http://xyz/baz" - )); // not changing anything, will trigger the loop around getRunningInstances - InstanceGroupManagersListManagedInstancesResponse after2RunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar" - )); // now the machine got dropped! - - EasyMock.expect(mockInstancesRequest.execute()).andReturn(beforeRunningInstance); // 1st call - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.expect(mockInstancesRequest.execute()).andReturn(after1RunningInstance); // 2nd call, the next is needed - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.expect(mockInstancesRequest.execute()).andReturn(after2RunningInstance); // 3rd call, this unblocks - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.replay(mockInstancesRequest); - - - EasyMock.expect(mockInstanceGroupManagers.listManagedInstances( - "proj-x", - "us-central-1", - "druid-mig" - )).andReturn(mockInstancesRequest).times(3); - - // set up the delete operation - Operation mockResponse = new Operation(); - mockResponse.setStatus("DONE"); - mockResponse.setError(new Operation.Error()); - - EasyMock.expect(mockDeleteRequest.execute()).andReturn(mockResponse); - EasyMock.replay(mockDeleteRequest); - - InstanceGroupManagersDeleteInstancesRequest requestBody = - new InstanceGroupManagersDeleteInstancesRequest(); - requestBody.setInstances(Collections.singletonList("zones/us-central-1/instances/baz")); - - EasyMock.expect(mockInstanceGroupManagers.deleteInstances( - "proj-x", - "us-central-1", - "druid-mig", - requestBody - )).andReturn(mockDeleteRequest); - - EasyMock.replay(mockInstanceGroupManagers); - - // called three times in getRunningInstances... - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - // ...and once in terminateWithIds - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - - // and that's all folks! - EasyMock.replay(mockCompute); - - AutoScalingData autoScalingData = - autoScaler.terminateWithIds(Collections.singletonList("baz")); - Assert.assertEquals(1, autoScalingData.getNodeIds().size()); - Assert.assertEquals("baz", autoScalingData.getNodeIds().get(0)); - - EasyMock.verify(mockCompute); - EasyMock.verify(mockInstanceGroupManagers); - EasyMock.verify(mockDeleteRequest); - EasyMock.verify(mockInstancesRequest); - } - - @Test - public void testProvision() - throws IOException, GeneralSecurityException, GceServiceException - { - GceAutoScaler autoScaler = EasyMock.createMockBuilder(GceAutoScaler.class).withConstructor( - int.class, - int.class, - GceEnvironmentConfig.class - ).withArgs( - 2, - 4, - new GceEnvironmentConfig(1, "proj-x", "us-central-1", "druid-mig") - ).addMockedMethod( - "createComputeServiceImpl" - ).createMock(); - - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(mockCompute); - EasyMock.replay(autoScaler); - - // set up getRunningInstances results - InstanceGroupManagersListManagedInstancesResponse beforeRunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar" - )); - InstanceGroupManagersListManagedInstancesResponse afterRunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar", - "http://xyz/baz" - )); - - EasyMock.expect(mockInstancesRequest.execute()).andReturn(beforeRunningInstance); // 1st call - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.expect(mockInstancesRequest.execute()).andReturn(afterRunningInstance); // 2nd call - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.replay(mockInstancesRequest); - - EasyMock.expect(mockInstanceGroupManagers.listManagedInstances( - "proj-x", - "us-central-1", - "druid-mig" - )).andReturn(mockInstancesRequest).times(2); - - // set up the resize operation - Operation mockResponse = new Operation(); - mockResponse.setStatus("DONE"); - mockResponse.setError(new Operation.Error()); - - EasyMock.expect(mockResizeRequest.execute()).andReturn(mockResponse); - EasyMock.replay(mockResizeRequest); - - EasyMock.expect(mockInstanceGroupManagers.resize( - "proj-x", - "us-central-1", - "druid-mig", - 3 - )).andReturn(mockResizeRequest); - - EasyMock.replay(mockInstanceGroupManagers); - - // called twice in getRunningInstances... - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - // ...and once in provision - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - - // and that's all folks! - EasyMock.replay(mockCompute); - - AutoScalingData autoScalingData = autoScaler.provision(); - Assert.assertEquals(1, autoScalingData.getNodeIds().size()); - Assert.assertEquals("baz", autoScalingData.getNodeIds().get(0)); - - EasyMock.verify(mockCompute); - EasyMock.verify(mockInstanceGroupManagers); - EasyMock.verify(mockResizeRequest); - EasyMock.verify(mockInstancesRequest); - } - - @Test - public void testProvisionSkipped() - throws IOException, GeneralSecurityException, GceServiceException - { - GceAutoScaler autoScaler = EasyMock.createMockBuilder(GceAutoScaler.class).withConstructor( - int.class, - int.class, - GceEnvironmentConfig.class - ).withArgs( - 2, - 4, - new GceEnvironmentConfig(1, "proj-x", "us-central-1", "druid-mig") - ).addMockedMethod( - "createComputeServiceImpl" - ).createMock(); - - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(mockCompute); - EasyMock.replay(autoScaler); - - // set up getRunningInstances results - InstanceGroupManagersListManagedInstancesResponse beforeRunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar", - "http://xyz/baz", - "http://xyz/zab" // already max instances, will not scale - )); - - EasyMock.expect(mockInstancesRequest.execute()).andReturn(beforeRunningInstance); - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.replay(mockInstancesRequest); - - EasyMock.expect(mockInstanceGroupManagers.listManagedInstances( - "proj-x", - "us-central-1", - "druid-mig" - )).andReturn(mockInstancesRequest); - - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - EasyMock.replay(mockInstanceGroupManagers); - - // and that's all folks! - EasyMock.replay(mockCompute); - - AutoScalingData autoScalingData = autoScaler.provision(); - Assert.assertEquals(0, autoScalingData.getNodeIds().size()); - - EasyMock.verify(mockCompute); - EasyMock.verify(mockInstancesRequest); - EasyMock.verify(mockInstanceGroupManagers); - } - - @Test - public void testProvisionWithMissingNewInstances() - throws IOException, GeneralSecurityException, GceServiceException - { - GceAutoScaler autoScaler = EasyMock.createMockBuilder(GceAutoScaler.class).withConstructor( - int.class, - int.class, - GceEnvironmentConfig.class - ).withArgs( - 2, - 4, - new GceEnvironmentConfig(1, "proj-x", "us-central-1", "druid-mig") - ).addMockedMethod( - "createComputeServiceImpl" - ).createMock(); - - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(mockCompute); - EasyMock.replay(autoScaler); - - // set up getRunningInstances results - InstanceGroupManagersListManagedInstancesResponse beforeRunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar" - )); - InstanceGroupManagersListManagedInstancesResponse after1RunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar" - )); // not changing anything, will trigger the loop around getRunningInstances - InstanceGroupManagersListManagedInstancesResponse after2RunningInstance = - createRunningInstances(Arrays.asList( - "http://xyz/foo", - "http://xyz/bar", - "http://xyz/baz" - )); // now the new machine is here! - - EasyMock.expect(mockInstancesRequest.execute()).andReturn(beforeRunningInstance); // 1st call - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.expect(mockInstancesRequest.execute()).andReturn(after1RunningInstance); // 2nd call, the next is needed - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.expect(mockInstancesRequest.execute()).andReturn(after2RunningInstance); // 3rd call, this unblocks - EasyMock.expect(mockInstancesRequest.setMaxResults(500L)).andReturn(mockInstancesRequest); - EasyMock.replay(mockInstancesRequest); - - EasyMock.expect(mockInstanceGroupManagers.listManagedInstances( - "proj-x", - "us-central-1", - "druid-mig" - )).andReturn(mockInstancesRequest).times(3); - - // set up the resize operation - Operation mockResponse = new Operation(); - mockResponse.setStatus("DONE"); - mockResponse.setError(new Operation.Error()); - - EasyMock.expect(mockResizeRequest.execute()).andReturn(mockResponse); - EasyMock.replay(mockResizeRequest); - - EasyMock.expect(mockInstanceGroupManagers.resize( - "proj-x", - "us-central-1", - "druid-mig", - 3 - )).andReturn(mockResizeRequest); - - EasyMock.replay(mockInstanceGroupManagers); - - // called three times in getRunningInstances... - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - // ...and once in provision - EasyMock.expect(mockCompute.instanceGroupManagers()).andReturn(mockInstanceGroupManagers); - - // and that's all folks! - EasyMock.replay(mockCompute); - - AutoScalingData autoScalingData = autoScaler.provision(); - Assert.assertEquals(1, autoScalingData.getNodeIds().size()); - Assert.assertEquals("baz", autoScalingData.getNodeIds().get(0)); - - EasyMock.verify(mockCompute); - EasyMock.verify(mockInstanceGroupManagers); - EasyMock.verify(mockResizeRequest); - EasyMock.verify(mockInstancesRequest); - } - - @Test - public void testEquals() - { - EqualsVerifier.forClass(GceAutoScaler.class).withNonnullFields( - "envConfig", "maxNumWorkers", "minNumWorkers" - ).withIgnoredFields("cachedComputeService").usingGetClass().verify(); - } - - @Test - public void testFailedComputeCreation() - throws IOException, GeneralSecurityException, GceServiceException - { - GceAutoScaler autoScaler = EasyMock.createMockBuilder(GceAutoScaler.class).withConstructor( - int.class, - int.class, - GceEnvironmentConfig.class - ).withArgs( - 2, - 4, - new GceEnvironmentConfig(1, "proj-x", "us-central-1", "druid-mig") - ).addMockedMethod( - "createComputeServiceImpl" - ).createMock(); - - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.expect(autoScaler.createComputeServiceImpl()).andReturn(null); - EasyMock.replay(autoScaler); - - List ips = Collections.singletonList("1.2.3.4"); - List ids = autoScaler.ipToIdLookup(ips); - Assert.assertEquals(0, ids.size()); // Exception caught in execution results in empty result - } - -} diff --git a/extensions-contrib/gce-extensions/src/test/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceUtilsTest.java b/extensions-contrib/gce-extensions/src/test/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceUtilsTest.java deleted file mode 100644 index 7c88355e1a39..000000000000 --- a/extensions-contrib/gce-extensions/src/test/java/org/apache/druid/indexing/overlord/autoscaling/gce/GceUtilsTest.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.overlord.autoscaling.gce; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.ArrayList; -import java.util.List; - -/** - */ -public class GceUtilsTest -{ - @Test - public void testExtractNameFromInstance() - { - String instance0 = - "https://www.googleapis.com/compute/v1/projects/X/zones/Y/instances/name-of-the-thing"; - Assert.assertEquals("name-of-the-thing", GceUtils.extractNameFromInstance(instance0)); - - String instance1 = "https://www.googleapis.com/compute/v1/projects/X/zones/Y/instances/"; - Assert.assertEquals("", GceUtils.extractNameFromInstance(instance1)); - - String instance2 = "name-of-the-thing"; - Assert.assertEquals("name-of-the-thing", GceUtils.extractNameFromInstance(instance2)); - - String instance3 = null; - Assert.assertEquals(null, GceUtils.extractNameFromInstance(instance3)); - - String instance4 = ""; - Assert.assertEquals("", GceUtils.extractNameFromInstance(instance4)); - } - - @Test - public void testBuildFilter() - { - List list0 = null; - try { - String x = GceUtils.buildFilter(list0, "name"); - Assert.fail("Exception should have been thrown!"); - } - catch (IllegalArgumentException e) { - // ok to be here! - } - - List list1 = new ArrayList<>(); - try { - String x = GceUtils.buildFilter(list1, "name"); - Assert.fail("Exception should have been thrown!"); - } - catch (IllegalArgumentException e) { - // ok to be here! - } - - List list2 = new ArrayList<>(); - list2.add("foo"); - try { - String x = GceUtils.buildFilter(list2, null); - Assert.fail("Exception should have been thrown!"); - } - catch (IllegalArgumentException e) { - // ok to be here! - } - - List list3 = new ArrayList<>(); - list3.add("foo"); - Assert.assertEquals("(name = \"foo\")", GceUtils.buildFilter(list3, "name")); - - List list4 = new ArrayList<>(); - list4.add("foo"); - list4.add("bar"); - Assert.assertEquals( - "(name = \"foo\") OR (name = \"bar\")", - GceUtils.buildFilter(list4, "name") - ); - } - -} diff --git a/extensions-contrib/graphite-emitter/pom.xml b/extensions-contrib/graphite-emitter/pom.xml deleted file mode 100644 index 9f415ff01f3e..000000000000 --- a/extensions-contrib/graphite-emitter/pom.xml +++ /dev/null @@ -1,124 +0,0 @@ - - - - - 4.0.0 - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - org.apache.druid.extensions.contrib - graphite-emitter - graphite-emitter - Druid emitter extension to support graphite - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - org.apache.druid - druid-server - ${project.parent.version} - provided - - - io.dropwizard.metrics - metrics-graphite - 3.1.2 - - - commons-io - commons-io - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - joda-time - joda-time - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - com.google.guava - guava - provided - - - javax.validation - validation-api - provided - - - - junit - junit - test - - - org.easymock - easymock - test - - - pl.pragmatists - JUnitParams - test - - - org.apache.druid - druid-processing - ${project.parent.version} - test-jar - test - - - diff --git a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/DruidToGraphiteEventConverter.java b/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/DruidToGraphiteEventConverter.java deleted file mode 100644 index 35505ccc1537..000000000000 --- a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/DruidToGraphiteEventConverter.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.graphite; - -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeInfo; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; - - -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = WhiteListBasedConverter.class) -@JsonSubTypes(value = { - @JsonSubTypes.Type(name = "all", value = SendAllGraphiteEventConverter.class), - @JsonSubTypes.Type(name = "whiteList", value = WhiteListBasedConverter.class) -}) - -public interface DruidToGraphiteEventConverter -{ - /** - * This function acts as a filter. It returns null if the event is not suppose to be emitted to Graphite - * Also This function will define the mapping between the druid event dimension's values and Graphite metric Path - * - * @param serviceMetricEvent Druid event ot type {@link ServiceMetricEvent} - * - * @return {@link GraphiteEvent} or null - */ - GraphiteEvent druidEventToGraphite(ServiceMetricEvent serviceMetricEvent); -} diff --git a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitter.java b/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitter.java deleted file mode 100644 index 13ffb484b2d5..000000000000 --- a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitter.java +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.graphite; - -import com.codahale.metrics.graphite.Graphite; -import com.codahale.metrics.graphite.GraphiteSender; -import com.codahale.metrics.graphite.PickledGraphite; -import com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.core.Emitter; -import org.apache.druid.java.util.emitter.core.Event; -import org.apache.druid.java.util.emitter.service.AlertEvent; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.apache.druid.server.log.RequestLogEvent; - -import java.io.IOException; -import java.net.SocketException; -import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.regex.Pattern; - - -public class GraphiteEmitter implements Emitter -{ - private static Logger log = new Logger(GraphiteEmitter.class); - - private final DruidToGraphiteEventConverter graphiteEventConverter; - private final GraphiteEmitterConfig graphiteEmitterConfig; - private final List alertEmitters; - private final List requestLogEmitters; - private final AtomicBoolean started = new AtomicBoolean(false); - private final LinkedBlockingQueue eventsQueue; - private static final long FLUSH_TIMEOUT_MILLIS = TimeUnit.MINUTES.toMillis(1); // default flush wait 1 min - private static final Pattern DOT_OR_WHITESPACE_PATTERN = Pattern.compile("[\\s]+|[.]+"); - private final ScheduledExecutorService exec = Executors.newScheduledThreadPool(2, new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat("GraphiteEmitter-%s") - .build()); // Thread pool of two in order to schedule flush runnable - private AtomicLong countLostEvents = new AtomicLong(0); - - public GraphiteEmitter( - GraphiteEmitterConfig graphiteEmitterConfig, - List alertEmitters, - List requestLogEmitters - ) - { - this.alertEmitters = alertEmitters; - this.requestLogEmitters = requestLogEmitters; - this.graphiteEmitterConfig = graphiteEmitterConfig; - this.graphiteEventConverter = graphiteEmitterConfig.getDruidToGraphiteEventConverter(); - this.eventsQueue = new LinkedBlockingQueue(graphiteEmitterConfig.getMaxQueueSize()); - } - - @Override - public void start() - { - log.info("Starting Graphite Emitter."); - synchronized (started) { - if (!started.get()) { - exec.scheduleAtFixedRate( - new ConsumerRunnable(), - graphiteEmitterConfig.getFlushPeriod(), - graphiteEmitterConfig.getFlushPeriod(), - TimeUnit.MILLISECONDS - ); - started.set(true); - } - } - } - - - @Override - public void emit(Event event) - { - if (!started.get()) { - throw new ISE("WTF emit was called while service is not started yet"); - } - if (event instanceof ServiceMetricEvent) { - final GraphiteEvent graphiteEvent = graphiteEventConverter.druidEventToGraphite((ServiceMetricEvent) event); - if (graphiteEvent == null) { - return; - } - try { - final boolean isSuccessful = eventsQueue.offer( - graphiteEvent, - graphiteEmitterConfig.getEmitWaitTime(), - TimeUnit.MILLISECONDS - ); - if (!isSuccessful) { - if (countLostEvents.getAndIncrement() % 1000 == 0) { - log.error( - "Lost total of [%s] events because of emitter queue is full. Please increase the capacity or/and the consumer frequency", - countLostEvents.get() - ); - } - } - } - catch (InterruptedException e) { - log.error(e, "got interrupted with message [%s]", e.getMessage()); - Thread.currentThread().interrupt(); - } - } else if (event instanceof RequestLogEvent) { - for (Emitter emitter : requestLogEmitters) { - emitter.emit(event); - } - } else if (!alertEmitters.isEmpty() && event instanceof AlertEvent) { - for (Emitter emitter : alertEmitters) { - emitter.emit(event); - } - } else if (event instanceof AlertEvent) { - AlertEvent alertEvent = (AlertEvent) event; - log.error( - "The following alert is dropped, description is [%s], severity is [%s]", - alertEvent.getDescription(), alertEvent.getSeverity() - ); - } else { - log.error("unknown event type [%s]", event.getClass()); - } - } - - private class ConsumerRunnable implements Runnable - { - private final GraphiteSender graphite; - - public ConsumerRunnable() - { - if (graphiteEmitterConfig.getProtocol().equals(GraphiteEmitterConfig.PLAINTEXT_PROTOCOL)) { - graphite = new Graphite( - graphiteEmitterConfig.getHostname(), - graphiteEmitterConfig.getPort() - ); - } else { - graphite = new PickledGraphite( - graphiteEmitterConfig.getHostname(), - graphiteEmitterConfig.getPort(), - graphiteEmitterConfig.getBatchSize() - ); - } - log.info("Using %s protocol.", graphiteEmitterConfig.getProtocol()); - } - - @Override - public void run() - { - try { - if (!graphite.isConnected()) { - log.info("trying to connect to graphite server"); - graphite.connect(); - } - while (eventsQueue.size() > 0 && !exec.isShutdown()) { - try { - final GraphiteEvent graphiteEvent = eventsQueue.poll( - graphiteEmitterConfig.getWaitForEventTime(), - TimeUnit.MILLISECONDS - ); - if (graphiteEvent != null) { - log.debug( - "sent [%s] with value [%s] and time [%s]", - graphiteEvent.getEventPath(), - graphiteEvent.getValue(), - graphiteEvent.getTimestamp() - ); - graphite.send( - graphiteEvent.getEventPath(), - graphiteEvent.getValue(), - graphiteEvent.getTimestamp() - ); - } - } - catch (InterruptedException | IOException e) { - log.error(e, e.getMessage()); - if (e instanceof InterruptedException) { - Thread.currentThread().interrupt(); - break; - } else if (e instanceof SocketException) { - // This is antagonistic to general Closeable contract in Java, - // it is needed to allow re-connection in case of the socket is closed due long period of inactivity - graphite.close(); - log.warn("Trying to re-connect to graphite server"); - graphite.connect(); - } - } - } - } - catch (Exception e) { - log.error(e, e.getMessage()); - if (e instanceof InterruptedException) { - Thread.currentThread().interrupt(); - } - } - } - } - - @Override - public void flush() - { - if (started.get()) { - Future future = exec.schedule(new ConsumerRunnable(), 0, TimeUnit.MILLISECONDS); - try { - future.get(FLUSH_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); - } - catch (InterruptedException | ExecutionException | TimeoutException e) { - if (e instanceof InterruptedException) { - throw new RuntimeException("interrupted flushing elements from queue", e); - } - log.error(e, e.getMessage()); - } - } - - } - - @Override - public void close() - { - flush(); - started.set(false); - exec.shutdown(); - } - - protected static String sanitize(String namespace) - { - return sanitize(namespace, false); - } - - protected static String sanitize(String namespace, Boolean replaceSlashToDot) - { - String sanitizedNamespace = DOT_OR_WHITESPACE_PATTERN.matcher(namespace).replaceAll("_"); - if (replaceSlashToDot) { - sanitizedNamespace = sanitizedNamespace.replace('/', '.'); - } - return sanitizedNamespace; - } -} diff --git a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitterConfig.java b/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitterConfig.java deleted file mode 100644 index 3a16322db187..000000000000 --- a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitterConfig.java +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.graphite; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; - -import java.util.Collections; -import java.util.List; -import java.util.concurrent.TimeUnit; - -public class GraphiteEmitterConfig -{ - public static final String PLAINTEXT_PROTOCOL = "plaintext"; - public static final String PICKLE_PROTOCOL = "pickle"; - private static final int DEFAULT_BATCH_SIZE = 100; - private static final long DEFAULT_FLUSH_PERIOD_MILLIS = TimeUnit.MINUTES.toMillis(1); // flush every one minute - private static final long DEFAULT_GET_TIMEOUT_MILLIS = TimeUnit.SECONDS.toMillis(1); // default wait for get operations on the queue 1 sec - - @JsonProperty - private final String hostname; - @JsonProperty - private final int port; - @JsonProperty - private final int batchSize; - @JsonProperty - private final String protocol; - @JsonProperty - private final Long flushPeriod; - @JsonProperty - private final Integer maxQueueSize; - @JsonProperty("eventConverter") - private final DruidToGraphiteEventConverter druidToGraphiteEventConverter; - @JsonProperty - private final List alertEmitters; - @JsonProperty - private final List requestLogEmitters; - - @JsonProperty - private final Long emitWaitTime; - //waiting up to the specified wait time if necessary for an event to become available. - @JsonProperty - private final Long waitForEventTime; - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (!(o instanceof GraphiteEmitterConfig)) { - return false; - } - - GraphiteEmitterConfig that = (GraphiteEmitterConfig) o; - - if (getPort() != that.getPort()) { - return false; - } - if (getBatchSize() != that.getBatchSize()) { - return false; - } - if (!getProtocol().equals(that.getProtocol())) { - return false; - } - if (!getHostname().equals(that.getHostname())) { - return false; - } - if (!getFlushPeriod().equals(that.getFlushPeriod())) { - return false; - } - if (!getMaxQueueSize().equals(that.getMaxQueueSize())) { - return false; - } - if (!getDruidToGraphiteEventConverter().equals(that.getDruidToGraphiteEventConverter())) { - return false; - } - if (getAlertEmitters() != null - ? !getAlertEmitters().equals(that.getAlertEmitters()) - : that.getAlertEmitters() != null) { - return false; - } - if (getRequestLogEmitters() != null - ? !getRequestLogEmitters().equals(that.getRequestLogEmitters()) - : that.getRequestLogEmitters() != null) { - return false; - } - if (!getEmitWaitTime().equals(that.getEmitWaitTime())) { - return false; - } - return getWaitForEventTime().equals(that.getWaitForEventTime()); - - } - - @Override - public int hashCode() - { - int result = getHostname().hashCode(); - result = 31 * result + getPort(); - result = 31 * result + getBatchSize(); - result = 31 * result + getProtocol().hashCode(); - result = 31 * result + getFlushPeriod().hashCode(); - result = 31 * result + getMaxQueueSize().hashCode(); - result = 31 * result + getDruidToGraphiteEventConverter().hashCode(); - result = 31 * result + (getAlertEmitters() != null ? getAlertEmitters().hashCode() : 0); - result = 31 * result + (getRequestLogEmitters() != null ? getRequestLogEmitters().hashCode() : 0); - result = 31 * result + getEmitWaitTime().hashCode(); - result = 31 * result + getWaitForEventTime().hashCode(); - return result; - } - - @JsonCreator - public GraphiteEmitterConfig( - @JsonProperty("hostname") String hostname, - @JsonProperty("port") Integer port, - @JsonProperty("batchSize") Integer batchSize, - @JsonProperty("protocol") String protocol, - @JsonProperty("flushPeriod") Long flushPeriod, - @JsonProperty("maxQueueSize") Integer maxQueueSize, - @JsonProperty("eventConverter") DruidToGraphiteEventConverter druidToGraphiteEventConverter, - @JsonProperty("alertEmitters") List alertEmitters, - @JsonProperty("requestLogEmitters") List requestLogEmitters, - @JsonProperty("emitWaitTime") Long emitWaitTime, - @JsonProperty("waitForEventTime") Long waitForEventTime - ) - { - this.waitForEventTime = waitForEventTime == null ? DEFAULT_GET_TIMEOUT_MILLIS : waitForEventTime; - this.emitWaitTime = emitWaitTime == null ? 0 : emitWaitTime; - this.alertEmitters = alertEmitters == null ? Collections.emptyList() : alertEmitters; - this.requestLogEmitters = requestLogEmitters == null ? Collections.emptyList() : requestLogEmitters; - this.druidToGraphiteEventConverter = Preconditions.checkNotNull( - druidToGraphiteEventConverter, - "Event converter can not ne null dude" - ); - this.flushPeriod = flushPeriod == null ? DEFAULT_FLUSH_PERIOD_MILLIS : flushPeriod; - this.maxQueueSize = maxQueueSize == null ? Integer.MAX_VALUE : maxQueueSize; - this.hostname = Preconditions.checkNotNull(hostname, "hostname can not be null"); - this.port = Preconditions.checkNotNull(port, "port can not be null"); - this.batchSize = (batchSize == null) ? DEFAULT_BATCH_SIZE : batchSize; - this.protocol = (protocol == null) ? PICKLE_PROTOCOL : protocol; - } - - @JsonProperty - public String getHostname() - { - return hostname; - } - - @JsonProperty - public int getPort() - { - return port; - } - - @JsonProperty - public int getBatchSize() - { - return batchSize; - } - - @JsonProperty - public String getProtocol() - { - return protocol; - } - - @JsonProperty - public Integer getMaxQueueSize() - { - return maxQueueSize; - } - - @JsonProperty - public Long getFlushPeriod() - { - return flushPeriod; - } - - @JsonProperty - public DruidToGraphiteEventConverter getDruidToGraphiteEventConverter() - { - return druidToGraphiteEventConverter; - } - - @JsonProperty - public List getAlertEmitters() - { - return alertEmitters; - } - - @JsonProperty - public List getRequestLogEmitters() - { - return requestLogEmitters; - } - - @JsonProperty - public Long getEmitWaitTime() - { - return emitWaitTime; - } - - @JsonProperty - public Long getWaitForEventTime() - { - return waitForEventTime; - } -} diff --git a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitterModule.java b/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitterModule.java deleted file mode 100644 index 3f9904529f12..000000000000 --- a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEmitterModule.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.graphite; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.inject.Binder; -import com.google.inject.Injector; -import com.google.inject.Key; -import com.google.inject.Provides; -import com.google.inject.name.Named; -import com.google.inject.name.Names; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.ManageLifecycle; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.java.util.emitter.core.Emitter; - -import java.util.Collections; -import java.util.List; - -public class GraphiteEmitterModule implements DruidModule -{ - private static final String EMITTER_TYPE = "graphite"; - - @Override - public List getJacksonModules() - { - return Collections.emptyList(); - } - - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bind(binder, "druid.emitter." + EMITTER_TYPE, GraphiteEmitterConfig.class); - } - - @Provides - @ManageLifecycle - @Named(EMITTER_TYPE) - public Emitter getEmitter(GraphiteEmitterConfig graphiteEmitterConfig, ObjectMapper mapper, final Injector injector) - { - List emitters = ImmutableList.copyOf( - Lists.transform( - graphiteEmitterConfig.getAlertEmitters(), - alertEmitterName -> { - return injector.getInstance(Key.get(Emitter.class, Names.named(alertEmitterName))); - } - ) - ); - - List requestLogEmitters = ImmutableList.copyOf( - Lists.transform( - graphiteEmitterConfig.getRequestLogEmitters(), - requestLogEmitterName -> { - return injector.getInstance(Key.get(Emitter.class, Names.named(requestLogEmitterName))); - } - ) - ); - return new GraphiteEmitter(graphiteEmitterConfig, emitters, requestLogEmitters); - } -} diff --git a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEvent.java b/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEvent.java deleted file mode 100644 index 5712bb11fbde..000000000000 --- a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/GraphiteEvent.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.graphite; - -import com.google.common.base.Preconditions; - -import javax.validation.constraints.NotNull; - -public class GraphiteEvent -{ - private final String eventPath; - private final String value; - private final long timestamp; - - /** - * A graphite event must be in the following format: - * ex: PRODUCTION.host.graphite-tutorial.responseTime.p95 0.10 1400509112 - * @param eventPath This is the namespace path of the metric - * @param value value of the metric - * @param timestamp unix time in second - */ - GraphiteEvent(@NotNull String eventPath, @NotNull String value, @NotNull Long timestamp) - { - this.eventPath = Preconditions.checkNotNull(eventPath, "path can not be null"); - this.value = Preconditions.checkNotNull(value, "value can not be null"); - this.timestamp = Preconditions.checkNotNull(timestamp, "timestamp can not be null"); - } - - public String getEventPath() - { - return eventPath; - } - - public String getValue() - { - return value; - } - - public long getTimestamp() - { - return timestamp; - } -} diff --git a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/SendAllGraphiteEventConverter.java b/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/SendAllGraphiteEventConverter.java deleted file mode 100644 index 23d79ded7c5f..000000000000 --- a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/SendAllGraphiteEventConverter.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.graphite; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeName; -import com.google.common.base.Joiner; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSortedSet; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; - -import java.util.concurrent.TimeUnit; - -/** - * Emits all the events instance of {@link ServiceMetricEvent}. - *

- * All the dimensions will be retained and lexicographically order using dimensions name. - *

- * The metric path of the graphite event is: - * .[].[].. - *

- * Note that this path will be sanitized by replacing all the `.` or `space` to `_` {@link GraphiteEmitter#sanitize(String)} - */ - -@JsonTypeName("all") -public class SendAllGraphiteEventConverter implements DruidToGraphiteEventConverter -{ - @JsonProperty - private final boolean ignoreHostname; - - @JsonProperty - private final boolean ignoreServiceName; - - @JsonProperty - private final String namespacePrefix; - - @JsonProperty - private final boolean replaceSlashWithDot; - - @JsonProperty - public String getNamespacePrefix() - { - return namespacePrefix; - } - - @JsonProperty - public boolean isIgnoreServiceName() - { - return ignoreServiceName; - } - - @JsonProperty - public boolean isIgnoreHostname() - { - return ignoreHostname; - } - - @JsonProperty - public boolean replaceSlashWithDot() - { - return replaceSlashWithDot; - } - - @JsonCreator - public SendAllGraphiteEventConverter( - @JsonProperty("namespacePrefix") String namespacePrefix, - @JsonProperty("ignoreHostname") Boolean ignoreHostname, - @JsonProperty("ignoreServiceName") Boolean ignoreServiceName, - @JsonProperty("replaceSlashWithDot") Boolean replaceSlashWithDot - ) - { - this.ignoreHostname = ignoreHostname == null ? false : ignoreHostname; - this.ignoreServiceName = ignoreServiceName == null ? false : ignoreServiceName; - this.replaceSlashWithDot = replaceSlashWithDot == null ? false : replaceSlashWithDot; - this.namespacePrefix = Preconditions.checkNotNull(namespacePrefix, "namespace prefix can not be null"); - } - - @Override - public GraphiteEvent druidEventToGraphite(ServiceMetricEvent serviceMetricEvent) - { - ImmutableList.Builder metricPathBuilder = new ImmutableList.Builder(); - metricPathBuilder.add(this.getNamespacePrefix()); - if (!this.isIgnoreServiceName()) { - metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getService())); - } - if (!this.isIgnoreHostname()) { - metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getHost())); - } - - ImmutableSortedSet dimNames = ImmutableSortedSet.copyOf(serviceMetricEvent.getUserDims().keySet()); - for (String dimName : dimNames) { - metricPathBuilder.add(GraphiteEmitter.sanitize(String.valueOf(serviceMetricEvent.getUserDims() - .get(dimName)))); - } - metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getMetric(), this.replaceSlashWithDot())); - - return new GraphiteEvent( - Joiner.on(".").join(metricPathBuilder.build()), - serviceMetricEvent.getValue().toString(), - TimeUnit.MILLISECONDS.toSeconds(serviceMetricEvent.getCreatedTime().getMillis()) - ); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (!(o instanceof SendAllGraphiteEventConverter)) { - return false; - } - - SendAllGraphiteEventConverter that = (SendAllGraphiteEventConverter) o; - - if (isIgnoreHostname() != that.isIgnoreHostname()) { - return false; - } - if (isIgnoreServiceName() != that.isIgnoreServiceName()) { - return false; - } - if (replaceSlashWithDot() != that.replaceSlashWithDot()) { - return false; - } - return getNamespacePrefix().equals(that.getNamespacePrefix()); - - } - - @Override - public int hashCode() - { - int result = (isIgnoreHostname() ? 1 : 0); - result = 31 * result + (isIgnoreServiceName() ? 1 : 0); - result = 31 * result + (replaceSlashWithDot() ? 1 : 0); - result = 31 * result + getNamespacePrefix().hashCode(); - return result; - } -} diff --git a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/WhiteListBasedConverter.java b/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/WhiteListBasedConverter.java deleted file mode 100644 index 8027e237e91e..000000000000 --- a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/WhiteListBasedConverter.java +++ /dev/null @@ -1,294 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.graphite; - -import com.fasterxml.jackson.annotation.JacksonInject; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeName; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Joiner; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.ImmutableSortedMap; -import com.google.common.io.Files; -import com.google.common.io.Resources; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; - -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.SortedMap; -import java.util.concurrent.TimeUnit; - -@JsonTypeName("whiteList") -public class WhiteListBasedConverter implements DruidToGraphiteEventConverter -{ - private static final Logger LOGGER = new Logger(WhiteListBasedConverter.class); - /** - * @code whiteListDimsMapper is a white list of metric->dimensions mappings. - * Key is the metric name or the metric's prefix. - * Value is a list of metric's dimensions names. - * The order of the dimension name is important, it will be used to build the graphite metric path. - * For instance we have dimension type is nested under dimension dataSource -> prefix.dataSource.queryType.metricName - */ - private final ImmutableSortedMap> whiteListDimsMapper; - - @JsonProperty - private final boolean ignoreHostname; - - @JsonProperty - private final boolean ignoreServiceName; - - @JsonProperty - private final String namespacePrefix; - - @JsonProperty - private final boolean replaceSlashWithDot; - - @JsonProperty - private final String mapPath; - - private final ObjectMapper mapper; - - @JsonCreator - public WhiteListBasedConverter( - @JsonProperty("namespacePrefix") String namespacePrefix, - @JsonProperty("ignoreHostname") Boolean ignoreHostname, - @JsonProperty("ignoreServiceName") Boolean ignoreServiceName, - @JsonProperty("replaceSlashWithDot") Boolean replaceSlashWithDot, - @JsonProperty("mapPath") String mapPath, - @JacksonInject ObjectMapper mapper - ) - { - this.mapper = Preconditions.checkNotNull(mapper); - this.mapPath = mapPath; - this.whiteListDimsMapper = readMap(this.mapPath); - this.ignoreHostname = ignoreHostname == null ? false : ignoreHostname; - this.ignoreServiceName = ignoreServiceName == null ? false : ignoreServiceName; - this.replaceSlashWithDot = replaceSlashWithDot == null ? false : replaceSlashWithDot; - this.namespacePrefix = Preconditions.checkNotNull(namespacePrefix, "namespace prefix can not be null"); - } - - @JsonProperty - public boolean isIgnoreHostname() - { - return ignoreHostname; - } - - @JsonProperty - public boolean isIgnoreServiceName() - { - return ignoreServiceName; - } - - @JsonProperty - public String getNamespacePrefix() - { - return namespacePrefix; - } - - @JsonProperty - public boolean replaceSlashWithDot() - { - return replaceSlashWithDot; - } - - /** - * @param event Event subject to filtering - * - * @return true if and only if the event prefix key is in the {@code whiteListDimsMapper} - */ - private boolean isInWhiteList(ServiceMetricEvent event) - { - return getPrefixKey(event.getMetric(), whiteListDimsMapper) != null; - } - - /** - * @param key the metric name to lookup - * @param whiteList - * - * @return null if the key does not match with any of the prefixes keys in @code metricsWhiteList, - * or the prefix in @code whiteListDimsMapper - */ - private String getPrefixKey(String key, SortedMap whiteList) - { - String prefixKey = null; - if (whiteList.containsKey(key)) { - return key; - } - SortedMap headMap = whiteList.headMap(key); - if (!headMap.isEmpty() && key.startsWith(headMap.lastKey())) { - prefixKey = headMap.lastKey(); - } - return prefixKey; - } - - /** - * Returns a {@link List} of the white-listed dimension's values to send. - * The list is order is the same as the order of dimensions {@code whiteListDimsMapper} - * - * @param event the event for which will filter dimensions - * - * @return {@link List} of the filtered dimension values to send or null if the event is not in the white list - */ - private List getOrderedDimValues(ServiceMetricEvent event) - { - String prefixKey = getPrefixKey(event.getMetric(), whiteListDimsMapper); - if (prefixKey == null) { - return null; - } - ImmutableList.Builder outputList = new ImmutableList.Builder<>(); - Set dimensions = whiteListDimsMapper.get(prefixKey); - if (dimensions == null) { - return Collections.emptyList(); - } - for (String dimKey : dimensions) { - Object rawValue = event.getUserDims().get(dimKey); - String value = null; - - if (rawValue instanceof String) { - value = (String) rawValue; - } else if (rawValue instanceof Collection) { - Collection values = (Collection) rawValue; - if (!values.isEmpty()) { - value = (String) values.iterator().next(); - } - } - - if (value != null) { - outputList.add(GraphiteEmitter.sanitize(value)); - } - } - return outputList.build(); - } - - /** - * @param serviceMetricEvent druid metric event to convert - * - * @return null if the event is not white listed, otherwise return {@link GraphiteEvent} - *

- * The metric path of the graphite event is: - * .[].[].. - *

- * The order of the dimension is the order returned by {@code getOrderedDimValues()} - * Note that this path will be sanitized by replacing all the `.` or space by `_` {@link GraphiteEmitter#sanitize(String)} - *

- */ - - @Override - public GraphiteEvent druidEventToGraphite(ServiceMetricEvent serviceMetricEvent) - { - if (!this.isInWhiteList(serviceMetricEvent)) { - return null; - } - final ImmutableList.Builder metricPathBuilder = new ImmutableList.Builder<>(); - metricPathBuilder.add(this.getNamespacePrefix()); - if (!this.isIgnoreServiceName()) { - metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getService())); - } - if (!this.isIgnoreHostname()) { - metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getHost())); - } - List dimValues = getOrderedDimValues(serviceMetricEvent); - if (dimValues != null) { - metricPathBuilder.addAll(dimValues); - } - metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getMetric(), this.replaceSlashWithDot())); - - return new GraphiteEvent( - Joiner.on(".").join(metricPathBuilder.build()), - String.valueOf(serviceMetricEvent.getValue()), - TimeUnit.MILLISECONDS.toSeconds(serviceMetricEvent.getCreatedTime().getMillis()) - ); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (!(o instanceof WhiteListBasedConverter)) { - return false; - } - - WhiteListBasedConverter that = (WhiteListBasedConverter) o; - - if (isIgnoreHostname() != that.isIgnoreHostname()) { - return false; - } - if (isIgnoreServiceName() != that.isIgnoreServiceName()) { - return false; - } - if (replaceSlashWithDot() != that.replaceSlashWithDot()) { - return false; - } - if (!getNamespacePrefix().equals(that.getNamespacePrefix())) { - return false; - } - return mapPath != null ? mapPath.equals(that.mapPath) : that.mapPath == null; - - } - - @Override - public int hashCode() - { - int result = (isIgnoreHostname() ? 1 : 0); - result = 31 * result + (isIgnoreServiceName() ? 1 : 0); - result = 31 * result + (replaceSlashWithDot() ? 1 : 0); - result = 31 * result + getNamespacePrefix().hashCode(); - result = 31 * result + (mapPath != null ? mapPath.hashCode() : 0); - return result; - } - - private ImmutableSortedMap> readMap(final String mapPath) - { - String fileContent; - String actualPath = mapPath; - try { - if (Strings.isNullOrEmpty(mapPath)) { - URL resource = this.getClass().getClassLoader().getResource("defaultWhiteListMap.json"); - actualPath = resource.getFile(); - LOGGER.info("using default whiteList map located at [%s]", actualPath); - fileContent = Resources.toString(resource, Charset.defaultCharset()); - } else { - fileContent = Files.asCharSource(new File(mapPath), StandardCharsets.UTF_8).read(); - } - return mapper.readerFor(new TypeReference>>() - { - }).readValue(fileContent); - } - catch (IOException e) { - throw new ISE(e, "Got an exception while parsing file [%s]", actualPath); - } - } -} diff --git a/extensions-contrib/graphite-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/graphite-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index 4dbe90b207da..000000000000 --- a/extensions-contrib/graphite-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.emitter.graphite.GraphiteEmitterModule diff --git a/extensions-contrib/graphite-emitter/src/main/resources/defaultWhiteListMap.json b/extensions-contrib/graphite-emitter/src/main/resources/defaultWhiteListMap.json deleted file mode 100644 index 87cbd8951653..000000000000 --- a/extensions-contrib/graphite-emitter/src/main/resources/defaultWhiteListMap.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "ingest/events": [], - "ingest/handoff/failed": [], - "ingest/persists": [], - "ingest/rows/output": [], - "jvm/gc": [], - "jvm/mem": [], - "query/cpu/time": [ - "dataSource", - "type" - ], - "query/node/time": [ - "dataSource", - "type" - ], - "query/node/ttfb": [ - "dataSource", - "type" - ], - "query/partial/time": [ - "dataSource", - "type" - ], - "query/segment/time": [ - "dataSource", - "type" - ], - "query/segmentAndCache/time": [ - "dataSource", - "type" - ], - "query/time": [ - "dataSource", - "type" - ], - "query/wait/time": [ - "dataSource", - "type" - ], - "segment/count": [], - "segment/dropQueue/count": [], - "segment/loadQueue/count": [], - "segment/loadQueue/failed": [], - "segment/loadQueue/size": [], - "segment/scan/pending": [], - "segment/size": [], - "segment/usedPercent": [] -} diff --git a/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/DruidToWhiteListBasedConverterTest.java b/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/DruidToWhiteListBasedConverterTest.java deleted file mode 100644 index b634aa4928ec..000000000000 --- a/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/DruidToWhiteListBasedConverterTest.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.graphite; - -import org.junit.Assert; -import org.junit.Test; - -public class DruidToWhiteListBasedConverterTest -{ - - @Test - public void testSanitize() - { - String test = "host name.yahoo.com:8080"; - Assert.assertEquals("host_name_yahoo_com:8080", GraphiteEmitter.sanitize(test)); - } - - @Test - public void testSanitizeAndReplaceSlashWithDot() - { - String test = "query/cache/delta/hitRate"; - Assert.assertEquals("query.cache.delta.hitRate", GraphiteEmitter.sanitize(test, true)); - } -} diff --git a/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/GraphiteEmitterConfigTest.java b/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/GraphiteEmitterConfigTest.java deleted file mode 100644 index 7bde9f3c1627..000000000000 --- a/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/GraphiteEmitterConfigTest.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.graphite; - -import com.fasterxml.jackson.databind.InjectableValues; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; -import java.util.Collections; - -public class GraphiteEmitterConfigTest -{ - private ObjectMapper mapper = new DefaultObjectMapper(); - - @Before - public void setUp() - { - mapper.setInjectableValues(new InjectableValues.Std().addValue( - ObjectMapper.class, - new DefaultObjectMapper() - )); - } - - @Test - public void testSerDeserGraphiteEmitterConfig() throws IOException - { - GraphiteEmitterConfig graphiteEmitterConfig = new GraphiteEmitterConfig( - "hostname", - 8080, - 1000, - GraphiteEmitterConfig.PICKLE_PROTOCOL, - 1000L, - 100, - new SendAllGraphiteEventConverter("prefix", true, true, false), - Collections.emptyList(), - Collections.emptyList(), - null, - null - ); - String graphiteEmitterConfigString = mapper.writeValueAsString(graphiteEmitterConfig); - GraphiteEmitterConfig graphiteEmitterConfigExpected = mapper.readerFor(GraphiteEmitterConfig.class).readValue( - graphiteEmitterConfigString - ); - Assert.assertEquals(graphiteEmitterConfigExpected, graphiteEmitterConfig); - } - - @Test - public void testSerDeserDruidToGraphiteEventConverter() throws IOException - { - SendAllGraphiteEventConverter sendAllGraphiteEventConverter = new SendAllGraphiteEventConverter( - "prefix", - true, - true, - false - ); - String noopGraphiteEventConverterString = mapper.writeValueAsString(sendAllGraphiteEventConverter); - DruidToGraphiteEventConverter druidToGraphiteEventConverter = mapper.readerFor(DruidToGraphiteEventConverter.class) - .readValue(noopGraphiteEventConverterString); - Assert.assertEquals(druidToGraphiteEventConverter, sendAllGraphiteEventConverter); - - WhiteListBasedConverter whiteListBasedConverter = new WhiteListBasedConverter( - "prefix", - true, - true, - false, - "", - new DefaultObjectMapper() - ); - String whiteListBasedConverterString = mapper.writeValueAsString(whiteListBasedConverter); - druidToGraphiteEventConverter = mapper.readerFor(DruidToGraphiteEventConverter.class) - .readValue(whiteListBasedConverterString); - Assert.assertEquals(druidToGraphiteEventConverter, whiteListBasedConverter); - } - - @Test - public void testJacksonModules() - { - Assert.assertTrue(new GraphiteEmitterModule().getJacksonModules().isEmpty()); - } -} diff --git a/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/WhiteListBasedConverterTest.java b/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/WhiteListBasedConverterTest.java deleted file mode 100644 index 9098acc5420c..000000000000 --- a/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/WhiteListBasedConverterTest.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.graphite; - -import junitparams.JUnitParamsRunner; -import junitparams.Parameters; -import org.apache.commons.io.IOUtils; -import org.apache.druid.annotations.UsedByJUnitParamsRunner; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.easymock.EasyMock; -import org.joda.time.DateTime; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.HashMap; - - -@RunWith(JUnitParamsRunner.class) -public class WhiteListBasedConverterTest -{ - private final String prefix = "druid"; - private final WhiteListBasedConverter defaultWhiteListBasedConverter = new WhiteListBasedConverter( - prefix, - false, - false, - false, - null, - new DefaultObjectMapper() - ); - private ServiceMetricEvent event; - private DateTime createdTime = DateTimes.nowUtc(); - private String hostname = "testHost.yahoo.com:8080"; - private String serviceName = "historical"; - private String defaultNamespace = prefix + "." + serviceName + "." + GraphiteEmitter.sanitize(hostname); - - @Before - public void setUp() - { - event = EasyMock.createMock(ServiceMetricEvent.class); - EasyMock.expect(event.getHost()).andReturn(hostname).anyTimes(); - EasyMock.expect(event.getService()).andReturn(serviceName).anyTimes(); - EasyMock.expect(event.getCreatedTime()).andReturn(createdTime).anyTimes(); - EasyMock.expect(event.getUserDims()).andReturn(new HashMap<>()).anyTimes(); - EasyMock.expect(event.getValue()).andReturn(10).anyTimes(); - } - - @Test - @Parameters( - { - "query/time, true", - "query/node/ttfb, true", - "query/segmentAndCache/time, true", - "query/time/balaba, true", - "query/tim, false", - "segment/added/bytes, false", - "segment/count, true", - "segment/size, true", - "segment/cost/raw, false", - "coordinator/TIER_1 /cost/raw, false", - "segment/Kost/raw, false", - ", false", - "word, false", - "coordinator, false", - "server/, false", - "ingest/persists/time, true", - "jvm/mem/init, true", - "jvm/gc/count, true" - } - ) - public void testDefaultIsInWhiteList(String key, boolean expectedValue) - { - EasyMock.expect(event.getMetric()).andReturn(key).anyTimes(); - EasyMock.replay(event); - boolean isIn = defaultWhiteListBasedConverter.druidEventToGraphite(event) != null; - Assert.assertEquals(expectedValue, isIn); - } - - @Test - @Parameters - public void testGetPath(ServiceMetricEvent serviceMetricEvent, String expectedPath) - { - GraphiteEvent graphiteEvent = defaultWhiteListBasedConverter.druidEventToGraphite(serviceMetricEvent); - String path = null; - if (graphiteEvent != null) { - path = graphiteEvent.getEventPath(); - } - Assert.assertEquals(expectedPath, path); - } - - @Test - public void testWhiteListedStringArrayDimension() throws IOException - { - File mapFile = File.createTempFile("testing-" + System.nanoTime(), ".json"); - mapFile.deleteOnExit(); - - try (OutputStream outputStream = new FileOutputStream(mapFile)) { - IOUtils.copyLarge( - getClass().getResourceAsStream("/testWhiteListedStringArrayDimension.json"), - outputStream - ); - } - - WhiteListBasedConverter converter = new WhiteListBasedConverter( - prefix, - false, - false, - false, - mapFile.getAbsolutePath(), - new DefaultObjectMapper() - ); - - ServiceMetricEvent event = new ServiceMetricEvent.Builder() - .setDimension("gcName", new String[]{"g1"}) - .build(createdTime, "jvm/gc/cpu", 10) - .build(serviceName, hostname); - - GraphiteEvent graphiteEvent = converter.druidEventToGraphite(event); - - Assert.assertNotNull(graphiteEvent); - Assert.assertEquals(defaultNamespace + ".g1.jvm/gc/cpu", graphiteEvent.getEventPath()); - } - - @UsedByJUnitParamsRunner - private Object[] parametersForTestGetPath() - { - return new Object[]{ - new Object[]{ - new ServiceMetricEvent.Builder().setDimension("id", "dummy_id") - .setDimension("status", "some_status") - .setDimension("numDimensions", "1") - .setDimension("segment", "dummy_segment") - .build(createdTime, "query/segment/time/balabla/more", 10) - .build(serviceName, hostname), - defaultNamespace + ".query/segment/time/balabla/more" - }, - new Object[]{ - new ServiceMetricEvent.Builder().setDimension("dataSource", "some_data_source") - .setDimension("tier", "_default_tier") - .build(createdTime, "segment/max", 10) - .build(serviceName, hostname), - null - }, - new Object[]{ - new ServiceMetricEvent.Builder().setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .setDimension("interval", "2013/2015") - .setDimension("some_random_dim1", "random_dim_value1") - .setDimension("some_random_dim2", "random_dim_value2") - .setDimension("hasFilters", "no") - .setDimension("duration", "P1D") - .setDimension("remoteAddress", "194.0.90.2") - .setDimension("id", "ID") - .setDimension("context", "{context}") - .build(createdTime, "query/time", 10) - .build(serviceName, hostname), - defaultNamespace + ".data-source.groupBy.query/time" - }, - new Object[]{ - new ServiceMetricEvent.Builder().setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .setDimension("some_random_dim1", "random_dim_value1") - .build(createdTime, "ingest/persists/count", 10) - .build(serviceName, hostname), - defaultNamespace + ".ingest/persists/count" - }, - new Object[]{ - new ServiceMetricEvent.Builder().setDimension("bufferpoolName", "BufferPool") - .setDimension("type", "groupBy") - .setDimension("some_random_dim1", "random_dim_value1") - .build(createdTime, "jvm/bufferpool/capacity", 10) - .build(serviceName, hostname), - null - } - }; - } -} diff --git a/extensions-contrib/graphite-emitter/src/test/resources/testWhiteListedStringArrayDimension.json b/extensions-contrib/graphite-emitter/src/test/resources/testWhiteListedStringArrayDimension.json deleted file mode 100644 index 757742e1eed3..000000000000 --- a/extensions-contrib/graphite-emitter/src/test/resources/testWhiteListedStringArrayDimension.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "jvm/gc": ["gcName"] -} diff --git a/extensions-contrib/influx-extensions/pom.xml b/extensions-contrib/influx-extensions/pom.xml deleted file mode 100644 index 9e4d3e9c64ec..000000000000 --- a/extensions-contrib/influx-extensions/pom.xml +++ /dev/null @@ -1,127 +0,0 @@ - - - - - 4.0.0 - - org.apache.druid.extensions.contrib - druid-influx-extensions - druid-influx-extensions - druid-influx-extensions - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - - - - - - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.antlr - antlr4-runtime - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.guava - guava - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.google.code.findbugs - jsr305 - provided - - - - - junit - junit - test - - - org.hamcrest - hamcrest-all - test - - - org.hamcrest - hamcrest-core - test - - - pl.pragmatists - JUnitParams - test - - - - - - - org.antlr - antlr4-maven-plugin - - - - antlr4 - - - - - - - - - - strict - - - - - - - diff --git a/extensions-contrib/influx-extensions/src/main/antlr4/org/apache/druid/data/input/influx/InfluxLineProtocol.g4 b/extensions-contrib/influx-extensions/src/main/antlr4/org/apache/druid/data/input/influx/InfluxLineProtocol.g4 deleted file mode 100644 index 40bdb095c7dc..000000000000 --- a/extensions-contrib/influx-extensions/src/main/antlr4/org/apache/druid/data/input/influx/InfluxLineProtocol.g4 +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** Based on v1.4 from their docs - at https://docs.influxdata.com/influxdb/v1.4/write_protocols/line_protocol_tutorial/ - **/ - -grammar InfluxLineProtocol; - -lines - : line ('\n' line)* '\n'? EOF -; - -line - : identifier (',' tag_set)? ' ' field_set (' ' timestamp)? -; - -timestamp - : NUMBER -; - -field_set - : field_pair (',' field_pair)* -; - -tag_set - : tag_pair (',' tag_pair)* -; - -tag_pair - : identifier '=' identifier -; - -field_pair - : identifier '=' field_value -; - -identifier - : IDENTIFIER_STRING | NUMBER | BOOLEAN -; - -field_value - : QUOTED_STRING | NUMBER | BOOLEAN -; - -eol - : NEWLINE | EOF -; - -NEWLINE - : '\n' -; - -NUMBER - : '-'? INT ('.' [0-9] +) ? 'i'? -; - -BOOLEAN - : 'TRUE' | 'true' | 'True' | 't' | 'T' | 'FALSE' | 'False' | 'false' | 'F' | 'f' -; - -QUOTED_STRING - : '"' (StringFieldEscapeSequence | ~(["\\]) )* '"' -; - -IDENTIFIER_STRING - : (IdentifierEscapeSequence | ~([,= \n\\]) )+ -; - -fragment IdentifierEscapeSequence - : '\\' [,= \\] -; - -fragment StringFieldEscapeSequence - : '\\' ["\\] -; - -fragment INT - : '0' | [1-9] [0-9]* -; diff --git a/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxExtensionsModule.java b/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxExtensionsModule.java deleted file mode 100644 index da0eba6a4b60..000000000000 --- a/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxExtensionsModule.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.influx; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.inject.Binder; -import org.apache.druid.initialization.DruidModule; - -import java.util.Collections; -import java.util.List; - -public class InfluxExtensionsModule implements DruidModule -{ - public InfluxExtensionsModule() - { - } - - @Override - public List getJacksonModules() - { - return Collections.singletonList( - new SimpleModule("InfluxInputRowParserModule") - .registerSubtypes( - new NamedType(InfluxParseSpec.class, "influx") - ) - ); - } - - @Override - public void configure(Binder binder) - { - } -} diff --git a/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParseSpec.java b/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParseSpec.java deleted file mode 100644 index 025c3ad08e54..000000000000 --- a/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParseSpec.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.influx; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.Sets; -import org.apache.druid.data.input.impl.DimensionsSpec; -import org.apache.druid.data.input.impl.ParseSpec; -import org.apache.druid.data.input.impl.TimestampSpec; -import org.apache.druid.java.util.common.parsers.Parser; - -import java.util.List; - -public class InfluxParseSpec extends ParseSpec -{ - private List measurementWhitelist; - - @JsonCreator - public InfluxParseSpec( - @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec, - @JsonProperty("whitelistMeasurements") List measurementWhitelist - ) - { - super( - new TimestampSpec(InfluxParser.TIMESTAMP_KEY, "millis", null), - dimensionsSpec != null ? dimensionsSpec : new DimensionsSpec(null, null, null) - ); - this.measurementWhitelist = measurementWhitelist; - } - - @Override - public Parser makeParser() - { - if (measurementWhitelist != null && measurementWhitelist.size() > 0) { - return new InfluxParser(Sets.newHashSet(measurementWhitelist)); - } else { - return new InfluxParser(null); - } - } - - @Override - public ParseSpec withDimensionsSpec(DimensionsSpec spec) - { - return new InfluxParseSpec(spec, measurementWhitelist); - } -} diff --git a/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParser.java b/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParser.java deleted file mode 100644 index 42fca8b7b5d4..000000000000 --- a/extensions-contrib/influx-extensions/src/main/java/org/apache/druid/data/input/influx/InfluxParser.java +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.influx; - -import com.google.common.collect.ImmutableList; -import org.antlr.v4.runtime.ANTLRInputStream; -import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.CommonTokenStream; -import org.antlr.v4.runtime.TokenStream; -import org.apache.druid.java.util.common.parsers.ParseException; -import org.apache.druid.java.util.common.parsers.Parser; - -import javax.annotation.Nullable; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.regex.Pattern; - -public class InfluxParser implements Parser -{ - public static final String TIMESTAMP_KEY = "__ts"; - private static final String MEASUREMENT_KEY = "measurement"; - - private static final Pattern BACKSLASH_PATTERN = Pattern.compile("\\\\\""); - private static final Pattern IDENTIFIER_PATTERN = Pattern.compile("\\\\([,= ])"); - - private final Set measurementWhitelist; - - public InfluxParser(Set measurementWhitelist) - { - this.measurementWhitelist = measurementWhitelist; - } - - @Override - public void startFileFromBeginning() - { - } - - @Nullable - @Override - public Map parseToMap(String input) - { - CharStream charStream = new ANTLRInputStream(input); - InfluxLineProtocolLexer lexer = new InfluxLineProtocolLexer(charStream); - TokenStream tokenStream = new CommonTokenStream(lexer); - InfluxLineProtocolParser parser = new InfluxLineProtocolParser(tokenStream); - - List lines = parser.lines().line(); - if (parser.getNumberOfSyntaxErrors() != 0) { - throw new ParseException("Unable to parse line."); - } - if (lines.size() != 1) { - throw new ParseException("Multiple lines present; unable to parse more than one per record."); - } - - Map out = new LinkedHashMap<>(); - - InfluxLineProtocolParser.LineContext line = lines.get(0); - String measurement = parseIdentifier(line.identifier()); - - if (!checkWhitelist(measurement)) { - throw new ParseException("Metric not whitelisted."); - } - - out.put(MEASUREMENT_KEY, measurement); - if (line.tag_set() != null) { - line.tag_set().tag_pair().forEach(t -> parseTag(t, out)); - } - - line.field_set().field_pair().forEach(t -> parseField(t, out)); - - if (line.timestamp() != null) { - String timestamp = line.timestamp().getText(); - parseTimestamp(timestamp, out); - } - return out; - } - - private void parseTag(InfluxLineProtocolParser.Tag_pairContext tag, Map out) - { - String key = parseIdentifier(tag.identifier(0)); - String value = parseIdentifier(tag.identifier(1)); - out.put(key, value); - } - - private void parseField(InfluxLineProtocolParser.Field_pairContext field, Map out) - { - String key = parseIdentifier(field.identifier()); - InfluxLineProtocolParser.Field_valueContext valueContext = field.field_value(); - Object value; - if (valueContext.NUMBER() != null) { - value = parseNumber(valueContext.NUMBER().getText()); - } else if (valueContext.BOOLEAN() != null) { - value = parseBool(valueContext.BOOLEAN().getText()); - } else { - value = parseQuotedString(valueContext.QUOTED_STRING().getText()); - } - out.put(key, value); - } - - private Object parseQuotedString(String text) - { - return BACKSLASH_PATTERN.matcher(text.substring(1, text.length() - 1)).replaceAll("\""); - } - - private Object parseNumber(String raw) - { - if (raw.endsWith("i")) { - return Long.valueOf(raw.substring(0, raw.length() - 1)); - } - - return new Double(raw); - } - - private Object parseBool(String raw) - { - char first = raw.charAt(0); - if (first == 't' || first == 'T') { - return "true"; - } else { - return "false"; - } - } - - private String parseIdentifier(InfluxLineProtocolParser.IdentifierContext ctx) - { - if (ctx.BOOLEAN() != null || ctx.NUMBER() != null) { - return ctx.getText(); - } - - return IDENTIFIER_PATTERN.matcher(ctx.IDENTIFIER_STRING().getText()).replaceAll("$1"); - } - - private boolean checkWhitelist(String m) - { - return (measurementWhitelist == null) || measurementWhitelist.contains(m); - } - - private void parseTimestamp(String timestamp, Map dest) - { - // Influx timestamps come in nanoseconds; treat anything less than 1 ms as 0 - if (timestamp.length() < 7) { - dest.put(TIMESTAMP_KEY, 0L); - } else { - timestamp = timestamp.substring(0, timestamp.length() - 6); - final long timestampMillis = Long.valueOf(timestamp); - dest.put(TIMESTAMP_KEY, timestampMillis); - } - } - - @Override - public List getFieldNames() - { - return ImmutableList.of(); - } - - @Override - public void setFieldNames(Iterable fieldNames) - { - } -} diff --git a/extensions-contrib/influx-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/influx-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index b9b1a0e55a1e..000000000000 --- a/extensions-contrib/influx-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.data.input.influx.InfluxExtensionsModule diff --git a/extensions-contrib/influx-extensions/src/test/java/org/apache/druid/data/input/influx/InfluxParserTest.java b/extensions-contrib/influx-extensions/src/test/java/org/apache/druid/data/input/influx/InfluxParserTest.java deleted file mode 100644 index 49307f91e0a3..000000000000 --- a/extensions-contrib/influx-extensions/src/test/java/org/apache/druid/data/input/influx/InfluxParserTest.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.influx; - -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import junitparams.JUnitParamsRunner; -import junitparams.Parameters; -import org.apache.druid.java.util.common.Pair; -import org.apache.druid.java.util.common.parsers.ParseException; -import org.apache.druid.java.util.common.parsers.Parser; -import org.hamcrest.MatcherAssert; -import org.hamcrest.Matchers; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; - -import java.util.HashMap; -import java.util.Map; - -@RunWith(JUnitParamsRunner.class) -public class InfluxParserTest -{ - @SuppressWarnings("unused") - private String name; - @SuppressWarnings("unused") - private String input; - @SuppressWarnings("unused") - private Map expected; - - private static Object[] testCase(String name, String input, Parsed expected) - { - return Lists.newArrayList(name, input, expected).toArray(); - } - - public Object[] testData() - { - return Lists.newArrayList( - testCase( - "real sample", - "cpu,host=foo.bar.baz,region=us-east-1,application=echo pct_idle=99.3,pct_user=88.8,m1_load=2i 1465839830100400200", - Parsed.row("cpu", 1465839830100L) - .with("host", "foo.bar.baz") - .with("region", "us-east-1") - .with("application", "echo") - .with("pct_idle", 99.3) - .with("pct_user", 88.8) - .with("m1_load", 2L) - ), - testCase( - "negative timestamp", - "foo,region=us-east-1,host=127.0.0.1 m=1.0,n=3.0,o=500i -123456789", - Parsed.row("foo", -123L) - .with("region", "us-east-1") - .with("host", "127.0.0.1") - .with("m", 1.0) - .with("n", 3.0) - .with("o", 500L) - ), - testCase( - "truncated timestamp", - "foo,region=us-east-1,host=127.0.0.1 m=1.0,n=3.0,o=500i 123", - Parsed.row("foo", 0L) - .with("region", "us-east-1") - .with("host", "127.0.0.1") - .with("m", 1.0) - .with("n", 3.0) - .with("o", 500L) - ), - testCase( - "special characters", - "!@#$%^&*()_-\\=+,+++\\ +++=--\\ --- __**__=\"ü\" 123456789", - Parsed.row("!@#$%^&*()_-=+", 123L) - .with("+++ +++", "-- ---") - .with("__**__", "127.0.0.1") - .with("__**__", "ü") - ), - testCase( - "unicode characters", - "\uD83D\uDE00,\uD83D\uDE05=\uD83D\uDE06 \uD83D\uDE0B=100i,b=\"\uD83D\uDE42\" 123456789", - Parsed.row("\uD83D\uDE00", 123L) - .with("\uD83D\uDE05", "\uD83D\uDE06") - .with("\uD83D\uDE0B", 100L) - .with("b", "\uD83D\uDE42") - ), - testCase( - "quoted string measurement value", - "foo,region=us-east-1,host=127.0.0.1 m=1.0,n=3.0,o=\"something \\\"cool\\\" \" 123456789", - Parsed.row("foo", 123L) - .with("region", "us-east-1") - .with("host", "127.0.0.1") - .with("m", 1.0) - .with("n", 3.0) - .with("o", "something \"cool\" ") - ), - testCase( - "no tags", - "foo m=1.0,n=3.0 123456789", - Parsed.row("foo", 123L) - .with("m", 1.0) - .with("n", 3.0) - ), - testCase( - "Escaped characters in identifiers", - "f\\,oo\\ \\=,bar=baz m=1.0,n=3.0 123456789", - Parsed.row("f,oo =", 123L) - .with("bar", "baz") - .with("m", 1.0) - .with("n", 3.0) - ), - testCase( - "Escaped characters in identifiers", - "foo\\ \\=,bar=baz m=1.0,n=3.0 123456789", - Parsed.row("foo =", 123L) - .with("bar", "baz") - .with("m", 1.0) - .with("n", 3.0) - ) - ).toArray(); - } - - @Test - @Parameters(method = "testData") - public void testParse(String name, String input, Parsed expected) - { - Parser parser = new InfluxParser(null); - Map parsed = parser.parseToMap(input); - MatcherAssert.assertThat( - "correct measurement name", - parsed.get("measurement"), - Matchers.equalTo(expected.measurement) - ); - MatcherAssert.assertThat( - "correct timestamp", - parsed.get(InfluxParser.TIMESTAMP_KEY), - Matchers.equalTo(expected.timestamp) - ); - expected.kv.forEach((k, v) -> MatcherAssert.assertThat("correct field " + k, parsed.get(k), Matchers.equalTo(v))); - parsed.remove("measurement"); - parsed.remove(InfluxParser.TIMESTAMP_KEY); - MatcherAssert.assertThat("No extra keys in parsed data", parsed.keySet(), Matchers.equalTo(expected.kv.keySet())); - } - - @Test - public void testParseWhitelistPass() - { - Parser parser = new InfluxParser(Sets.newHashSet("cpu")); - String input = "cpu,host=foo.bar.baz,region=us-east,application=echo pct_idle=99.3,pct_user=88.8,m1_load=2 1465839830100400200"; - Map parsed = parser.parseToMap(input); - MatcherAssert.assertThat(parsed.get("measurement"), Matchers.equalTo("cpu")); - } - - @Test - public void testParseWhitelistFail() - { - Parser parser = new InfluxParser(Sets.newHashSet("mem")); - String input = "cpu,host=foo.bar.baz,region=us-east,application=echo pct_idle=99.3,pct_user=88.8,m1_load=2 1465839830100400200"; - try { - parser.parseToMap(input); - } - catch (ParseException t) { - MatcherAssert.assertThat(t, Matchers.isA(ParseException.class)); - return; - } - - Assert.fail("Exception not thrown"); - } - - public Object[] failureTestData() - { - return Lists.newArrayList( - Pair.of("Empty line", ""), - Pair.of("Invalid measurement", "invalid measurement"), - Pair.of("Invalid timestamp", "foo i=123 123x") - ).toArray(); - } - - @Test - @Parameters(method = "failureTestData") - public void testParseFailures(Pair testCase) - { - Parser parser = new InfluxParser(null); - try { - parser.parseToMap(testCase.rhs); - } - catch (ParseException t) { - MatcherAssert.assertThat(t, Matchers.isA(ParseException.class)); - return; - } - - Assert.fail(testCase.rhs + ": exception not thrown"); - } - - private static class Parsed - { - private String measurement; - private Long timestamp; - private final Map kv = new HashMap<>(); - - static Parsed row(String measurement, Long timestamp) - { - Parsed e = new Parsed(); - e.measurement = measurement; - e.timestamp = timestamp; - return e; - } - - Parsed with(String k, Object v) - { - kv.put(k, v); - return this; - } - } -} diff --git a/extensions-contrib/influxdb-emitter/pom.xml b/extensions-contrib/influxdb-emitter/pom.xml deleted file mode 100644 index 5ba8394d975e..000000000000 --- a/extensions-contrib/influxdb-emitter/pom.xml +++ /dev/null @@ -1,95 +0,0 @@ - - - - - - org.apache.druid.extensions.contrib - druid-influxdb-emitter - druid-influxdb-emitter - influxdb-emitter - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - 4.0.0 - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - - - - org.apache.httpcomponents - httpclient - - - org.apache.httpcomponents - httpcore - - - com.fasterxml.jackson.core - jackson-annotations - - - joda-time - joda-time - - - com.google.guava - guava - - - com.google.inject - guice - - - com.fasterxml.jackson.core - jackson-databind - - - - junit - junit - test - - - org.easymock - easymock - test - - - pl.pragmatists - JUnitParams - test - - - diff --git a/extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitter.java b/extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitter.java deleted file mode 100644 index ee22917f8e3f..000000000000 --- a/extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitter.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.influxdb; - -import com.google.common.collect.ImmutableSet; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.concurrent.ScheduledExecutors; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.core.Emitter; -import org.apache.druid.java.util.emitter.core.Event; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.apache.http.client.HttpClient; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.HttpClientBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.regex.Pattern; - - -public class InfluxdbEmitter implements Emitter -{ - - private static final Logger log = new Logger(InfluxdbEmitter.class); - private final HttpClient influxdbClient; - private final InfluxdbEmitterConfig influxdbEmitterConfig; - private final AtomicBoolean started = new AtomicBoolean(false); - private final ScheduledExecutorService exec = ScheduledExecutors.fixed(1, "InfluxdbEmitter-%s"); - private final ImmutableSet dimensionWhiteList; - private final LinkedBlockingQueue eventsQueue; - private static final Pattern DOT_OR_WHITESPACE = Pattern.compile("[\\s]+|[.]+"); - - public InfluxdbEmitter(InfluxdbEmitterConfig influxdbEmitterConfig) - { - this.influxdbEmitterConfig = influxdbEmitterConfig; - this.influxdbClient = HttpClientBuilder.create().build(); - this.eventsQueue = new LinkedBlockingQueue<>(influxdbEmitterConfig.getMaxQueueSize()); - this.dimensionWhiteList = influxdbEmitterConfig.getDimensionWhitelist(); - log.info("constructed influxdb emitter"); - } - - @Override - public void start() - { - synchronized (started) { - if (!started.get()) { - exec.scheduleAtFixedRate( - () -> transformAndSendToInfluxdb(eventsQueue), - influxdbEmitterConfig.getFlushDelay(), - influxdbEmitterConfig.getFlushPeriod(), - TimeUnit.MILLISECONDS - ); - started.set(true); - } - } - } - - @Override - public void emit(Event event) - { - if (event instanceof ServiceMetricEvent) { - ServiceMetricEvent metricEvent = (ServiceMetricEvent) event; - try { - eventsQueue.put(metricEvent); - } - catch (InterruptedException exception) { - log.error(exception, "Failed to add metricEvent to events queue."); - Thread.currentThread().interrupt(); - } - } - } - - public void postToInflux(String payload) - { - HttpPost post = new HttpPost( - "http://" + influxdbEmitterConfig.getHostname() - + ":" + influxdbEmitterConfig.getPort() - + "/write?db=" + influxdbEmitterConfig.getDatabaseName() - + "&u=" + influxdbEmitterConfig.getInfluxdbUserName() - + "&p=" + influxdbEmitterConfig.getInfluxdbPassword() - ); - - post.setEntity(new StringEntity(payload, ContentType.DEFAULT_TEXT)); - post.setHeader("Content-Type", "application/x-www-form-urlencoded"); - - try { - influxdbClient.execute(post); - } - catch (IOException ex) { - log.info(ex, "Failed to post events to InfluxDB."); - } - finally { - post.releaseConnection(); - } - } - - public String transformForInfluxSystems(ServiceMetricEvent event) - { - // split Druid metric on slashes and join middle parts (if any) with "_" - String[] parts = getValue("metric", event).split("/"); - String metric = String.join( - "_", - Arrays.asList( - Arrays.copyOfRange( - parts, - 1, - parts.length - 1 - ) - ) - ); - - // measurement - StringBuilder payload = new StringBuilder("druid_"); - payload.append(parts[0]); - - // tags - StringBuilder tag = new StringBuilder(",service="); - tag.append(getValue("service", event)); - String metricTag = parts.length == 2 ? "" : ",metric=druid_" + metric; - tag.append(metricTag); - tag.append(StringUtils.format(",hostname=%s", getValue("host", event).split(":")[0])); - ImmutableSet dimNames = ImmutableSet.copyOf(event.getUserDims().keySet()); - for (String dimName : dimNames) { - if (this.dimensionWhiteList.contains(dimName)) { - tag.append(StringUtils.format(",%1$s=%2$s", dimName, sanitize(String.valueOf(event.getUserDims().get(dimName))))); - } - } - payload.append(tag); - - // fields - payload.append(StringUtils.format(" druid_%1$s=%2$s", parts[parts.length - 1], getValue("value", event))); - - // timestamp - payload.append(StringUtils.format(" %d\n", event.getCreatedTime().getMillis() * 1000000)); - - return payload.toString(); - } - - private static String sanitize(String namespace) - { - return DOT_OR_WHITESPACE.matcher(namespace).replaceAll("_"); - } - - public String getValue(String key, ServiceMetricEvent event) - { - switch (key) { - case "service": - return event.getService(); - case "eventType": - return event.getClass().getSimpleName(); - case "metric": - return event.getMetric(); - case "feed": - return event.getFeed(); - case "host": - return event.getHost(); - case "value": - return event.getValue().toString(); - default: - return key; - } - } - - @Override - public void flush() - { - if (started.get()) { - transformAndSendToInfluxdb(eventsQueue); - } - } - - @Override - public void close() - { - flush(); - log.info("Closing [%s]", this.getClass().getName()); - started.set(false); - exec.shutdownNow(); - } - - public void transformAndSendToInfluxdb(LinkedBlockingQueue eventsQueue) - { - StringBuilder payload = new StringBuilder(); - int initialQueueSize = eventsQueue.size(); - for (int i = 0; i < initialQueueSize; i++) { - payload.append(transformForInfluxSystems(eventsQueue.poll())); - } - postToInflux(payload.toString()); - } - -} diff --git a/extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterConfig.java b/extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterConfig.java deleted file mode 100644 index d96b07083e2e..000000000000 --- a/extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterConfig.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.influxdb; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableSet; -import org.apache.druid.java.util.common.logger.Logger; - -import java.util.Arrays; -import java.util.List; -import java.util.Set; - -public class InfluxdbEmitterConfig -{ - - private static final int DEFAULT_PORT = 8086; - private static final int DEFAULT_QUEUE_SIZE = Integer.MAX_VALUE; - private static final int DEFAULT_FLUSH_PERIOD = 60000; // milliseconds - private static final List DEFAULT_DIMENSION_WHITELIST = Arrays.asList("dataSource", "type", "numMetrics", "numDimensions", "threshold", "dimension", "taskType", "taskStatus", "tier"); - - @JsonProperty - private final String hostname; - @JsonProperty - private final Integer port; - @JsonProperty - private final String databaseName; - @JsonProperty - private final Integer maxQueueSize; - @JsonProperty - private final Integer flushPeriod; - @JsonProperty - private final Integer flushDelay; - @JsonProperty - private final String influxdbUserName; - @JsonProperty - private final String influxdbPassword; - @JsonProperty - private final ImmutableSet dimensionWhitelist; - - private static Logger log = new Logger(InfluxdbEmitterConfig.class); - - @JsonCreator - public InfluxdbEmitterConfig( - @JsonProperty("hostname") String hostname, - @JsonProperty("port") Integer port, - @JsonProperty("databaseName") String databaseName, - @JsonProperty("maxQueueSize") Integer maxQueueSize, - @JsonProperty("flushPeriod") Integer flushPeriod, - @JsonProperty("flushDelay") Integer flushDelay, - @JsonProperty("influxdbUserName") String influxdbUserName, - @JsonProperty("influxdbPassword") String influxdbPassword, - @JsonProperty("dimensionWhitelist") Set dimensionWhitelist - ) - { - this.hostname = Preconditions.checkNotNull(hostname, "hostname can not be null"); - this.port = port == null ? DEFAULT_PORT : port; - this.databaseName = Preconditions.checkNotNull(databaseName, "databaseName can not be null"); - this.maxQueueSize = maxQueueSize == null ? DEFAULT_QUEUE_SIZE : maxQueueSize; - this.flushPeriod = flushPeriod == null ? DEFAULT_FLUSH_PERIOD : flushPeriod; - this.flushDelay = flushDelay == null ? DEFAULT_FLUSH_PERIOD : flushDelay; - this.influxdbUserName = Preconditions.checkNotNull(influxdbUserName, "influxdbUserName can not be null"); - this.influxdbPassword = Preconditions.checkNotNull(influxdbPassword, "influxdbPassword can not be null"); - this.dimensionWhitelist = dimensionWhitelist == null ? ImmutableSet.copyOf(DEFAULT_DIMENSION_WHITELIST) : ImmutableSet.copyOf(dimensionWhitelist); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (!(o instanceof InfluxdbEmitterConfig)) { - return false; - } - - InfluxdbEmitterConfig that = (InfluxdbEmitterConfig) o; - - if (getPort() != that.getPort()) { - return false; - } - if (!getHostname().equals(that.getHostname())) { - return false; - } - if (!getDatabaseName().equals(that.getDatabaseName())) { - return false; - } - if (getFlushPeriod() != that.getFlushPeriod()) { - return false; - } - if (getMaxQueueSize() != that.getMaxQueueSize()) { - return false; - } - if (getFlushDelay() != that.getFlushDelay()) { - return false; - } - if (!getInfluxdbUserName().equals(that.getInfluxdbUserName())) { - return false; - } - if (!getInfluxdbPassword().equals(that.getInfluxdbPassword())) { - return false; - } - if (!getDimensionWhitelist().equals(that.getDimensionWhitelist())) { - return false; - } - return true; - - } - - @Override - public int hashCode() - { - int result = getHostname().hashCode(); - result = 31 * result + getPort(); - result = 31 * result + getDatabaseName().hashCode(); - result = 31 * result + getFlushPeriod(); - result = 31 * result + getMaxQueueSize(); - result = 31 * result + getFlushDelay(); - result = 31 * result + getInfluxdbUserName().hashCode(); - result = 31 * result + getInfluxdbPassword().hashCode(); - result = 31 * result + getDimensionWhitelist().hashCode(); - return result; - } - - @JsonProperty - public String getHostname() - { - return hostname; - } - - @JsonProperty - public int getPort() - { - return port; - } - - @JsonProperty - public String getDatabaseName() - { - return databaseName; - } - - @JsonProperty - public int getFlushPeriod() - { - return flushPeriod; - } - - @JsonProperty - public int getMaxQueueSize() - { - return maxQueueSize; - } - - @JsonProperty - public int getFlushDelay() - { - return flushDelay; - } - - @JsonProperty - public String getInfluxdbUserName() - { - return influxdbUserName; - } - - @JsonProperty - public String getInfluxdbPassword() - { - return influxdbPassword; - } - - @JsonProperty - public ImmutableSet getDimensionWhitelist() - { - return dimensionWhitelist; - } -} diff --git a/extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterModule.java b/extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterModule.java deleted file mode 100644 index b286a972c182..000000000000 --- a/extensions-contrib/influxdb-emitter/src/main/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterModule.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.influxdb; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.inject.Binder; -import com.google.inject.Provides; -import com.google.inject.name.Named; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.ManageLifecycle; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.core.Emitter; - -import java.util.Collections; -import java.util.List; - -public class InfluxdbEmitterModule implements DruidModule -{ - - private static final String EMITTER_TYPE = "influxdb"; - private static final Logger log = new Logger(InfluxdbEmitterModule.class); - - @Override - public List getJacksonModules() - { - return Collections.emptyList(); - } - - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bind(binder, "druid.emitter." + EMITTER_TYPE, InfluxdbEmitterConfig.class); - } - - @Provides - @ManageLifecycle - @Named(EMITTER_TYPE) - public Emitter getEmitter(InfluxdbEmitterConfig influxdbEmitterConfig, ObjectMapper mapper) - { - return new InfluxdbEmitter(influxdbEmitterConfig); - } -} diff --git a/extensions-contrib/influxdb-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/influxdb-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index fafe8ee67cf8..000000000000 --- a/extensions-contrib/influxdb-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.emitter.influxdb.InfluxdbEmitterModule diff --git a/extensions-contrib/influxdb-emitter/src/test/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterConfigTest.java b/extensions-contrib/influxdb-emitter/src/test/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterConfigTest.java deleted file mode 100644 index 09e6e55c2f38..000000000000 --- a/extensions-contrib/influxdb-emitter/src/test/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterConfigTest.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.influxdb; - -import com.fasterxml.jackson.databind.InjectableValues; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableSet; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.util.Arrays; - -public class InfluxdbEmitterConfigTest -{ - private ObjectMapper mapper = new DefaultObjectMapper(); - private InfluxdbEmitterConfig influxdbEmitterConfig; - - @Before - public void setUp() - { - mapper.setInjectableValues(new InjectableValues.Std().addValue( - ObjectMapper.class, - new DefaultObjectMapper() - )); - - influxdbEmitterConfig = new InfluxdbEmitterConfig( - "localhost", - 8086, - "dbname", - 10000, - 15000, - 30000, - "adam", - "password", - null - ); - } - - @Test - public void testInfluxdbEmitterConfigObjectsAreDifferent() - { - InfluxdbEmitterConfig influxdbEmitterConfigComparison = new InfluxdbEmitterConfig( - "localhost", - 8080, - "dbname", - 10000, - 15000, - 30000, - "adam", - "password", - null - ); - Assert.assertNotEquals(influxdbEmitterConfig, influxdbEmitterConfigComparison); - } - - @Test(expected = NullPointerException.class) - public void testConfigWithNullHostname() - { - InfluxdbEmitterConfig influxdbEmitterConfigWithNullHostname = new InfluxdbEmitterConfig( - null, - 8080, - "dbname", - 10000, - 15000, - 30000, - "adam", - "password", - null - ); - } - - @Test - public void testConfigWithNullPort() - { - InfluxdbEmitterConfig influxdbEmitterConfigWithNullPort = new InfluxdbEmitterConfig( - "localhost", - null, - "dbname", - 10000, - 15000, - 30000, - "adam", - "password", - null - ); - int expectedPort = 8086; - Assert.assertEquals(expectedPort, influxdbEmitterConfig.getPort()); - } - - @Test - public void testEqualsMethod() - { - InfluxdbEmitterConfig influxdbEmitterConfigComparison = new InfluxdbEmitterConfig( - "localhost", - 8086, - "dbname", - 10000, - 15000, - 30000, - "adam", - "password", - null - ); - Assert.assertTrue(influxdbEmitterConfig.equals(influxdbEmitterConfigComparison)); - } - - @Test - public void testEqualsMethodWithNotEqualConfigs() - { - InfluxdbEmitterConfig influxdbEmitterConfigComparison = new InfluxdbEmitterConfig( - "localhost", - 8086, - "dbname", - 10000, - 15000, - 10000, - "adam", - "password", - null - ); - Assert.assertFalse(influxdbEmitterConfig.equals(influxdbEmitterConfigComparison)); - } - - @Test(expected = NullPointerException.class) - public void testConfigWithNullInfluxdbUserName() - { - InfluxdbEmitterConfig influxdbEmitterConfigWithNullHostname = new InfluxdbEmitterConfig( - "localhost", - 8086, - "dbname", - 10000, - 15000, - 30000, - null, - "password", - null - ); - } - - @Test(expected = NullPointerException.class) - public void testConfigWithNullInfluxdbPassword() - { - InfluxdbEmitterConfig influxdbEmitterConfigWithNullHostname = new InfluxdbEmitterConfig( - "localhost", - 8086, - "dbname", - 10000, - 15000, - 30000, - "adam", - null, - null - ); - } - - @Test - public void testConfigWithNullDimensionWhitelist() - { - InfluxdbEmitterConfig influxdbEmitterConfig = new InfluxdbEmitterConfig( - "localhost", - 8086, - "dbname", - 10000, - 15000, - 30000, - "adam", - "password", - null - ); - ImmutableSet expected = ImmutableSet.copyOf(Arrays.asList("dataSource", "type", "numMetrics", "numDimensions", "threshold", "dimension", "taskType", "taskStatus", "tier")); - Assert.assertEquals(expected, influxdbEmitterConfig.getDimensionWhitelist()); - } - - @Test - public void testConfigWithDimensionWhitelist() - { - InfluxdbEmitterConfig influxdbEmitterConfig = new InfluxdbEmitterConfig( - "localhost", - 8086, - "dbname", - 10000, - 15000, - 30000, - "adam", - "password", - ImmutableSet.of("dataSource", "taskType") - ); - ImmutableSet expected = ImmutableSet.copyOf(Arrays.asList("dataSource", "taskType")); - Assert.assertEquals(expected, influxdbEmitterConfig.getDimensionWhitelist()); - } - -} diff --git a/extensions-contrib/influxdb-emitter/src/test/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterTest.java b/extensions-contrib/influxdb-emitter/src/test/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterTest.java deleted file mode 100644 index 318f38f65c33..000000000000 --- a/extensions-contrib/influxdb-emitter/src/test/java/org/apache/druid/emitter/influxdb/InfluxdbEmitterTest.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.influxdb; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import org.apache.druid.java.util.emitter.service.ServiceEventBuilder; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -public class InfluxdbEmitterTest -{ - - private ServiceMetricEvent event; - - @Before - public void setUp() - { - DateTime date = new DateTime(2017, - 10, - 30, - 10, - 00, - DateTimeZone.UTC); // 10:00am on 30/10/2017 = 1509357600000000000 in epoch nanoseconds - String metric = "metric/te/st/value"; - Number value = 1234; - ImmutableMap serviceDims = ImmutableMap.of( - "service", - "druid/historical", - "host", - "localhost", - "version", - "0.10.0" - ); - ServiceMetricEvent.Builder builder = ServiceMetricEvent.builder(); - builder.setDimension("nonWhiteListedDim", "test"); - builder.setDimension("dataSource", "test_datasource"); - ServiceEventBuilder eventBuilder = builder.build(date, metric, value); - event = (ServiceMetricEvent) eventBuilder.build(serviceDims); - } - - @Test - public void testTransformForInfluxWithLongMetric() - { - InfluxdbEmitterConfig config = new InfluxdbEmitterConfig( - "localhost", - 8086, - "dbname", - 10000, - 15000, - 30000, - "adam", - "password", - null - ); - InfluxdbEmitter influxdbEmitter = new InfluxdbEmitter(config); - String expected = - "druid_metric,service=druid/historical,metric=druid_te_st,hostname=localhost,dataSource=test_datasource druid_value=1234 1509357600000000000" - + "\n"; - String actual = influxdbEmitter.transformForInfluxSystems(event); - Assert.assertEquals(expected, actual); - } - - @Test - public void testTransformForInfluxWithShortMetric() - { - DateTime date = new DateTime(2017, - 10, - 30, - 10, - 00, - DateTimeZone.UTC); // 10:00am on 30/10/2017 = 1509357600000000000 in epoch nanoseconds - String metric = "metric/time"; - Number value = 1234; - ImmutableMap serviceDims = ImmutableMap.of( - "service", - "druid/historical", - "host", - "localhost", - "version", - "0.10.0" - ); - ServiceMetricEvent.Builder builder = ServiceMetricEvent.builder(); - ServiceEventBuilder eventBuilder = builder.build(date, metric, value); - ServiceMetricEvent event = (ServiceMetricEvent) eventBuilder.build(serviceDims); - InfluxdbEmitterConfig config = new InfluxdbEmitterConfig( - "localhost", - 8086, - "dbname", - 10000, - 15000, - 30000, - "adam", - "password", - null - ); - InfluxdbEmitter influxdbEmitter = new InfluxdbEmitter(config); - String expected = "druid_metric,service=druid/historical,hostname=localhost druid_time=1234 1509357600000000000" - + "\n"; - String actual = influxdbEmitter.transformForInfluxSystems(event); - Assert.assertEquals(expected, actual); - } - - @Test - public void testMetricIsInDimensionWhitelist() - { - DateTime date = new DateTime(2017, - 10, - 30, - 10, - 00, - DateTimeZone.UTC); // 10:00am on 30/10/2017 = 1509357600000000000 in epoch nanoseconds - String metric = "metric/time"; - Number value = 1234; - ImmutableMap serviceDims = ImmutableMap.of( - "service", - "druid/historical", - "host", - "localhost", - "version", - "0.10.0" - ); - ServiceMetricEvent.Builder builder = ServiceMetricEvent.builder(); - ServiceEventBuilder eventBuilder = builder.build(date, metric, value); - builder.setDimension("dataSource", "wikipedia"); - builder.setDimension("taskType", "index"); - ServiceMetricEvent event = (ServiceMetricEvent) eventBuilder.build(serviceDims); - InfluxdbEmitterConfig config = new InfluxdbEmitterConfig( - "localhost", - 8086, - "dbname", - 10000, - 15000, - 30000, - "adam", - "password", - ImmutableSet.of("dataSource") - ); - InfluxdbEmitter influxdbEmitter = new InfluxdbEmitter(config); - String expected = "druid_metric,service=druid/historical,hostname=localhost,dataSource=wikipedia druid_time=1234 1509357600000000000" - + "\n"; - String actual = influxdbEmitter.transformForInfluxSystems(event); - Assert.assertEquals(expected, actual); - } - - @Test - public void testMetricIsInDefaultDimensionWhitelist() - { - DateTime date = new DateTime(2017, - 10, - 30, - 10, - 00, - DateTimeZone.UTC); // 10:00am on 30/10/2017 = 1509357600000000000 in epoch nanoseconds - String metric = "metric/time"; - Number value = 1234; - ImmutableMap serviceDims = ImmutableMap.of( - "service", - "druid/historical", - "host", - "localhost", - "version", - "0.10.0" - ); - ServiceMetricEvent.Builder builder = ServiceMetricEvent.builder(); - ServiceEventBuilder eventBuilder = builder.build(date, metric, value); - builder.setDimension("dataSource", "wikipedia"); - builder.setDimension("taskType", "index"); - ServiceMetricEvent event = (ServiceMetricEvent) eventBuilder.build(serviceDims); - InfluxdbEmitterConfig config = new InfluxdbEmitterConfig( - "localhost", - 8086, - "dbname", - 10000, - 15000, - 30000, - "adam", - "password", - null - ); - InfluxdbEmitter influxdbEmitter = new InfluxdbEmitter(config); - String expected = "druid_metric,service=druid/historical,hostname=localhost,dataSource=wikipedia,taskType=index druid_time=1234 1509357600000000000" - + "\n"; - String actual = influxdbEmitter.transformForInfluxSystems(event); - Assert.assertEquals(expected, actual); - } - - @Test - public void testJacksonModules() - { - Assert.assertTrue(new InfluxdbEmitterModule().getJacksonModules().isEmpty()); - } -} diff --git a/extensions-contrib/kafka-emitter/pom.xml b/extensions-contrib/kafka-emitter/pom.xml deleted file mode 100644 index 4f8faf9afab7..000000000000 --- a/extensions-contrib/kafka-emitter/pom.xml +++ /dev/null @@ -1,115 +0,0 @@ - - - - - 4.0.0 - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - org.apache.druid.extensions.contrib - kafka-emitter - kafka-emitter - Druid emitter extension to support kafka - - - - org.apache.kafka - kafka-clients - ${apache.kafka.version} - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - com.google.code.findbugs - jsr305 - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.guava - guava - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - - junit - junit - test - - - org.easymock - easymock - test - - - pl.pragmatists - JUnitParams - test - - - org.apache.druid - druid-server - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-processing - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-processing - ${project.parent.version} - test - - - diff --git a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitter.java b/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitter.java deleted file mode 100644 index ceb21c3975c9..000000000000 --- a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitter.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.kafka; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; -import org.apache.druid.emitter.kafka.MemoryBoundLinkedBlockingQueue.ObjectContainer; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.lifecycle.LifecycleStop; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.core.Emitter; -import org.apache.druid.java.util.emitter.core.Event; -import org.apache.druid.java.util.emitter.service.AlertEvent; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.apache.kafka.clients.producer.Callback; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.Producer; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.serialization.StringSerializer; - -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; - -public class KafkaEmitter implements Emitter -{ - private static Logger log = new Logger(KafkaEmitter.class); - - private static final int DEFAULT_RETRIES = 3; - private final AtomicLong metricLost; - private final AtomicLong alertLost; - private final AtomicLong invalidLost; - - private final KafkaEmitterConfig config; - private final Producer producer; - private final ObjectMapper jsonMapper; - private final MemoryBoundLinkedBlockingQueue metricQueue; - private final MemoryBoundLinkedBlockingQueue alertQueue; - private final ScheduledExecutorService scheduler; - - public KafkaEmitter(KafkaEmitterConfig config, ObjectMapper jsonMapper) - { - this.config = config; - this.jsonMapper = jsonMapper; - this.producer = setKafkaProducer(); - // same with kafka producer's buffer.memory - long queueMemoryBound = Long.parseLong(this.config.getKafkaProducerConfig() - .getOrDefault(ProducerConfig.BUFFER_MEMORY_CONFIG, "33554432")); - this.metricQueue = new MemoryBoundLinkedBlockingQueue<>(queueMemoryBound); - this.alertQueue = new MemoryBoundLinkedBlockingQueue<>(queueMemoryBound); - this.scheduler = Executors.newScheduledThreadPool(3); - this.metricLost = new AtomicLong(0L); - this.alertLost = new AtomicLong(0L); - this.invalidLost = new AtomicLong(0L); - } - - private Callback setProducerCallback(AtomicLong lostCouter) - { - return (recordMetadata, e) -> { - if (e != null) { - log.debug("Event send failed [%s]", e.getMessage()); - lostCouter.incrementAndGet(); - } - }; - } - - private Producer setKafkaProducer() - { - ClassLoader currCtxCl = Thread.currentThread().getContextClassLoader(); - try { - Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); - - Properties props = new Properties(); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getBootstrapServers()); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); - props.put(ProducerConfig.RETRIES_CONFIG, DEFAULT_RETRIES); - props.putAll(config.getKafkaProducerConfig()); - - return new KafkaProducer<>(props); - } - finally { - Thread.currentThread().setContextClassLoader(currCtxCl); - } - } - - @Override - public void start() - { - scheduler.schedule(this::sendMetricToKafka, 10, TimeUnit.SECONDS); - scheduler.schedule(this::sendAlertToKafka, 10, TimeUnit.SECONDS); - scheduler.scheduleWithFixedDelay(() -> { - log.info("Message lost counter: metricLost=[%d], alertLost=[%d], invalidLost=[%d]", - metricLost.get(), alertLost.get(), invalidLost.get()); - }, 5, 5, TimeUnit.MINUTES); - log.info("Starting Kafka Emitter."); - } - - private void sendMetricToKafka() - { - sendToKafka(config.getMetricTopic(), metricQueue, setProducerCallback(metricLost)); - } - - private void sendAlertToKafka() - { - sendToKafka(config.getAlertTopic(), alertQueue, setProducerCallback(alertLost)); - } - - private void sendToKafka(final String topic, MemoryBoundLinkedBlockingQueue recordQueue, Callback callback) - { - ObjectContainer objectToSend; - try { - while (true) { - objectToSend = recordQueue.take(); - producer.send(new ProducerRecord<>(topic, objectToSend.getData()), callback); - } - } - catch (Throwable e) { - log.warn(e, "Exception while getting record from queue or producer send, Events would not be emitted anymore."); - } - } - - @Override - public void emit(final Event event) - { - if (event != null) { - ImmutableMap.Builder resultBuilder = ImmutableMap.builder().putAll(event.toMap()); - if (config.getClusterName() != null) { - resultBuilder.put("clusterName", config.getClusterName()); - } - Map result = resultBuilder.build(); - - try { - String resultJson = jsonMapper.writeValueAsString(result); - ObjectContainer objectContainer = new ObjectContainer<>( - resultJson, - StringUtils.toUtf8(resultJson).length - ); - if (event instanceof ServiceMetricEvent) { - if (!metricQueue.offer(objectContainer)) { - metricLost.incrementAndGet(); - } - } else if (event instanceof AlertEvent) { - if (!alertQueue.offer(objectContainer)) { - alertLost.incrementAndGet(); - } - } else { - invalidLost.incrementAndGet(); - } - } - catch (JsonProcessingException e) { - invalidLost.incrementAndGet(); - } - } - } - - @Override - public void flush() - { - producer.flush(); - } - - @Override - @LifecycleStop - public void close() - { - scheduler.shutdownNow(); - producer.close(); - } -} diff --git a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterConfig.java b/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterConfig.java deleted file mode 100644 index fe71b21c4cd9..000000000000 --- a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterConfig.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.kafka; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import org.apache.kafka.clients.producer.ProducerConfig; - -import javax.annotation.Nullable; -import java.util.Map; - -public class KafkaEmitterConfig -{ - - @JsonProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG) - private final String bootstrapServers; - @JsonProperty("metric.topic") - private final String metricTopic; - @JsonProperty("alert.topic") - private final String alertTopic; - @JsonProperty - private final String clusterName; - @JsonProperty("producer.config") - private Map kafkaProducerConfig; - - @JsonCreator - public KafkaEmitterConfig( - @JsonProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG) String bootstrapServers, - @JsonProperty("metric.topic") String metricTopic, - @JsonProperty("alert.topic") String alertTopic, - @JsonProperty("clusterName") String clusterName, - @JsonProperty("producer.config") @Nullable Map kafkaProducerConfig - ) - { - this.bootstrapServers = Preconditions.checkNotNull(bootstrapServers, "bootstrap.servers can not be null"); - this.metricTopic = Preconditions.checkNotNull(metricTopic, "metric.topic can not be null"); - this.alertTopic = Preconditions.checkNotNull(alertTopic, "alert.topic can not be null"); - this.clusterName = clusterName; - this.kafkaProducerConfig = kafkaProducerConfig == null ? ImmutableMap.of() : kafkaProducerConfig; - } - - @JsonProperty - public String getBootstrapServers() - { - return bootstrapServers; - } - - @JsonProperty - public String getMetricTopic() - { - return metricTopic; - } - - @JsonProperty - public String getAlertTopic() - { - return alertTopic; - } - - @JsonProperty - public String getClusterName() - { - return clusterName; - } - - @JsonProperty - public Map getKafkaProducerConfig() - { - return kafkaProducerConfig; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - KafkaEmitterConfig that = (KafkaEmitterConfig) o; - - if (!getBootstrapServers().equals(that.getBootstrapServers())) { - return false; - } - if (!getMetricTopic().equals(that.getMetricTopic())) { - return false; - } - if (!getAlertTopic().equals(that.getAlertTopic())) { - return false; - } - if (getClusterName() != null ? !getClusterName().equals(that.getClusterName()) : that.getClusterName() != null) { - return false; - } - return getKafkaProducerConfig().equals(that.getKafkaProducerConfig()); - } - - @Override - public int hashCode() - { - int result = getBootstrapServers().hashCode(); - result = 31 * result + getMetricTopic().hashCode(); - result = 31 * result + getAlertTopic().hashCode(); - result = 31 * result + (getClusterName() != null ? getClusterName().hashCode() : 0); - result = 31 * result + getKafkaProducerConfig().hashCode(); - return result; - } - - @Override - public String toString() - { - return "KafkaEmitterConfig{" + - "bootstrap.servers='" + bootstrapServers + '\'' + - ", metric.topic='" + metricTopic + '\'' + - ", alert.topic='" + alertTopic + '\'' + - ", clusterName='" + clusterName + '\'' + - ", Producer.config=" + kafkaProducerConfig + - '}'; - } -} diff --git a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterModule.java b/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterModule.java deleted file mode 100644 index f83932ed78e9..000000000000 --- a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/KafkaEmitterModule.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.kafka; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.inject.Binder; -import com.google.inject.Provides; -import com.google.inject.name.Named; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.ManageLifecycle; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.java.util.emitter.core.Emitter; - -import java.util.Collections; -import java.util.List; - -public class KafkaEmitterModule implements DruidModule -{ - private static final String EMITTER_TYPE = "kafka"; - - @Override - public List getJacksonModules() - { - return Collections.emptyList(); - } - - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bind(binder, "druid.emitter." + EMITTER_TYPE, KafkaEmitterConfig.class); - } - - @Provides - @ManageLifecycle - @Named(EMITTER_TYPE) - public Emitter getEmitter(KafkaEmitterConfig kafkaEmitterConfig, ObjectMapper mapper) - { - return new KafkaEmitter(kafkaEmitterConfig, mapper); - } -} diff --git a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/MemoryBoundLinkedBlockingQueue.java b/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/MemoryBoundLinkedBlockingQueue.java deleted file mode 100644 index fb6cae8ee954..000000000000 --- a/extensions-contrib/kafka-emitter/src/main/java/org/apache/druid/emitter/kafka/MemoryBoundLinkedBlockingQueue.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.kafka; - -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.atomic.AtomicLong; - -/** - * Similar to LinkedBlockingQueue but can be bounded by the total byte size of the items present in the queue - * rather than number of items. - */ -public class MemoryBoundLinkedBlockingQueue -{ - private final long memoryBound; - private final AtomicLong currentMemory; - private final LinkedBlockingQueue> queue; - - public MemoryBoundLinkedBlockingQueue(long memoryBound) - { - this.memoryBound = memoryBound; - this.currentMemory = new AtomicLong(0L); - this.queue = new LinkedBlockingQueue<>(); - } - - // returns true/false depending on whether item was added or not - public boolean offer(ObjectContainer item) - { - final long itemLength = item.getSize(); - - if (currentMemory.addAndGet(itemLength) <= memoryBound) { - if (queue.offer(item)) { - return true; - } - } - currentMemory.addAndGet(-itemLength); - return false; - } - - // blocks until at least one item is available to take - public ObjectContainer take() throws InterruptedException - { - final ObjectContainer ret = queue.take(); - currentMemory.addAndGet(-ret.getSize()); - return ret; - } - - public static class ObjectContainer - { - private T data; - private long size; - - ObjectContainer(T data, long size) - { - this.data = data; - this.size = size; - } - - public T getData() - { - return data; - } - - public long getSize() - { - return size; - } - } -} diff --git a/extensions-contrib/kafka-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/kafka-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index a30123e184b0..000000000000 --- a/extensions-contrib/kafka-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.emitter.kafka.KafkaEmitterModule diff --git a/extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterConfigTest.java b/extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterConfigTest.java deleted file mode 100644 index 89e75fc28089..000000000000 --- a/extensions-contrib/kafka-emitter/src/test/java/org/apache/druid/emitter/kafka/KafkaEmitterConfigTest.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.kafka; - -import com.fasterxml.jackson.databind.InjectableValues; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; - -public class KafkaEmitterConfigTest -{ - private ObjectMapper mapper = new DefaultObjectMapper(); - - @Before - public void setUp() - { - mapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, new DefaultObjectMapper())); - } - - @Test - public void testSerDeserKafkaEmitterConfig() throws IOException - { - KafkaEmitterConfig kafkaEmitterConfig = new KafkaEmitterConfig("hostname", "metricTest", - "alertTest", "clusterNameTest", - ImmutableMap.builder() - .put("testKey", "testValue").build() - ); - String kafkaEmitterConfigString = mapper.writeValueAsString(kafkaEmitterConfig); - KafkaEmitterConfig kafkaEmitterConfigExpected = mapper.readerFor(KafkaEmitterConfig.class) - .readValue(kafkaEmitterConfigString); - Assert.assertEquals(kafkaEmitterConfigExpected, kafkaEmitterConfig); - } - - @Test - public void testSerDeNotRequiredKafkaProducerConfig() - { - KafkaEmitterConfig kafkaEmitterConfig = new KafkaEmitterConfig("localhost:9092", "metricTest", - "alertTest", "clusterNameTest", - null - ); - try { - @SuppressWarnings("unused") - KafkaEmitter emitter = new KafkaEmitter(kafkaEmitterConfig, mapper); - } - catch (NullPointerException e) { - Assert.fail(); - } - } - - @Test - public void testJacksonModules() - { - Assert.assertTrue(new KafkaEmitterModule().getJacksonModules().isEmpty()); - } -} diff --git a/extensions-contrib/materialized-view-maintenance/pom.xml b/extensions-contrib/materialized-view-maintenance/pom.xml deleted file mode 100644 index 1430766f1662..000000000000 --- a/extensions-contrib/materialized-view-maintenance/pom.xml +++ /dev/null @@ -1,137 +0,0 @@ - - - - - - druid - org.apache.druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - 4.0.0 - - org.apache.druid.extensions.contrib - materialized-view-maintenance - materialized-view-maintenance - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - org.apache.druid - druid-server - ${project.parent.version} - provided - - - org.apache.druid - druid-indexing-hadoop - ${project.parent.version} - provided - - - org.apache.druid - druid-indexing-service - ${project.parent.version} - provided - - - com.google.code.findbugs - jsr305 - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - joda-time - joda-time - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.google.guava - guava - provided - - - commons-codec - commons-codec - provided - - - - org.apache.druid - druid-server - ${project.parent.version} - test - test-jar - - - org.apache.druid - druid-processing - ${project.parent.version} - test - test-jar - - - org.easymock - easymock - test - - - junit - junit - test - - - org.hamcrest - hamcrest-all - test - - - org.hamcrest - hamcrest-core - test - - - - diff --git a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/DerivativeDataSourceMetadata.java b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/DerivativeDataSourceMetadata.java deleted file mode 100644 index 0c38b2fd3bdf..000000000000 --- a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/DerivativeDataSourceMetadata.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.materializedview; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.common.collect.Sets; -import org.apache.druid.indexing.overlord.DataSourceMetadata; - -import java.util.Objects; -import java.util.Set; - -public class DerivativeDataSourceMetadata implements DataSourceMetadata -{ - private final String baseDataSource; - private final Set dimensions; - private final Set metrics; - - @JsonCreator - public DerivativeDataSourceMetadata( - @JsonProperty("baseDataSource") String baseDataSource, - @JsonProperty("dimensions") Set dimensions, - @JsonProperty("metrics") Set metrics - ) - { - Preconditions.checkArgument(!Strings.isNullOrEmpty(baseDataSource), "baseDataSource cannot be null or empty. Please provide a baseDataSource."); - this.baseDataSource = baseDataSource; - - this.dimensions = Preconditions.checkNotNull(dimensions, "dimensions cannot be null. This is not a valid DerivativeDataSourceMetadata."); - this.metrics = Preconditions.checkNotNull(metrics, "metrics cannot be null. This is not a valid DerivativeDataSourceMetadata."); - } - - @JsonProperty("baseDataSource") - public String getBaseDataSource() - { - return baseDataSource; - } - - @JsonProperty("dimensions") - public Set getDimensions() - { - return dimensions; - } - - @JsonProperty("metrics") - public Set getMetrics() - { - return metrics; - } - - @Override - public boolean isValidStart() - { - return false; - } - - @Override - public DataSourceMetadata asStartMetadata() - { - return this; - } - - @Override - public boolean matches(DataSourceMetadata other) - { - return equals(other); - } - - @Override - public DataSourceMetadata plus(DataSourceMetadata other) - { - throw new UnsupportedOperationException("Derivative dataSource metadata is not allowed to plus"); - } - - @Override - public DataSourceMetadata minus(DataSourceMetadata other) - { - throw new UnsupportedOperationException("Derivative dataSource metadata is not allowed to minus"); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - DerivativeDataSourceMetadata that = (DerivativeDataSourceMetadata) o; - - return baseDataSource.equals(that.getBaseDataSource()) && - dimensions.equals(that.getDimensions()) && - metrics.equals(that.getMetrics()); - } - - @Override - public int hashCode() - { - return Objects.hash(baseDataSource, dimensions, metrics); - } - - public Set getColumns() - { - Set fields = Sets.newHashSet(dimensions); - fields.addAll(metrics); - return fields; - } - - @Override - public String toString() - { - return "DerivedDataSourceMetadata{" + - "baseDataSource=" + baseDataSource + - ", dimensions=" + dimensions + - ", metrics=" + metrics + - '}'; - } -} diff --git a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewMaintenanceDruidModule.java b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewMaintenanceDruidModule.java deleted file mode 100644 index 255cf7613c34..000000000000 --- a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewMaintenanceDruidModule.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.materializedview; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.initialization.DruidModule; - -import java.util.List; - -public class MaterializedViewMaintenanceDruidModule implements DruidModule -{ - @Override - public List getJacksonModules() - { - return ImmutableList.of( - new SimpleModule(getClass().getSimpleName()) - .registerSubtypes( - new NamedType(MaterializedViewSupervisorSpec.class, "derivativeDataSource"), - new NamedType(DerivativeDataSourceMetadata.class, "derivativeDataSource") - ) - ); - } - - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bind(binder, "druid.materialized.view.task", MaterializedViewTaskConfig.class); - } -} diff --git a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java deleted file mode 100644 index dd3db5008a97..000000000000 --- a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java +++ /dev/null @@ -1,515 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.materializedview; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Optional; -import com.google.common.base.Preconditions; -import com.google.common.collect.MapDifference; -import com.google.common.collect.Maps; -import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.ListeningScheduledExecutorService; -import com.google.common.util.concurrent.MoreExecutors; -import org.apache.druid.indexer.TaskStatus; -import org.apache.druid.indexing.common.task.HadoopIndexTask; -import org.apache.druid.indexing.overlord.DataSourceMetadata; -import org.apache.druid.indexing.overlord.IndexerMetadataStorageCoordinator; -import org.apache.druid.indexing.overlord.Segments; -import org.apache.druid.indexing.overlord.TaskMaster; -import org.apache.druid.indexing.overlord.TaskStorage; -import org.apache.druid.indexing.overlord.supervisor.Supervisor; -import org.apache.druid.indexing.overlord.supervisor.SupervisorReport; -import org.apache.druid.indexing.overlord.supervisor.SupervisorStateManager; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.java.util.common.JodaUtils; -import org.apache.druid.java.util.common.Pair; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.concurrent.Execs; -import org.apache.druid.java.util.common.guava.Comparators; -import org.apache.druid.java.util.emitter.EmittingLogger; -import org.apache.druid.metadata.EntryExistsException; -import org.apache.druid.metadata.MetadataSupervisorManager; -import org.apache.druid.metadata.SqlSegmentsMetadataManager; -import org.apache.druid.timeline.DataSegment; -import org.joda.time.Duration; -import org.joda.time.Interval; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.SortedMap; -import java.util.TreeMap; -import java.util.concurrent.TimeUnit; - -public class MaterializedViewSupervisor implements Supervisor -{ - private static final EmittingLogger log = new EmittingLogger(MaterializedViewSupervisor.class); - private static final int DEFAULT_MAX_TASK_COUNT = 1; - // there is a lag between derivatives and base dataSource, to prevent repeatedly building for some delay data. - private static final long DEFAULT_MIN_DATA_LAG_MS = TimeUnit.DAYS.toMillis(1); - - private final MetadataSupervisorManager metadataSupervisorManager; - private final IndexerMetadataStorageCoordinator metadataStorageCoordinator; - private final SqlSegmentsMetadataManager sqlSegmentsMetadataManager; - private final MaterializedViewSupervisorSpec spec; - private final TaskMaster taskMaster; - private final TaskStorage taskStorage; - private final MaterializedViewTaskConfig config; - private final SupervisorStateManager stateManager; - private final String dataSource; - private final String supervisorId; - private final int maxTaskCount; - private final long minDataLagMs; - private final Map runningTasks = new HashMap<>(); - private final Map runningVersion = new HashMap<>(); - // taskLock is used to synchronize runningTask and runningVersion - private final Object taskLock = new Object(); - // stateLock is used to synchronize materializedViewSupervisor's status - private final Object stateLock = new Object(); - private boolean started = false; - private ListenableFuture future = null; - private ListeningScheduledExecutorService exec = null; - // In the missing intervals, baseDataSource has data but derivedDataSource does not, which means - // data in these intervals of derivedDataSource needs to be rebuilt. - private Set missInterval = new HashSet<>(); - - public MaterializedViewSupervisor( - TaskMaster taskMaster, - TaskStorage taskStorage, - MetadataSupervisorManager metadataSupervisorManager, - SqlSegmentsMetadataManager sqlSegmentsMetadataManager, - IndexerMetadataStorageCoordinator metadataStorageCoordinator, - MaterializedViewTaskConfig config, - MaterializedViewSupervisorSpec spec - ) - { - this.taskMaster = taskMaster; - this.taskStorage = taskStorage; - this.metadataStorageCoordinator = metadataStorageCoordinator; - this.sqlSegmentsMetadataManager = sqlSegmentsMetadataManager; - this.metadataSupervisorManager = metadataSupervisorManager; - this.config = config; - this.spec = spec; - this.stateManager = new SupervisorStateManager(spec.getSupervisorStateManagerConfig(), spec.isSuspended()); - this.dataSource = spec.getDataSourceName(); - this.supervisorId = StringUtils.format("MaterializedViewSupervisor-%s", dataSource); - this.maxTaskCount = spec.getContext().containsKey("maxTaskCount") - ? Integer.parseInt(String.valueOf(spec.getContext().get("maxTaskCount"))) - : DEFAULT_MAX_TASK_COUNT; - this.minDataLagMs = spec.getContext().containsKey("minDataLagMs") - ? Long.parseLong(String.valueOf(spec.getContext().get("minDataLagMs"))) - : DEFAULT_MIN_DATA_LAG_MS; - } - - @Override - public void start() - { - synchronized (stateLock) { - Preconditions.checkState(!started, "already started"); - - DataSourceMetadata metadata = metadataStorageCoordinator.retrieveDataSourceMetadata(dataSource); - if (null == metadata) { - metadataStorageCoordinator.insertDataSourceMetadata( - dataSource, - new DerivativeDataSourceMetadata(spec.getBaseDataSource(), spec.getDimensions(), spec.getMetrics()) - ); - } - exec = MoreExecutors.listeningDecorator(Execs.scheduledSingleThreaded(supervisorId)); - final Duration delay = config.getTaskCheckDuration().toStandardDuration(); - future = exec.scheduleWithFixedDelay( - MaterializedViewSupervisor.this::run, - 0, - delay.getMillis(), - TimeUnit.MILLISECONDS - ); - started = true; - } - } - - @VisibleForTesting - public void run() - { - try { - if (spec.isSuspended()) { - log.info( - "Materialized view supervisor[%s:%s] is suspended", - spec.getId(), - spec.getDataSourceName() - ); - return; - } - - DataSourceMetadata metadata = metadataStorageCoordinator.retrieveDataSourceMetadata(dataSource); - if (metadata instanceof DerivativeDataSourceMetadata - && spec.getBaseDataSource().equals(((DerivativeDataSourceMetadata) metadata).getBaseDataSource()) - && spec.getDimensions().equals(((DerivativeDataSourceMetadata) metadata).getDimensions()) - && spec.getMetrics().equals(((DerivativeDataSourceMetadata) metadata).getMetrics())) { - checkSegmentsAndSubmitTasks(); - } else { - log.error( - "Failed to start %s. Metadata in database(%s) is different from new dataSource metadata(%s)", - supervisorId, - metadata, - spec - ); - } - } - catch (Exception e) { - stateManager.recordThrowableEvent(e); - log.makeAlert(e, StringUtils.format("uncaught exception in %s.", supervisorId)).emit(); - } - finally { - stateManager.markRunFinished(); - } - } - - @Override - public void stop(boolean stopGracefully) - { - synchronized (stateLock) { - Preconditions.checkState(started, "not started"); - - stateManager.maybeSetState(SupervisorStateManager.BasicState.STOPPING); - - // stop all schedulers and threads - if (stopGracefully) { - synchronized (taskLock) { - future.cancel(false); - future = null; - exec.shutdownNow(); - exec = null; - clearTasks(); - if (!(metadataSupervisorManager.getLatest().get(supervisorId) instanceof MaterializedViewSupervisorSpec)) { - clearSegments(); - } - } - } else { - future.cancel(true); - future = null; - exec.shutdownNow(); - exec = null; - synchronized (taskLock) { - clearTasks(); - if (!(metadataSupervisorManager.getLatest().get(supervisorId) instanceof MaterializedViewSupervisorSpec)) { - clearSegments(); - } - } - } - started = false; - } - - } - - @Override - public SupervisorReport getStatus() - { - return new MaterializedViewSupervisorReport( - dataSource, - DateTimes.nowUtc(), - spec.isSuspended(), - spec.getBaseDataSource(), - spec.getDimensions(), - spec.getMetrics(), - JodaUtils.condenseIntervals(missInterval), - stateManager.isHealthy(), - stateManager.getSupervisorState().getBasicState(), - stateManager.getExceptionEvents() - ); - } - - @Override - public SupervisorStateManager.State getState() - { - return stateManager.getSupervisorState(); - } - - @Override - public Boolean isHealthy() - { - return stateManager.isHealthy(); - } - - @Override - public void reset(DataSourceMetadata dataSourceMetadata) - { - if (dataSourceMetadata == null) { - // if oldMetadata is different from spec, tasks and segments will be removed when reset. - DataSourceMetadata oldMetadata = metadataStorageCoordinator.retrieveDataSourceMetadata(dataSource); - if (oldMetadata instanceof DerivativeDataSourceMetadata) { - if (!((DerivativeDataSourceMetadata) oldMetadata).getBaseDataSource().equals(spec.getBaseDataSource()) || - !((DerivativeDataSourceMetadata) oldMetadata).getDimensions().equals(spec.getDimensions()) || - !((DerivativeDataSourceMetadata) oldMetadata).getMetrics().equals(spec.getMetrics())) { - synchronized (taskLock) { - clearTasks(); - clearSegments(); - } - } - } - commitDataSourceMetadata( - new DerivativeDataSourceMetadata(spec.getBaseDataSource(), spec.getDimensions(), spec.getMetrics()) - ); - } else { - throw new IAE("DerivedDataSourceMetadata is not allowed to reset to a new DerivedDataSourceMetadata"); - } - } - - @Override - public void checkpoint(int taskGroupId, DataSourceMetadata checkpointMetadata) - { - // do nothing - } - - /** - * Find intervals in which derived dataSource should rebuild the segments. - * Choose the latest intervals to create new HadoopIndexTask and submit it. - */ - @VisibleForTesting - void checkSegmentsAndSubmitTasks() - { - synchronized (taskLock) { - List intervalsToRemove = new ArrayList<>(); - for (Map.Entry entry : runningTasks.entrySet()) { - Optional taskStatus = taskStorage.getStatus(entry.getValue().getId()); - if (!taskStatus.isPresent() || !taskStatus.get().isRunnable()) { - intervalsToRemove.add(entry.getKey()); - } - } - for (Interval interval : intervalsToRemove) { - runningTasks.remove(interval); - runningVersion.remove(interval); - } - - if (runningTasks.size() == maxTaskCount) { - //if the number of running tasks reach the max task count, supervisor won't submit new tasks. - return; - } - Pair, Map>> toBuildIntervalAndBaseSegments = - checkSegments(); - SortedMap sortedToBuildVersion = toBuildIntervalAndBaseSegments.lhs; - Map> baseSegments = toBuildIntervalAndBaseSegments.rhs; - missInterval = sortedToBuildVersion.keySet(); - submitTasks(sortedToBuildVersion, baseSegments); - } - } - - @VisibleForTesting - Pair, Map> getRunningTasks() - { - return new Pair<>(runningTasks, runningVersion); - } - - /** - * Find infomation about the intervals in which derived dataSource data should be rebuilt. - * The infomation includes the version and DataSegments list of a interval. - * The intervals include: in the interval, - * 1) baseDataSource has data, but the derivedDataSource does not; - * 2) version of derived segments isn't the max(created_date) of all base segments; - * - * Drop the segments of the intervals in which derivedDataSource has data, but baseDataSource does not. - * - * @return the left part of Pair: interval -> version, and the right part: interval -> DataSegment list. - * Version and DataSegment list can be used to create HadoopIndexTask. - * Derived datasource data in all these intervals need to be rebuilt. - */ - @VisibleForTesting - Pair, Map>> checkSegments() - { - // Pair version, interval -> list> - Collection derivativeSegmentsCollection = - metadataStorageCoordinator.retrieveAllUsedSegments(dataSource, Segments.ONLY_VISIBLE); - Pair, Map>> derivativeSegmentsSnapshot = - getVersionAndBaseSegments(derivativeSegmentsCollection); - // Pair max(created_date), interval -> list> - Pair, Map>> baseSegmentsSnapshot = - getMaxCreateDateAndBaseSegments( - metadataStorageCoordinator.retrieveUsedSegmentsAndCreatedDates(spec.getBaseDataSource()) - ); - // baseSegments are used to create HadoopIndexTask - Map> baseSegments = baseSegmentsSnapshot.rhs; - Map> derivativeSegments = derivativeSegmentsSnapshot.rhs; - // use max created_date of base segments as the version of derivative segments - Map maxCreatedDate = baseSegmentsSnapshot.lhs; - Map derivativeVersion = derivativeSegmentsSnapshot.lhs; - SortedMap sortedToBuildInterval = - new TreeMap<>(Comparators.intervalsByStartThenEnd().reversed()); - // find the intervals to drop and to build - MapDifference difference = Maps.difference(maxCreatedDate, derivativeVersion); - Map toBuildInterval = new HashMap<>(difference.entriesOnlyOnLeft()); - Map toDropInterval = new HashMap<>(difference.entriesOnlyOnRight()); - // update version of derived segments if isn't the max (created_date) of all base segments - // prevent user supplied segments list did not match with segments list obtained from db - Map> checkIfNewestVersion = - new HashMap<>(difference.entriesDiffering()); - for (Map.Entry> entry : checkIfNewestVersion.entrySet()) { - final String versionOfBase = maxCreatedDate.get(entry.getKey()); - final String versionOfDerivative = derivativeVersion.get(entry.getKey()); - final int baseCount = baseSegments.get(entry.getKey()).size(); - if (versionOfBase.compareTo(versionOfDerivative) > 0) { - int usedCount = metadataStorageCoordinator - .retrieveUsedSegmentsForInterval(spec.getBaseDataSource(), entry.getKey(), Segments.ONLY_VISIBLE).size(); - if (baseCount == usedCount) { - toBuildInterval.put(entry.getKey(), versionOfBase); - } - } - } - // if some intervals are in running tasks and the versions are the same, remove it from toBuildInterval - // if some intervals are in running tasks, but the versions are different, stop the task. - runningVersion.forEach((interval, version) -> { - if (toBuildInterval.containsKey(interval)) { - if (toBuildInterval.get(interval).equals(version)) { - toBuildInterval.remove(interval); - } else { - if (taskMaster.getTaskQueue().isPresent()) { - taskMaster.getTaskQueue().get().shutdown(runningTasks.get(interval).getId(), "version mismatch"); - runningTasks.remove(interval); - } - } - } - }); - // drop derivative segments which interval equals the interval in toDeleteBaseSegments - for (Interval interval : toDropInterval.keySet()) { - for (DataSegment segment : derivativeSegments.get(interval)) { - sqlSegmentsMetadataManager.markSegmentAsUnused(segment.getId().toString()); - } - } - // data of the latest interval will be built firstly. - sortedToBuildInterval.putAll(toBuildInterval); - return new Pair<>(sortedToBuildInterval, baseSegments); - } - - private void submitTasks( - SortedMap sortedToBuildVersion, - Map> baseSegments - ) - { - for (Map.Entry entry : sortedToBuildVersion.entrySet()) { - if (runningTasks.size() < maxTaskCount) { - HadoopIndexTask task = spec.createTask(entry.getKey(), entry.getValue(), baseSegments.get(entry.getKey())); - try { - if (taskMaster.getTaskQueue().isPresent()) { - taskMaster.getTaskQueue().get().add(task); - runningVersion.put(entry.getKey(), entry.getValue()); - runningTasks.put(entry.getKey(), task); - } - } - catch (EntryExistsException e) { - log.error("task %s already exsits", task); - } - catch (Exception e) { - throw new RuntimeException(e); - } - } - } - } - - private Pair, Map>> getVersionAndBaseSegments( - Collection snapshot - ) - { - Map versions = new HashMap<>(); - Map> segments = new HashMap<>(); - for (DataSegment segment : snapshot) { - Interval interval = segment.getInterval(); - versions.put(interval, segment.getVersion()); - segments.computeIfAbsent(interval, i -> new ArrayList<>()).add(segment); - } - return new Pair<>(versions, segments); - } - - private Pair, Map>> getMaxCreateDateAndBaseSegments( - Collection> snapshot - ) - { - Interval maxAllowedToBuildInterval = snapshot.parallelStream() - .map(pair -> pair.lhs) - .map(DataSegment::getInterval) - .max(Comparators.intervalsByStartThenEnd()) - .get(); - Map maxCreatedDate = new HashMap<>(); - Map> segments = new HashMap<>(); - for (Pair entry : snapshot) { - DataSegment segment = entry.lhs; - String createDate = entry.rhs; - Interval interval = segment.getInterval(); - if (!hasEnoughLag(interval, maxAllowedToBuildInterval)) { - continue; - } - maxCreatedDate.merge(interval, createDate, (date1, date2) -> { - return DateTimes.max(DateTimes.of(date1), DateTimes.of(date2)).toString(); - }); - segments.computeIfAbsent(interval, i -> new ArrayList<>()).add(segment); - } - return new Pair<>(maxCreatedDate, segments); - } - - - /** - * check whether the start millis of target interval is more than minDataLagMs lagging behind maxInterval's - * minDataLag is required to prevent repeatedly building data because of delay data. - * - * @param target - * @param maxInterval - * @return true if the start millis of target interval is more than minDataLagMs lagging behind maxInterval's - */ - private boolean hasEnoughLag(Interval target, Interval maxInterval) - { - return minDataLagMs <= (maxInterval.getStartMillis() - target.getStartMillis()); - } - - private void clearTasks() - { - for (HadoopIndexTask task : runningTasks.values()) { - if (taskMaster.getTaskQueue().isPresent()) { - taskMaster.getTaskQueue().get().shutdown(task.getId(), "killing all tasks"); - } - } - runningTasks.clear(); - runningVersion.clear(); - } - - private void clearSegments() - { - log.info("Clear all metadata of dataSource %s", dataSource); - metadataStorageCoordinator.deletePendingSegments(dataSource); - sqlSegmentsMetadataManager.markAsUnusedAllSegmentsInDataSource(dataSource); - metadataStorageCoordinator.deleteDataSourceMetadata(dataSource); - } - - private void commitDataSourceMetadata(DataSourceMetadata dataSourceMetadata) - { - if (!metadataStorageCoordinator.insertDataSourceMetadata(dataSource, dataSourceMetadata)) { - try { - metadataStorageCoordinator.resetDataSourceMetadata( - dataSource, - dataSourceMetadata - ); - } - catch (IOException e) { - throw new RuntimeException(e); - } - } - } -} diff --git a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorReport.java b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorReport.java deleted file mode 100644 index 13e51dafd142..000000000000 --- a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorReport.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.materializedview; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; -import org.apache.druid.indexing.overlord.supervisor.SupervisorReport; -import org.apache.druid.indexing.overlord.supervisor.SupervisorStateManager; -import org.joda.time.DateTime; -import org.joda.time.Interval; - -import java.util.List; -import java.util.Set; - -public class MaterializedViewSupervisorReport extends SupervisorReport -{ - public MaterializedViewSupervisorReport( - String dataSource, - DateTime generationTime, - boolean suspended, - String baseDataSource, - Set dimensions, - Set metrics, - List missTimeline, - boolean healthy, - SupervisorStateManager.State state, - List recentErrors - ) - { - super( - dataSource, - generationTime, - ImmutableMap.builder() - .put("dataSource", dataSource) - .put("baseDataSource", baseDataSource) - .put("suspended", suspended) - .put("dimensions", dimensions) - .put("metrics", metrics) - .put("missTimeline", Sets.newHashSet(missTimeline)) - .put("healthy", healthy) - .put("state", state) - .put("recentErrors", recentErrors) - .build() - ); - } -} diff --git a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpec.java b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpec.java deleted file mode 100644 index dd385130c761..000000000000 --- a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpec.java +++ /dev/null @@ -1,437 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.materializedview; - -import com.fasterxml.jackson.annotation.JacksonInject; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableList; -import org.apache.commons.codec.digest.DigestUtils; -import org.apache.druid.data.input.impl.DimensionSchema; -import org.apache.druid.data.input.impl.DimensionsSpec; -import org.apache.druid.indexer.HadoopIOConfig; -import org.apache.druid.indexer.HadoopIngestionSpec; -import org.apache.druid.indexer.HadoopTuningConfig; -import org.apache.druid.indexer.hadoop.DatasourceIngestionSpec; -import org.apache.druid.indexing.common.task.HadoopIndexTask; -import org.apache.druid.indexing.overlord.IndexerMetadataStorageCoordinator; -import org.apache.druid.indexing.overlord.TaskMaster; -import org.apache.druid.indexing.overlord.TaskStorage; -import org.apache.druid.indexing.overlord.supervisor.Supervisor; -import org.apache.druid.indexing.overlord.supervisor.SupervisorSpec; -import org.apache.druid.indexing.overlord.supervisor.SupervisorStateManagerConfig; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.metadata.MetadataSupervisorManager; -import org.apache.druid.metadata.SqlSegmentsMetadataManager; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.segment.indexing.DataSchema; -import org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec; -import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider; -import org.apache.druid.segment.transform.TransformSpec; -import org.apache.druid.server.security.AuthorizerMapper; -import org.apache.druid.timeline.DataSegment; -import org.joda.time.Interval; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -public class MaterializedViewSupervisorSpec implements SupervisorSpec -{ - private static final String TASK_PREFIX = "index_materialized_view"; - private static final String SUPERVISOR_TYPE = "materialized_view"; - private final String baseDataSource; - private final DimensionsSpec dimensionsSpec; - private final AggregatorFactory[] aggregators; - private final HadoopTuningConfig tuningConfig; - private final String dataSourceName; - private final String hadoopCoordinates; - private final List hadoopDependencyCoordinates; - private final String classpathPrefix; - private final Map context; - private final Set metrics; - private final Set dimensions; - private final ObjectMapper objectMapper; - private final MetadataSupervisorManager metadataSupervisorManager; - private final IndexerMetadataStorageCoordinator metadataStorageCoordinator; - private final SqlSegmentsMetadataManager sqlSegmentsMetadataManager; - private final TaskMaster taskMaster; - private final TaskStorage taskStorage; - private final MaterializedViewTaskConfig config; - private final AuthorizerMapper authorizerMapper; - private final ChatHandlerProvider chatHandlerProvider; - private final SupervisorStateManagerConfig supervisorStateManagerConfig; - private final boolean suspended; - - public MaterializedViewSupervisorSpec( - @JsonProperty("baseDataSource") String baseDataSource, - @JsonProperty("dimensionsSpec") DimensionsSpec dimensionsSpec, - @JsonProperty("metricsSpec") AggregatorFactory[] aggregators, - @JsonProperty("tuningConfig") HadoopTuningConfig tuningConfig, - @JsonProperty("dataSource") String dataSourceName, - @JsonProperty("hadoopCoordinates") String hadoopCoordinates, - @JsonProperty("hadoopDependencyCoordinates") List hadoopDependencyCoordinates, - @JsonProperty("classpathPrefix") String classpathPrefix, - @JsonProperty("context") Map context, - @JsonProperty("suspended") Boolean suspended, - @JacksonInject ObjectMapper objectMapper, - @JacksonInject TaskMaster taskMaster, - @JacksonInject TaskStorage taskStorage, - @JacksonInject MetadataSupervisorManager metadataSupervisorManager, - @JacksonInject SqlSegmentsMetadataManager sqlSegmentsMetadataManager, - @JacksonInject IndexerMetadataStorageCoordinator metadataStorageCoordinator, - @JacksonInject MaterializedViewTaskConfig config, - @JacksonInject AuthorizerMapper authorizerMapper, - @JacksonInject ChatHandlerProvider chatHandlerProvider, - @JacksonInject SupervisorStateManagerConfig supervisorStateManagerConfig - ) - { - Preconditions.checkArgument( - !Strings.isNullOrEmpty(baseDataSource), - "baseDataSource cannot be null or empty. Please provide a baseDataSource." - ); - this.baseDataSource = baseDataSource; - - this.dimensionsSpec = Preconditions.checkNotNull( - dimensionsSpec, - "dimensionsSpec cannot be null. Please provide a dimensionsSpec" - ); - this.aggregators = Preconditions.checkNotNull( - aggregators, - "metricsSpec cannot be null. Please provide a metricsSpec" - ); - this.tuningConfig = Preconditions.checkNotNull( - tuningConfig, - "tuningConfig cannot be null. Please provide tuningConfig" - ); - - this.dataSourceName = dataSourceName == null - ? StringUtils.format( - "%s-%s", - baseDataSource, - DigestUtils.sha1Hex(dimensionsSpec.toString()).substring(0, 8) - ) - : dataSourceName; - this.hadoopCoordinates = hadoopCoordinates; - this.hadoopDependencyCoordinates = hadoopDependencyCoordinates; - this.classpathPrefix = classpathPrefix; - this.context = context == null ? new HashMap<>() : context; - this.objectMapper = objectMapper; - this.taskMaster = taskMaster; - this.taskStorage = taskStorage; - this.metadataSupervisorManager = metadataSupervisorManager; - this.sqlSegmentsMetadataManager = sqlSegmentsMetadataManager; - this.metadataStorageCoordinator = metadataStorageCoordinator; - this.authorizerMapper = authorizerMapper; - this.chatHandlerProvider = chatHandlerProvider; - this.config = config; - this.supervisorStateManagerConfig = supervisorStateManagerConfig; - this.suspended = suspended != null ? suspended : false; - - this.metrics = new HashSet<>(); - for (AggregatorFactory aggregatorFactory : aggregators) { - metrics.add(aggregatorFactory.getName()); - } - this.dimensions = new HashSet<>(); - for (DimensionSchema schema : dimensionsSpec.getDimensions()) { - dimensions.add(schema.getName()); - } - } - - public HadoopIndexTask createTask(Interval interval, String version, List segments) - { - String taskId = StringUtils.format("%s_%s_%s", TASK_PREFIX, dataSourceName, DateTimes.nowUtc()); - - // generate parser - Map parseSpec = new HashMap<>(); - parseSpec.put("format", "timeAndDims"); - parseSpec.put("dimensionsSpec", dimensionsSpec); - Map parser = new HashMap<>(); - parser.put("type", "map"); - parser.put("parseSpec", parseSpec); - - //generate HadoopTuningConfig - HadoopTuningConfig tuningConfigForTask = new HadoopTuningConfig( - tuningConfig.getWorkingPath(), - version, - tuningConfig.getPartitionsSpec(), - tuningConfig.getShardSpecs(), - tuningConfig.getIndexSpec(), - tuningConfig.getIndexSpecForIntermediatePersists(), - tuningConfig.getRowFlushBoundary(), - tuningConfig.getMaxBytesInMemory(), - tuningConfig.isLeaveIntermediate(), - tuningConfig.isCleanupOnFailure(), - tuningConfig.isOverwriteFiles(), - tuningConfig.isIgnoreInvalidRows(), - tuningConfig.getJobProperties(), - tuningConfig.isCombineText(), - tuningConfig.getUseCombiner(), - tuningConfig.getRowFlushBoundary(), - tuningConfig.getBuildV9Directly(), - tuningConfig.getNumBackgroundPersistThreads(), - tuningConfig.isForceExtendableShardSpecs(), - true, - tuningConfig.getUserAllowedHadoopPrefix(), - tuningConfig.isLogParseExceptions(), - tuningConfig.getMaxParseExceptions(), - tuningConfig.isUseYarnRMJobStatusFallback() - ); - - // generate granularity - ArbitraryGranularitySpec granularitySpec = new ArbitraryGranularitySpec( - Granularities.NONE, - ImmutableList.of(interval) - ); - - // generate DataSchema - DataSchema dataSchema = new DataSchema( - dataSourceName, - parser, - aggregators, - granularitySpec, - TransformSpec.NONE, - objectMapper - ); - - // generate DatasourceIngestionSpec - DatasourceIngestionSpec datasourceIngestionSpec = new DatasourceIngestionSpec( - baseDataSource, - null, - ImmutableList.of(interval), - segments, - null, - null, - null, - false, - null - ); - - // generate HadoopIOConfig - Map inputSpec = new HashMap<>(); - inputSpec.put("type", "dataSource"); - inputSpec.put("ingestionSpec", datasourceIngestionSpec); - HadoopIOConfig hadoopIOConfig = new HadoopIOConfig(inputSpec, null, null); - - // generate HadoopIngestionSpec - HadoopIngestionSpec spec = new HadoopIngestionSpec(dataSchema, hadoopIOConfig, tuningConfigForTask); - - // generate HadoopIndexTask - HadoopIndexTask task = new HadoopIndexTask( - taskId, - spec, - hadoopCoordinates, - hadoopDependencyCoordinates, - classpathPrefix, - objectMapper, - context, - authorizerMapper, - chatHandlerProvider - ); - - return task; - } - - public Set getDimensions() - { - return dimensions; - } - - public Set getMetrics() - { - return metrics; - } - - @JsonProperty("baseDataSource") - public String getBaseDataSource() - { - return baseDataSource; - } - - @JsonProperty("dimensionsSpec") - public DimensionsSpec getDimensionsSpec() - { - return dimensionsSpec; - } - - @JsonProperty("metricsSpec") - public AggregatorFactory[] getMetricsSpec() - { - return aggregators; - } - - @JsonProperty("tuningConfig") - public HadoopTuningConfig getTuningConfig() - { - return tuningConfig; - } - - @JsonProperty("dataSource") - public String getDataSourceName() - { - return dataSourceName; - } - - @JsonProperty("hadoopCoordinates") - public String getHadoopCoordinates() - { - return hadoopCoordinates; - } - - @JsonProperty("hadoopDependencyCoordinates") - public List getSadoopDependencyCoordinates() - { - return hadoopDependencyCoordinates; - } - - @JsonProperty("classpathPrefix") - public String getClasspathPrefix() - { - return classpathPrefix; - } - - @JsonProperty("context") - public Map getContext() - { - return context; - } - - @Override - @JsonProperty("suspended") - public boolean isSuspended() - { - return suspended; - } - - @Override - @JsonProperty("type") - public String getType() - { - return SUPERVISOR_TYPE; - } - - @Override - @JsonProperty("source") - public String getSource() - { - return getBaseDataSource(); - } - - @Override - public String getId() - { - return StringUtils.format("MaterializedViewSupervisor-%s", dataSourceName); - } - - @Override - public Supervisor createSupervisor() - { - return new MaterializedViewSupervisor( - taskMaster, - taskStorage, - metadataSupervisorManager, - sqlSegmentsMetadataManager, - metadataStorageCoordinator, - config, - this - ); - } - - @Override - public List getDataSources() - { - return ImmutableList.of(dataSourceName); - } - - @Override - public SupervisorSpec createSuspendedSpec() - { - return new MaterializedViewSupervisorSpec( - baseDataSource, - dimensionsSpec, - aggregators, - tuningConfig, - dataSourceName, - hadoopCoordinates, - hadoopDependencyCoordinates, - classpathPrefix, - context, - true, - objectMapper, - taskMaster, - taskStorage, - metadataSupervisorManager, - sqlSegmentsMetadataManager, - metadataStorageCoordinator, - config, - authorizerMapper, - chatHandlerProvider, - supervisorStateManagerConfig - ); - } - - @Override - public SupervisorSpec createRunningSpec() - { - return new MaterializedViewSupervisorSpec( - baseDataSource, - dimensionsSpec, - aggregators, - tuningConfig, - dataSourceName, - hadoopCoordinates, - hadoopDependencyCoordinates, - classpathPrefix, - context, - false, - objectMapper, - taskMaster, - taskStorage, - metadataSupervisorManager, - sqlSegmentsMetadataManager, - metadataStorageCoordinator, - config, - authorizerMapper, - chatHandlerProvider, - supervisorStateManagerConfig - ); - } - - public SupervisorStateManagerConfig getSupervisorStateManagerConfig() - { - return supervisorStateManagerConfig; - } - - @Override - public String toString() - { - return "MaterializedViewSupervisorSpec{" + - "baseDataSource=" + baseDataSource + - ", dimensions=" + dimensions + - ", metrics=" + metrics + - '}'; - } -} diff --git a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewTaskConfig.java b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewTaskConfig.java deleted file mode 100644 index 1a63420523de..000000000000 --- a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewTaskConfig.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.materializedview; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.joda.time.Period; - -public class MaterializedViewTaskConfig -{ - @JsonProperty - private Period taskCheckDuration = new Period("PT1M"); - - public Period getTaskCheckDuration() - { - return taskCheckDuration; - } -} diff --git a/extensions-contrib/materialized-view-maintenance/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/materialized-view-maintenance/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index 935023d68b40..000000000000 --- a/extensions-contrib/materialized-view-maintenance/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.indexing.materializedview.MaterializedViewMaintenanceDruidModule diff --git a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/DerivativeDataSourceMetadataTest.java b/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/DerivativeDataSourceMetadataTest.java deleted file mode 100644 index 64a88f98fc3a..000000000000 --- a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/DerivativeDataSourceMetadataTest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.materializedview; - -import com.google.common.collect.Sets; -import org.hamcrest.CoreMatchers; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.util.Set; - - -public class DerivativeDataSourceMetadataTest -{ - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void testEmptyBaseDataSource() - { - expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class)); - expectedException.expectMessage( - "baseDataSource cannot be null or empty. Please provide a baseDataSource." - ); - String baseDataSource = ""; - Set dims = Sets.newHashSet("dim1", "dim2", "dim3"); - Set metrics = Sets.newHashSet("cost"); - DerivativeDataSourceMetadata metadata = new DerivativeDataSourceMetadata(baseDataSource, dims, metrics); - } - - @Test - public void testNullBaseDataSource() - { - expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class)); - expectedException.expectMessage( - "baseDataSource cannot be null or empty. Please provide a baseDataSource." - ); - String baseDataSource = null; - Set dims = Sets.newHashSet("dim1", "dim2", "dim3"); - Set metrics = Sets.newHashSet("cost"); - DerivativeDataSourceMetadata metadata = new DerivativeDataSourceMetadata(baseDataSource, dims, metrics); - } -} diff --git a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpecTest.java b/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpecTest.java deleted file mode 100644 index c82b8b8fae6a..000000000000 --- a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpecTest.java +++ /dev/null @@ -1,302 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.materializedview; - -import com.fasterxml.jackson.databind.InjectableValues; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.google.common.collect.Lists; -import org.apache.druid.data.input.impl.DimensionsSpec; -import org.apache.druid.data.input.impl.StringDimensionSchema; -import org.apache.druid.indexer.HadoopTuningConfig; -import org.apache.druid.indexing.overlord.IndexerMetadataStorageCoordinator; -import org.apache.druid.indexing.overlord.TaskMaster; -import org.apache.druid.indexing.overlord.TaskStorage; -import org.apache.druid.indexing.overlord.supervisor.SupervisorStateManagerConfig; -import org.apache.druid.math.expr.ExprMacroTable; -import org.apache.druid.metadata.MetadataSupervisorManager; -import org.apache.druid.metadata.SqlSegmentsMetadataManager; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.CountAggregatorFactory; -import org.apache.druid.query.aggregation.LongSumAggregatorFactory; -import org.apache.druid.query.expression.LookupEnabledTestExprMacroTable; -import org.apache.druid.segment.TestHelper; -import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider; -import org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider; -import org.apache.druid.server.security.AuthorizerMapper; -import org.easymock.EasyMock; -import org.hamcrest.CoreMatchers; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.io.IOException; - -public class MaterializedViewSupervisorSpecTest -{ - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - private final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); - - @Before - public void setup() - { - objectMapper.registerSubtypes(new NamedType(MaterializedViewSupervisorSpec.class, "derivativeDataSource")); - objectMapper.setInjectableValues( - new InjectableValues.Std() - .addValue(TaskMaster.class, null) - .addValue(TaskStorage.class, null) - .addValue(ExprMacroTable.class.getName(), LookupEnabledTestExprMacroTable.INSTANCE) - .addValue(ObjectMapper.class, objectMapper) - .addValue(MetadataSupervisorManager.class, null) - .addValue(SqlSegmentsMetadataManager.class, null) - .addValue(IndexerMetadataStorageCoordinator.class, null) - .addValue(MaterializedViewTaskConfig.class, new MaterializedViewTaskConfig()) - .addValue(AuthorizerMapper.class, EasyMock.createMock(AuthorizerMapper.class)) - .addValue(ChatHandlerProvider.class, new NoopChatHandlerProvider()) - .addValue(SupervisorStateManagerConfig.class, new SupervisorStateManagerConfig()) - ); - } - - @Test - public void testSupervisorSerialization() throws IOException - { - String supervisorStr = "{\n" + - " \"type\" : \"derivativeDataSource\",\n" + - " \"baseDataSource\": \"wikiticker\",\n" + - " \"dimensionsSpec\":{\n" + - " \"dimensions\" : [\n" + - " \"isUnpatrolled\",\n" + - " \"metroCode\",\n" + - " \"namespace\",\n" + - " \"page\",\n" + - " \"regionIsoCode\",\n" + - " \"regionName\",\n" + - " \"user\"\n" + - " ]\n" + - " },\n" + - " \"metricsSpec\" : [\n" + - " {\n" + - " \"name\" : \"count\",\n" + - " \"type\" : \"count\"\n" + - " },\n" + - " {\n" + - " \"name\" : \"added\",\n" + - " \"type\" : \"longSum\",\n" + - " \"fieldName\" : \"added\"\n" + - " }\n" + - " ],\n" + - " \"tuningConfig\": {\n" + - " \"type\" : \"hadoop\"\n" + - " }\n" + - "}"; - MaterializedViewSupervisorSpec expected = new MaterializedViewSupervisorSpec( - "wikiticker", - new DimensionsSpec( - Lists.newArrayList( - new StringDimensionSchema("isUnpatrolled"), - new StringDimensionSchema("metroCode"), - new StringDimensionSchema("namespace"), - new StringDimensionSchema("page"), - new StringDimensionSchema("regionIsoCode"), - new StringDimensionSchema("regionName"), - new StringDimensionSchema("user") - ), - null, - null - ), - new AggregatorFactory[]{ - new CountAggregatorFactory("count"), - new LongSumAggregatorFactory("added", "added") - }, - HadoopTuningConfig.makeDefaultTuningConfig(), - null, - null, - null, - null, - null, - false, - objectMapper, - null, - null, - null, - null, - null, - new MaterializedViewTaskConfig(), - EasyMock.createMock(AuthorizerMapper.class), - new NoopChatHandlerProvider(), - new SupervisorStateManagerConfig() - ); - MaterializedViewSupervisorSpec spec = objectMapper.readValue(supervisorStr, MaterializedViewSupervisorSpec.class); - Assert.assertEquals(expected.getBaseDataSource(), spec.getBaseDataSource()); - Assert.assertEquals(expected.getId(), spec.getId()); - Assert.assertEquals(expected.getDataSourceName(), spec.getDataSourceName()); - Assert.assertEquals(expected.getDimensions(), spec.getDimensions()); - Assert.assertEquals(expected.getMetrics(), spec.getMetrics()); - } - - @Test - public void testSuspendResuume() throws IOException - { - String supervisorStr = "{\n" + - " \"type\" : \"derivativeDataSource\",\n" + - " \"baseDataSource\": \"wikiticker\",\n" + - " \"dimensionsSpec\":{\n" + - " \"dimensions\" : [\n" + - " \"isUnpatrolled\",\n" + - " \"metroCode\",\n" + - " \"namespace\",\n" + - " \"page\",\n" + - " \"regionIsoCode\",\n" + - " \"regionName\",\n" + - " \"user\"\n" + - " ]\n" + - " },\n" + - " \"metricsSpec\" : [\n" + - " {\n" + - " \"name\" : \"count\",\n" + - " \"type\" : \"count\"\n" + - " },\n" + - " {\n" + - " \"name\" : \"added\",\n" + - " \"type\" : \"longSum\",\n" + - " \"fieldName\" : \"added\"\n" + - " }\n" + - " ],\n" + - " \"tuningConfig\": {\n" + - " \"type\" : \"hadoop\"\n" + - " }\n" + - "}"; - - MaterializedViewSupervisorSpec spec = objectMapper.readValue(supervisorStr, MaterializedViewSupervisorSpec.class); - Assert.assertFalse(spec.isSuspended()); - - String suspendedSerialized = objectMapper.writeValueAsString(spec.createSuspendedSpec()); - MaterializedViewSupervisorSpec suspendedSpec = objectMapper.readValue( - suspendedSerialized, - MaterializedViewSupervisorSpec.class - ); - Assert.assertTrue(suspendedSpec.isSuspended()); - - String runningSerialized = objectMapper.writeValueAsString(spec.createRunningSpec()); - MaterializedViewSupervisorSpec runningSpec = objectMapper.readValue( - runningSerialized, - MaterializedViewSupervisorSpec.class - ); - Assert.assertFalse(runningSpec.isSuspended()); - } - - @Test - public void testEmptyBaseDataSource() - { - expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class)); - expectedException.expectMessage( - "baseDataSource cannot be null or empty. Please provide a baseDataSource." - ); - //noinspection ResultOfObjectAllocationIgnored (this method call will trigger the expected exception) - new MaterializedViewSupervisorSpec( - "", - new DimensionsSpec( - Lists.newArrayList( - new StringDimensionSchema("isUnpatrolled"), - new StringDimensionSchema("metroCode"), - new StringDimensionSchema("namespace"), - new StringDimensionSchema("page"), - new StringDimensionSchema("regionIsoCode"), - new StringDimensionSchema("regionName"), - new StringDimensionSchema("user") - ), - null, - null - ), - new AggregatorFactory[]{ - new CountAggregatorFactory("count"), - new LongSumAggregatorFactory("added", "added") - }, - HadoopTuningConfig.makeDefaultTuningConfig(), - null, - null, - null, - null, - null, - false, - objectMapper, - null, - null, - null, - null, - null, - new MaterializedViewTaskConfig(), - EasyMock.createMock(AuthorizerMapper.class), - new NoopChatHandlerProvider(), - new SupervisorStateManagerConfig() - ); - } - - @Test - public void testNullBaseDataSource() - { - expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class)); - expectedException.expectMessage( - "baseDataSource cannot be null or empty. Please provide a baseDataSource." - ); - //noinspection ResultOfObjectAllocationIgnored (this method call will trigger the expected exception) - new MaterializedViewSupervisorSpec( - null, - new DimensionsSpec( - Lists.newArrayList( - new StringDimensionSchema("isUnpatrolled"), - new StringDimensionSchema("metroCode"), - new StringDimensionSchema("namespace"), - new StringDimensionSchema("page"), - new StringDimensionSchema("regionIsoCode"), - new StringDimensionSchema("regionName"), - new StringDimensionSchema("user") - ), - null, - null - ), - new AggregatorFactory[]{ - new CountAggregatorFactory("count"), - new LongSumAggregatorFactory("added", "added") - }, - HadoopTuningConfig.makeDefaultTuningConfig(), - null, - null, - null, - null, - null, - false, - objectMapper, - null, - null, - null, - null, - null, - new MaterializedViewTaskConfig(), - EasyMock.createMock(AuthorizerMapper.class), - new NoopChatHandlerProvider(), - new SupervisorStateManagerConfig() - ); - } -} diff --git a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java b/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java deleted file mode 100644 index 766f5109269e..000000000000 --- a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java +++ /dev/null @@ -1,360 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.indexing.materializedview; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.google.common.base.Optional; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; -import org.apache.druid.data.input.impl.DimensionsSpec; -import org.apache.druid.data.input.impl.StringDimensionSchema; -import org.apache.druid.indexer.HadoopIOConfig; -import org.apache.druid.indexer.HadoopIngestionSpec; -import org.apache.druid.indexer.HadoopTuningConfig; -import org.apache.druid.indexer.TaskStatus; -import org.apache.druid.indexing.common.task.HadoopIndexTask; -import org.apache.druid.indexing.overlord.IndexerMetadataStorageCoordinator; -import org.apache.druid.indexing.overlord.TaskMaster; -import org.apache.druid.indexing.overlord.TaskQueue; -import org.apache.druid.indexing.overlord.TaskStorage; -import org.apache.druid.indexing.overlord.supervisor.SupervisorStateManagerConfig; -import org.apache.druid.java.util.common.Intervals; -import org.apache.druid.java.util.common.Pair; -import org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator; -import org.apache.druid.metadata.MetadataSupervisorManager; -import org.apache.druid.metadata.SqlSegmentsMetadataManager; -import org.apache.druid.metadata.TestDerbyConnector; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.LongSumAggregatorFactory; -import org.apache.druid.segment.TestHelper; -import org.apache.druid.segment.indexing.DataSchema; -import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider; -import org.apache.druid.segment.transform.TransformSpec; -import org.apache.druid.server.security.AuthorizerMapper; -import org.apache.druid.timeline.DataSegment; -import org.apache.druid.timeline.partition.HashBasedNumberedShardSpec; -import org.easymock.EasyMock; -import org.joda.time.Interval; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.SortedMap; - -public class MaterializedViewSupervisorTest -{ - @Rule - public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule(); - - @Rule - public final ExpectedException expectedException = ExpectedException.none(); - private TaskStorage taskStorage; - private TaskMaster taskMaster; - private IndexerMetadataStorageCoordinator indexerMetadataStorageCoordinator; - private MetadataSupervisorManager metadataSupervisorManager; - private SqlSegmentsMetadataManager sqlSegmentsMetadataManager; - private TaskQueue taskQueue; - private MaterializedViewSupervisor supervisor; - private String derivativeDatasourceName; - private final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); - - @Before - public void setUp() - { - TestDerbyConnector derbyConnector = derbyConnectorRule.getConnector(); - derbyConnector.createDataSourceTable(); - derbyConnector.createSegmentTable(); - taskStorage = EasyMock.createMock(TaskStorage.class); - taskMaster = EasyMock.createMock(TaskMaster.class); - indexerMetadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator( - objectMapper, - derbyConnectorRule.metadataTablesConfigSupplier().get(), - derbyConnector - ); - metadataSupervisorManager = EasyMock.createMock(MetadataSupervisorManager.class); - sqlSegmentsMetadataManager = EasyMock.createMock(SqlSegmentsMetadataManager.class); - taskQueue = EasyMock.createMock(TaskQueue.class); - taskQueue.start(); - objectMapper.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed")); - MaterializedViewSupervisorSpec spec = new MaterializedViewSupervisorSpec( - "base", - new DimensionsSpec(Collections.singletonList(new StringDimensionSchema("dim")), null, null), - new AggregatorFactory[]{new LongSumAggregatorFactory("m1", "m1")}, - HadoopTuningConfig.makeDefaultTuningConfig(), - null, - null, - null, - null, - null, - false, - objectMapper, - taskMaster, - taskStorage, - metadataSupervisorManager, - sqlSegmentsMetadataManager, - indexerMetadataStorageCoordinator, - new MaterializedViewTaskConfig(), - EasyMock.createMock(AuthorizerMapper.class), - EasyMock.createMock(ChatHandlerProvider.class), - new SupervisorStateManagerConfig() - ); - derivativeDatasourceName = spec.getDataSourceName(); - supervisor = (MaterializedViewSupervisor) spec.createSupervisor(); - } - - @Test - public void testCheckSegments() throws IOException - { - Set baseSegments = Sets.newHashSet( - new DataSegment( - "base", - Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), - "2015-01-02", - ImmutableMap.of(), - ImmutableList.of("dim1", "dim2"), - ImmutableList.of("m1"), - new HashBasedNumberedShardSpec(0, 1, 0, 1, null, null), - 9, - 1024 - ), - new DataSegment( - "base", - Intervals.of("2015-01-02T00Z/2015-01-03T00Z"), - "2015-01-03", - ImmutableMap.of(), - ImmutableList.of("dim1", "dim2"), - ImmutableList.of("m1"), - new HashBasedNumberedShardSpec(0, 1, 0, 1, null, null), - 9, - 1024 - ), - new DataSegment( - "base", - Intervals.of("2015-01-03T00Z/2015-01-04T00Z"), - "2015-01-04", - ImmutableMap.of(), - ImmutableList.of("dim1", "dim2"), - ImmutableList.of("m1"), - new HashBasedNumberedShardSpec(0, 1, 0, 1, null, null), - 9, - 1024 - ) - ); - Set derivativeSegments = Sets.newHashSet( - new DataSegment( - derivativeDatasourceName, - Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), - "2015-01-02", - ImmutableMap.of(), - ImmutableList.of("dim1", "dim2"), - ImmutableList.of("m1"), - new HashBasedNumberedShardSpec(0, 1, 0, 1, null, null), - 9, - 1024 - ), - new DataSegment( - derivativeDatasourceName, - Intervals.of("2015-01-02T00Z/2015-01-03T00Z"), - "3015-01-01", - ImmutableMap.of(), - ImmutableList.of("dim1", "dim2"), - ImmutableList.of("m1"), - new HashBasedNumberedShardSpec(0, 1, 0, 1, null, null), - 9, - 1024 - ) - ); - indexerMetadataStorageCoordinator.announceHistoricalSegments(baseSegments); - indexerMetadataStorageCoordinator.announceHistoricalSegments(derivativeSegments); - EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - Pair, Map>> toBuildInterval = supervisor.checkSegments(); - Set expectedToBuildInterval = Sets.newHashSet(Intervals.of("2015-01-01T00Z/2015-01-02T00Z")); - Map> expectedSegments = new HashMap<>(); - expectedSegments.put( - Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), - Collections.singletonList( - new DataSegment( - "base", - Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), - "2015-01-02", - ImmutableMap.of(), - ImmutableList.of("dim1", "dim2"), - ImmutableList.of("m1"), - new HashBasedNumberedShardSpec(0, 1, 0, 1, null, null), - 9, - 1024 - ) - ) - ); - expectedSegments.put( - Intervals.of("2015-01-02T00Z/2015-01-03T00Z"), - Collections.singletonList( - new DataSegment( - "base", - Intervals.of("2015-01-02T00Z/2015-01-03T00Z"), - "2015-01-03", - ImmutableMap.of(), - ImmutableList.of("dim1", "dim2"), - ImmutableList.of("m1"), - new HashBasedNumberedShardSpec(0, 1, 0, 1, null, null), - 9, - 1024 - ) - ) - ); - Assert.assertEquals(expectedToBuildInterval, toBuildInterval.lhs.keySet()); - Assert.assertEquals(expectedSegments, toBuildInterval.rhs); - } - - @Test - public void testCheckSegmentsAndSubmitTasks() throws IOException - { - Set baseSegments = Sets.newHashSet( - new DataSegment( - "base", - Intervals.of("2015-01-02T00Z/2015-01-03T00Z"), - "2015-01-03", - ImmutableMap.of(), - ImmutableList.of("dim1", "dim2"), - ImmutableList.of("m1"), - new HashBasedNumberedShardSpec(0, 1, 0, 1, null, null), - 9, - 1024 - ) - ); - indexerMetadataStorageCoordinator.announceHistoricalSegments(baseSegments); - EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - EasyMock.expect(taskStorage.getStatus("test_task1")) - .andReturn(Optional.of(TaskStatus.failure("test_task1"))) - .anyTimes(); - EasyMock.expect(taskStorage.getStatus("test_task2")) - .andReturn(Optional.of(TaskStatus.running("test_task2"))) - .anyTimes(); - EasyMock.replay(taskStorage); - - Pair, Map> runningTasksPair = supervisor.getRunningTasks(); - Map runningTasks = runningTasksPair.lhs; - Map runningVersion = runningTasksPair.rhs; - - DataSchema dataSchema = new DataSchema( - "test_datasource", - null, - null, - null, - TransformSpec.NONE, - objectMapper - ); - HadoopIOConfig hadoopIOConfig = new HadoopIOConfig(new HashMap<>(), null, null); - HadoopIngestionSpec spec = new HadoopIngestionSpec(dataSchema, hadoopIOConfig, null); - HadoopIndexTask task1 = new HadoopIndexTask( - "test_task1", - spec, - null, - null, - null, - objectMapper, - null, - null, - null - ); - runningTasks.put(Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), task1); - runningVersion.put(Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "test_version1"); - - HadoopIndexTask task2 = new HadoopIndexTask( - "test_task2", - spec, - null, - null, - null, - objectMapper, - null, - null, - null - ); - runningTasks.put(Intervals.of("2015-01-02T00Z/2015-01-03T00Z"), task2); - runningVersion.put(Intervals.of("2015-01-02T00Z/2015-01-03T00Z"), "test_version2"); - - supervisor.checkSegmentsAndSubmitTasks(); - - Map expectedRunningTasks = new HashMap<>(); - Map expectedRunningVersion = new HashMap<>(); - expectedRunningTasks.put(Intervals.of("2015-01-02T00Z/2015-01-03T00Z"), task2); - expectedRunningVersion.put(Intervals.of("2015-01-02T00Z/2015-01-03T00Z"), "test_version2"); - - Assert.assertEquals(expectedRunningTasks, runningTasks); - Assert.assertEquals(expectedRunningVersion, runningVersion); - - } - - @Test - public void testSuspendedDoesntRun() - { - MaterializedViewSupervisorSpec suspended = new MaterializedViewSupervisorSpec( - "base", - new DimensionsSpec(Collections.singletonList(new StringDimensionSchema("dim")), null, null), - new AggregatorFactory[]{new LongSumAggregatorFactory("m1", "m1")}, - HadoopTuningConfig.makeDefaultTuningConfig(), - null, - null, - null, - null, - null, - true, - objectMapper, - taskMaster, - taskStorage, - metadataSupervisorManager, - sqlSegmentsMetadataManager, - indexerMetadataStorageCoordinator, - new MaterializedViewTaskConfig(), - EasyMock.createMock(AuthorizerMapper.class), - EasyMock.createMock(ChatHandlerProvider.class), - new SupervisorStateManagerConfig() - ); - MaterializedViewSupervisor supervisor = (MaterializedViewSupervisor) suspended.createSupervisor(); - - // mock IndexerSQLMetadataStorageCoordinator to ensure that retrieveDataSourceMetadata is not called - // which will be true if truly suspended, since this is the first operation of the 'run' method otherwise - IndexerSQLMetadataStorageCoordinator mock = EasyMock.createMock(IndexerSQLMetadataStorageCoordinator.class); - EasyMock.expect(mock.retrieveDataSourceMetadata(suspended.getDataSourceName())) - .andAnswer(() -> { - Assert.fail(); - return null; - }) - .anyTimes(); - - EasyMock.replay(mock); - supervisor.run(); - } -} diff --git a/extensions-contrib/materialized-view-selection/pom.xml b/extensions-contrib/materialized-view-selection/pom.xml deleted file mode 100644 index 427c8f9deb37..000000000000 --- a/extensions-contrib/materialized-view-selection/pom.xml +++ /dev/null @@ -1,155 +0,0 @@ - - - - - - druid - org.apache.druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - 4.0.0 - - org.apache.druid.extensions.contrib - materialized-view-selection - materialized-view-selection - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - org.apache.druid - druid-server - ${project.parent.version} - provided - - - org.apache.druid.extensions.contrib - materialized-view-maintenance - ${project.parent.version} - - - com.fasterxml.jackson.dataformat - jackson-dataformat-smile - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - joda-time - joda-time - provided - - - org.apache.curator - curator-framework - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - com.google.inject.extensions - guice-multibindings - provided - - - org.jdbi - jdbi - provided - - - com.google.guava - guava - provided - - - com.google.code.findbugs - jsr305 - provided - - - org.apache.druid - druid-core - ${project.parent.version} - test - test-jar - - - org.apache.druid - druid-processing - ${project.parent.version} - test - test-jar - - - org.apache.druid - druid-server - ${project.parent.version} - test - test-jar - - - org.easymock - easymock - test - - - org.apache.curator - curator-test - test - - - junit - junit - test - - - org.hamcrest - hamcrest-all - test - - - diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizer.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizer.java deleted file mode 100644 index a3c03a6245c6..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizer.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSortedSet; -import com.google.inject.Inject; -import org.apache.druid.client.TimelineServerView; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.query.Query; -import org.apache.druid.query.TableDataSource; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.planning.DataSourceAnalysis; -import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; -import org.apache.druid.query.timeseries.TimeseriesQuery; -import org.apache.druid.query.topn.TopNQuery; -import org.apache.druid.timeline.TimelineObjectHolder; -import org.joda.time.Interval; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.stream.Collectors; - -public class DataSourceOptimizer -{ - private final ReadWriteLock lock = new ReentrantReadWriteLock(); - private final TimelineServerView serverView; - private ConcurrentHashMap derivativesHitCount = new ConcurrentHashMap<>(); - private ConcurrentHashMap totalCount = new ConcurrentHashMap<>(); - private ConcurrentHashMap hitCount = new ConcurrentHashMap<>(); - private ConcurrentHashMap costTime = new ConcurrentHashMap<>(); - private ConcurrentHashMap, AtomicLong>> missFields = new ConcurrentHashMap<>(); - - @Inject - public DataSourceOptimizer(TimelineServerView serverView) - { - this.serverView = serverView; - } - - /** - * Do main work about materialized view selection: transform user query to one or more sub-queries. - * - * In the sub-query, the dataSource is the derivative of dataSource in user query, and sum of all sub-queries' - * intervals equals the interval in user query - * - * Derived dataSource with smallest average data size per segment granularity have highest priority to replace the - * datasource in user query - * - * @param query only TopNQuery/TimeseriesQuery/GroupByQuery can be optimized - * @return a list of queries with specified derived dataSources and intervals - */ - public List optimize(Query query) - { - long start = System.currentTimeMillis(); - // only topN/timeseries/groupby query can be optimized - // only TableDataSource can be optimiezed - if (!(query instanceof TopNQuery || query instanceof TimeseriesQuery || query instanceof GroupByQuery) - || !(query.getDataSource() instanceof TableDataSource)) { - return Collections.singletonList(query); - } - String datasourceName = ((TableDataSource) query.getDataSource()).getName(); - // get all derivatives for datasource in query. The derivatives set is sorted by average size of - // per segment granularity. - Set derivatives = DerivativeDataSourceManager.getDerivatives(datasourceName); - - if (derivatives.isEmpty()) { - return Collections.singletonList(query); - } - lock.readLock().lock(); - try { - totalCount.computeIfAbsent(datasourceName, dsName -> new AtomicLong(0)).incrementAndGet(); - hitCount.putIfAbsent(datasourceName, new AtomicLong(0)); - AtomicLong costTimeOfDataSource = costTime.computeIfAbsent(datasourceName, dsName -> new AtomicLong(0)); - - // get all fields which the query required - Set requiredFields = MaterializedViewUtils.getRequiredFields(query); - - Set derivativesWithRequiredFields = new HashSet<>(); - for (DerivativeDataSource derivativeDataSource : derivatives) { - derivativesHitCount.putIfAbsent(derivativeDataSource.getName(), new AtomicLong(0)); - if (derivativeDataSource.getColumns().containsAll(requiredFields)) { - derivativesWithRequiredFields.add(derivativeDataSource); - } - } - // if no derivatives contains all required dimensions, this materialized view selection failed. - if (derivativesWithRequiredFields.isEmpty()) { - missFields - .computeIfAbsent(datasourceName, dsName -> new ConcurrentHashMap<>()) - .computeIfAbsent(requiredFields, rf -> new AtomicLong(0)) - .incrementAndGet(); - costTimeOfDataSource.addAndGet(System.currentTimeMillis() - start); - return Collections.singletonList(query); - } - - List queries = new ArrayList<>(); - List remainingQueryIntervals = (List) query.getIntervals(); - - for (DerivativeDataSource derivativeDataSource : ImmutableSortedSet.copyOf(derivativesWithRequiredFields)) { - final List derivativeIntervals = remainingQueryIntervals.stream() - .flatMap(interval -> serverView - .getTimeline(DataSourceAnalysis.forDataSource(new TableDataSource(derivativeDataSource.getName()))) - .orElseThrow(() -> new ISE("No timeline for dataSource: %s", derivativeDataSource.getName())) - .lookup(interval) - .stream() - .map(TimelineObjectHolder::getInterval) - ) - .collect(Collectors.toList()); - // if the derivative does not contain any parts of intervals in the query, the derivative will - // not be selected. - if (derivativeIntervals.isEmpty()) { - continue; - } - - remainingQueryIntervals = MaterializedViewUtils.minus(remainingQueryIntervals, derivativeIntervals); - queries.add( - query.withDataSource(new TableDataSource(derivativeDataSource.getName())) - .withQuerySegmentSpec(new MultipleIntervalSegmentSpec(derivativeIntervals)) - ); - derivativesHitCount.get(derivativeDataSource.getName()).incrementAndGet(); - if (remainingQueryIntervals.isEmpty()) { - break; - } - } - - if (queries.isEmpty()) { - costTime.get(datasourceName).addAndGet(System.currentTimeMillis() - start); - return Collections.singletonList(query); - } - - //after materialized view selection, the result of the remaining query interval will be computed based on - // the original datasource. - if (!remainingQueryIntervals.isEmpty()) { - queries.add(query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(remainingQueryIntervals))); - } - hitCount.get(datasourceName).incrementAndGet(); - costTime.get(datasourceName).addAndGet(System.currentTimeMillis() - start); - return queries; - } - finally { - lock.readLock().unlock(); - } - } - - public List getAndResetStats() - { - ImmutableMap derivativesHitCountSnapshot; - ImmutableMap totalCountSnapshot; - ImmutableMap hitCountSnapshot; - ImmutableMap costTimeSnapshot; - ImmutableMap, AtomicLong>> missFieldsSnapshot; - lock.writeLock().lock(); - try { - derivativesHitCountSnapshot = ImmutableMap.copyOf(derivativesHitCount); - totalCountSnapshot = ImmutableMap.copyOf(totalCount); - hitCountSnapshot = ImmutableMap.copyOf(hitCount); - costTimeSnapshot = ImmutableMap.copyOf(costTime); - missFieldsSnapshot = ImmutableMap.copyOf(missFields); - derivativesHitCount.clear(); - totalCount.clear(); - hitCount.clear(); - costTime.clear(); - missFields.clear(); - } - finally { - lock.writeLock().unlock(); - } - List stats = new ArrayList<>(); - Map> baseToDerivatives = DerivativeDataSourceManager.getAllDerivatives(); - for (Map.Entry> entry : baseToDerivatives.entrySet()) { - Map derivativesStat = new HashMap<>(); - for (DerivativeDataSource derivative : entry.getValue()) { - derivativesStat.put( - derivative.getName(), - derivativesHitCountSnapshot.getOrDefault(derivative.getName(), new AtomicLong(0)).get() - ); - } - stats.add( - new DataSourceOptimizerStats( - entry.getKey(), - hitCountSnapshot.getOrDefault(entry.getKey(), new AtomicLong(0)).get(), - totalCountSnapshot.getOrDefault(entry.getKey(), new AtomicLong(0)).get(), - costTimeSnapshot.getOrDefault(entry.getKey(), new AtomicLong(0)).get(), - missFieldsSnapshot.getOrDefault(entry.getKey(), new ConcurrentHashMap<>()), - derivativesStat - ) - ); - } - return stats; - } -} diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizerMonitor.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizerMonitor.java deleted file mode 100644 index 08b0b25ebb27..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizerMonitor.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.google.inject.Inject; -import org.apache.druid.java.util.emitter.service.ServiceEmitter; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.apache.druid.java.util.metrics.AbstractMonitor; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -public class DataSourceOptimizerMonitor extends AbstractMonitor -{ - private final DataSourceOptimizer optimizer; - - @Inject - public DataSourceOptimizerMonitor(DataSourceOptimizer optimizer) - { - this.optimizer = optimizer; - } - - @Override - public boolean doMonitor(ServiceEmitter emitter) - { - final List stats = optimizer.getAndResetStats(); - for (DataSourceOptimizerStats stat : stats) { - final ServiceMetricEvent.Builder builder = new ServiceMetricEvent.Builder(); - builder.setDimension("dataSource", stat.getBase()); - emitter.emit(builder.build("/materialized/view/query/totalNum", stat.getTotalcount())); - emitter.emit(builder.build("/materialized/view/query/hits", stat.getHitcount())); - emitter.emit(builder.build("/materialized/view/query/hitRate", stat.getHitRate())); - emitter.emit(builder.build("/materialized/view/select/avgCostMS", stat.getOptimizerCost())); - Map derivativesStats = stat.getDerivativesHitCount(); - for (Map.Entry derivative : derivativesStats.entrySet()) { - builder.setDimension("derivative", derivative.getKey()); - emitter.emit(builder.build("/materialized/view/derivative/numSelected", derivative.getValue())); - } - final ServiceMetricEvent.Builder builder2 = new ServiceMetricEvent.Builder(); - builder2.setDimension("dataSource", stat.getBase()); - for (Set fields : stat.getMissFields().keySet()) { - builder2.setDimension("fields", fields.toString()); - emitter.emit(builder2.build("/materialized/view/missNum", stat.getMissFields().get(fields).get())); - } - } - return true; - } -} diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizerStats.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizerStats.java deleted file mode 100644 index 2c86df8fad7a..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizerStats.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicLong; - -public class DataSourceOptimizerStats -{ - private final String base; - private final long hitcount; - private final long totalcount; - private final long optimizerCost; - private final Map, AtomicLong> missFields; - private final Map derivativesHitCount; - - public DataSourceOptimizerStats( - String base, - long hitcount, - long totalcount, - long optimizerCost, - Map, AtomicLong> missFields, - Map derivativesHitCount - ) - { - this.base = base; - this.hitcount = hitcount; - this.totalcount = totalcount; - this.optimizerCost = optimizerCost; - this.missFields = missFields; - this.derivativesHitCount = derivativesHitCount; - } - - public Map, AtomicLong> getMissFields() - { - return missFields; - } - - public String getBase() - { - return base; - } - - public long getHitcount() - { - return hitcount; - } - - public long getTotalcount() - { - return totalcount; - } - - public double getOptimizerCost() - { - if (totalcount == 0L) { - return 0; - } - return ((double) optimizerCost) / totalcount; - } - - public double getHitRate() - { - if (totalcount == 0L) { - return 0; - } - return ((double) hitcount) / totalcount; - } - - public Map getDerivativesHitCount() - { - return derivativesHitCount; - } - -} diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSource.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSource.java deleted file mode 100644 index be9c5f833cb6..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSource.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.google.common.base.Preconditions; - -import java.util.Objects; -import java.util.Set; - -public class DerivativeDataSource implements Comparable -{ - private final String name; - private final String baseDataSource; - private final Set columns; - private final long avgSizeBasedGranularity; - - public DerivativeDataSource(String name, String baseDataSource, Set columns, long size) - { - this.name = Preconditions.checkNotNull(name, "name"); - this.baseDataSource = Preconditions.checkNotNull(baseDataSource, "baseDataSource"); - this.columns = Preconditions.checkNotNull(columns, "columns"); - this.avgSizeBasedGranularity = size; - } - - public String getName() - { - return name; - } - - public String getBaseDataSource() - { - return baseDataSource; - } - - public Set getColumns() - { - return columns; - } - - public long getAvgSizeBasedGranularity() - { - return avgSizeBasedGranularity; - } - - @Override - public int compareTo(DerivativeDataSource o) - { - if (this.avgSizeBasedGranularity > o.getAvgSizeBasedGranularity()) { - return 1; - } else if (this.avgSizeBasedGranularity == o.getAvgSizeBasedGranularity()) { - return 0; - } else { - return -1; - } - } - - @Override - public boolean equals(Object o) - { - if (o == null) { - return false; - } - if (!(o instanceof DerivativeDataSource)) { - return false; - } - DerivativeDataSource that = (DerivativeDataSource) o; - return name.equals(that.getName()) - && baseDataSource.equals(that.getBaseDataSource()) - && columns.equals(that.getColumns()); - } - - @Override - public int hashCode() - { - return Objects.hash(name, baseDataSource, columns); - } -} diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java deleted file mode 100644 index 3cff0da0075a..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java +++ /dev/null @@ -1,254 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Supplier; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.ListeningScheduledExecutorService; -import com.google.common.util.concurrent.MoreExecutors; -import com.google.inject.Inject; -import org.apache.druid.guice.ManageLifecycle; -import org.apache.druid.indexing.materializedview.DerivativeDataSourceMetadata; -import org.apache.druid.indexing.overlord.DataSourceMetadata; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.Intervals; -import org.apache.druid.java.util.common.Pair; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.concurrent.Execs; -import org.apache.druid.java.util.common.jackson.JacksonUtils; -import org.apache.druid.java.util.common.lifecycle.LifecycleStart; -import org.apache.druid.java.util.common.lifecycle.LifecycleStop; -import org.apache.druid.java.util.emitter.EmittingLogger; -import org.apache.druid.metadata.MetadataStorageTablesConfig; -import org.apache.druid.metadata.SQLMetadataConnector; -import org.apache.druid.timeline.DataSegment; -import org.joda.time.Duration; -import org.joda.time.Interval; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.StatementContext; -import org.skife.jdbi.v2.tweak.HandleCallback; - -import java.sql.ResultSet; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; -import java.util.stream.Collectors; - -/** - * Read and store derivatives information from dataSource table frequently. - * When optimize query, DerivativesManager offers the information about derivatives. - */ -@ManageLifecycle -public class DerivativeDataSourceManager -{ - private static final EmittingLogger log = new EmittingLogger(DerivativeDataSourceManager.class); - private static final AtomicReference>> DERIVATIVES_REF = - new AtomicReference<>(new ConcurrentHashMap<>()); - private final MaterializedViewConfig config; - private final Supplier dbTables; - private final SQLMetadataConnector connector; - private final ObjectMapper objectMapper; - private final Object lock = new Object(); - - private boolean started = false; - private ListeningScheduledExecutorService exec = null; - private ListenableFuture future = null; - - @Inject - public DerivativeDataSourceManager( - MaterializedViewConfig config, - Supplier dbTables, - ObjectMapper objectMapper, - SQLMetadataConnector connector - ) - { - this.config = config; - this.dbTables = dbTables; - this.objectMapper = objectMapper; - this.connector = connector; - } - - @LifecycleStart - public void start() - { - log.info("starting derivatives manager."); - synchronized (lock) { - if (started) { - return; - } - exec = MoreExecutors.listeningDecorator(Execs.scheduledSingleThreaded("DerivativeDataSourceManager-Exec-%d")); - final Duration delay = config.getPollDuration().toStandardDuration(); - future = exec.scheduleWithFixedDelay( - new Runnable() { - @Override - public void run() - { - try { - updateDerivatives(); - } - catch (Exception e) { - log.makeAlert(e, "uncaught exception in derivatives manager updating thread").emit(); - } - } - }, - 0, - delay.getMillis(), - TimeUnit.MILLISECONDS - ); - started = true; - } - log.info("Derivatives manager started."); - } - - @LifecycleStop - public void stop() - { - synchronized (lock) { - if (!started) { - return; - } - started = false; - future.cancel(true); - future = null; - DERIVATIVES_REF.set(new ConcurrentHashMap<>()); - exec.shutdownNow(); - exec = null; - } - } - - public static ImmutableSet getDerivatives(String datasource) - { - return ImmutableSet.copyOf(DERIVATIVES_REF.get().getOrDefault(datasource, new TreeSet<>())); - } - - public static ImmutableMap> getAllDerivatives() - { - return ImmutableMap.copyOf(DERIVATIVES_REF.get()); - } - - private void updateDerivatives() - { - List> derivativesInDatabase = connector.retryWithHandle( - handle -> - handle - .createQuery( - StringUtils.format( - "SELECT DISTINCT dataSource,commit_metadata_payload FROM %1$s", - dbTables.get().getDataSourceTable() - ) - ) - .map((int index, ResultSet r, StatementContext ctx) -> { - String datasourceName = r.getString("dataSource"); - DataSourceMetadata payload = JacksonUtils.readValue( - objectMapper, - r.getBytes("commit_metadata_payload"), - DataSourceMetadata.class); - if (!(payload instanceof DerivativeDataSourceMetadata)) { - return null; - } - DerivativeDataSourceMetadata metadata = (DerivativeDataSourceMetadata) payload; - return new Pair<>(datasourceName, metadata); - }) - .list() - ); - - List derivativeDataSources = derivativesInDatabase.parallelStream() - .filter(data -> data != null) - .map(derivatives -> { - String name = derivatives.lhs; - DerivativeDataSourceMetadata metadata = derivatives.rhs; - String baseDataSource = metadata.getBaseDataSource(); - long avgSizePerGranularity = getAvgSizePerGranularity(name); - log.info("find derivatives: {bases=%s, derivative=%s, dimensions=%s, metrics=%s, avgSize=%s}", - baseDataSource, name, metadata.getDimensions(), metadata.getMetrics(), avgSizePerGranularity); - return new DerivativeDataSource(name, baseDataSource, metadata.getColumns(), avgSizePerGranularity); - }) - .filter(derivatives -> derivatives.getAvgSizeBasedGranularity() > 0) - .collect(Collectors.toList()); - - ConcurrentHashMap> newDerivatives = new ConcurrentHashMap<>(); - for (DerivativeDataSource derivative : derivativeDataSources) { - newDerivatives.computeIfAbsent(derivative.getBaseDataSource(), ds -> new TreeSet<>()).add(derivative); - } - ConcurrentHashMap> current; - do { - current = DERIVATIVES_REF.get(); - } while (!DERIVATIVES_REF.compareAndSet(current, newDerivatives)); - } - - /** - * calculate the average data size per segment granularity for a given datasource. - * - * e.g. for a datasource, there're 5 segments as follows, - * interval = "2018-04-01/2017-04-02", segment size = 1024 * 1024 * 2 - * interval = "2018-04-01/2017-04-02", segment size = 1024 * 1024 * 2 - * interval = "2018-04-02/2017-04-03", segment size = 1024 * 1024 * 1 - * interval = "2018-04-02/2017-04-03", segment size = 1024 * 1024 * 1 - * interval = "2018-04-02/2017-04-03", segment size = 1024 * 1024 * 1 - * Then, we get interval number = 2, total segment size = 1024 * 1024 * 7 - * At last, return the result 1024 * 1024 * 7 / 2 = 1024 * 1024 * 3.5 - * - * @param datasource - * @return average data size per segment granularity - */ - private long getAvgSizePerGranularity(String datasource) - { - return connector.retryWithHandle( - new HandleCallback() { - Set intervals = new HashSet<>(); - long totalSize = 0; - @Override - public Long withHandle(Handle handle) - { - handle.createQuery( - StringUtils.format("SELECT start,%1$send%1$s,payload FROM %2$s WHERE used = true AND dataSource = :dataSource", - connector.getQuoteString(), dbTables.get().getSegmentsTable() - ) - ) - .bind("dataSource", datasource) - .map( - (int index, ResultSet r, StatementContext ctx) -> { - intervals.add( - Intervals.utc( - DateTimes.of(r.getString("start")).getMillis(), - DateTimes.of(r.getString("end")).getMillis() - ) - ); - DataSegment segment = - JacksonUtils.readValue(objectMapper, r.getBytes("payload"), DataSegment.class); - totalSize += segment.getSize(); - return null; - } - ) - .list(); - return intervals.isEmpty() ? 0L : totalSize / intervals.size(); - } - } - ); - } -} diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewConfig.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewConfig.java deleted file mode 100644 index ca58daf3c323..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewConfig.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.joda.time.Period; - -public class MaterializedViewConfig -{ - @JsonProperty - private Period pollDuration = new Period("PT1M"); - - public Period getPollDuration() - { - return pollDuration; - } - -} diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java deleted file mode 100644 index 2bb9938bcb0d..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQuery.java +++ /dev/null @@ -1,248 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.fasterxml.jackson.annotation.JacksonInject; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.collect.Ordering; -import org.apache.druid.java.util.common.granularity.Granularity; -import org.apache.druid.query.BaseQuery; -import org.apache.druid.query.DataSource; -import org.apache.druid.query.Query; -import org.apache.druid.query.QueryRunner; -import org.apache.druid.query.QuerySegmentWalker; -import org.apache.druid.query.filter.DimFilter; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.spec.QuerySegmentSpec; -import org.apache.druid.query.timeseries.TimeseriesQuery; -import org.apache.druid.query.topn.TopNQuery; -import org.joda.time.DateTimeZone; -import org.joda.time.Duration; -import org.joda.time.Interval; - -import javax.annotation.Nullable; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * MaterializedViewQuery helps to do materialized view selection automatically. - * - * Each MaterializedViewQuery contains a real query which type can be topn, timeseries or groupBy. - * The real query will be optimized based on its dataSources and intervals. It will be converted into one or more - * sub-queries, in which dataSources and intervals are replaced by derived dataSources and related sub-intervals. - * - * Derived dataSources always have less dimensions, but contains all dimensions which real query required. - */ -public class MaterializedViewQuery implements Query -{ - public static final String TYPE = "view"; - private final Query query; - private final DataSourceOptimizer optimizer; - - @JsonCreator - public MaterializedViewQuery( - @JsonProperty("query") Query query, - @JacksonInject DataSourceOptimizer optimizer - ) - { - Preconditions.checkArgument( - query instanceof TopNQuery || query instanceof TimeseriesQuery || query instanceof GroupByQuery, - "Only topN/timeseries/groupby query are supported" - ); - this.query = query; - this.optimizer = optimizer; - } - - @JsonProperty("query") - public Query getQuery() - { - return query; - } - - public DataSourceOptimizer getOptimizer() - { - return optimizer; - } - - @Override - public DataSource getDataSource() - { - return query.getDataSource(); - } - - @Override - public boolean hasFilters() - { - return query.hasFilters(); - } - - @Override - public DimFilter getFilter() - { - return query.getFilter(); - } - - @Override - public String getType() - { - return query.getType(); - } - - @Override - public QueryRunner getRunner(QuerySegmentWalker walker) - { - return ((BaseQuery) query).getQuerySegmentSpec().lookup(this, walker); - } - - @Override - public List getIntervals() - - { - return query.getIntervals(); - } - - @Override - public Duration getDuration() - { - return query.getDuration(); - } - - @Override - public Granularity getGranularity() - { - return query.getGranularity(); - } - - @Override - public DateTimeZone getTimezone() - { - return query.getTimezone(); - } - - @Override - public Map getContext() - { - return query.getContext(); - } - - @Override - public ContextType getContextValue(String key) - { - return (ContextType) query.getContextValue(key); - } - - @Override - public ContextType getContextValue(String key, ContextType defaultValue) - { - return (ContextType) query.getContextValue(key, defaultValue); - } - - @Override - public boolean getContextBoolean(String key, boolean defaultValue) - { - return query.getContextBoolean(key, defaultValue); - } - - @Override - public boolean isDescending() - { - return query.isDescending(); - } - - @Override - public Ordering getResultOrdering() - { - return query.getResultOrdering(); - } - - @Override - public MaterializedViewQuery withOverriddenContext(Map contextOverride) - { - return new MaterializedViewQuery(query.withOverriddenContext(contextOverride), optimizer); - } - - @Override - public MaterializedViewQuery withQuerySegmentSpec(QuerySegmentSpec spec) - { - return new MaterializedViewQuery(query.withQuerySegmentSpec(spec), optimizer); - } - - @Override - public MaterializedViewQuery withId(String id) - { - return new MaterializedViewQuery(query.withId(id), optimizer); - } - - @Override - public String getId() - { - return query.getId(); - } - - @Override - public Query withSubQueryId(String subQueryId) - { - return new MaterializedViewQuery<>(query.withSubQueryId(subQueryId), optimizer); - } - - @Nullable - @Override - public String getSubQueryId() - { - return query.getSubQueryId(); - } - - @Override - public MaterializedViewQuery withDataSource(DataSource dataSource) - { - return new MaterializedViewQuery(query.withDataSource(dataSource), optimizer); - } - - @Override - public String toString() - { - return "MaterializedViewQuery{" + - "query=" + query + - "}"; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - MaterializedViewQuery other = (MaterializedViewQuery) o; - return other.getQuery().equals(query); - } - - @Override - public int hashCode() - { - return Objects.hash(TYPE, query); - } - -} diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChest.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChest.java deleted file mode 100644 index d6916e719ef4..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChest.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Function; -import com.google.inject.Inject; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.query.Query; -import org.apache.druid.query.QueryMetrics; -import org.apache.druid.query.QueryPlus; -import org.apache.druid.query.QueryRunner; -import org.apache.druid.query.QueryToolChest; -import org.apache.druid.query.QueryToolChestWarehouse; -import org.apache.druid.query.aggregation.MetricManipulationFn; -import org.apache.druid.query.context.ResponseContext; - -import java.util.Comparator; -import java.util.function.BinaryOperator; - -public class MaterializedViewQueryQueryToolChest extends QueryToolChest -{ - private final QueryToolChestWarehouse warehouse; - private DataSourceOptimizer optimizer; - - @Inject - public MaterializedViewQueryQueryToolChest( - QueryToolChestWarehouse warehouse - ) - { - this.warehouse = warehouse; - } - - @Override - public QueryRunner mergeResults(QueryRunner runner) - { - return new QueryRunner() { - @Override - public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) - { - Query realQuery = getRealQuery(queryPlus.getQuery()); - return warehouse.getToolChest(realQuery).mergeResults(runner).run(queryPlus.withQuery(realQuery), responseContext); - } - }; - } - - @Override - public BinaryOperator createMergeFn(Query query) - { - final Query realQuery = getRealQuery(query); - return warehouse.getToolChest(realQuery).createMergeFn(realQuery); - } - - @Override - public Comparator createResultComparator(Query query) - { - final Query realQuery = getRealQuery(query); - return warehouse.getToolChest(realQuery).createResultComparator(realQuery); - } - - @Override - public QueryMetrics makeMetrics(Query query) - { - Query realQuery = getRealQuery(query); - return warehouse.getToolChest(realQuery).makeMetrics(realQuery); - } - - @Override - public Function makePreComputeManipulatorFn(Query query, MetricManipulationFn fn) - { - Query realQuery = getRealQuery(query); - return warehouse.getToolChest(realQuery).makePreComputeManipulatorFn(realQuery, fn); - } - - @Override - public Function makePostComputeManipulatorFn(Query query, MetricManipulationFn fn) - { - Query realQuery = getRealQuery(query); - return warehouse.getToolChest(realQuery).makePostComputeManipulatorFn(realQuery, fn); - } - - @Override - public ObjectMapper decorateObjectMapper(final ObjectMapper objectMapper, final Query query) - { - Query realQuery = getRealQuery(query); - return warehouse.getToolChest(realQuery).decorateObjectMapper(objectMapper, realQuery); - } - - @Override - public TypeReference getResultTypeReference() - { - return null; - } - - @Override - public QueryRunner preMergeQueryDecoration(final QueryRunner runner) - { - return new QueryRunner() { - @Override - public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) - { - Query realQuery = getRealQuery(queryPlus.getQuery()); - QueryToolChest realQueryToolChest = warehouse.getToolChest(realQuery); - QueryRunner realQueryRunner = realQueryToolChest.preMergeQueryDecoration( - new MaterializedViewQueryRunner(runner, optimizer) - ); - return realQueryRunner.run(queryPlus.withQuery(realQuery), responseContext); - } - }; - } - - public Query getRealQuery(Query query) - { - if (query instanceof MaterializedViewQuery) { - optimizer = ((MaterializedViewQuery) query).getOptimizer(); - return ((MaterializedViewQuery) query).getQuery(); - } - return query; - } -} diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQueryRunner.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQueryRunner.java deleted file mode 100644 index 9c72808045c3..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewQueryRunner.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.google.common.base.Function; -import com.google.common.collect.Lists; -import org.apache.druid.java.util.common.guava.MergeSequence; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.java.util.common.guava.Sequences; -import org.apache.druid.query.Query; -import org.apache.druid.query.QueryPlus; -import org.apache.druid.query.QueryRunner; -import org.apache.druid.query.context.ResponseContext; - - -public class MaterializedViewQueryRunner implements QueryRunner -{ - private final QueryRunner runner; - private final DataSourceOptimizer optimizer; - - public MaterializedViewQueryRunner(QueryRunner queryRunner, DataSourceOptimizer optimizer) - { - this.runner = queryRunner; - this.optimizer = optimizer; - } - - @Override - public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) - { - Query query = queryPlus.getQuery(); - return new MergeSequence<>( - query.getResultOrdering(), - Sequences.simple( - Lists.transform( - optimizer.optimize(query), - new Function>() - { - @Override - public Sequence apply(Query query) - { - return runner.run( - queryPlus.withQuery(query), - responseContext - ); - } - } - ) - ) - ); - } -} diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewSelectionDruidModule.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewSelectionDruidModule.java deleted file mode 100644 index 2527bb0b52cb..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewSelectionDruidModule.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import com.google.inject.Singleton; -import org.apache.druid.guice.DruidBinders; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.LifecycleModule; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.server.metrics.MetricsModule; - -import java.util.List; - -public class MaterializedViewSelectionDruidModule implements DruidModule -{ - @Override - public List getJacksonModules() - { - return ImmutableList.of( - new SimpleModule(getClass().getSimpleName()) - .registerSubtypes( - new NamedType(MaterializedViewQuery.class, MaterializedViewQuery.TYPE)) - ); - } - - @Override - public void configure(Binder binder) - { - DruidBinders.queryToolChestBinder(binder) - .addBinding(MaterializedViewQuery.class) - .to(MaterializedViewQueryQueryToolChest.class); - LifecycleModule.register(binder, DerivativeDataSourceManager.class); - binder.bind(DataSourceOptimizer.class).in(Singleton.class); - MetricsModule.register(binder, DataSourceOptimizerMonitor.class); - JsonConfigProvider.bind(binder, "druid.manager.derivatives", MaterializedViewConfig.class); - } -} diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewUtils.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewUtils.java deleted file mode 100644 index c0a5e9d089a1..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewUtils.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import org.apache.druid.java.util.common.Intervals; -import org.apache.druid.java.util.common.JodaUtils; -import org.apache.druid.query.Query; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.FilteredAggregatorFactory; -import org.apache.druid.query.dimension.DimensionSpec; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.timeseries.TimeseriesQuery; -import org.apache.druid.query.topn.TopNQuery; -import org.joda.time.Interval; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Set; - -public class MaterializedViewUtils -{ - /** - * extract all dimensions in query. - * only support TopNQuery/TimeseriesQuery/GroupByQuery - * - * @param query - * @return dimensions set in query - */ - public static Set getRequiredFields(Query query) - { - Set dimsInFilter = null == query.getFilter() ? new HashSet() : query.getFilter().getRequiredColumns(); - Set dimensions = new HashSet<>(dimsInFilter); - - if (query instanceof TopNQuery) { - TopNQuery q = (TopNQuery) query; - dimensions.addAll(extractFieldsFromAggregations(q.getAggregatorSpecs())); - dimensions.add(q.getDimensionSpec().getDimension()); - } else if (query instanceof TimeseriesQuery) { - TimeseriesQuery q = (TimeseriesQuery) query; - dimensions.addAll(extractFieldsFromAggregations(q.getAggregatorSpecs())); - } else if (query instanceof GroupByQuery) { - GroupByQuery q = (GroupByQuery) query; - dimensions.addAll(extractFieldsFromAggregations(q.getAggregatorSpecs())); - for (DimensionSpec spec : q.getDimensions()) { - String dim = spec.getDimension(); - dimensions.add(dim); - } - } else { - throw new UnsupportedOperationException("Method getRequeiredFields only support TopNQuery/TimeseriesQuery/GroupByQuery"); - } - return dimensions; - } - - private static Set extractFieldsFromAggregations(List aggs) - { - Set ret = new HashSet<>(); - for (AggregatorFactory agg : aggs) { - if (agg instanceof FilteredAggregatorFactory) { - FilteredAggregatorFactory fagg = (FilteredAggregatorFactory) agg; - ret.addAll(fagg.getFilter().getRequiredColumns()); - } - ret.addAll(agg.requiredFields()); - } - return ret; - } - - /** - * calculate the intervals which are covered by interval2, but not covered by interval1. - * result intervals = interval2 - interval1 ∩ interval2 - * e.g. - * a list of interval2: ["2018-04-01T00:00:00.000Z/2018-04-02T00:00:00.000Z", - * "2018-04-03T00:00:00.000Z/2018-04-10T00:00:00.000Z"] - * a list of interval1: ["2018-04-04T00:00:00.000Z/2018-04-06T00:00:00.000Z"] - * the result list of intervals: ["2018-04-01T00:00:00.000Z/2018-04-02T00:00:00.000Z", - * "2018-04-03T00:00:00.000Z/2018-04-04T00:00:00.000Z", - * "2018-04-06T00:00:00.000Z/2018-04-10T00:00:00.000Z"] - * If interval2 is empty, then return an empty list of interval. - * @param interval2 list of intervals - * @param interval1 list of intervals - * @return list of intervals are covered by interval2, but not covered by interval1. - */ - public static List minus(List interval2, List interval1) - { - if (interval1.isEmpty() || interval2.isEmpty()) { - return interval1; - } - Iterator it1 = JodaUtils.condenseIntervals(interval1).iterator(); - Iterator it2 = JodaUtils.condenseIntervals(interval2).iterator(); - List remaining = new ArrayList<>(); - Interval currInterval1 = it1.next(); - Interval currInterval2 = it2.next(); - long start1 = currInterval1.getStartMillis(); - long end1 = currInterval1.getEndMillis(); - long start2 = currInterval2.getStartMillis(); - long end2 = currInterval2.getEndMillis(); - while (true) { - if (start2 < start1 && end2 <= start1) { - remaining.add(Intervals.utc(start2, end2)); - if (it2.hasNext()) { - currInterval2 = it2.next(); - start2 = currInterval2.getStartMillis(); - end2 = currInterval2.getEndMillis(); - } else { - break; - } - } - if (start2 < start1 && end2 > start1 && end2 < end1) { - remaining.add(Intervals.utc(start2, start1)); - start1 = end2; - if (it2.hasNext()) { - currInterval2 = it2.next(); - start2 = currInterval2.getStartMillis(); - end2 = currInterval2.getEndMillis(); - } else { - break; - } - } - if (start2 < start1 && end2 == end1) { - remaining.add(Intervals.utc(start2, start1)); - if (it2.hasNext() && it1.hasNext()) { - currInterval2 = it2.next(); - start2 = currInterval2.getStartMillis(); - end2 = currInterval2.getEndMillis(); - currInterval1 = it1.next(); - start1 = currInterval1.getStartMillis(); - end1 = currInterval1.getEndMillis(); - } else { - break; - } - } - if (start2 < start1 && end2 > end1) { - remaining.add(Intervals.utc(start2, start1)); - start2 = end1; - if (it1.hasNext()) { - currInterval1 = it1.next(); - start1 = currInterval1.getStartMillis(); - end1 = currInterval1.getEndMillis(); - } else { - remaining.add(Intervals.utc(end1, end2)); - break; - } - } - if (start2 == start1 && end2 >= start1 && end2 < end1) { - start1 = end2; - if (it2.hasNext()) { - currInterval2 = it2.next(); - start2 = currInterval2.getStartMillis(); - end2 = currInterval2.getEndMillis(); - } else { - break; - } - } - if (start2 == start1 && end2 > end1) { - start2 = end1; - if (it1.hasNext()) { - currInterval1 = it1.next(); - start1 = currInterval1.getStartMillis(); - end1 = currInterval1.getEndMillis(); - } else { - remaining.add(Intervals.utc(end1, end2)); - break; - } - } - if (start2 > start1 && start2 < end1 && end2 < end1) { - start1 = end2; - if (it2.hasNext()) { - currInterval2 = it2.next(); - start2 = currInterval2.getStartMillis(); - end2 = currInterval2.getEndMillis(); - } else { - break; - } - } - if (start2 > start1 && start2 < end1 && end2 > end1) { - start2 = end1; - if (it1.hasNext()) { - currInterval1 = it1.next(); - start1 = currInterval1.getStartMillis(); - end1 = currInterval1.getEndMillis(); - } else { - remaining.add(Intervals.utc(end1, end2)); - break; - } - } - if (start2 >= start1 && start2 <= end1 && end2 == end1) { - if (it2.hasNext() && it1.hasNext()) { - currInterval2 = it2.next(); - start2 = currInterval2.getStartMillis(); - end2 = currInterval2.getEndMillis(); - currInterval1 = it1.next(); - start1 = currInterval1.getStartMillis(); - end1 = currInterval1.getEndMillis(); - } else { - break; - } - } - if (start2 >= end1 && end2 > end1) { - if (it1.hasNext()) { - currInterval1 = it1.next(); - start1 = currInterval1.getStartMillis(); - end1 = currInterval1.getEndMillis(); - } else { - remaining.add(Intervals.utc(start2, end2)); - break; - } - } - } - - while (it2.hasNext()) { - remaining.add(Intervals.of(it2.next().toString())); - } - return remaining; - } -} diff --git a/extensions-contrib/materialized-view-selection/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/materialized-view-selection/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index a0aa8be4c8c1..000000000000 --- a/extensions-contrib/materialized-view-selection/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.query.materializedview.MaterializedViewSelectionDruidModule diff --git a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java deleted file mode 100644 index bd070e3b24dc..000000000000 --- a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java +++ /dev/null @@ -1,315 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.dataformat.smile.SmileFactory; -import com.fasterxml.jackson.dataformat.smile.SmileGenerator; -import com.google.common.base.Predicates; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import org.apache.druid.client.BatchServerInventoryView; -import org.apache.druid.client.BrokerSegmentWatcherConfig; -import org.apache.druid.client.BrokerServerView; -import org.apache.druid.client.DruidServer; -import org.apache.druid.client.selector.HighestPriorityTierSelectorStrategy; -import org.apache.druid.client.selector.RandomServerSelectorStrategy; -import org.apache.druid.curator.CuratorTestBase; -import org.apache.druid.indexing.materializedview.DerivativeDataSourceMetadata; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.java.util.common.Intervals; -import org.apache.druid.java.util.http.client.HttpClient; -import org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator; -import org.apache.druid.metadata.TestDerbyConnector; -import org.apache.druid.query.Query; -import org.apache.druid.query.QueryRunnerTestHelper; -import org.apache.druid.query.QueryToolChestWarehouse; -import org.apache.druid.query.QueryWatcher; -import org.apache.druid.query.aggregation.LongSumAggregatorFactory; -import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; -import org.apache.druid.query.topn.TopNQuery; -import org.apache.druid.query.topn.TopNQueryBuilder; -import org.apache.druid.segment.TestHelper; -import org.apache.druid.server.coordination.DruidServerMetadata; -import org.apache.druid.server.coordination.ServerType; -import org.apache.druid.server.initialization.ZkPathsConfig; -import org.apache.druid.server.metrics.NoopServiceEmitter; -import org.apache.druid.timeline.DataSegment; -import org.apache.druid.timeline.partition.NoneShardSpec; -import org.easymock.EasyMock; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.concurrent.Executor; -import java.util.concurrent.TimeUnit; - -public class DatasourceOptimizerTest extends CuratorTestBase -{ - @Rule - public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule(); - private DerivativeDataSourceManager derivativesManager; - private DruidServer druidServer; - private ObjectMapper jsonMapper; - private ZkPathsConfig zkPathsConfig; - private DataSourceOptimizer optimizer; - private IndexerSQLMetadataStorageCoordinator metadataStorageCoordinator; - private BatchServerInventoryView baseView; - private BrokerServerView brokerServerView; - - @Before - public void setUp() throws Exception - { - TestDerbyConnector derbyConnector = derbyConnectorRule.getConnector(); - derbyConnector.createDataSourceTable(); - derbyConnector.createSegmentTable(); - MaterializedViewConfig viewConfig = new MaterializedViewConfig(); - jsonMapper = TestHelper.makeJsonMapper(); - jsonMapper.registerSubtypes(new NamedType(DerivativeDataSourceMetadata.class, "view")); - metadataStorageCoordinator = EasyMock.createMock(IndexerSQLMetadataStorageCoordinator.class); - derivativesManager = new DerivativeDataSourceManager( - viewConfig, - derbyConnectorRule.metadataTablesConfigSupplier(), - jsonMapper, - derbyConnector - ); - metadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator( - jsonMapper, - derbyConnectorRule.metadataTablesConfigSupplier().get(), - derbyConnector - ); - - setupServerAndCurator(); - curator.start(); - curator.blockUntilConnected(); - - zkPathsConfig = new ZkPathsConfig(); - setupViews(); - - druidServer = new DruidServer( - "localhost:1234", - "localhost:1234", - null, - 10000000L, - ServerType.HISTORICAL, - "default_tier", - 0 - ); - setupZNodeForServer(druidServer, new ZkPathsConfig(), jsonMapper); - optimizer = new DataSourceOptimizer(brokerServerView); - } - - @After - public void tearDown() throws IOException - { - baseView.stop(); - tearDownServerAndCurator(); - } - - @Test(timeout = 60_000L) - public void testOptimize() throws InterruptedException - { - // insert datasource metadata - String dataSource = "derivative"; - String baseDataSource = "base"; - Set dims = Sets.newHashSet("dim1", "dim2", "dim3"); - Set metrics = Sets.newHashSet("cost"); - DerivativeDataSourceMetadata metadata = new DerivativeDataSourceMetadata(baseDataSource, dims, metrics); - metadataStorageCoordinator.insertDataSourceMetadata(dataSource, metadata); - // insert base datasource segments - List baseResult = Lists.transform( - ImmutableList.of( - "2011-04-01/2011-04-02", - "2011-04-02/2011-04-03", - "2011-04-03/2011-04-04", - "2011-04-04/2011-04-05", - "2011-04-05/2011-04-06" - ), - interval -> { - final DataSegment segment = createDataSegment( - "base", - interval, - "v1", - Lists.newArrayList("dim1", "dim2", "dim3", "dim4"), - 1024 * 1024 - ); - try { - metadataStorageCoordinator.announceHistoricalSegments(Sets.newHashSet(segment)); - announceSegmentForServer(druidServer, segment, zkPathsConfig, jsonMapper); - } - catch (IOException e) { - return false; - } - return true; - } - ); - // insert derivative segments - List derivativeResult = Lists.transform( - ImmutableList.of( - "2011-04-01/2011-04-02", - "2011-04-02/2011-04-03", - "2011-04-03/2011-04-04" - ), - interval -> { - final DataSegment segment = createDataSegment( - "derivative", - interval, - "v1", - Lists.newArrayList("dim1", "dim2", "dim3"), - 1024 - ); - try { - metadataStorageCoordinator.announceHistoricalSegments(Sets.newHashSet(segment)); - announceSegmentForServer(druidServer, segment, zkPathsConfig, jsonMapper); - } - catch (IOException e) { - return false; - } - return true; - } - ); - Assert.assertFalse(baseResult.contains(false)); - Assert.assertFalse(derivativeResult.contains(false)); - derivativesManager.start(); - while (DerivativeDataSourceManager.getAllDerivatives().isEmpty()) { - TimeUnit.SECONDS.sleep(1L); - } - // build user query - TopNQuery userQuery = new TopNQueryBuilder() - .dataSource("base") - .granularity(QueryRunnerTestHelper.ALL_GRAN) - .dimension("dim1") - .metric("cost") - .threshold(4) - .intervals("2011-04-01/2011-04-06") - .aggregators(new LongSumAggregatorFactory("cost", "cost")) - .build(); - - List expectedQueryAfterOptimizing = Lists.newArrayList( - new TopNQueryBuilder() - .dataSource("derivative") - .granularity(QueryRunnerTestHelper.ALL_GRAN) - .dimension("dim1") - .metric("cost") - .threshold(4) - .intervals(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-01/2011-04-04")))) - .aggregators(new LongSumAggregatorFactory("cost", "cost")) - .build(), - new TopNQueryBuilder() - .dataSource("base") - .granularity(QueryRunnerTestHelper.ALL_GRAN) - .dimension("dim1") - .metric("cost") - .threshold(4) - .intervals(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-04/2011-04-06")))) - .aggregators(new LongSumAggregatorFactory("cost", "cost")) - .build() - ); - Assert.assertEquals(expectedQueryAfterOptimizing, optimizer.optimize(userQuery)); - derivativesManager.stop(); - } - - private DataSegment createDataSegment(String name, String intervalStr, String version, List dims, long size) - { - return DataSegment.builder() - .dataSource(name) - .interval(Intervals.of(intervalStr)) - .loadSpec( - ImmutableMap.of( - "type", - "local", - "path", - "somewhere" - ) - ) - .version(version) - .dimensions(dims) - .metrics(ImmutableList.of("cost")) - .shardSpec(NoneShardSpec.instance()) - .binaryVersion(9) - .size(size) - .build(); - } - - private void setupViews() throws Exception - { - baseView = new BatchServerInventoryView(zkPathsConfig, curator, jsonMapper, Predicates.alwaysTrue()) - { - @Override - public void registerSegmentCallback(Executor exec, final SegmentCallback callback) - { - super.registerSegmentCallback( - exec, - new SegmentCallback() - { - @Override - public CallbackAction segmentAdded(DruidServerMetadata server, DataSegment segment) - { - return callback.segmentAdded(server, segment); - } - - @Override - public CallbackAction segmentRemoved(DruidServerMetadata server, DataSegment segment) - { - return callback.segmentRemoved(server, segment); - } - - @Override - public CallbackAction segmentViewInitialized() - { - return callback.segmentViewInitialized(); - } - } - ); - } - }; - - brokerServerView = new BrokerServerView( - EasyMock.createMock(QueryToolChestWarehouse.class), - EasyMock.createMock(QueryWatcher.class), - getSmileMapper(), - EasyMock.createMock(HttpClient.class), - baseView, - new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy()), - new NoopServiceEmitter(), - new BrokerSegmentWatcherConfig() - ); - baseView.start(); - } - - private ObjectMapper getSmileMapper() - { - final SmileFactory smileFactory = new SmileFactory(); - smileFactory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false); - smileFactory.delegateToTextual(true); - final ObjectMapper retVal = new DefaultObjectMapper(smileFactory); - retVal.getFactory().setCodec(retVal); - return retVal; - } -} diff --git a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChestTest.java b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChestTest.java deleted file mode 100644 index b96ff17f670b..000000000000 --- a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChestTest.java +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Function; -import com.google.common.collect.ImmutableMap; -import org.apache.druid.data.input.MapBasedRow; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.query.Druids; -import org.apache.druid.query.MapQueryToolChestWarehouse; -import org.apache.druid.query.Query; -import org.apache.druid.query.QueryRunnerTestHelper; -import org.apache.druid.query.QueryToolChest; -import org.apache.druid.query.Result; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.LongSumAggregatorFactory; -import org.apache.druid.query.aggregation.MetricManipulationFn; -import org.apache.druid.query.dimension.DefaultDimensionSpec; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.groupby.GroupByQueryConfig; -import org.apache.druid.query.groupby.GroupByQueryQueryToolChest; -import org.apache.druid.query.groupby.GroupByQueryRunnerTestHelper; -import org.apache.druid.query.groupby.ResultRow; -import org.apache.druid.query.timeseries.TimeseriesQuery; -import org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest; -import org.apache.druid.query.timeseries.TimeseriesResultValue; -import org.junit.Assert; -import org.junit.Test; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -public class MaterializedViewQueryQueryToolChestTest -{ - private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); - - @Test - public void testMakePostComputeManipulatorFn() - { - TimeseriesQuery realQuery = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.DATA_SOURCE) - .granularity(QueryRunnerTestHelper.DAY_GRAN) - .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) - .aggregators(QueryRunnerTestHelper.ROWS_COUNT) - .descending(true) - .build(); - MaterializedViewQuery materializedViewQuery = new MaterializedViewQuery(realQuery, null); - - QueryToolChest materializedViewQueryQueryToolChest = - new MaterializedViewQueryQueryToolChest(new MapQueryToolChestWarehouse( - ImmutableMap., QueryToolChest>builder() - .put(TimeseriesQuery.class, new TimeseriesQueryQueryToolChest()) - .build() - )); - - Function postFn = - materializedViewQueryQueryToolChest.makePostComputeManipulatorFn( - materializedViewQuery, - new MetricManipulationFn() { - @Override - public Object manipulate(AggregatorFactory factory, Object object) - { - return "metricvalue1"; - } - }); - - - - Result result = new Result<>( - DateTimes.nowUtc(), - new TimeseriesResultValue(ImmutableMap.of("dim1", "dimvalue1")) - ); - - Result postResult = (Result) postFn.apply(result); - Map postResultMap = postResult.getValue().getBaseObject(); - - Assert.assertEquals(postResult.getTimestamp(), result.getTimestamp()); - Assert.assertEquals(postResultMap.size(), 2); - Assert.assertEquals(postResultMap.get(QueryRunnerTestHelper.ROWS_COUNT.getName()), "metricvalue1"); - Assert.assertEquals(postResultMap.get("dim1"), "dimvalue1"); - } - - @Test - public void testDecorateObjectMapper() throws IOException - { - GroupByQuery realQuery = GroupByQuery.builder() - .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) - .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) - .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs( - QueryRunnerTestHelper.ROWS_COUNT, - new LongSumAggregatorFactory("idx", "index") - ) - .setGranularity(QueryRunnerTestHelper.DAY_GRAN) - .setContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_ARRAY_RESULT_ROWS, false)) - .build(); - - QueryToolChest queryToolChest = - new MaterializedViewQueryQueryToolChest(new MapQueryToolChestWarehouse( - ImmutableMap., QueryToolChest>builder() - .put(GroupByQuery.class, new GroupByQueryQueryToolChest(null)) - .build() - )); - - ObjectMapper objectMapper = queryToolChest.decorateObjectMapper(JSON_MAPPER, realQuery); - - List results = Arrays.asList( - GroupByQueryRunnerTestHelper.createExpectedRow( - realQuery, - "2011-04-01", - "alias", - "automotive", - "rows", - 1L, - "idx", - 135L - - ), - GroupByQueryRunnerTestHelper.createExpectedRow( - realQuery, - "2011-04-01", - "alias", - "business", - "rows", - 1L, - "idx", - 118L - ) - ); - List expectedResults = results.stream() - .map(resultRow -> resultRow.toMapBasedRow(realQuery)) - .collect(Collectors.toList()); - - Assert.assertEquals( - "decorate-object-mapper", - JSON_MAPPER.writerFor(new TypeReference>(){}).writeValueAsString(expectedResults), - objectMapper.writeValueAsString(results) - ); - } - - @Test - public void testDecorateObjectMapperMaterializedViewQuery() throws IOException - { - GroupByQuery realQuery = GroupByQuery.builder() - .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) - .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) - .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs( - QueryRunnerTestHelper.ROWS_COUNT, - new LongSumAggregatorFactory("idx", "index") - ) - .setGranularity(QueryRunnerTestHelper.DAY_GRAN) - .setContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_ARRAY_RESULT_ROWS, false)) - .build(); - MaterializedViewQuery materializedViewQuery = new MaterializedViewQuery(realQuery, null); - - QueryToolChest materializedViewQueryQueryToolChest = - new MaterializedViewQueryQueryToolChest(new MapQueryToolChestWarehouse( - ImmutableMap., QueryToolChest>builder() - .put(GroupByQuery.class, new GroupByQueryQueryToolChest(null)) - .build() - )); - - ObjectMapper objectMapper = materializedViewQueryQueryToolChest.decorateObjectMapper(JSON_MAPPER, materializedViewQuery); - - List results = Arrays.asList( - GroupByQueryRunnerTestHelper.createExpectedRow( - realQuery, - "2011-04-01", - "alias", - "automotive", - "rows", - 1L, - "idx", - 135L - - ), - GroupByQueryRunnerTestHelper.createExpectedRow( - realQuery, - "2011-04-01", - "alias", - "business", - "rows", - 1L, - "idx", - 118L - ) - ); - List expectedResults = results.stream() - .map(resultRow -> resultRow.toMapBasedRow(realQuery)) - .collect(Collectors.toList()); - - Assert.assertEquals( - "decorate-object-mapper", - JSON_MAPPER.writerFor(new TypeReference>(){}).writeValueAsString(expectedResults), - objectMapper.writeValueAsString(results) - ); - } - - @Test - public void testGetRealQuery() - { - GroupByQuery realQuery = GroupByQuery.builder() - .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) - .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) - .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs( - QueryRunnerTestHelper.ROWS_COUNT, - new LongSumAggregatorFactory("idx", "index") - ) - .setGranularity(QueryRunnerTestHelper.DAY_GRAN) - .setContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_ARRAY_RESULT_ROWS, false)) - .build(); - MaterializedViewQuery materializedViewQuery = new MaterializedViewQuery(realQuery, null); - - MaterializedViewQueryQueryToolChest materializedViewQueryQueryToolChest = - new MaterializedViewQueryQueryToolChest(new MapQueryToolChestWarehouse( - ImmutableMap., QueryToolChest>builder() - .put(GroupByQuery.class, new GroupByQueryQueryToolChest(null)) - .build() - )); - - Assert.assertEquals(realQuery, materializedViewQueryQueryToolChest.getRealQuery(materializedViewQuery)); - - } - -} diff --git a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java deleted file mode 100644 index 1432f519ef73..000000000000 --- a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.fasterxml.jackson.databind.InjectableValues; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import org.apache.druid.math.expr.ExprMacroTable; -import org.apache.druid.query.Query; -import org.apache.druid.query.QueryRunnerTestHelper; -import org.apache.druid.query.TableDataSource; -import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory; -import org.apache.druid.query.aggregation.DoubleMinAggregatorFactory; -import org.apache.druid.query.expression.LookupEnabledTestExprMacroTable; -import org.apache.druid.query.topn.TopNQuery; -import org.apache.druid.query.topn.TopNQueryBuilder; -import org.apache.druid.segment.TestHelper; -import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; - -public class MaterializedViewQueryTest -{ - private static final ObjectMapper JSON_MAPPER = TestHelper.makeJsonMapper(); - private DataSourceOptimizer optimizer; - - @Before - public void setUp() - { - JSON_MAPPER.registerSubtypes(new NamedType(MaterializedViewQuery.class, MaterializedViewQuery.TYPE)); - optimizer = EasyMock.createMock(DataSourceOptimizer.class); - JSON_MAPPER.setInjectableValues( - new InjectableValues.Std() - .addValue(ExprMacroTable.class.getName(), LookupEnabledTestExprMacroTable.INSTANCE) - .addValue(DataSourceOptimizer.class, optimizer) - ); - } - - @Test - public void testQuerySerialization() throws IOException - { - TopNQuery topNQuery = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.DATA_SOURCE) - .granularity(QueryRunnerTestHelper.ALL_GRAN) - .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) - .metric(QueryRunnerTestHelper.INDEX_METRIC) - .threshold(4) - .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) - .aggregators( - Lists.newArrayList( - Iterables.concat( - QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, - Lists.newArrayList( - new DoubleMaxAggregatorFactory("maxIndex", "index"), - new DoubleMinAggregatorFactory("minIndex", "index") - ) - ) - ) - ) - .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) - .build(); - MaterializedViewQuery query = new MaterializedViewQuery(topNQuery, optimizer); - String json = JSON_MAPPER.writeValueAsString(query); - Query serdeQuery = JSON_MAPPER.readValue(json, Query.class); - Assert.assertEquals(query, serdeQuery); - Assert.assertEquals(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), query.getDataSource()); - Assert.assertEquals(QueryRunnerTestHelper.ALL_GRAN, query.getGranularity()); - Assert.assertEquals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals(), query.getIntervals()); - } -} diff --git a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewUtilsTest.java b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewUtilsTest.java deleted file mode 100644 index fe00a2ce318c..000000000000 --- a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewUtilsTest.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.materializedview; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import org.apache.druid.java.util.common.Intervals; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.timeseries.TimeseriesQuery; -import org.apache.druid.query.topn.TopNQuery; -import org.apache.druid.segment.TestHelper; -import org.apache.druid.testing.InitializedNullHandlingTest; -import org.joda.time.Interval; -import org.junit.Assert; -import org.junit.Test; - -import java.util.List; -import java.util.Set; - -public class MaterializedViewUtilsTest extends InitializedNullHandlingTest -{ - private static ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); - - @Test - public void testGetRequiredFieldsFromGroupByQuery() throws Exception - { - String queryStr = "{\n" + - " \"queryType\": \"groupBy\",\n" + - " \"dataSource\": \"sample_datasource\",\n" + - " \"granularity\": \"day\",\n" + - " \"dimensions\": [\"country\", \"device\"],\n" + - " \"limitSpec\": { \"type\": \"default\", \"limit\": 5000, \"columns\": [\"country\", \"data_transfer\"] },\n" + - " \"filter\": {\n" + - " \"type\": \"and\",\n" + - " \"fields\": [\n" + - " { \"type\": \"selector\", \"dimension\": \"carrier\", \"value\": \"AT&T\" },\n" + - " { \"type\": \"or\", \n" + - " \"fields\": [\n" + - " { \"type\": \"selector\", \"dimension\": \"make\", \"value\": \"Apple\" },\n" + - " { \"type\": \"selector\", \"dimension\": \"make\", \"value\": \"Samsung\" }\n" + - " ]\n" + - " }\n" + - " ]\n" + - " },\n" + - " \"aggregations\": [\n" + - " { \"type\": \"longSum\", \"name\": \"total_usage\", \"fieldName\": \"user_count\" },\n" + - " { \"type\": \"doubleSum\", \"name\": \"data_transfer\", \"fieldName\": \"data_transfer\" }\n" + - " ],\n" + - " \"postAggregations\": [\n" + - " { \"type\": \"arithmetic\",\n" + - " \"name\": \"avg_usage\",\n" + - " \"fn\": \"/\",\n" + - " \"fields\": [\n" + - " { \"type\": \"fieldAccess\", \"fieldName\": \"data_transfer\" },\n" + - " { \"type\": \"fieldAccess\", \"fieldName\": \"total_usage\" }\n" + - " ]\n" + - " }\n" + - " ],\n" + - " \"intervals\": [ \"2012-01-01T00:00:00.000/2012-01-03T00:00:00.000\" ],\n" + - " \"having\": {\n" + - " \"type\": \"greaterThan\",\n" + - " \"aggregation\": \"total_usage\",\n" + - " \"value\": 100\n" + - " }\n" + - "}"; - GroupByQuery query = jsonMapper.readValue(queryStr, GroupByQuery.class); - Set fields = MaterializedViewUtils.getRequiredFields(query); - Assert.assertEquals( - Sets.newHashSet("country", "device", "carrier", "make", "user_count", "data_transfer"), - fields - ); - } - - @Test - public void testGetRequiredFieldsFromTopNQuery() throws Exception - { - String queryStr = "{\n" + - " \"queryType\": \"topN\",\n" + - " \"dataSource\": \"sample_data\",\n" + - " \"dimension\": \"sample_dim\",\n" + - " \"threshold\": 5,\n" + - " \"metric\": \"count\",\n" + - " \"granularity\": \"all\",\n" + - " \"filter\": {\n" + - " \"type\": \"and\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"type\": \"selector\",\n" + - " \"dimension\": \"dim1\",\n" + - " \"value\": \"some_value\"\n" + - " },\n" + - " {\n" + - " \"type\": \"selector\",\n" + - " \"dimension\": \"dim2\",\n" + - " \"value\": \"some_other_val\"\n" + - " }\n" + - " ]\n" + - " },\n" + - " \"aggregations\": [\n" + - " {\n" + - " \"type\": \"longSum\",\n" + - " \"name\": \"count\",\n" + - " \"fieldName\": \"count\"\n" + - " },\n" + - " {\n" + - " \"type\": \"doubleSum\",\n" + - " \"name\": \"some_metric\",\n" + - " \"fieldName\": \"some_metric\"\n" + - " }\n" + - " ],\n" + - " \"postAggregations\": [\n" + - " {\n" + - " \"type\": \"arithmetic\",\n" + - " \"name\": \"average\",\n" + - " \"fn\": \"/\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"type\": \"fieldAccess\",\n" + - " \"name\": \"some_metric\",\n" + - " \"fieldName\": \"some_metric\"\n" + - " },\n" + - " {\n" + - " \"type\": \"fieldAccess\",\n" + - " \"name\": \"count\",\n" + - " \"fieldName\": \"count\"\n" + - " }\n" + - " ]\n" + - " }\n" + - " ],\n" + - " \"intervals\": [\n" + - " \"2013-08-31T00:00:00.000/2013-09-03T00:00:00.000\"\n" + - " ]\n" + - "}"; - TopNQuery query = jsonMapper.readValue(queryStr, TopNQuery.class); - Set fields = MaterializedViewUtils.getRequiredFields(query); - Assert.assertEquals( - Sets.newHashSet("sample_dim", "dim1", "dim2", "count", "some_metric"), - fields - ); - } - - @Test - public void testGetRequiredFieldsFromTimeseriesQuery() throws Exception - { - String queryStr = "{\n" + - " \"queryType\": \"timeseries\",\n" + - " \"dataSource\": \"sample_datasource\",\n" + - " \"granularity\": \"day\",\n" + - " \"descending\": \"true\",\n" + - " \"filter\": {\n" + - " \"type\": \"and\",\n" + - " \"fields\": [\n" + - " { \"type\": \"selector\", \"dimension\": \"sample_dimension1\", \"value\": \"sample_value1\" },\n" + - " { \"type\": \"or\",\n" + - " \"fields\": [\n" + - " { \"type\": \"selector\", \"dimension\": \"sample_dimension2\", \"value\": \"sample_value2\" },\n" + - " { \"type\": \"selector\", \"dimension\": \"sample_dimension3\", \"value\": \"sample_value3\" }\n" + - " ]\n" + - " }\n" + - " ]\n" + - " },\n" + - " \"aggregations\": [\n" + - " { \"type\": \"longSum\", \"name\": \"sample_name1\", \"fieldName\": \"sample_fieldName1\" },\n" + - " { \"type\": \"doubleSum\", \"name\": \"sample_name2\", \"fieldName\": \"sample_fieldName2\" }\n" + - " ],\n" + - " \"postAggregations\": [\n" + - " { \"type\": \"arithmetic\",\n" + - " \"name\": \"sample_divide\",\n" + - " \"fn\": \"/\",\n" + - " \"fields\": [\n" + - " { \"type\": \"fieldAccess\", \"name\": \"postAgg__sample_name1\", \"fieldName\": \"sample_name1\" },\n" + - " { \"type\": \"fieldAccess\", \"name\": \"postAgg__sample_name2\", \"fieldName\": \"sample_name2\" }\n" + - " ]\n" + - " }\n" + - " ],\n" + - " \"intervals\": [ \"2012-01-01T00:00:00.000/2012-01-03T00:00:00.000\" ]\n" + - "}"; - TimeseriesQuery query = jsonMapper.readValue(queryStr, TimeseriesQuery.class); - Set fields = MaterializedViewUtils.getRequiredFields(query); - Assert.assertEquals( - Sets.newHashSet("sample_dimension1", "sample_dimension2", "sample_dimension3", "sample_fieldName1", - "sample_fieldName2"), - fields - ); - } - - @Test - public void testIntervalMinus() - { - List intervalList1 = Lists.newArrayList( - Intervals.of("2012-01-02T00:00:00.000/2012-01-03T00:00:00.000"), - Intervals.of("2012-01-08T00:00:00.000/2012-01-10T00:00:00.000"), - Intervals.of("2012-01-16T00:00:00.000/2012-01-17T00:00:00.000") - ); - List intervalList2 = Lists.newArrayList( - Intervals.of("2012-01-01T00:00:00.000/2012-01-04T00:00:00.000"), - Intervals.of("2012-01-05T00:00:00.000/2012-01-10T00:00:00.000"), - Intervals.of("2012-01-16T00:00:00.000/2012-01-18T00:00:00.000"), - Intervals.of("2012-01-19T00:00:00.000/2012-01-20T00:00:00.000") - ); - - List result = MaterializedViewUtils.minus(intervalList2, intervalList1); - Assert.assertEquals( - Lists.newArrayList( - Intervals.of("2012-01-01T00:00:00.000/2012-01-02T00:00:00.000"), - Intervals.of("2012-01-03T00:00:00.000/2012-01-04T00:00:00.000"), - Intervals.of("2012-01-05T00:00:00.000/2012-01-08T00:00:00.000"), - Intervals.of("2012-01-17T00:00:00.000/2012-01-18T00:00:00.000"), - Intervals.of("2012-01-19T00:00:00.000/2012-01-20T00:00:00.000") - ), - result - ); - } -} diff --git a/extensions-contrib/momentsketch/pom.xml b/extensions-contrib/momentsketch/pom.xml deleted file mode 100644 index a0491bfc23c8..000000000000 --- a/extensions-contrib/momentsketch/pom.xml +++ /dev/null @@ -1,123 +0,0 @@ - - - - - - druid - org.apache.druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - 4.0.0 - - org.apache.druid.extensions.contrib - druid-momentsketch - druid-momentsketch - Aggregators for the approximate quantile moment sketch - - - - com.github.stanford-futuredata.momentsketch - momentsketch-solver - 0.1.1 - - - com.google.guava - guava - ${guava.version} - provided - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - com.google.code.findbugs - jsr305 - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - - junit - junit - test - - - org.easymock - easymock - test - - - nl.jqno.equalsverifier - equalsverifier - test - - - org.apache.druid - druid-core - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-processing - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-server - ${project.parent.version} - test - - - - - \ No newline at end of file diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchComplexMetricSerde.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchComplexMetricSerde.java deleted file mode 100644 index 4e631e53525b..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchComplexMetricSerde.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch; - -import org.apache.druid.data.input.InputRow; -import org.apache.druid.query.aggregation.momentsketch.aggregator.MomentSketchAggregatorFactory; -import org.apache.druid.segment.GenericColumnSerializer; -import org.apache.druid.segment.column.ColumnBuilder; -import org.apache.druid.segment.data.GenericIndexed; -import org.apache.druid.segment.data.ObjectStrategy; -import org.apache.druid.segment.serde.ComplexColumnPartSupplier; -import org.apache.druid.segment.serde.ComplexMetricExtractor; -import org.apache.druid.segment.serde.ComplexMetricSerde; -import org.apache.druid.segment.serde.LargeColumnSupportedComplexColumnSerializer; -import org.apache.druid.segment.writeout.SegmentWriteOutMedium; - -import java.nio.ByteBuffer; - -public class MomentSketchComplexMetricSerde extends ComplexMetricSerde -{ - private static final MomentSketchObjectStrategy STRATEGY = new MomentSketchObjectStrategy(); - - @Override - public String getTypeName() - { - return MomentSketchAggregatorFactory.TYPE_NAME; - } - - @Override - public ComplexMetricExtractor getExtractor() - { - return new ComplexMetricExtractor() - { - @Override - public Class extractedClass() - { - return MomentSketchWrapper.class; - } - - @Override - public Object extractValue(final InputRow inputRow, final String metricName) - { - return (MomentSketchWrapper) inputRow.getRaw(metricName); - } - }; - } - - @Override - public void deserializeColumn(ByteBuffer buffer, ColumnBuilder builder) - { - final GenericIndexed column = GenericIndexed.read( - buffer, - STRATEGY, - builder.getFileMapper() - ); - builder.setComplexColumnSupplier(new ComplexColumnPartSupplier(getTypeName(), column)); - } - - @Override - public ObjectStrategy getObjectStrategy() - { - return STRATEGY; - } - - @Override - public GenericColumnSerializer getSerializer(SegmentWriteOutMedium segmentWriteOutMedium, String column) - { - return LargeColumnSupportedComplexColumnSerializer.create( - segmentWriteOutMedium, - column, - this.getObjectStrategy() - ); - } - -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchJsonSerializer.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchJsonSerializer.java deleted file mode 100644 index 268c4017a7a5..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchJsonSerializer.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.databind.JsonSerializer; -import com.fasterxml.jackson.databind.SerializerProvider; - -import java.io.IOException; - -public class MomentSketchJsonSerializer extends JsonSerializer -{ - @Override - public void serialize( - MomentSketchWrapper momentsSketch, - JsonGenerator jsonGenerator, - SerializerProvider serializerProvider - ) throws IOException - { - jsonGenerator.writeBinary(momentsSketch.toByteArray()); - } -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchModule.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchModule.java deleted file mode 100644 index 65c655bf811f..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchModule.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.query.aggregation.momentsketch.aggregator.MomentSketchAggregatorFactory; -import org.apache.druid.query.aggregation.momentsketch.aggregator.MomentSketchMaxPostAggregator; -import org.apache.druid.query.aggregation.momentsketch.aggregator.MomentSketchMergeAggregatorFactory; -import org.apache.druid.query.aggregation.momentsketch.aggregator.MomentSketchMinPostAggregator; -import org.apache.druid.query.aggregation.momentsketch.aggregator.MomentSketchQuantilePostAggregator; -import org.apache.druid.segment.serde.ComplexMetrics; - -import java.util.List; - -/** - * Module defining aggregators for the moments approximate quantiles sketch - * @see MomentSketchAggregatorFactory - */ -public class MomentSketchModule implements DruidModule -{ - @Override - public List getJacksonModules() - { - return ImmutableList.of( - new SimpleModule( - getClass().getSimpleName() - ).registerSubtypes( - new NamedType( - MomentSketchAggregatorFactory.class, - MomentSketchAggregatorFactory.TYPE_NAME - ), - new NamedType( - MomentSketchMergeAggregatorFactory.class, - MomentSketchMergeAggregatorFactory.TYPE_NAME - ), - new NamedType( - MomentSketchQuantilePostAggregator.class, - MomentSketchQuantilePostAggregator.TYPE_NAME - ), - new NamedType( - MomentSketchMinPostAggregator.class, - MomentSketchMinPostAggregator.TYPE_NAME - ), - new NamedType( - MomentSketchMaxPostAggregator.class, - MomentSketchMaxPostAggregator.TYPE_NAME - ) - ).addSerializer(MomentSketchWrapper.class, new MomentSketchJsonSerializer()) - ); - } - - @Override - public void configure(Binder binder) - { - registerSerde(); - } - - @VisibleForTesting - public static void registerSerde() - { - ComplexMetrics.registerSerde(MomentSketchAggregatorFactory.TYPE_NAME, new MomentSketchComplexMetricSerde()); - } -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchObjectStrategy.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchObjectStrategy.java deleted file mode 100644 index 7a706f90399b..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchObjectStrategy.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch; - -import org.apache.druid.query.aggregation.momentsketch.aggregator.MomentSketchAggregatorFactory; -import org.apache.druid.segment.data.ObjectStrategy; - -import javax.annotation.Nullable; -import java.nio.ByteBuffer; - -public class MomentSketchObjectStrategy implements ObjectStrategy -{ - private static final byte[] EMPTY_BYTES = new byte[0]; - - @Override - public Class getClazz() - { - return MomentSketchWrapper.class; - } - - @Override - public MomentSketchWrapper fromByteBuffer(ByteBuffer buffer, int numBytes) - { - if (numBytes == 0) { - return null; - } - buffer.limit(buffer.position() + numBytes); - return MomentSketchWrapper.fromBytes(buffer); - } - - @Override - public byte[] toBytes(@Nullable MomentSketchWrapper val) - { - if (val == null) { - return EMPTY_BYTES; - } - return val.toByteArray(); - } - - @Override - public int compare(MomentSketchWrapper o1, MomentSketchWrapper o2) - { - return MomentSketchAggregatorFactory.COMPARATOR.compare(o1, o2); - } -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapper.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapper.java deleted file mode 100644 index 41ecf30fb845..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapper.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch; - -import com.github.stanfordfuturedata.momentsketch.MomentSolver; -import com.github.stanfordfuturedata.momentsketch.MomentStruct; - -import java.nio.ByteBuffer; - -/** - * Class for wrapping the operations of the moments sketch for use in - * the moment sketch aggregator - * {@link org.apache.druid.query.aggregation.momentsketch.aggregator.MomentSketchAggregatorFactory}. - * - * k controls the size and accuracy provided by the sketch. - * The sinh function is used to compress the range of data to allow for more robust results - * on skewed and long-tailed metrics, but slightly reducing accuracy on metrics with more uniform - * distributions. - */ -public class MomentSketchWrapper -{ - // The MomentStruct object stores the relevant statistics about a metric distribution. - protected MomentStruct data; - // Whether we use arcsinh to compress the range - protected boolean useArcSinh = true; - - public MomentSketchWrapper(int k) - { - data = new MomentStruct(k); - } - - public MomentSketchWrapper(MomentStruct data) - { - this.data = data; - } - - public void setCompressed(boolean flag) - { - useArcSinh = flag; - } - - public boolean getCompressed() - { - return useArcSinh; - } - - public int getK() - { - return data.power_sums.length; - } - - public double[] getPowerSums() - { - return data.power_sums; - } - - public double getMin() - { - if (useArcSinh) { - return Math.sinh(data.min); - } else { - return data.min; - } - } - - public double getMax() - { - if (useArcSinh) { - return Math.sinh(data.max); - } else { - return data.max; - } - } - - public void add(double rawX) - { - double x = rawX; - if (useArcSinh) { - // Since Java does not have a native arcsinh implementation we - // compute it manually using the following formula. - // This is the inverse operation of Math.sinh - x = Math.log(rawX + Math.sqrt(1 + rawX * rawX)); - } - data.add(x); - } - - public void merge(MomentSketchWrapper other) - { - data.merge(other.data); - } - - public byte[] toByteArray() - { - ByteBuffer bb = ByteBuffer.allocate(2 * Integer.BYTES + (data.power_sums.length + 2) * Double.BYTES); - return toBytes(bb).array(); - } - - public MomentSolver getSolver() - { - MomentSolver ms = new MomentSolver(data); - return ms; - } - - /** - * Estimates quantiles given the statistics in a moments sketch. - * @param fractions real values between [0,1] for which we want to estimate quantiles - * - * @return estimated quantiles. - */ - public double[] getQuantiles(double[] fractions) - { - // The solver attempts to construct a distribution estimate which matches the - // statistics tracked by the moments sketch. We can then read off quantile estimates - // from the reconstructed distribution. - // This operation can be relatively expensive (~1 ms) so we set the parameters from distribution - // reconstruction to conservative values. - MomentSolver ms = new MomentSolver(data); - // Constants here are chosen to yield maximum precision while keeping solve times ~1ms on 2Ghz cpu - // Grid size can be increased if longer solve times are acceptable - ms.setGridSize(1024); - ms.setMaxIter(15); - ms.solve(); - double[] rawQuantiles = ms.getQuantiles(fractions); - for (int i = 0; i < fractions.length; i++) { - if (useArcSinh) { - rawQuantiles[i] = Math.sinh(rawQuantiles[i]); - } - } - return rawQuantiles; - } - - public ByteBuffer toBytes(ByteBuffer bb) - { - int compressedInt = getCompressed() ? 1 : 0; - bb.putInt(data.power_sums.length); - bb.putInt(compressedInt); - bb.putDouble(data.min); - bb.putDouble(data.max); - for (double x : data.power_sums) { - bb.putDouble(x); - } - return bb; - } - - public static MomentSketchWrapper fromBytes(ByteBuffer bb) - { - int k = bb.getInt(); - int compressedInt = bb.getInt(); - boolean compressed = (compressedInt > 0); - MomentStruct m = new MomentStruct(k); - m.min = bb.getDouble(); - m.max = bb.getDouble(); - for (int i = 0; i < k; i++) { - m.power_sums[i] = bb.getDouble(); - } - MomentSketchWrapper mw = new MomentSketchWrapper(m); - mw.setCompressed(compressed); - return mw; - } - - public static MomentSketchWrapper fromByteArray(byte[] input) - { - ByteBuffer bb = ByteBuffer.wrap(input); - return fromBytes(bb); - } - - @Override - public String toString() - { - return data.toString(); - } -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchAggregatorFactory.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchAggregatorFactory.java deleted file mode 100644 index 951c532e4a7d..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchAggregatorFactory.java +++ /dev/null @@ -1,290 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.query.aggregation.Aggregator; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.AggregatorFactoryNotMergeableException; -import org.apache.druid.query.aggregation.AggregatorUtil; -import org.apache.druid.query.aggregation.BufferAggregator; -import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper; -import org.apache.druid.query.cache.CacheKeyBuilder; -import org.apache.druid.segment.ColumnSelectorFactory; -import org.apache.druid.segment.column.ColumnCapabilities; -import org.apache.druid.segment.column.ValueType; - -import javax.annotation.Nullable; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -/** - * Aggregation operations over the moment-based quantile sketch - * available on github and described - * in the paper Moment-based quantile sketches. - * - * This sketch stores a set of (k) statistics about univariate metrics that can be used to - * solve for approximate quantiles of the original distribution at query time after aggregating - * the statistics. - */ -public class MomentSketchAggregatorFactory extends AggregatorFactory -{ - // Default number of moments (k) chosen for ~1% quantile error. - public static final int DEFAULT_K = 13; - // Safer to compress data with unknown ranges by default, but reduces accuracy on uniform data - public static final boolean DEFAULT_COMPRESS = true; - - private final String name; - private final String fieldName; - // Number of moments tracked. Larger k allows for better estimates but greater resource usage - private final int k; - // Controls whether or not data is compressed onto a smaller range using arcsinh - private final boolean compress; - private final byte cacheTypeId; - - public static final String TYPE_NAME = "momentSketch"; - - @JsonCreator - public MomentSketchAggregatorFactory( - @JsonProperty("name") final String name, - @JsonProperty("fieldName") final String fieldName, - @JsonProperty("k") @Nullable final Integer k, - @JsonProperty("compress") @Nullable final Boolean compress - ) - { - this(name, fieldName, k, compress, AggregatorUtil.MOMENTS_SKETCH_BUILD_CACHE_TYPE_ID); - } - - MomentSketchAggregatorFactory( - final String name, - final String fieldName, - @Nullable final Integer k, - @Nullable final Boolean compress, - final byte cacheTypeId - ) - { - Objects.requireNonNull(name, "Must have a valid, non-null aggregator name"); - this.name = name; - Objects.requireNonNull(fieldName, "Parameter fieldName must be specified"); - this.fieldName = fieldName; - this.k = k == null ? DEFAULT_K : k; - this.compress = compress == null ? DEFAULT_COMPRESS : compress; - this.cacheTypeId = cacheTypeId; - } - - - @Override - public byte[] getCacheKey() - { - return new CacheKeyBuilder( - cacheTypeId - ).appendString(fieldName).appendInt(k).appendBoolean(compress).build(); - } - - - @Override - public Aggregator factorize(ColumnSelectorFactory metricFactory) - { - ColumnCapabilities cap = metricFactory.getColumnCapabilities(fieldName); - if (cap == null || ValueType.isNumeric(cap.getType())) { - return new MomentSketchBuildAggregator(metricFactory.makeColumnValueSelector(fieldName), k, getCompress()); - } else { - return new MomentSketchMergeAggregator(metricFactory.makeColumnValueSelector(fieldName), k, getCompress()); - } - } - - @Override - public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) - { - ColumnCapabilities cap = metricFactory.getColumnCapabilities(fieldName); - if (cap == null || ValueType.isNumeric(cap.getType())) { - return new MomentSketchBuildBufferAggregator(metricFactory.makeColumnValueSelector(fieldName), k, getCompress()); - } else { - return new MomentSketchMergeBufferAggregator(metricFactory.makeColumnValueSelector(fieldName), k, getCompress()); - } - } - - public static final Comparator COMPARATOR = Comparator.nullsFirst( - Comparator.comparingDouble(a -> a.getPowerSums()[0]) - ); - - @Override - public Comparator getComparator() - { - return COMPARATOR; - } - - @Override - public Object combine(@Nullable Object lhs, @Nullable Object rhs) - { - if (lhs == null) { - return rhs; - } - if (rhs == null) { - return lhs; - } - MomentSketchWrapper union = (MomentSketchWrapper) lhs; - union.merge((MomentSketchWrapper) rhs); - return union; - } - - @Override - public AggregatorFactory getCombiningFactory() - { - return new MomentSketchMergeAggregatorFactory(name, k, compress); - } - - @Override - public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException - { - if (other.getName().equals(this.getName()) && this.getClass() == other.getClass()) { - return getCombiningFactory(); - } else { - throw new AggregatorFactoryNotMergeableException(this, other); - } - } - - @Override - public List getRequiredColumns() - { - return Collections.singletonList( - new MomentSketchAggregatorFactory( - fieldName, - fieldName, - k, - compress - ) - ); - } - - private MomentSketchWrapper deserializeFromByteArray(byte[] bytes) - { - return MomentSketchWrapper.fromByteArray(bytes); - } - - @Override - public Object deserialize(Object serializedSketch) - { - if (serializedSketch instanceof String) { - String str = (String) serializedSketch; - return deserializeFromByteArray(StringUtils.decodeBase64(StringUtils.toUtf8(str))); - } else if (serializedSketch instanceof byte[]) { - return deserializeFromByteArray((byte[]) serializedSketch); - } else if (serializedSketch instanceof MomentSketchWrapper) { - return serializedSketch; - } - throw new ISE( - "Object cannot be deserialized to a Moments Sketch: " - + serializedSketch.getClass() - ); - } - - @Nullable - @Override - public Object finalizeComputation(@Nullable Object object) - { - return object; - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @JsonProperty - public String getFieldName() - { - return fieldName; - } - - @JsonProperty - public int getK() - { - return k; - } - - @JsonProperty - public boolean getCompress() - { - return compress; - } - - @Override - public List requiredFields() - { - return Collections.singletonList(fieldName); - } - - @Override - public String getTypeName() - { - return TYPE_NAME; - } - - @Override - public int getMaxIntermediateSize() - { - // k double precision moments, 2 doubles for the min and max - // one integer to specify the number of moments - // one integer to specify whether data range is compressed - return (k + 2) * Double.BYTES + 2 * Integer.BYTES; - } - - @Override - public boolean equals(final Object o) - { - if (this == o) { - return true; - } - if (o == null || !getClass().equals(o.getClass())) { - return false; - } - final MomentSketchAggregatorFactory that = (MomentSketchAggregatorFactory) o; - - return Objects.equals(name, that.name) && - Objects.equals(fieldName, that.fieldName) && - k == that.k && - compress == that.compress; - } - - @Override - public int hashCode() - { - return Objects.hash(name, fieldName, k, compress); - } - - @Override - public String toString() - { - return getClass().getSimpleName() + "{" - + "name=" + name - + ", fieldName=" + fieldName - + ", k=" + k - + ", compress=" + compress - + "}"; - } -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchBuildAggregator.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchBuildAggregator.java deleted file mode 100644 index aa779cadf4a7..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchBuildAggregator.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import org.apache.druid.query.aggregation.Aggregator; -import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper; -import org.apache.druid.segment.BaseDoubleColumnValueSelector; - -public class MomentSketchBuildAggregator implements Aggregator -{ - private final BaseDoubleColumnValueSelector valueSelector; - private final int k; - private final boolean compress; - - private MomentSketchWrapper momentsSketch; - - public MomentSketchBuildAggregator( - final BaseDoubleColumnValueSelector valueSelector, - final int k, - final boolean compress - ) - { - this.valueSelector = valueSelector; - this.k = k; - this.compress = compress; - momentsSketch = new MomentSketchWrapper(k); - momentsSketch.setCompressed(compress); - } - - @Override - public void aggregate() - { - if (valueSelector.isNull()) { - return; - } - momentsSketch.add(valueSelector.getDouble()); - } - - @Override - public Object get() - { - return momentsSketch; - } - - @Override - public float getFloat() - { - throw new UnsupportedOperationException("not implemented"); - } - - @Override - public long getLong() - { - throw new UnsupportedOperationException("not implemented"); - } - - @Override - public Aggregator clone() - { - return new MomentSketchBuildAggregator(valueSelector, k, compress); - } - - @Override - public void close() - { - momentsSketch = null; - } -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchBuildBufferAggregator.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchBuildBufferAggregator.java deleted file mode 100644 index 79e88cc67ae8..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchBuildBufferAggregator.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import org.apache.druid.query.aggregation.BufferAggregator; -import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper; -import org.apache.druid.segment.BaseDoubleColumnValueSelector; - -import java.nio.ByteBuffer; - -public class MomentSketchBuildBufferAggregator implements BufferAggregator -{ - private final BaseDoubleColumnValueSelector selector; - private final int k; - private final boolean compress; - - public MomentSketchBuildBufferAggregator( - final BaseDoubleColumnValueSelector valueSelector, - final int k, - final boolean compress - ) - { - this.selector = valueSelector; - this.k = k; - this.compress = compress; - } - - @Override - public synchronized void init(final ByteBuffer buffer, final int position) - { - ByteBuffer mutationBuffer = buffer.duplicate(); - mutationBuffer.position(position); - - MomentSketchWrapper emptyStruct = new MomentSketchWrapper(k); - emptyStruct.setCompressed(compress); - emptyStruct.toBytes(mutationBuffer); - } - - @Override - public synchronized void aggregate(final ByteBuffer buffer, final int position) - { - if (selector.isNull()) { - return; - } - ByteBuffer mutationBuffer = buffer.duplicate(); - mutationBuffer.position(position); - - MomentSketchWrapper ms0 = MomentSketchWrapper.fromBytes(mutationBuffer); - double x = selector.getDouble(); - ms0.add(x); - - mutationBuffer.position(position); - ms0.toBytes(mutationBuffer); - } - - @Override - public synchronized Object get(final ByteBuffer buffer, final int position) - { - ByteBuffer mutationBuffer = buffer.duplicate(); - mutationBuffer.position(position); - return MomentSketchWrapper.fromBytes(mutationBuffer); - } - - @Override - public float getFloat(final ByteBuffer buffer, final int position) - { - throw new UnsupportedOperationException("Not implemented"); - } - - @Override - public long getLong(final ByteBuffer buffer, final int position) - { - throw new UnsupportedOperationException("Not implemented"); - } - - @Override - public void close() - { - } -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMaxPostAggregator.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMaxPostAggregator.java deleted file mode 100644 index b7fe1f812708..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMaxPostAggregator.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.primitives.Doubles; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper; -import org.apache.druid.query.aggregation.post.PostAggregatorIds; -import org.apache.druid.query.cache.CacheKeyBuilder; - -import java.util.Comparator; -import java.util.Map; -import java.util.Set; - -public class MomentSketchMaxPostAggregator implements PostAggregator -{ - private final String name; - private final PostAggregator field; - - public static final String TYPE_NAME = "momentSketchMax"; - - @JsonCreator - public MomentSketchMaxPostAggregator( - @JsonProperty("name") final String name, - @JsonProperty("field") final PostAggregator field - ) - { - this.name = Preconditions.checkNotNull(name, "name is null"); - this.field = Preconditions.checkNotNull(field, "field is null"); - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @JsonProperty - public PostAggregator getField() - { - return field; - } - - @Override - public Object compute(final Map combinedAggregators) - { - final MomentSketchWrapper sketch = (MomentSketchWrapper) field.compute(combinedAggregators); - return sketch.getMax(); - } - - @Override - public Comparator getComparator() - { - return Doubles::compare; - } - - @Override - public Set getDependentFields() - { - return field.getDependentFields(); - } - - @Override - public String toString() - { - return getClass().getSimpleName() + "{" + - "name='" + name + '\'' + - ", field=" + field + - "}"; - } - - @Override - public boolean equals(final Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final MomentSketchMaxPostAggregator that = (MomentSketchMaxPostAggregator) o; - if (!name.equals(that.name)) { - return false; - } - return field.equals(that.field); - } - - @Override - public int hashCode() - { - return (name.hashCode() * 31 + field.hashCode()); - } - - @Override - public byte[] getCacheKey() - { - final CacheKeyBuilder builder = new CacheKeyBuilder( - PostAggregatorIds.MOMENTS_SKETCH_TO_MAX_CACHE_TYPE_ID - ).appendCacheable(field); - return builder.build(); - } - - @Override - public PostAggregator decorate(final Map map) - { - return this; - } -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeAggregator.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeAggregator.java deleted file mode 100644 index c03dd369065c..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeAggregator.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import org.apache.druid.query.aggregation.Aggregator; -import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper; -import org.apache.druid.segment.ColumnValueSelector; - -public class MomentSketchMergeAggregator implements Aggregator -{ - private final ColumnValueSelector selector; - private MomentSketchWrapper momentsSketch; - - public MomentSketchMergeAggregator( - ColumnValueSelector selector, - final int k, - final boolean compress - ) - { - this.selector = selector; - this.momentsSketch = new MomentSketchWrapper(k); - momentsSketch.setCompressed(compress); - } - - @Override - public void aggregate() - { - final MomentSketchWrapper sketch = selector.getObject(); - if (sketch == null) { - return; - } - this.momentsSketch.merge(sketch); - } - - @Override - public Object get() - { - return momentsSketch; - } - - @Override - public float getFloat() - { - throw new UnsupportedOperationException("Not implemented"); - } - - @Override - public long getLong() - { - throw new UnsupportedOperationException("Not implemented"); - } - - @Override - public void close() - { - momentsSketch = null; - } -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeAggregatorFactory.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeAggregatorFactory.java deleted file mode 100644 index 274445897984..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeAggregatorFactory.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.druid.query.aggregation.Aggregator; -import org.apache.druid.query.aggregation.AggregatorUtil; -import org.apache.druid.query.aggregation.BufferAggregator; -import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper; -import org.apache.druid.segment.ColumnSelectorFactory; -import org.apache.druid.segment.ColumnValueSelector; - -public class MomentSketchMergeAggregatorFactory extends MomentSketchAggregatorFactory -{ - public static final String TYPE_NAME = "momentSketchMerge"; - - @JsonCreator - public MomentSketchMergeAggregatorFactory( - @JsonProperty("name") final String name, - @JsonProperty("k") final Integer k, - @JsonProperty("compress") final Boolean compress - ) - { - super(name, name, k, compress, AggregatorUtil.MOMENTS_SKETCH_MERGE_CACHE_TYPE_ID); - } - - @Override - public Aggregator factorize(final ColumnSelectorFactory metricFactory) - { - final ColumnValueSelector selector = metricFactory.makeColumnValueSelector( - getFieldName()); - return new MomentSketchMergeAggregator(selector, getK(), getCompress()); - } - - @Override - public BufferAggregator factorizeBuffered(final ColumnSelectorFactory metricFactory) - { - final ColumnValueSelector selector = metricFactory.makeColumnValueSelector( - getFieldName() - ); - return new MomentSketchMergeBufferAggregator(selector, getK(), getCompress()); - } - -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeBufferAggregator.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeBufferAggregator.java deleted file mode 100644 index 505d1ebed97d..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMergeBufferAggregator.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import org.apache.druid.query.aggregation.BufferAggregator; -import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper; -import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector; -import org.apache.druid.segment.ColumnValueSelector; - -import java.nio.ByteBuffer; - -public class MomentSketchMergeBufferAggregator implements BufferAggregator -{ - private final ColumnValueSelector selector; - private final int size; - private final boolean compress; - - public MomentSketchMergeBufferAggregator( - ColumnValueSelector selector, - int size, - boolean compress - ) - { - this.selector = selector; - this.size = size; - this.compress = compress; - } - - @Override - public void init(ByteBuffer buf, int position) - { - MomentSketchWrapper h = new MomentSketchWrapper(size); - h.setCompressed(compress); - - ByteBuffer mutationBuffer = buf.duplicate(); - mutationBuffer.position(position); - h.toBytes(mutationBuffer); - } - - @Override - public void aggregate(ByteBuffer buf, int position) - { - MomentSketchWrapper msNext = selector.getObject(); - if (msNext == null) { - return; - } - ByteBuffer mutationBuffer = buf.duplicate(); - mutationBuffer.position(position); - - MomentSketchWrapper ms0 = MomentSketchWrapper.fromBytes(mutationBuffer); - ms0.merge(msNext); - - mutationBuffer.position(position); - ms0.toBytes(mutationBuffer); - } - - @Override - public Object get(ByteBuffer buf, int position) - { - ByteBuffer mutationBuffer = buf.asReadOnlyBuffer(); - mutationBuffer.position(position); - return MomentSketchWrapper.fromBytes(mutationBuffer); - } - - @Override - public float getFloat(ByteBuffer buf, int position) - { - throw new UnsupportedOperationException("Not implemented"); - } - - @Override - public long getLong(ByteBuffer buf, int position) - { - throw new UnsupportedOperationException("Not implemented"); - } - - @Override - public double getDouble(ByteBuffer buf, int position) - { - throw new UnsupportedOperationException("Not implemented"); - } - - @Override - public void close() - { - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("selector", selector); - } -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMinPostAggregator.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMinPostAggregator.java deleted file mode 100644 index 2c454405768b..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMinPostAggregator.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.primitives.Doubles; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper; -import org.apache.druid.query.aggregation.post.PostAggregatorIds; -import org.apache.druid.query.cache.CacheKeyBuilder; - -import java.util.Comparator; -import java.util.Map; -import java.util.Set; - -public class MomentSketchMinPostAggregator implements PostAggregator -{ - private final String name; - private final PostAggregator field; - public static final String TYPE_NAME = "momentSketchMin"; - - @JsonCreator - public MomentSketchMinPostAggregator( - @JsonProperty("name") final String name, - @JsonProperty("field") final PostAggregator field - ) - { - this.name = Preconditions.checkNotNull(name, "name is null"); - this.field = Preconditions.checkNotNull(field, "field is null"); - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @JsonProperty - public PostAggregator getField() - { - return field; - } - - @Override - public Object compute(final Map combinedAggregators) - { - final MomentSketchWrapper sketch = (MomentSketchWrapper) field.compute(combinedAggregators); - return sketch.getMin(); - } - - @Override - public Comparator getComparator() - { - return Doubles::compare; - } - - @Override - public Set getDependentFields() - { - return field.getDependentFields(); - } - - @Override - public String toString() - { - return getClass().getSimpleName() + "{" + - "name='" + name + '\'' + - ", field=" + field + - "}"; - } - - @Override - public boolean equals(final Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final MomentSketchMinPostAggregator that = (MomentSketchMinPostAggregator) o; - if (!name.equals(that.name)) { - return false; - } - return field.equals(that.field); - } - - @Override - public int hashCode() - { - return (name.hashCode() * 31 + field.hashCode()); - } - - @Override - public byte[] getCacheKey() - { - final CacheKeyBuilder builder = new CacheKeyBuilder( - PostAggregatorIds.MOMENTS_SKETCH_TO_MIN_CACHE_TYPE_ID - ).appendCacheable(field); - return builder.build(); - } - - @Override - public PostAggregator decorate(final Map map) - { - return this; - } -} diff --git a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchQuantilePostAggregator.java b/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchQuantilePostAggregator.java deleted file mode 100644 index 81f5b63acf08..000000000000 --- a/extensions-contrib/momentsketch/src/main/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchQuantilePostAggregator.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper; -import org.apache.druid.query.aggregation.post.PostAggregatorIds; -import org.apache.druid.query.cache.CacheKeyBuilder; - -import java.util.Arrays; -import java.util.Comparator; -import java.util.Map; -import java.util.Set; - -public class MomentSketchQuantilePostAggregator implements PostAggregator -{ - private final String name; - private final PostAggregator field; - private final double[] fractions; - - public static final String TYPE_NAME = "momentSketchSolveQuantiles"; - - @JsonCreator - public MomentSketchQuantilePostAggregator( - @JsonProperty("name") final String name, - @JsonProperty("field") final PostAggregator field, - @JsonProperty("fractions") final double[] fractions - ) - { - this.name = Preconditions.checkNotNull(name, "name is null"); - this.field = Preconditions.checkNotNull(field, "field is null"); - this.fractions = Preconditions.checkNotNull(fractions, "array of fractions is null"); - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @JsonProperty - public PostAggregator getField() - { - return field; - } - - @JsonProperty - public double[] getFractions() - { - return fractions; - } - - @Override - public Object compute(final Map combinedAggregators) - { - final MomentSketchWrapper sketch = (MomentSketchWrapper) field.compute(combinedAggregators); - double[] quantiles = sketch.getQuantiles(fractions); - return quantiles; - } - - @Override - public Comparator getComparator() - { - throw new IAE("Comparing arrays of quantiles is not supported"); - } - - @Override - public Set getDependentFields() - { - return field.getDependentFields(); - } - - @Override - public String toString() - { - return getClass().getSimpleName() + "{" + - "name='" + name + '\'' + - ", field=" + field + - ", fractions=" + Arrays.toString(fractions) + - "}"; - } - - @Override - public boolean equals(final Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final MomentSketchQuantilePostAggregator that = (MomentSketchQuantilePostAggregator) o; - if (!name.equals(that.name)) { - return false; - } - if (!Arrays.equals(fractions, that.fractions)) { - return false; - } - return field.equals(that.field); - } - - @Override - public int hashCode() - { - return (name.hashCode() * 31 + field.hashCode()) * 31 + Arrays.hashCode(fractions); - } - - @Override - public byte[] getCacheKey() - { - final CacheKeyBuilder builder = new CacheKeyBuilder( - PostAggregatorIds.MOMENTS_SKETCH_TO_QUANTILES_CACHE_TYPE_ID - ) - .appendCacheable(field) - .appendDoubleArray(fractions); - return builder.build(); - } - - @Override - public PostAggregator decorate(final Map map) - { - return this; - } - -} diff --git a/extensions-contrib/momentsketch/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/momentsketch/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index 8fb117369b01..000000000000 --- a/extensions-contrib/momentsketch/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.query.aggregation.momentsketch.MomentSketchModule diff --git a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapperTest.java b/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapperTest.java deleted file mode 100644 index 75b2abb7ad13..000000000000 --- a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapperTest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch; - -import org.junit.Assert; -import org.junit.Test; - -public class MomentSketchWrapperTest -{ - @Test - public void testDeserialize() - { - MomentSketchWrapper mw = new MomentSketchWrapper(10); - mw.setCompressed(false); - mw.add(10); - byte[] bs = mw.toByteArray(); - MomentSketchWrapper mw2 = MomentSketchWrapper.fromByteArray(bs); - - Assert.assertEquals(10, mw2.getPowerSums()[1], 1e-10); - } - - @Test - public void testSimpleSolve() - { - MomentSketchWrapper mw = new MomentSketchWrapper(13); - mw.setCompressed(true); - for (int x = 0; x < 101; x++) { - mw.add((double) x); - } - double[] ps = {0.0, 0.5, 1.0}; - double[] qs = mw.getQuantiles(ps); - Assert.assertEquals(0, qs[0], 1.0); - Assert.assertEquals(50, qs[1], 1.0); - } -} diff --git a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchAggregatorFactorySerdeTest.java b/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchAggregatorFactorySerdeTest.java deleted file mode 100644 index 7c00ea80be2a..000000000000 --- a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchAggregatorFactorySerdeTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.junit.Assert; -import org.junit.Test; - -public class MomentSketchAggregatorFactorySerdeTest -{ - @Test - public void serializeDeserializeFactoryWithFieldName() throws Exception - { - ObjectMapper objectMapper = new DefaultObjectMapper(); - MomentSketchAggregatorFactory factory = new MomentSketchAggregatorFactory( - "name", "fieldName", 128, true - ); - - MomentSketchAggregatorFactory other = objectMapper.readValue( - objectMapper.writeValueAsString(factory), - MomentSketchAggregatorFactory.class - ); - - Assert.assertEquals(factory, other); - } -} diff --git a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMaxPostAggregatorTest.java b/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMaxPostAggregatorTest.java deleted file mode 100644 index 83e1aab0d4fa..000000000000 --- a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMaxPostAggregatorTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import nl.jqno.equalsverifier.EqualsVerifier; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.aggregation.post.ConstantPostAggregator; -import org.junit.Assert; -import org.junit.Test; - -public class MomentSketchMaxPostAggregatorTest -{ - @Test - public void testSerde() throws Exception - { - MomentSketchMaxPostAggregator there = - new MomentSketchMaxPostAggregator("post", new ConstantPostAggregator("", 100)); - - DefaultObjectMapper mapper = new DefaultObjectMapper(); - MomentSketchMaxPostAggregator andBackAgain = mapper.readValue( - mapper.writeValueAsString(there), - MomentSketchMaxPostAggregator.class - ); - - Assert.assertEquals(there, andBackAgain); - Assert.assertArrayEquals(there.getCacheKey(), andBackAgain.getCacheKey()); - Assert.assertEquals(there.getDependentFields(), andBackAgain.getDependentFields()); - } - - @Test - public void testToString() - { - PostAggregator postAgg = - new MomentSketchMaxPostAggregator("post", new ConstantPostAggregator("", 100)); - - Assert.assertEquals( - "MomentSketchMaxPostAggregator{name='post', field=ConstantPostAggregator{name='', constantValue=100}}", - postAgg.toString() - ); - } - - @Test - public void testEquals() - { - EqualsVerifier.forClass(MomentSketchMaxPostAggregator.class) - .withNonnullFields("name", "field") - .usingGetClass() - .verify(); - } -} diff --git a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMinPostAggregatorTest.java b/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMinPostAggregatorTest.java deleted file mode 100644 index 6613d5f2aef2..000000000000 --- a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchMinPostAggregatorTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import nl.jqno.equalsverifier.EqualsVerifier; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.aggregation.post.ConstantPostAggregator; -import org.junit.Assert; -import org.junit.Test; - -public class MomentSketchMinPostAggregatorTest -{ - @Test - public void testSerde() throws Exception - { - MomentSketchMinPostAggregator there = - new MomentSketchMinPostAggregator("post", new ConstantPostAggregator("", 100)); - - DefaultObjectMapper mapper = new DefaultObjectMapper(); - MomentSketchMinPostAggregator andBackAgain = mapper.readValue( - mapper.writeValueAsString(there), - MomentSketchMinPostAggregator.class - ); - - Assert.assertEquals(there, andBackAgain); - Assert.assertArrayEquals(there.getCacheKey(), andBackAgain.getCacheKey()); - Assert.assertEquals(there.getDependentFields(), andBackAgain.getDependentFields()); - } - - @Test - public void testToString() - { - PostAggregator postAgg = - new MomentSketchMinPostAggregator("post", new ConstantPostAggregator("", 100)); - - Assert.assertEquals( - "MomentSketchMinPostAggregator{name='post', field=ConstantPostAggregator{name='', constantValue=100}}", - postAgg.toString() - ); - } - - @Test - public void testEquals() - { - EqualsVerifier.forClass(MomentSketchMinPostAggregator.class) - .withNonnullFields("name", "field") - .usingGetClass() - .verify(); - } -} diff --git a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchQuantilePostAggregatorTest.java b/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchQuantilePostAggregatorTest.java deleted file mode 100644 index bc2179d55d51..000000000000 --- a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentSketchQuantilePostAggregatorTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - -import nl.jqno.equalsverifier.EqualsVerifier; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.aggregation.post.ConstantPostAggregator; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -public class MomentSketchQuantilePostAggregatorTest -{ - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void testSerde() throws Exception - { - MomentSketchQuantilePostAggregator there = - new MomentSketchQuantilePostAggregator("post", new ConstantPostAggregator("", 100), new double[]{0.25, 0.75}); - - DefaultObjectMapper mapper = new DefaultObjectMapper(); - MomentSketchQuantilePostAggregator andBackAgain = mapper.readValue( - mapper.writeValueAsString(there), - MomentSketchQuantilePostAggregator.class - ); - - Assert.assertEquals(there, andBackAgain); - Assert.assertArrayEquals(there.getCacheKey(), andBackAgain.getCacheKey()); - Assert.assertEquals(there.getDependentFields(), andBackAgain.getDependentFields()); - } - - @Test - public void testToString() - { - PostAggregator postAgg = - new MomentSketchQuantilePostAggregator("post", new ConstantPostAggregator("", 100), new double[]{0.25, 0.75}); - - Assert.assertEquals( - "MomentSketchQuantilePostAggregator{name='post', field=ConstantPostAggregator{name='', constantValue=100}, fractions=[0.25, 0.75]}", - postAgg.toString() - ); - } - - @Test - public void testComparator() - { - expectedException.expect(IAE.class); - expectedException.expectMessage("Comparing arrays of quantiles is not supported"); - PostAggregator postAgg = - new MomentSketchQuantilePostAggregator("post", new ConstantPostAggregator("", 100), new double[]{0.25, 0.75}); - postAgg.getComparator(); - } - @Test - public void testEquals() - { - EqualsVerifier.forClass(MomentSketchQuantilePostAggregator.class) - .withNonnullFields("name", "field", "fractions") - .usingGetClass() - .verify(); - } -} diff --git a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentsSketchAggregatorTest.java b/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentsSketchAggregatorTest.java deleted file mode 100644 index c764620d763f..000000000000 --- a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentsSketchAggregatorTest.java +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.momentsketch.aggregator; - - -import org.apache.druid.common.config.NullHandling; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.query.aggregation.AggregationTestHelper; -import org.apache.druid.query.aggregation.momentsketch.MomentSketchModule; -import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper; -import org.apache.druid.query.groupby.GroupByQueryConfig; -import org.apache.druid.query.groupby.GroupByQueryRunnerTest; -import org.apache.druid.query.groupby.ResultRow; -import org.apache.druid.testing.InitializedNullHandlingTest; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import java.io.File; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -@RunWith(Parameterized.class) -public class MomentsSketchAggregatorTest extends InitializedNullHandlingTest -{ - private final boolean hasNulls = !NullHandling.replaceWithDefault(); - private final AggregationTestHelper helper; - - @Rule - public final TemporaryFolder tempFolder = new TemporaryFolder(); - - public MomentsSketchAggregatorTest(final GroupByQueryConfig config) - { - MomentSketchModule.registerSerde(); - DruidModule module = new MomentSketchModule(); - helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper( - module.getJacksonModules(), config, tempFolder); - } - - @Parameterized.Parameters(name = "{0}") - public static Collection constructorFeeder() - { - final List constructors = new ArrayList<>(); - for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { - constructors.add(new Object[]{config}); - } - return constructors; - } - - @Test - public void buildingSketchesAtIngestionTime() throws Exception - { - Sequence seq = helper.createIndexAndRunQueryOnSegment( - new File(this.getClass().getClassLoader().getResource("doubles_build_data.tsv").getFile()), - String.join( - "\n", - "{", - " \"type\": \"string\",", - " \"parseSpec\": {", - " \"format\": \"tsv\",", - " \"timestampSpec\": {\"column\": \"timestamp\", \"format\": \"yyyyMMddHH\"},", - " \"dimensionsSpec\": {", - " \"dimensions\": [\"product\"],", - " \"dimensionExclusions\": [ \"sequenceNumber\"],", - " \"spatialDimensions\": []", - " },", - " \"columns\": [\"timestamp\", \"sequenceNumber\", \"product\", \"value\", \"valueWithNulls\"]", - " }", - "}" - ), - "[" - + "{\"type\": \"momentSketch\", \"name\": \"sketch\", \"fieldName\": \"value\", \"k\": 10, \"compress\": true}," - + "{\"type\": \"momentSketch\", \"name\": \"sketchWithNulls\", \"fieldName\": \"valueWithNulls\", \"k\": 10, \"compress\": true}" - + "]", - 0, - // minTimestamp - Granularities.NONE, - 10, - // maxRowCount - String.join( - "\n", - "{", - " \"queryType\": \"groupBy\",", - " \"dataSource\": \"test_datasource\",", - " \"granularity\": \"ALL\",", - " \"dimensions\": [],", - " \"aggregations\": [", - " {\"type\": \"momentSketchMerge\", \"name\": \"sketch\", \"fieldName\": \"sketch\", \"k\": 10, \"compress\": true},", - " {\"type\": \"momentSketchMerge\", \"name\": \"sketchWithNulls\", \"fieldName\": \"sketchWithNulls\", \"k\": 10, \"compress\": true}", - " ],", - " \"postAggregations\": [", - " {\"type\": \"momentSketchSolveQuantiles\", \"name\": \"quantiles\", \"fractions\": [0, 0.5, 1], \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}},", - " {\"type\": \"momentSketchMin\", \"name\": \"min\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}},", - " {\"type\": \"momentSketchMax\", \"name\": \"max\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}},", - " {\"type\": \"momentSketchSolveQuantiles\", \"name\": \"quantilesWithNulls\", \"fractions\": [0, 0.5, 1], \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketchWithNulls\"}},", - " {\"type\": \"momentSketchMin\", \"name\": \"minWithNulls\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketchWithNulls\"}},", - " {\"type\": \"momentSketchMax\", \"name\": \"maxWithNulls\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketchWithNulls\"}}", - " ],", - " \"intervals\": [\"2016-01-01T00:00:00.000Z/2016-01-31T00:00:00.000Z\"]", - "}" - ) - ); - List results = seq.toList(); - Assert.assertEquals(1, results.size()); - ResultRow row = results.get(0); - MomentSketchWrapper sketchObject = (MomentSketchWrapper) row.get(0); // "sketch" - // 400 total products since this is pre-rollup - Assert.assertEquals(400.0, sketchObject.getPowerSums()[0], 1e-10); - - MomentSketchWrapper sketchObjectWithNulls = (MomentSketchWrapper) row.get(1); // "sketchWithNulls" - // 23 null values (377 when nulls are not replaced with default) - Assert.assertEquals( - NullHandling.replaceWithDefault() ? 400.0 : 377.0, - sketchObjectWithNulls.getPowerSums()[0], - 1e-10 - ); - - double[] quantilesArray = (double[]) row.get(2); // "quantiles" - Assert.assertEquals(0, quantilesArray[0], 0.05); - Assert.assertEquals(.5, quantilesArray[1], 0.05); - Assert.assertEquals(1.0, quantilesArray[2], 0.05); - - Double minValue = (Double) row.get(3); // "min" - Assert.assertEquals(0.0011, minValue, 0.0001); - - Double maxValue = (Double) row.get(4); // "max" - Assert.assertEquals(0.9969, maxValue, 0.0001); - - double[] quantilesArrayWithNulls = (double[]) row.get(5); // "quantilesWithNulls" - Assert.assertEquals(NullHandling.replaceWithDefault() ? 0.0 : 5.0, quantilesArrayWithNulls[0], 0.05); - Assert.assertEquals( - NullHandling.replaceWithDefault() ? 7.721400294818661d : 7.57, - quantilesArrayWithNulls[1], - 0.05 - ); - Assert.assertEquals(10.0, quantilesArrayWithNulls[2], 0.05); - - Double minValueWithNulls = (Double) row.get(6); // "minWithNulls" - Assert.assertEquals(NullHandling.replaceWithDefault() ? 0.0 : 5.0164, minValueWithNulls, 0.0001); - - Double maxValueWithNulls = (Double) row.get(7); // "maxWithNulls" - Assert.assertEquals(9.9788, maxValueWithNulls, 0.0001); - - } - - @Test - public void buildingSketchesAtQueryTime() throws Exception - { - Sequence seq = helper.createIndexAndRunQueryOnSegment( - new File(this.getClass().getClassLoader().getResource("doubles_build_data.tsv").getFile()), - String.join( - "\n", - "{", - " \"type\": \"string\",", - " \"parseSpec\": {", - " \"format\": \"tsv\",", - " \"timestampSpec\": {\"column\": \"timestamp\", \"format\": \"yyyyMMddHH\"},", - " \"dimensionsSpec\": {", - " \"dimensions\": [ \"product\", {\"name\":\"valueWithNulls\", \"type\":\"double\"}],", - " \"dimensionExclusions\": [\"sequenceNumber\"],", - " \"spatialDimensions\": []", - " },", - " \"columns\": [\"timestamp\", \"sequenceNumber\", \"product\", \"value\", \"valueWithNulls\"]", - " }", - "}" - ), - "[{\"type\": \"doubleSum\", \"name\": \"value\", \"fieldName\": \"value\"}]", - 0, // minTimestamp - Granularities.NONE, - 10, // maxRowCount - String.join( - "\n", - "{", - " \"queryType\": \"groupBy\",", - " \"dataSource\": \"test_datasource\",", - " \"granularity\": \"ALL\",", - " \"dimensions\": [],", - " \"aggregations\": [", - " {\"type\": \"momentSketch\", \"name\": \"sketch\", \"fieldName\": \"value\", \"k\": 10},", - " {\"type\": \"momentSketch\", \"name\": \"sketchWithNulls\", \"fieldName\": \"valueWithNulls\", \"k\": 10}", - " ],", - " \"intervals\": [\"2016-01-01T00:00:00.000Z/2016-01-31T00:00:00.000Z\"]", - "}" - ) - ); - - List results = seq.toList(); - Assert.assertEquals(1, results.size()); - ResultRow row = results.get(0); - - MomentSketchWrapper sketchObject = (MomentSketchWrapper) row.get(0); // "sketch" - // 385 total products since roll-up limited by valueWithNulls column - Assert.assertEquals(385.0, sketchObject.getPowerSums()[0], 1e-10); - - MomentSketchWrapper sketchObjectWithNulls = (MomentSketchWrapper) row.get(1); // "sketchWithNulls" - - // in default mode, all 385 rows have a number value so will be computed, but only 377 rows have actual values in - // sql null mode - Assert.assertEquals(hasNulls ? 377.0 : 385.0, sketchObjectWithNulls.getPowerSums()[0], 1e-10); - } -} - diff --git a/extensions-contrib/momentsketch/src/test/resources/doubles_build_data.tsv b/extensions-contrib/momentsketch/src/test/resources/doubles_build_data.tsv deleted file mode 100644 index b0cf04d09aaf..000000000000 --- a/extensions-contrib/momentsketch/src/test/resources/doubles_build_data.tsv +++ /dev/null @@ -1,400 +0,0 @@ -2016010101 0 0 0.6529403005319299 6.4640 -2016010101 1 0 0.9270214958987323 5.6748 -2016010101 2 0 0.6383273609981486 9.0873 -2016010101 3 0 0.8088289215633632 8.6046 -2016010101 4 0 0.8163864917598281 5.2844 -2016010101 5 0 0.38484848588530784 6.7631 -2016010101 6 0 0.7690020468986823 9.4987 -2016010101 7 0 0.6212078833139824 -2016010101 8 0 0.4915825094949512 -2016010101 9 0 0.688004059332008 5.2246 -2016010101 10 0 0.2536908275250508 5.8375 -2016010101 11 0 0.6618435914290263 8.0502 -2016010101 12 0 0.7892773595797635 -2016010101 13 0 0.08857624134076048 9.3025 -2016010101 14 0 0.11992633801904151 9.8775 -2016010101 15 0 0.4959192800105586 5.3096 -2016010101 16 0 0.5564893557708243 5.7811 -2016010101 17 0 0.7755547456799993 -2016010101 18 0 0.06420706406984311 7.5113 -2016010101 19 0 0.23085639094262378 6.6375 -2016010101 20 7 0.012013916725163498 5.5032 -2016010101 21 7 0.34077219818209503 6.0330 -2016010101 22 7 0.8445966884204918 6.9012 -2016010101 23 7 0.6466142718287953 7.2324 -2016010101 24 7 0.43959032391415487 8.5575 -2016010101 25 7 0.7768829233737787 -2016010101 26 7 0.5899544206136442 7.9103 -2016010101 27 7 0.017782361911801825 9.5395 -2016010101 28 7 0.5431916165782864 9.0030 -2016010101 29 7 0.8218253174439416 8.2596 -2016010101 30 7 0.6372788284951859 8.4348 -2016010101 31 7 0.41403671834680933 7.0427 -2016010101 32 7 0.042508330730374855 8.4631 -2016010101 33 7 0.7416290691530969 5.8021 -2016010101 34 7 0.6990557213726277 6.9269 -2016010101 35 7 0.6302154208823348 8.6705 -2016010101 36 7 0.021053567154993402 8.8914 -2016010101 37 7 0.770280353784988 7.3932 -2016010101 38 7 0.08205576978448703 6.8809 -2016010101 39 7 0.2049660800682488 5.9201 -2016010101 40 5 0.08129304678049831 -2016010101 41 5 0.17754747271638005 8.7760 -2016010101 42 5 0.8441702357096768 6.2093 -2016010101 43 5 0.9060464737257796 6.5394 -2016010101 44 5 0.5970595512785409 9.9788 -2016010101 45 5 0.843859346312315 7.1848 -2016010101 46 5 0.1649847892987305 8.4503 -2016010101 47 5 0.5279903496999094 8.9682 -2016010101 48 5 0.08758749830556767 6.4161 -2016010101 49 5 0.6088480522002063 7.1579 -2016010101 50 5 0.31079133043670004 9.3775 -2016010101 51 5 0.43062105356651226 5.2508 -2016010101 52 5 0.8542989852099488 7.4714 -2016010101 53 5 0.42443162807834045 7.9648 -2016010101 54 5 0.5020327054358468 7.4182 -2016010101 55 5 0.36453920012074237 9.4505 -2016010101 56 5 0.9884597580348689 6.1475 -2016010101 57 5 0.3770559586575706 5.1045 -2016010101 58 5 0.5989237303385875 -2016010101 59 5 0.9926342802399872 7.7604 -2016010101 60 4 0.7813961047849703 5.3715 -2016010101 61 4 0.062171533805525425 6.2639 -2016010101 62 4 0.5284977503473608 8.4169 -2016010101 63 4 0.5924687065581794 9.3528 -2016010101 64 4 0.06305234223879275 9.4684 -2016010101 65 4 0.4959562731747129 9.9342 -2016010101 66 4 0.6336733165353365 5.1156 -2016010101 67 4 0.48860263540869875 8.3483 -2016010101 68 4 0.9387610528974851 5.8623 -2016010101 69 4 0.3391271652731308 6.8404 -2016010101 70 4 0.5962837638971421 5.8733 -2016010101 71 4 0.9190447294921896 5.6447 -2016010101 72 4 0.33082943548872534 6.5562 -2016010101 73 4 0.6236359023672029 9.6535 -2016010101 74 4 0.27134427542016615 7.4968 -2016010101 75 4 0.11665530238761901 8.7194 -2016010101 76 4 0.10469260335277608 8.7135 -2016010101 77 4 0.6824658847771211 5.5886 -2016010101 78 4 0.6131047630496756 8.4490 -2016010101 79 4 0.9838171536972515 9.2168 -2016010101 80 4 0.7484669110852756 9.7527 -2016010101 81 4 0.797620888697219 8.2709 -2016010101 82 4 0.7166673353657907 9.7910 -2016010101 83 4 0.46968710353176557 7.6986 -2016010101 84 4 0.3998491199643106 9.0208 -2016010101 85 4 0.6314883585976869 8.5947 -2016010101 86 4 0.8305617875577815 7.5846 -2016010101 87 4 0.6867651870284084 7.4681 -2016010101 88 4 0.9961677044887979 8.9175 -2016010101 89 4 0.19745766301180412 8.3892 -2016010101 90 4 0.2737652043079263 9.0234 -2016010101 91 4 0.2954503444695358 6.1687 -2016010101 92 4 0.6191902196833489 8.1738 -2016010101 93 4 0.6828058006233482 7.5305 -2016010101 94 4 0.7967115641510757 8.8036 -2016010101 95 4 0.5485460823820962 9.0837 -2016010101 96 4 0.4278132830938558 -2016010101 97 4 0.32194908458166194 5.7173 -2016010101 98 4 0.07094920295725238 6.8724 -2016010101 99 4 0.4351839393889565 7.1156 -2016010101 100 1 0.6160833396611648 5.9507 -2016010101 101 1 0.4652667787803648 7.3727 -2016010101 102 1 0.5026953463132913 5.9615 -2016010101 103 1 0.4103237191034753 6.0366 -2016010101 104 1 0.3298554666697301 8.3332 -2016010101 105 1 0.16907537273919138 8.9892 -2016010101 106 1 0.6945260598989513 -2016010101 107 1 0.917138530496438 -2016010101 108 1 0.8810129148605083 6.9952 -2016010101 109 1 0.11845626048380542 5.6466 -2016010101 110 1 0.8848971155827816 9.2285 -2016010101 111 1 0.9969103769603667 8.1092 -2016010101 112 1 0.06274198529295416 7.7878 -2016010101 113 1 0.2923616769686519 5.9326 -2016010101 114 1 0.12621083638328634 7.1302 -2016010101 115 1 0.9655188575577313 -2016010101 116 1 0.6074995164352884 9.0669 -2016010101 117 1 0.5501887988201414 5.2207 -2016010101 118 1 0.9406914128003497 7.7406 -2016010101 119 1 0.03264873659277656 6.2651 -2016010101 120 6 0.004852543443656487 8.5527 -2016010101 121 6 0.11161194329252788 9.1995 -2016010101 122 6 0.9403527002796559 8.0145 -2016010101 123 6 0.8951866979503953 9.5080 -2016010101 124 6 0.07629846897033454 8.6304 -2016010101 125 6 0.9898485014275873 -2016010101 126 6 0.42827377712188075 9.9497 -2016010101 127 6 0.4274796777951825 9.6071 -2016010101 128 6 0.5569522946332676 6.2189 -2016010101 129 6 0.028195121559112635 7.7296 -2016010101 130 6 0.8599127909482382 5.9382 -2016010101 131 6 0.3516112293128607 -2016010101 132 6 0.3888868189342449 9.2290 -2016010101 133 6 0.644589126160206 5.0507 -2016010101 134 6 0.7398741071492928 8.2090 -2016010101 135 6 0.1998479248216123 8.0746 -2016010101 136 6 0.8803215884594476 -2016010101 137 6 0.7079531966558515 9.2607 -2016010101 138 6 0.7904290564015343 5.1416 -2016010101 139 6 0.475671788742007 9.0617 -2016010101 140 3 0.034708334899357096 5.8831 -2016010101 141 3 0.4134637419532796 9.3853 -2016010101 142 3 0.9757934592902832 7.6605 -2016010101 143 3 0.37422347371609666 7.6535 -2016010101 144 3 0.5904996168737154 7.1212 -2016010101 145 3 0.5883259679727514 7.2821 -2016010101 146 3 0.3380286015499171 6.0115 -2016010101 147 3 0.42174393035143043 8.4855 -2016010101 148 3 0.4764900074141757 9.0136 -2016010101 149 3 0.01864239537224921 9.5688 -2016010101 150 3 0.9124007087743986 5.0372 -2016010101 151 3 0.8951275235699193 5.8328 -2016010101 152 3 0.7037272142266654 9.6007 -2016010101 153 3 0.5685506209266902 5.0164 -2016010101 154 3 0.4104883958833594 6.5091 -2016010101 155 3 0.7794005551450208 5.5946 -2016010101 156 3 0.2879354697088996 8.5981 -2016010101 157 3 0.5243215707259823 9.5059 -2016010101 158 3 0.22238840286136063 8.6117 -2016010101 159 3 0.11336472553284738 5.7849 -2016010101 160 4 0.9800770037725316 6.4036 -2016010101 161 4 0.7628237317889158 8.9102 -2016010101 162 4 0.5355335935170453 7.7302 -2016010101 163 4 0.9676939330565402 6.2399 -2016010101 164 4 0.657825753108034 9.4161 -2016010101 165 4 0.9175328548944673 9.4530 -2016010101 166 4 0.6834666043257283 7.0117 -2016010101 167 4 0.08580759367942314 8.9389 -2016010101 168 4 0.3134740602060899 7.4747 -2016010101 169 4 0.3218818254752742 7.3339 -2016010101 170 4 0.6119297354994999 5.5100 -2016010101 171 4 0.07086832750773142 5.5108 -2016010101 172 4 0.2700864307032772 7.0204 -2016010101 173 4 0.7497315076673637 6.7076 -2016010101 174 4 0.4959921300968493 9.5705 -2016010101 175 4 0.09294825796093753 5.6014 -2016010101 176 4 0.4954515904444161 9.8482 -2016010101 177 4 0.8820366880191506 7.3854 -2016010101 178 4 0.17978298283728522 6.1864 -2016010101 179 4 0.05259679741524781 6.5391 -2016010101 180 5 0.4711892966981096 6.9655 -2016010101 181 5 0.5965662941715105 7.1904 -2016010101 182 5 0.4775201668966973 6.1565 -2016010101 183 5 0.05084576687030873 7.3423 -2016010101 184 5 0.16680660677593928 6.8420 -2016010101 185 5 0.9342287333653685 -2016010101 186 5 0.8153161893769392 7.7256 -2016010101 187 5 0.9362517669519288 9.3776 -2016010101 188 5 0.10865218471840699 5.7953 -2016010101 189 5 0.44665378915111065 6.9584 -2016010101 190 5 0.8804454791937898 5.4257 -2016010101 191 5 0.20666928346935398 6.5840 -2016010101 192 5 0.7052479677101612 8.9173 -2016010101 193 5 0.5006205470200923 7.7161 -2016010101 194 5 0.23220501028575968 7.7501 -2016010101 195 5 0.11776507130391467 6.6947 -2016010101 196 5 0.592011744069295 9.7699 -2016010101 197 5 0.7089191450076786 6.4001 -2016010101 198 5 0.7269340552231702 -2016010101 199 5 0.7049554871226075 5.1937 -2016010101 200 1 0.44078367400761076 6.1463 -2016010101 201 1 0.7715264806037321 6.8473 -2016010101 202 1 0.10151701902103971 5.9633 -2016010101 203 1 0.661891806135609 6.4671 -2016010101 204 1 0.23095745116331567 6.7930 -2016010101 205 1 0.46625278601359255 6.3031 -2016010101 206 1 0.5912486124707177 5.1771 -2016010101 207 1 0.963946871892115 5.0794 -2016010101 208 1 0.8172596270687692 6.2385 -2016010101 209 1 0.05745699928199144 8.5527 -2016010101 210 1 0.40612684342877337 8.1107 -2016010101 211 1 0.6330844777969608 7.1172 -2016010101 212 1 0.3148973406065705 9.1816 -2016010101 213 1 0.23230462811318175 6.4461 -2016010101 214 1 0.9960772952945196 9.5017 -2016010101 215 1 0.4581376339786414 6.2413 -2016010101 216 1 0.7181494575770677 6.9744 -2016010101 217 1 0.04277917580280799 9.0779 -2016010101 218 1 0.11137419446625674 8.5031 -2016010101 219 1 0.014716278313423037 7.8452 -2016010101 220 2 0.8988603727313186 7.2137 -2016010101 221 2 0.8192124226306603 5.3630 -2016010101 222 2 0.9304683598956597 7.2404 -2016010101 223 2 0.4375546733938238 9.7816 -2016010101 224 2 0.7676359685332207 -2016010101 225 2 0.30977859822027964 6.9535 -2016010101 226 2 0.008595955287459267 7.2172 -2016010101 227 2 0.6790605343724216 8.9225 -2016010101 228 2 0.36949588946147993 9.7846 -2016010101 229 2 0.3826798435706562 6.8578 -2016010101 230 2 0.13836513167087128 6.2374 -2016010101 231 2 0.4451570472364902 5.0264 -2016010101 232 2 0.8944067771338549 8.4037 -2016010101 233 2 0.6068095655362902 6.1181 -2016010101 234 2 0.7084870042917992 9.3126 -2016010101 235 2 0.5867363290655241 8.8871 -2016010101 236 2 0.6903863088381504 9.8005 -2016010101 237 2 0.30984947936089124 6.0620 -2016010101 238 2 0.31561088279452665 7.1647 -2016010101 239 2 0.006286479849849758 -2016010101 240 5 0.34397466439693725 7.5199 -2016010101 241 5 0.052476003295899964 7.3817 -2016010101 242 5 0.726106045184451 7.6509 -2016010101 243 5 0.01559115401009159 9.7002 -2016010101 244 5 0.9219270739836661 8.0615 -2016010101 245 5 0.5147917330760431 9.4082 -2016010101 246 5 0.41919804470784205 5.7261 -2016010101 247 5 0.4145101775865617 6.2074 -2016010101 248 5 0.34153038022995796 8.7753 -2016010101 249 5 0.9503817180587767 8.8932 -2016010101 250 5 0.6958354849389804 9.1141 -2016010101 251 5 0.46000811480536297 8.8439 -2016010101 252 5 0.18379911670616378 8.2403 -2016010101 253 5 0.20973108758556713 7.5995 -2016010101 254 5 0.5979201603287885 6.0502 -2016010101 255 5 0.5552419362393491 5.1349 -2016010101 256 5 0.10996555307297629 8.9563 -2016010101 257 5 0.3591453585622102 8.7323 -2016010101 258 5 0.06098055111386691 7.9408 -2016010101 259 5 0.5227270267924988 8.5808 -2016010101 260 0 0.8492702312836989 7.9958 -2016010101 261 0 0.5941242001151825 8.6743 -2016010101 262 0 0.6840733026822607 7.1121 -2016010101 263 0 0.8109777000249937 5.2360 -2016010101 264 0 0.8599286045013937 7.7809 -2016010101 265 0 0.7828806670746145 7.3934 -2016010101 266 0 0.8102260971867188 5.6508 -2016010101 267 0 0.38306094770114385 9.4577 -2016010101 268 0 0.7093609268723879 8.8535 -2016010101 269 0 0.4806583187577358 7.8049 -2016010101 270 0 0.5766489331365172 9.4820 -2016010101 271 0 0.7565067278238041 8.1346 -2016010101 272 0 0.8262768908267573 5.4734 -2016010101 273 0 0.7951015619138146 9.8669 -2016010101 274 0 0.1938448910588796 6.4710 -2016010101 275 0 0.8884608583839426 7.8987 -2016010101 276 0 0.7046203516594505 5.1656 -2016010101 277 0 0.5951074760704175 8.9059 -2016010101 278 0 0.38207409719784036 6.5022 -2016010101 279 0 0.2445271560830221 8.0526 -2016010101 280 7 0.6032919624054952 9.5785 -2016010101 281 7 0.1473220747987144 7.7168 -2016010101 282 7 0.38396643099307604 8.0813 -2016010101 283 7 0.4431561135554619 9.6805 -2016010101 284 7 0.896578318093225 9.3298 -2016010101 285 7 0.6729206122043515 5.1569 -2016010101 286 7 0.8498821349478478 8.8960 -2016010101 287 7 0.48231924024179784 9.8923 -2016010101 288 7 0.005379480238994816 6.2797 -2016010101 289 7 0.8017936717647264 9.5651 -2016010101 290 7 0.08193232952990348 9.1733 -2016010101 291 7 0.3422943366454193 9.8168 -2016010101 292 7 0.6081556855207957 6.5901 -2016010101 293 7 0.641193222941943 5.9771 -2016010101 294 7 0.3716858024654186 7.8948 -2016010101 295 7 0.0011169303830090849 9.4816 -2016010101 296 7 0.4698784438339285 8.1062 -2016010101 297 7 0.958198841287214 7.1534 -2016010101 298 7 0.730945048929339 6.3048 -2016010101 299 7 0.1858601884405512 7.0620 -2016010101 300 5 0.1020825694779407 9.9499 -2016010101 301 5 0.5742385074938443 7.1200 -2016010101 302 5 0.9846817584978909 8.1874 -2016010101 303 5 0.3858694391491331 9.3786 -2016010101 304 5 0.9822246873202894 9.2787 -2016010101 305 5 0.39822015482143314 8.0036 -2016010101 306 5 0.6575924137957005 8.2140 -2016010101 307 5 0.02359557062746842 5.6017 -2016010101 308 5 0.42059510563039115 8.4333 -2016010101 309 5 0.5970764856116284 5.2788 -2016010101 310 5 0.2817399870096221 9.5843 -2016010101 311 5 0.5334091165258412 7.4521 -2016010101 312 5 0.31199853410796585 5.9777 -2016010101 313 5 0.3156991306990594 6.9610 -2016010101 314 5 0.9560285139855889 5.8305 -2016010101 315 5 0.7846951771498516 6.9552 -2016010101 316 5 0.009731486767097897 6.1948 -2016010101 317 5 0.22625857375026215 5.4469 -2016010101 318 5 0.8580955944724618 6.5356 -2016010101 319 5 0.9622008926137687 9.0020 -2016010101 320 5 0.023872302930851297 9.1422 -2016010101 321 5 0.3580981601151092 6.3018 -2016010101 322 5 0.9120442264954038 8.0827 -2016010101 323 5 0.5968491989965334 5.1635 -2016010101 324 5 0.5028516120506729 8.0053 -2016010101 325 5 0.30590552314314 5.2736 -2016010101 326 5 0.5566430714368423 5.1079 -2016010101 327 5 0.6441099124064397 6.4961 -2016010101 328 5 0.8765287851559298 8.6211 -2016010101 329 5 0.38405928947408385 9.0091 -2016010101 330 5 0.29654203975364 6.4320 -2016010101 331 5 0.3606921959261904 8.5929 -2016010101 332 5 0.9617038824842609 5.9154 -2016010101 333 5 0.3103700669261584 5.7548 -2016010101 334 5 0.4935170174690311 9.8212 -2016010101 335 5 0.34757561267296444 6.2293 -2016010101 336 5 0.1236918485545484 7.7914 -2016010101 337 5 0.24925258973306597 8.1718 -2016010101 338 5 0.4104821367672965 6.1997 -2016010101 339 5 0.3621850216936935 8.4722 -2016010101 340 6 0.3816099229918041 8.0530 -2016010101 341 6 0.9496667754823915 8.9414 -2016010101 342 6 0.5594605720642025 8.1693 -2016010101 343 6 0.8537860901562698 9.0562 -2016010101 344 6 0.74787202967909 5.2490 -2016010101 345 6 0.29699361421249604 8.7462 -2016010101 346 6 0.035943527086235605 8.6117 -2016010101 347 6 0.20106098029261277 8.8491 -2016010101 348 6 0.6589994525818863 7.2742 -2016010101 349 6 0.3851541727199762 5.4651 -2016010101 350 6 0.12262059605539744 5.6784 -2016010101 351 6 0.33383436408012057 9.0930 -2016010101 352 6 0.5087733967157267 6.8228 -2016010101 353 6 0.34978350071897446 6.9314 -2016010101 354 6 0.9171509423859847 6.5113 -2016010101 355 6 0.6395164525815664 5.0802 -2016010101 356 6 0.659637993918835 8.8696 -2016010101 357 6 0.5689746534857604 6.7891 -2016010101 358 6 0.03266513163571427 9.0242 -2016010101 359 6 0.5863675010868861 9.3277 -2016010101 360 9 0.8665167898047901 7.7439 -2016010101 361 9 0.7933960420424948 -2016010101 362 9 0.8409667771425247 6.2808 -2016010101 363 9 0.9544310598825743 9.5424 -2016010101 364 9 0.36206869840549716 -2016010101 365 9 0.253957983880155 9.2830 -2016010101 366 9 0.08496022679431525 7.7179 -2016010101 367 9 0.5483782518766319 6.7984 -2016010101 368 9 0.41440902281408365 7.7158 -2016010101 369 9 0.2947889064970717 6.1741 -2016010101 370 9 0.659477180019486 7.2289 -2016010101 371 9 0.9016744422830162 8.8920 -2016010101 372 9 0.4692828259677926 5.0643 -2016010101 373 9 0.4221974527778145 8.4176 -2016010101 374 9 0.26318360778150285 5.4449 -2016010101 375 9 0.10064081807071767 9.5148 -2016010101 376 9 0.7781802619858804 6.3367 -2016010101 377 9 0.529215767115243 9.5807 -2016010101 378 9 0.21094147073619007 5.9806 -2016010101 379 9 0.18894985078463877 9.8089 -2016010101 380 5 0.20683422198832369 7.8759 -2016010101 381 5 0.9506923735546904 8.2207 -2016010101 382 5 0.25734447316063913 7.5610 -2016010101 383 5 0.6439025323539892 6.4487 -2016010101 384 5 0.9099080819805052 9.4225 -2016010101 385 5 0.9331714165375404 -2016010101 386 5 0.24979840404324272 -2016010101 387 5 0.40270120064812764 7.5996 -2016010101 388 5 0.35895113537427137 5.4774 -2016010101 389 5 0.44814114645480074 8.5523 -2016010101 390 5 0.437368419580639 -2016010101 391 5 0.2777496228001308 7.1315 -2016010101 392 5 0.09350862521048608 6.0913 -2016010101 393 5 0.10366624548706516 5.9504 -2016010101 394 5 0.8715309310993357 6.6814 -2016010101 395 5 0.8953111125914557 7.4980 -2016010101 396 5 0.9410866942183567 6.1849 -2016010101 397 5 0.16367286942347592 6.3734 -2016010101 398 5 0.6995415361957786 9.1507 -2016010101 399 5 0.7170527361072194 8.5733 diff --git a/extensions-contrib/moving-average-query/README.md b/extensions-contrib/moving-average-query/README.md deleted file mode 100644 index 918f1750ec06..000000000000 --- a/extensions-contrib/moving-average-query/README.md +++ /dev/null @@ -1,29 +0,0 @@ - - -druid-moving-average-query -============= - -Overview -============= -**Moving Average Query** is an extension which provides support for [Moving Average](https://en.wikipedia.org/wiki/Moving_average) and other Aggregate [Window Functions](https://en.wikibooks.org/wiki/Structured_Query_Language/Window_functions) in Druid queries. - -Documentation -============= -See the druid.apache.org website or under [Druid Github Repo](https://github.com/apache/druid/tree/master/docs/development/extensions-contrib/moving-average-query.md). diff --git a/extensions-contrib/moving-average-query/pom.xml b/extensions-contrib/moving-average-query/pom.xml deleted file mode 100644 index dbb75dcf7078..000000000000 --- a/extensions-contrib/moving-average-query/pom.xml +++ /dev/null @@ -1,131 +0,0 @@ - - - - 4.0.0 - - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - org.apache.druid.extensions.contrib - druid-moving-average-query - druid-moving-average-query - - - UTF-8 - - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - org.apache.druid - druid-server - ${project.parent.version} - provided - - - com.google.code.findbugs - jsr305 - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - joda-time - joda-time - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - com.google.inject.extensions - guice-multibindings - provided - - - com.google.guava - guava - provided - - - - - org.apache.druid - druid-core - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-server - ${project.parent.version} - test-jar - test - - - junit - junit - test - - - com.fasterxml.jackson.dataformat - jackson-dataformat-yaml - ${jackson.version} - test - - - org.hamcrest - hamcrest-core - test - - - diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/AveragerFactoryWrapper.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/AveragerFactoryWrapper.java deleted file mode 100644 index 42cfc3a4a6a0..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/AveragerFactoryWrapper.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import org.apache.druid.query.aggregation.Aggregator; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.BufferAggregator; -import org.apache.druid.query.movingaverage.averagers.AveragerFactory; -import org.apache.druid.segment.ColumnSelectorFactory; -import org.apache.druid.segment.column.ValueType; - -import javax.annotation.Nullable; -import java.util.Comparator; -import java.util.List; - -/** - * A wrapper around averagers that makes them appear to be aggregators. - * This is necessary purely to allow existing common druid code that only knows - * about aggregators to work with the MovingAverageQuery query as well. - * - * NOTE: The {@link AggregatorFactory} abstract class is only partially extended. - * Most methods are not implemented and throw {@link UnsupportedOperationException} if called. - * This is becsuse these methods are invalid for the AveragerFactoryWrapper. - * - * @param Result type - * @param Finalized Result type - */ -public class AveragerFactoryWrapper extends AggregatorFactory -{ - - private final AveragerFactory af; - private final String prefix; - - /** - * Simple constructor - * - * @param af - * @param prefix - */ - public AveragerFactoryWrapper(AveragerFactory af, String prefix) - { - this.af = af; - this.prefix = prefix; - } - - /** - * Not implemented. Throws UnsupportedOperationException. - */ - @Override - public Aggregator factorize(ColumnSelectorFactory metricFactory) throws UnsupportedOperationException - { - throw new UnsupportedOperationException("Invalid operation for AveragerFactoryWrapper."); - } - - /** - * Not implemented. Throws UnsupportedOperationException. - */ - @Override - public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) - { - throw new UnsupportedOperationException("Invalid operation for AveragerFactoryWrapper."); - } - - /* (non-Javadoc) - * @see org.apache.druid.query.aggregation.AggregatorFactory#getComparator() - */ - @Override - public Comparator getComparator() - { - return af.getComparator(); - } - - /** - * Not implemented. Throws UnsupportedOperationException. - */ - @Override - public Object combine(Object lhs, Object rhs) - { - throw new UnsupportedOperationException("Invalid operation for AveragerFactoryWrapper."); - } - - /** - * Not implemented. Throws UnsupportedOperationException. - */ - @Override - public AggregatorFactory getCombiningFactory() - { - throw new UnsupportedOperationException("Invalid operation for AveragerFactoryWrapper."); - } - - /** - * Not implemented. Throws UnsupportedOperationException. - */ - @Override - public List getRequiredColumns() - { - throw new UnsupportedOperationException("Invalid operation for AveragerFactoryWrapper."); - } - - /** - * Not implemented. Throws UnsupportedOperationException. - */ - @Override - public Object deserialize(Object object) - { - throw new UnsupportedOperationException("Invalid operation for AveragerFactoryWrapper."); - } - - /** - * Not implemented. Throws UnsupportedOperationException. - */ - @SuppressWarnings("unchecked") - @Nullable - @Override - public Object finalizeComputation(@Nullable Object object) - { - return af.finalizeComputation((T) object); - } - - /* (non-Javadoc) - * @see org.apache.druid.query.aggregation.AggregatorFactory#getName() - */ - @Override - public String getName() - { - return prefix + af.getName(); - } - - /** - * Not implemented. Throws UnsupportedOperationException. - */ - @Override - public List requiredFields() - { - throw new UnsupportedOperationException("Invalid operation for AveragerFactoryWrapper."); - } - - /** - * Not implemented. Throws UnsupportedOperationException. - */ - @Override - public byte[] getCacheKey() - { - throw new UnsupportedOperationException("Invalid operation for AveragerFactoryWrapper."); - } - - /** - * This method must be implemented since it is called by - * {@link org.apache.druid.query.groupby.GroupByQuery#computeResultRowSignature}. Returning "COMPLEX" causes the - * return type to be treated as unknown. - */ - @Override - public String getTypeName() - { - return ValueType.COMPLEX.name(); - } - - /** - * Not implemented. Throws UnsupportedOperationException. - */ - @Override - public int getMaxIntermediateSize() - { - throw new UnsupportedOperationException("Invalid operation for AveragerFactoryWrapper."); - } - -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/BucketingAccumulator.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/BucketingAccumulator.java deleted file mode 100644 index a79e24bf3d97..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/BucketingAccumulator.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import org.apache.druid.data.input.Row; -import org.apache.druid.java.util.common.guava.YieldingAccumulator; - -import java.util.ArrayList; -import java.util.List; - -/** - * Groups all the rows for a specific period together. - * Rows of each period are placed in a single {@link RowBucket} (timed through the dateTime field). - * (Assumpltion: Input arrives sorted by timestamp). - */ -public class BucketingAccumulator extends YieldingAccumulator -{ - - /* (non-Javadoc) - * @see YieldingAccumulator#accumulate(java.lang.Object, java.lang.Object) - */ - @Override - public RowBucket accumulate(RowBucket accumulated, Row in) - { - List rows; - - if (accumulated == null) { - // first row, initializing - rows = new ArrayList<>(); - accumulated = new RowBucket(in.getTimestamp(), rows); - } else if (accumulated.getNextBucket() != null) { - accumulated = accumulated.getNextBucket(); - } - - if (!accumulated.getDateTime().equals(in.getTimestamp())) { - // day change detected - rows = new ArrayList<>(); - rows.add(in); - RowBucket nextBucket = new RowBucket(in.getTimestamp(), rows); - accumulated.setNextBucket(nextBucket); - yield(); - } else { - // still on the same day - rows = accumulated.getRows(); - rows.add(in); - } - - return accumulated; - } - -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/DefaultMovingAverageQueryMetrics.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/DefaultMovingAverageQueryMetrics.java deleted file mode 100644 index 8ff5ade430e9..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/DefaultMovingAverageQueryMetrics.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import org.apache.druid.query.DefaultQueryMetrics; -import org.apache.druid.query.DruidMetrics; - -public class DefaultMovingAverageQueryMetrics extends DefaultQueryMetrics implements - MovingAverageQueryMetrics -{ - @Override - public void query(MovingAverageQuery query) - { - super.query(query); - numDimensions(query); - numMetrics(query); - numComplexMetrics(query); - } - - @Override - public void numDimensions(MovingAverageQuery query) - { - setDimension("numDimensions", String.valueOf(query.getDimensions().size())); - } - - @Override - public void numMetrics(MovingAverageQuery query) - { - setDimension("numMetrics", String.valueOf(query.getAggregatorSpecs().size())); - } - - @Override - public void numComplexMetrics(MovingAverageQuery query) - { - int numComplexAggs = DruidMetrics.findNumComplexAggs(query.getAggregatorSpecs()); - setDimension("numComplexMetrics", String.valueOf(numComplexAggs)); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/DefaultMovingAverageQueryMetricsFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/DefaultMovingAverageQueryMetricsFactory.java deleted file mode 100644 index 6f2cc3446be8..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/DefaultMovingAverageQueryMetricsFactory.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.druid.guice.LazySingleton; - -@LazySingleton -public class DefaultMovingAverageQueryMetricsFactory implements MovingAverageQueryMetricsFactory -{ - - private static final MovingAverageQueryMetricsFactory INSTANCE = - new DefaultMovingAverageQueryMetricsFactory(); - - /** - * Should be used only in tests, directly or indirectly (via {@link - * MovingAverageQueryToolChest#MovingAverageQueryToolChest}). - */ - @VisibleForTesting - public static MovingAverageQueryMetricsFactory instance() - { - return INSTANCE; - } - - @Override - public MovingAverageQueryMetrics makeMetrics() - { - return new DefaultMovingAverageQueryMetrics(); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageHelper.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageHelper.java deleted file mode 100644 index f0cfb0bd15bf..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageHelper.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import org.apache.druid.data.input.MapBasedRow; -import org.apache.druid.data.input.Row; -import org.apache.druid.query.dimension.DimensionSpec; - -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; - -public class MovingAverageHelper -{ - - /** - * @param dimensions A list of DimensionSpec in the specified in the query - * @param row The Row to be used for looking up dimension values - * - * @return A Map of dimension/value from the row - */ - - public static Map getDimKeyFromRow(Collection dimensions, Row row) - { - - Map key = new HashMap<>(); - Map event = ((MapBasedRow) row).getEvent(); - - for (DimensionSpec dimension : dimensions) { - key.put(dimension.getOutputName(), event.get(dimension.getOutputName())); - } - - return key; - } - -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageIterable.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageIterable.java deleted file mode 100644 index b92604d9f23c..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageIterable.java +++ /dev/null @@ -1,312 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import org.apache.druid.data.input.MapBasedRow; -import org.apache.druid.data.input.Row; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.java.util.common.guava.Yielder; -import org.apache.druid.java.util.common.guava.Yielders; -import org.apache.druid.query.aggregation.Aggregator; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.dimension.DimensionSpec; -import org.apache.druid.query.movingaverage.averagers.Averager; -import org.apache.druid.query.movingaverage.averagers.AveragerFactory; -import org.apache.druid.segment.ColumnSelectorFactory; -import org.apache.druid.segment.ColumnValueSelector; -import org.apache.druid.segment.DimensionSelector; -import org.apache.druid.segment.NilColumnValueSelector; -import org.apache.druid.segment.column.ColumnCapabilities; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Set; -import java.util.stream.Collectors; - -/** - * {@link MovingAverageIterable} iterates over days {@link RowBucket}, producing rows for each dimension combination, - * filling in missing entries with "empty" rows so that the averaging buckets have enough data to operate on. - * It then computes the moving average on the buckets and returns the row. - * See computeMovingAverage for more details. - */ -public class MovingAverageIterable implements Iterable -{ - - private final Sequence seq; - private final List dims; - private final List> factories; - private final Map postAggMap; - private final Map aggMap; - private final Map emptyEvents; - - public MovingAverageIterable( - Sequence buckets, - List dims, - List> factories, - List postAggList, - List aggList - ) - { - this.dims = dims; - this.factories = factories; - this.seq = buckets; - - postAggMap = postAggList.stream().collect(Collectors.toMap(postAgg -> postAgg.getName(), postAgg -> postAgg)); - aggMap = aggList.stream().collect(Collectors.toMap(agg -> agg.getName(), agg -> agg)); - emptyEvents = generateEmptyEventsFromAggregators(aggMap, postAggMap); - } - - // Build a list of empty events from Aggregators/PostAggregators to be used by Iterator to build fake rows. - // These fake rows will be used by computeMovingAverage() in skip=true mode. - // See emptyEventsCopy in internalNext() and computeMovingAverage() documentation. - private Map generateEmptyEventsFromAggregators(Map aggMap, - Map postAggMap) - { - Map emptyEvents = new LinkedHashMap<>(); - aggMap.values().forEach(agg -> { - Aggregator aggFactorized = agg.factorize(getEmptyColumnSelectorFactory()); - emptyEvents.put(agg.getName(), aggFactorized.get()); - }); - postAggMap.values().forEach(postAgg -> emptyEvents.put(postAgg.getName(), postAgg.compute(emptyEvents))); - return emptyEvents; - } - - @Nonnull - private ColumnSelectorFactory getEmptyColumnSelectorFactory() - { - return new ColumnSelectorFactory() - { - @Override - public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec) - { - // Generating empty records while aggregating on Filtered aggregators requires a dimension selector - // for initialization. This dimension selector is not actually used for generating values - return DimensionSelector.constant(null); - } - - @Override - public ColumnValueSelector makeColumnValueSelector(String s) - { - return NilColumnValueSelector.instance(); - } - - @Override - public ColumnCapabilities getColumnCapabilities(String s) - { - return null; - } - }; - } - - /* (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ - @Override - public Iterator iterator() - { - return new MovingAverageIterator(seq, dims, factories, emptyEvents, aggMap); - } - - static class MovingAverageIterator implements Iterator - { - - private final List dims; - // Key: Row's dimension set. Value: Averager. See MovingAverageIterator#computeMovingAverage for more details. - private final Map, List>> averagers = new HashMap<>(); - private final List> averagerFactories; - - private Yielder yielder; - private RowBucket cache = null; - private Iterator cacheIter; - private Iterator> averagersKeysIter; - private Set> seenKeys = new HashSet<>(); - private Row saveNext; - private Map aggMap; - private Map emptyEvents; - - public MovingAverageIterator( - Sequence rows, - List dims, - List> averagerFactories, - Map emptyEvents, - Map aggMap - ) - { - this.dims = dims; - this.averagerFactories = averagerFactories; - this.emptyEvents = emptyEvents; - this.aggMap = aggMap; - - yielder = Yielders.each(rows); - } - - /* (non-Javadoc) - * @see java.util.Iterator#hasNext() - */ - @Override - public boolean hasNext() - { - if (saveNext != null) { - return true; - } - - saveNext = internalNext(); - return (saveNext != null); - } - - /* (non-Javadoc) - * @see java.util.Iterator#next() - */ - @Override - public Row next() - { - if (!hasNext()) { - throw new NoSuchElementException(); - } - - Row retVal = saveNext; - saveNext = null; - return retVal; - } - - private Row internalNext() - { - // Iterate until there is a row to return or Yielder is exahusted, in such a case return null. - // This is used in order to skip empty buckets (iterate to the next one). - while (true) { - if (cache == null && !yielder.isDone()) { - cache = yielder.get(); - yielder = yielder.next(cache); - - cacheIter = cache.getRows().iterator(); - } - - Row r; - - // return rows from the cached RowBucket - if (cacheIter != null) { - if (cacheIter.hasNext()) { - r = cacheIter.next(); - // Convert full event (key + metrics) to key - Map key = MovingAverageHelper.getDimKeyFromRow(dims, r); - seenKeys.add(key); - r = computeMovingAverage((MapBasedRow) r, false); - if (r != null) { - return r; - } else { - throw new NoSuchElementException(); - } - } else { - Set> averagerKeys = new HashSet<>(averagers.keySet()); - averagerKeys.removeAll(seenKeys); - averagersKeysIter = averagerKeys.iterator(); - cacheIter = null; - } - } - - // return empty rows for unseen dimension combinations - if (averagersKeysIter != null) { - while (averagersKeysIter.hasNext()) { - Map dims = averagersKeysIter.next(); - Map emptyEventsCopy = new HashMap<>(emptyEvents); - - // Convert key to a full dummy event (key + dummy metrics). - dims.forEach((dim, value) -> emptyEventsCopy.put(dim, value)); - - r = computeMovingAverage(new MapBasedRow(cache.getDateTime(), emptyEventsCopy), true); - if (r != null) { - return r; - } - } - - seenKeys.clear(); - averagersKeysIter = null; - cache = null; - } - - if (cacheIter == null && yielder.isDone()) { - // we should never get here. For some reason, there is - // no more work to do, so continuing to iterate will infinite loop - return null; - } - } - } - - /** - * Compute and add any moving average columns. - * - *

Normally, the row passed in will be added to all the {@link Averager}'s and then results pulled - * from each averager. If skip is true, then the incoming row is actually a dummy value due to - * no data being present for this dimension combination in the current bucket. When this happens, - * {@link Averager#skip()} should be called instead of {@link Averager#addElement(Map, Map)}()} to force proper - * decaying of the average values. - * - *

Usually, the contents of key will be contained by the row R being passed in, but in the case of a - * dummy row, it's possible that the dimensions will be known but the row empty. Hence, the values are - * passed as two separate arguments. - * - * @param r The Row to operate on - * @param skip Indicates whether skip or add should be called - * - * @return The updated row containing averager results, or null if no averagers computed a result - */ - @Nullable - private Row computeMovingAverage(MapBasedRow r, boolean skip) - { - Map event = r.getEvent(); - Map result = new HashMap<>(event); - Map key = MovingAverageHelper.getDimKeyFromRow(dims, r); - - List> avg = averagers.get(key); - - // Initialize key's averagers. - if (avg == null) { - avg = averagerFactories.stream().map(af -> af.createAverager()).collect(Collectors.toList()); - averagers.put(key, avg); - } - - if (!skip) { - avg.forEach(af -> af.addElement(event, aggMap)); - } else { - avg.forEach(af -> af.skip()); - } - - avg.forEach(af -> result.put(af.getName(), af.getResult())); - - // At least one non-dimension value must be in the record for it to be valid. - if (result.entrySet().stream().anyMatch(e -> !key.containsKey(e.getKey()) && e.getValue() != null)) { - result.putAll(event); - return new MapBasedRow(r.getTimestamp(), result); - } else { - // No averagers returned anything. All buckets must be empty. - // skip this row. - return null; - } - } - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java deleted file mode 100644 index 5ac36de51047..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQuery.java +++ /dev/null @@ -1,387 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeName; -import com.google.common.base.Function; -import com.google.common.base.Functions; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Iterables; -import org.apache.druid.common.config.NullHandling; -import org.apache.druid.data.input.Row; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.java.util.common.granularity.Granularity; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.java.util.common.guava.Sequences; -import org.apache.druid.query.BaseQuery; -import org.apache.druid.query.DataSource; -import org.apache.druid.query.Query; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.dimension.DimensionSpec; -import org.apache.druid.query.filter.DimFilter; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.groupby.ResultRow; -import org.apache.druid.query.groupby.having.HavingSpec; -import org.apache.druid.query.groupby.orderby.LimitSpec; -import org.apache.druid.query.groupby.orderby.NoopLimitSpec; -import org.apache.druid.query.movingaverage.averagers.AveragerFactory; -import org.apache.druid.query.spec.QuerySegmentSpec; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -/** - * Class that defines druid MovingAverage query fields - */ -@JsonTypeName("movingAverage") -public class MovingAverageQuery extends BaseQuery -{ - - public static final String MOVING_AVG_QUERY_TYPE = "movingAverage"; - public static final String CTX_KEY_SORT_BY_DIMS_FIRST = "sortByDimsFirst"; - - private final LimitSpec limitSpec; - private final HavingSpec havingSpec; - private final DimFilter dimFilter; - private final Granularity granularity; - private final List dimensions; - private final List aggregatorSpecs; - private final List postAggregatorSpecs; - private final List> averagerSpecs; - private final List postAveragerSpecs; - - /** - * This GroupByQuery is used by {@link #applyLimit(Sequence)} to convert between Rows and ResultRows. - */ - private final GroupByQuery groupByQueryForLimitSpec; - - /** - * This Function is used by {@link #applyLimit(Sequence)} to apply having and limit specs. - */ - private final Function, Sequence> limitFn; - - @JsonCreator - public MovingAverageQuery( - @JsonProperty("dataSource") DataSource dataSource, - @JsonProperty("intervals") QuerySegmentSpec querySegmentSpec, - @JsonProperty("filter") DimFilter dimFilter, - @JsonProperty("granularity") Granularity granularity, - @JsonProperty("dimensions") List dimensions, - @JsonProperty("aggregations") List aggregatorSpecs, - @JsonProperty("postAggregations") List postAggregatorSpecs, - @JsonProperty("having") HavingSpec havingSpec, - @JsonProperty("averagers") List> averagerSpecs, - @JsonProperty("postAveragers") List postAveragerSpecs, - @JsonProperty("limitSpec") LimitSpec limitSpec, - @JsonProperty("context") Map context - ) - { - super(dataSource, querySegmentSpec, false, context); - - //TBD: Implement null awareness to respect the contract of this flag. - Preconditions.checkArgument( - NullHandling.replaceWithDefault(), - "movingAverage does not support druid.generic.useDefaultValueForNull=false" - ); - - this.dimFilter = dimFilter; - this.granularity = granularity; - this.dimensions = dimensions == null ? ImmutableList.of() : dimensions; - for (DimensionSpec spec : this.dimensions) { - Preconditions.checkArgument(spec != null, "dimensions has null DimensionSpec"); - } - this.aggregatorSpecs = aggregatorSpecs == null ? ImmutableList.of() : aggregatorSpecs; - this.postAggregatorSpecs = postAggregatorSpecs == null ? ImmutableList.of() : postAggregatorSpecs; - this.averagerSpecs = averagerSpecs == null ? ImmutableList.of() : averagerSpecs; - this.postAveragerSpecs = postAveragerSpecs == null ? ImmutableList.of() : postAveragerSpecs; - this.havingSpec = havingSpec; - this.limitSpec = (limitSpec == null) ? NoopLimitSpec.INSTANCE : limitSpec; - - Preconditions.checkNotNull(this.granularity, "Must specify a granularity"); - - verifyOutputNames(this.dimensions, this.aggregatorSpecs, this.postAggregatorSpecs); - - // build combined list of aggregators and averagers so that limit spec building is happy - List combinedAggregatorSpecs = new ArrayList<>(this.aggregatorSpecs); - for (AveragerFactory avg : this.averagerSpecs) { - combinedAggregatorSpecs.add(new AveragerFactoryWrapper(avg, "")); - } - - this.groupByQueryForLimitSpec = GroupByQuery - .builder() - .setDataSource(dataSource) - .setInterval(getQuerySegmentSpec()) - .setDimensions(this.dimensions) - .setAggregatorSpecs(combinedAggregatorSpecs) - .setPostAggregatorSpecs( - ImmutableList.copyOf(Iterables.concat(this.postAggregatorSpecs, this.postAveragerSpecs)) - ) - .setGranularity(this.granularity) - .overrideContext(ImmutableMap.of(GroupByQuery.CTX_KEY_SORT_BY_DIMS_FIRST, true)) - .build(); - - Function, Sequence> postProcFn = this.limitSpec.build(groupByQueryForLimitSpec); - - if (havingSpec != null) { - postProcFn = Functions.compose( - postProcFn, - sequence -> Sequences.filter(sequence, MovingAverageQuery.this.havingSpec::eval) - ); - } - - this.limitFn = postProcFn; - } - - private static void verifyOutputNames( - List dimensions, - List aggregators, - List postAggregators - ) - { - - final Set outputNames = new HashSet<>(); - for (DimensionSpec dimension : dimensions) { - if (!outputNames.add(dimension.getOutputName())) { - throw new IAE("Duplicate output name[%s]", dimension.getOutputName()); - } - } - - for (AggregatorFactory aggregator : aggregators) { - if (!outputNames.add(aggregator.getName())) { - throw new IAE("Duplicate output name[%s]", aggregator.getName()); - } - } - - for (PostAggregator postAggregator : postAggregators) { - if (!outputNames.add(postAggregator.getName())) { - throw new IAE("Duplicate output name[%s]", postAggregator.getName()); - } - } - } - - /** - * A private constructor that avoids all of the various state checks. Used by the with*() methods where the checks - * have already passed in order for the object to exist. - */ - private MovingAverageQuery( - DataSource dataSource, - QuerySegmentSpec querySegmentSpec, - DimFilter dimFilter, - Granularity granularity, - List dimensions, - List aggregatorSpecs, - List> averagerSpecs, - List postAggregatorSpecs, - List postAveragerSpecs, - HavingSpec havingSpec, - LimitSpec orderBySpec, - GroupByQuery groupByQueryForLimitSpec, - Function, Sequence> limitFn, - Map context - ) - { - super(dataSource, querySegmentSpec, false, context); - - this.dimFilter = dimFilter; - this.granularity = granularity; - this.dimensions = dimensions; - this.aggregatorSpecs = aggregatorSpecs; - this.averagerSpecs = averagerSpecs; - this.postAggregatorSpecs = postAggregatorSpecs; - this.postAveragerSpecs = postAveragerSpecs; - this.havingSpec = havingSpec; - this.limitSpec = orderBySpec; - this.groupByQueryForLimitSpec = groupByQueryForLimitSpec; - this.limitFn = limitFn; - } - - @Override - public boolean hasFilters() - { - return dimFilter != null; - } - - @Override - public String getType() - { - return MOVING_AVG_QUERY_TYPE; - } - - @JsonIgnore - public boolean getContextSortByDimsFirst() - { - return getContextBoolean(CTX_KEY_SORT_BY_DIMS_FIRST, false); - } - - @Override - @JsonProperty - public DimFilter getFilter() - { - return dimFilter; - } - - @Override - @JsonProperty - public Granularity getGranularity() - { - return granularity; - } - - @JsonProperty - public List getDimensions() - { - return dimensions; - } - - @JsonProperty("aggregations") - public List getAggregatorSpecs() - { - return aggregatorSpecs; - } - - @JsonProperty("averagers") - public List> getAveragerSpecs() - { - return averagerSpecs; - } - - @JsonProperty("postAggregations") - public List getPostAggregatorSpecs() - { - return postAggregatorSpecs; - } - - @JsonProperty("postAveragers") - public List getPostAveragerSpecs() - { - return postAveragerSpecs; - } - - @JsonProperty("having") - public HavingSpec getHavingSpec() - { - return havingSpec; - } - - @JsonProperty - public LimitSpec getLimitSpec() - { - return limitSpec; - } - - @Override - public MovingAverageQuery withOverriddenContext(Map contextOverride) - { - return new MovingAverageQuery( - getDataSource(), - getQuerySegmentSpec(), - dimFilter, - granularity, - dimensions, - aggregatorSpecs, - averagerSpecs, - postAggregatorSpecs, - postAveragerSpecs, - havingSpec, - limitSpec, - groupByQueryForLimitSpec, - limitFn, - computeOverridenContext(contextOverride) - ); - } - - @Override - public MovingAverageQuery withQuerySegmentSpec(QuerySegmentSpec spec) - { - return new MovingAverageQuery( - getDataSource(), - spec, - dimFilter, - granularity, - dimensions, - aggregatorSpecs, - averagerSpecs, - postAggregatorSpecs, - postAveragerSpecs, - havingSpec, - limitSpec, - groupByQueryForLimitSpec, - limitFn, - getContext() - ); - } - - @Override - public Query withDataSource(DataSource dataSource) - { - return new MovingAverageQuery( - dataSource, - getQuerySegmentSpec(), - dimFilter, - granularity, - dimensions, - aggregatorSpecs, - averagerSpecs, - postAggregatorSpecs, - postAveragerSpecs, - havingSpec, - limitSpec, - groupByQueryForLimitSpec, - limitFn, - getContext() - ); - } - - public Query withPostAveragers(List postAveragerSpecs) - { - return new MovingAverageQuery( - getDataSource(), - getQuerySegmentSpec(), - dimFilter, - granularity, - dimensions, - aggregatorSpecs, - averagerSpecs, - postAggregatorSpecs, - postAveragerSpecs, - havingSpec, - limitSpec, - groupByQueryForLimitSpec, - limitFn, - getContext() - ); - } - - public Sequence applyLimit(Sequence results) - { - return limitFn.apply(results.map(row -> ResultRow.fromLegacyRow(row, groupByQueryForLimitSpec))) - .map(row -> row.toMapBasedRow(groupByQueryForLimitSpec)); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryMetrics.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryMetrics.java deleted file mode 100644 index 6b9f39ad0f2e..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryMetrics.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import org.apache.druid.query.QueryMetrics; - -public interface MovingAverageQueryMetrics extends QueryMetrics -{ - /** - * Sets the size of {@link MovingAverageQuery#getDimensions()} of the given query as dimension. - */ - void numDimensions(MovingAverageQuery query); - - /** - * Sets the number of metrics of the given groupBy query as dimension. - */ - void numMetrics(MovingAverageQuery query); - - /** - * Sets the number of "complex" metrics of the given groupBy query as dimension. By default it is assumed that - * "complex" metric is a metric of not long or double type, but it could be redefined in the implementation of this - * method. - */ - void numComplexMetrics(MovingAverageQuery query); -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryMetricsFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryMetricsFactory.java deleted file mode 100644 index db344a0f0ec5..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryMetricsFactory.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -/** - * Implementations could be injected using - *

- * PolyBind - * .optionBinder(binder, Key.get(MovingAverageQueryMetricsFactory.class)) - * .addBinding("myCustomMovingAverageQueryMetricsFactory") - * .to(MyCustomMovingAverageQueryMetricsFactory.class); - *

- * And then setting property: - * druid.query.movingavgquery.queryMetricsFactory=myCustomMovingAverageQueryMetricsFactory - */ -public interface MovingAverageQueryMetricsFactory -{ - MovingAverageQueryMetrics makeMetrics(); -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java deleted file mode 100644 index 9655678680da..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryModule.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.inject.Binder; -import com.google.inject.multibindings.MapBinder; -import org.apache.druid.guice.DruidBinders; -import org.apache.druid.guice.LazySingleton; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.query.Query; -import org.apache.druid.query.QueryToolChest; - -import java.util.Collections; -import java.util.List; - -public class MovingAverageQueryModule implements DruidModule -{ - - @Override - public void configure(Binder binder) - { - MapBinder, QueryToolChest> toolChests = DruidBinders.queryToolChestBinder(binder); - - //Bind the query toolchest to the query class and add the binding to toolchest - toolChests.addBinding(MovingAverageQuery.class).to(MovingAverageQueryToolChest.class); - - //Bind the query toolchest to binder - binder.bind(MovingAverageQueryToolChest.class).in(LazySingleton.class); - } - - @Override - public List getJacksonModules() - { - return Collections.singletonList(new SimpleModule("MovingAverageQueryModule") - .registerSubtypes(new NamedType( - MovingAverageQuery.class, - "movingAverage" - ))); - } - -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryRunner.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryRunner.java deleted file mode 100644 index 3d704dea3a0a..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryRunner.java +++ /dev/null @@ -1,239 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import com.google.common.base.Function; -import com.google.common.base.Throwables; -import com.google.common.collect.ImmutableMap; -import org.apache.druid.data.input.MapBasedRow; -import org.apache.druid.data.input.Row; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.granularity.PeriodGranularity; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.java.util.common.guava.Sequences; -import org.apache.druid.query.DataSource; -import org.apache.druid.query.QueryContexts; -import org.apache.druid.query.QueryDataSource; -import org.apache.druid.query.QueryPlus; -import org.apache.druid.query.QueryRunner; -import org.apache.druid.query.QuerySegmentWalker; -import org.apache.druid.query.Result; -import org.apache.druid.query.TableDataSource; -import org.apache.druid.query.UnionDataSource; -import org.apache.druid.query.context.ResponseContext; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.groupby.ResultRow; -import org.apache.druid.query.movingaverage.averagers.AveragerFactory; -import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; -import org.apache.druid.query.timeseries.TimeseriesQuery; -import org.apache.druid.query.timeseries.TimeseriesResultValue; -import org.apache.druid.server.QueryStats; -import org.apache.druid.server.RequestLogLine; -import org.apache.druid.server.log.RequestLogger; -import org.joda.time.Interval; -import org.joda.time.Period; - -import javax.annotation.Nullable; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; - -/** - * The QueryRunner for MovingAverage query. - * High level flow: - * 1. Invokes an inner groupBy query (Or timeseries for no dimensions scenario) to get Aggregations/PostAggregtions. - * 2. Result is passed to {@link RowBucketIterable}, which groups rows of all dimension combinations into period-based (e.g. daily) buckets of rows ({@link RowBucket}). - * 3. The sequence is passed to {@link MovingAverageIterable}, which performs the main part of the query of adding Averagers computation into the records. - * 4. Finishes up by applying post averagers, removing redundant dates, and applying post phases (having, sorting, limits). - */ -public class MovingAverageQueryRunner implements QueryRunner -{ - private final QuerySegmentWalker walker; - private final RequestLogger requestLogger; - - public MovingAverageQueryRunner( - @Nullable QuerySegmentWalker walker, - RequestLogger requestLogger - ) - { - this.walker = walker; - this.requestLogger = requestLogger; - } - - @Override - public Sequence run(QueryPlus query, ResponseContext responseContext) - { - - MovingAverageQuery maq = (MovingAverageQuery) query.getQuery(); - List intervals; - final Period period; - - // Get the largest bucket from the list of averagers - Optional opt = - maq.getAveragerSpecs().stream().map(AveragerFactory::getNumBuckets).max(Integer::compare); - int buckets = opt.orElse(0); - - //Extend the interval beginning by specified bucket - 1 - if (maq.getGranularity() instanceof PeriodGranularity) { - period = ((PeriodGranularity) maq.getGranularity()).getPeriod(); - int offset = buckets <= 0 ? 0 : (1 - buckets); - intervals = maq.getIntervals() - .stream() - .map(i -> new Interval(i.getStart().withPeriodAdded(period, offset), i.getEnd())) - .collect(Collectors.toList()); - } else { - throw new ISE("Only PeriodGranulaity is supported for movingAverage queries"); - } - - Sequence resultsSeq; - DataSource dataSource = maq.getDataSource(); - if (maq.getDimensions() != null && !maq.getDimensions().isEmpty() && - (dataSource instanceof TableDataSource || dataSource instanceof UnionDataSource || - dataSource instanceof QueryDataSource)) { - // build groupBy query from movingAverage query - GroupByQuery.Builder builder = GroupByQuery.builder() - .setDataSource(dataSource) - .setInterval(intervals) - .setDimFilter(maq.getFilter()) - .setGranularity(maq.getGranularity()) - .setDimensions(maq.getDimensions()) - .setAggregatorSpecs(maq.getAggregatorSpecs()) - .setPostAggregatorSpecs(maq.getPostAggregatorSpecs()) - .setContext(maq.getContext()); - GroupByQuery gbq = builder.build(); - - ResponseContext gbqResponseContext = ResponseContext.createEmpty(); - gbqResponseContext.put( - ResponseContext.Key.QUERY_FAIL_DEADLINE_MILLIS, - System.currentTimeMillis() + QueryContexts.getTimeout(gbq) - ); - gbqResponseContext.put(ResponseContext.Key.QUERY_TOTAL_BYTES_GATHERED, new AtomicLong()); - - Sequence results = gbq.getRunner(walker).run(QueryPlus.wrap(gbq), gbqResponseContext); - try { - // use localhost for remote address - requestLogger.logNativeQuery(RequestLogLine.forNative( - gbq, - DateTimes.nowUtc(), - "127.0.0.1", - new QueryStats( - ImmutableMap.of( - "query/time", 0, - "query/bytes", 0, - "success", true - )) - )); - } - catch (Exception e) { - throw Throwables.propagate(e); - } - - resultsSeq = results.map(row -> row.toMapBasedRow(gbq)); - } else { - // no dimensions, so optimize this as a TimeSeries - TimeseriesQuery tsq = new TimeseriesQuery( - dataSource, - new MultipleIntervalSegmentSpec(intervals), - false, - null, - maq.getFilter(), - maq.getGranularity(), - maq.getAggregatorSpecs(), - maq.getPostAggregatorSpecs(), - 0, - maq.getContext() - ); - ResponseContext tsqResponseContext = ResponseContext.createEmpty(); - tsqResponseContext.put( - ResponseContext.Key.QUERY_FAIL_DEADLINE_MILLIS, - System.currentTimeMillis() + QueryContexts.getTimeout(tsq) - ); - tsqResponseContext.put(ResponseContext.Key.QUERY_TOTAL_BYTES_GATHERED, new AtomicLong()); - - Sequence> results = tsq.getRunner(walker).run(QueryPlus.wrap(tsq), tsqResponseContext); - try { - // use localhost for remote address - requestLogger.logNativeQuery(RequestLogLine.forNative( - tsq, - DateTimes.nowUtc(), - "127.0.0.1", - new QueryStats( - ImmutableMap.of( - "query/time", 0, - "query/bytes", 0, - "success", true - )) - )); - } - catch (Exception e) { - throw Throwables.propagate(e); - } - - resultsSeq = Sequences.map(results, new TimeseriesResultToRow()); - } - - // Process into period buckets - Sequence bucketedMovingAvgResults = - Sequences.simple(new RowBucketIterable(resultsSeq, intervals, period)); - - // Apply the windows analysis functions - Sequence movingAvgResults = Sequences.simple( - new MovingAverageIterable( - bucketedMovingAvgResults, - maq.getDimensions(), - maq.getAveragerSpecs(), - maq.getPostAggregatorSpecs(), - maq.getAggregatorSpecs() - ) - ); - - // Apply any postAveragers - Sequence movingAvgResultsWithPostAveragers = - Sequences.map(movingAvgResults, new PostAveragerAggregatorCalculator(maq)); - - // remove rows outside the reporting window - List reportingIntervals = maq.getIntervals(); - movingAvgResults = - Sequences.filter( - movingAvgResultsWithPostAveragers, - row -> reportingIntervals.stream().anyMatch(i -> i.contains(row.getTimestamp())) - ); - - // Apply any having, sorting, and limits - movingAvgResults = maq.applyLimit(movingAvgResults); - - return movingAvgResults; - - } - - static class TimeseriesResultToRow implements Function, Row> - { - @Override - public Row apply(Result lookbackResult) - { - Map event = lookbackResult.getValue().getBaseObject(); - MapBasedRow row = new MapBasedRow(lookbackResult.getTimestamp(), event); - return row; - } - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryToolChest.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryToolChest.java deleted file mode 100644 index c421c2933add..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/MovingAverageQueryToolChest.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.google.common.base.Function; -import com.google.common.base.Functions; -import com.google.inject.Inject; -import com.google.inject.Provider; -import org.apache.druid.data.input.MapBasedRow; -import org.apache.druid.data.input.Row; -import org.apache.druid.query.QueryMetrics; -import org.apache.druid.query.QueryRunner; -import org.apache.druid.query.QuerySegmentWalker; -import org.apache.druid.query.QueryToolChest; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.MetricManipulationFn; -import org.apache.druid.query.movingaverage.averagers.AveragerFactory; -import org.apache.druid.server.log.RequestLogger; - -import java.util.HashMap; -import java.util.Map; - -/** - * The QueryToolChest for MovingAverage Query - */ -public class MovingAverageQueryToolChest extends QueryToolChest -{ - - private final Provider walkerProvider; - private final RequestLogger requestLogger; - - private final MovingAverageQueryMetricsFactory movingAverageQueryMetricsFactory; - - /** - * Construct a MovingAverageQueryToolChest for processing moving-average queries. - * MovingAverage queries are expected to be processed on broker nodes and never hit historical nodes. - * - * @param walkerProvider - * @param requestLogger - */ - @Inject - public MovingAverageQueryToolChest(Provider walkerProvider, RequestLogger requestLogger) - { - this.walkerProvider = walkerProvider; - this.requestLogger = requestLogger; - this.movingAverageQueryMetricsFactory = DefaultMovingAverageQueryMetricsFactory.instance(); - } - - @Override - public QueryRunner mergeResults(QueryRunner runner) - { - return new MovingAverageQueryRunner(walkerProvider.get(), requestLogger); - } - - @Override - public QueryMetrics makeMetrics(MovingAverageQuery query) - { - MovingAverageQueryMetrics movingAverageQueryMetrics = movingAverageQueryMetricsFactory.makeMetrics(); - movingAverageQueryMetrics.query(query); - return movingAverageQueryMetrics; - } - - @Override - public Function makePostComputeManipulatorFn(MovingAverageQuery query, MetricManipulationFn fn) - { - - return new Function() - { - - @Override - public Row apply(Row result) - { - MapBasedRow mRow = (MapBasedRow) result; - final Map values = new HashMap<>(mRow.getEvent()); - - for (AggregatorFactory agg : query.getAggregatorSpecs()) { - Object aggVal = values.get(agg.getName()); - if (aggVal != null) { - values.put(agg.getName(), fn.manipulate(agg, aggVal)); - } else { - values.put(agg.getName(), null); - } - } - - for (AveragerFactory avg : query.getAveragerSpecs()) { - Object aggVal = values.get(avg.getName()); - if (aggVal != null) { - values.put(avg.getName(), fn.manipulate(new AveragerFactoryWrapper<>(avg, avg.getName() + "_"), aggVal)); - } else { - values.put(avg.getName(), null); - } - } - - return new MapBasedRow(result.getTimestamp(), values); - - } - }; - - } - - - @Override - public TypeReference getResultTypeReference() - { - return new TypeReference() - { - }; - } - - @Override - public Function makePreComputeManipulatorFn(MovingAverageQuery query, MetricManipulationFn fn) - { - return Functions.identity(); - } - -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculator.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculator.java deleted file mode 100644 index 5af34871dc4c..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculator.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import com.google.common.base.Function; -import com.google.common.collect.Maps; -import org.apache.druid.data.input.MapBasedRow; -import org.apache.druid.data.input.Row; -import org.apache.druid.query.aggregation.PostAggregator; - -import java.util.List; -import java.util.Map; - -/** - * Function that can be applied to a Sequence to calculate PostAverager results - */ -public class PostAveragerAggregatorCalculator implements Function -{ - - private final List postAveragers; - - public PostAveragerAggregatorCalculator(MovingAverageQuery maq) - { - this.postAveragers = maq.getPostAveragerSpecs(); - } - - @Override - public Row apply(final Row row) - { - if (postAveragers.isEmpty()) { - return row; - } - - final Map newMap; - - newMap = Maps.newLinkedHashMap(((MapBasedRow) row).getEvent()); - - for (PostAggregator postAverager : postAveragers) { - boolean allColsPresent = postAverager.getDependentFields().stream().allMatch(c -> newMap.get(c) != null); - newMap.put(postAverager.getName(), allColsPresent ? postAverager.compute(newMap) : null); - } - - return new MapBasedRow(row.getTimestamp(), newMap); - } - -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/RowBucket.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/RowBucket.java deleted file mode 100644 index fa614fa4218e..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/RowBucket.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import org.apache.druid.data.input.Row; -import org.joda.time.DateTime; - -import java.util.List; - -/** - * Represents a set of rows for a specific date - * Each RowBucket is an element in a list (holds a pointer to the next RowBucket) - */ -public class RowBucket -{ - private final DateTime dateTime; - private final List rows; - private RowBucket nextBucket = null; - - public RowBucket(DateTime dateTime, List rows) - { - this.dateTime = dateTime; - this.rows = rows; - } - - public DateTime getDateTime() - { - return dateTime; - } - - public List getRows() - { - return rows; - } - - public RowBucket getNextBucket() - { - return nextBucket; - } - - public void setNextBucket(RowBucket nextRow) - { - this.nextBucket = nextRow; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/RowBucketIterable.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/RowBucketIterable.java deleted file mode 100644 index 308d5551c866..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/RowBucketIterable.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import org.apache.druid.data.input.Row; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.java.util.common.guava.Yielder; -import org.joda.time.DateTime; -import org.joda.time.Interval; -import org.joda.time.Period; - -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; - -/** - * An iterator which takes list of rows ({@link Sequence}) and generates a new list of {@link RowBucket}s from it. - * - * It calls {@link BucketingAccumulator} for naive bucketing to buckets of periods, - * But does more subtle logic to cover edge cases, such as: - * - Handling periods with no rows. - * - Handling last record. - * - * Please notice this is being called by {@link MovingAverageIterable.MovingAverageIterator#internalNext()} - * and the logic for skipping records is comprised by the interaction between the two classes. - */ -public class RowBucketIterable implements Iterable -{ - - public final Sequence seq; - private List intervals; - private Period period; - - public RowBucketIterable(Sequence seq, List intervals, Period period) - { - this.seq = seq; - this.period = period; - this.intervals = intervals; - } - - /* (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ - @Override - public Iterator iterator() - { - return new RowBucketIterator(seq, intervals, period); - } - - static class RowBucketIterator implements Iterator - { - private Yielder yielder; - private DateTime endTime; - private DateTime expectedBucket; - private Period period; - private int intervalIndex = 0; - private List intervals; - private boolean processedLastRow = false; - private boolean processedExtraRow = false; - - public RowBucketIterator(Sequence rows, List intervals, Period period) - { - this.period = period; - this.intervals = intervals; - expectedBucket = intervals.get(intervalIndex).getStart(); - endTime = intervals.get(intervals.size() - 1).getEnd(); - yielder = rows.toYielder(null, new BucketingAccumulator()); - } - - /* (non-Javadoc) - * @see java.util.Iterator#hasNext() - */ - @Override - public boolean hasNext() - { - return expectedBucket.compareTo(endTime) < 0 || !this.yielder.isDone(); - } - - /* (non-Javadoc) - * @see java.util.Iterator#next() - */ - @Override - public RowBucket next() - { - RowBucket currentBucket = yielder.get(); - - // Iterate to next interval - if (expectedBucket.compareTo(intervals.get(intervalIndex).getEnd()) >= 0) { - intervalIndex++; - if (intervalIndex < intervals.size()) { - expectedBucket = intervals.get(intervalIndex).getStart(); - } - } - // currentBucket > expectedBucket (No rows found for period). Iterate to next period. - if (currentBucket != null && currentBucket.getDateTime().compareTo(expectedBucket) > 0) { - currentBucket = new RowBucket(expectedBucket, Collections.emptyList()); - expectedBucket = expectedBucket.plus(period); - return currentBucket; - } - - if (!yielder.isDone()) { - // standard case. return regular row - yielder = yielder.next(currentBucket); - expectedBucket = expectedBucket.plus(period); - return currentBucket; - } else if (!processedLastRow && yielder.get() != null && yielder.get().getNextBucket() == null) { - // yielder.isDone, processing last row - processedLastRow = true; - expectedBucket = expectedBucket.plus(period); - return currentBucket; - } else if (!processedExtraRow && yielder.get() != null && yielder.get().getNextBucket() != null) { - RowBucket lastRow = yielder.get().getNextBucket(); - - if (lastRow.getDateTime().compareTo(expectedBucket) > 0) { - lastRow = new RowBucket(expectedBucket, Collections.emptyList()); - expectedBucket = expectedBucket.plus(period); - return lastRow; - } - - // yielder is done, processing newBucket - processedExtraRow = true; - expectedBucket = expectedBucket.plus(period); - return lastRow; - } else if (expectedBucket.compareTo(endTime) < 0) { - // add any trailing blank rows - currentBucket = new RowBucket(expectedBucket, Collections.emptyList()); - expectedBucket = expectedBucket.plus(period); - return currentBucket; - } else { - // we should never get here - throw new NoSuchElementException(); - } - - } - } - -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/Averager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/Averager.java deleted file mode 100644 index 506380cac1bb..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/Averager.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.apache.druid.query.aggregation.AggregatorFactory; - -import java.util.Map; - -/** - * Interface for an averager - * - * @param The return type of the averager - */ -public interface Averager -{ - /** - * Add a row to the window being operated on - * - * @param e The row to add - * @param aggMap The Map of AggregatorFactory used to determine if the metric should to be finalized - */ - void addElement(Map e, Map aggMap); - - /** - * There is a missing row, so record a missing entry in the window - */ - void skip(); - - /** - * Compute the resulting "average" over the collected window - * - * @return the "average" over the window of buckets - */ - R getResult(); - - /** - * @return the name - */ - String getName(); -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/AveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/AveragerFactory.java deleted file mode 100644 index f605fb51606a..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/AveragerFactory.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeInfo; - -import java.util.Comparator; -import java.util.List; - -/** - * Interface representing Averager in the movingAverage query. - * - * @param Type returned by the underlying averager. - * @param Type of finalized value. - */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") -@JsonSubTypes(value = { - @JsonSubTypes.Type(name = "constant", value = ConstantAveragerFactory.class), - @JsonSubTypes.Type(name = "doubleMean", value = DoubleMeanAveragerFactory.class), - @JsonSubTypes.Type(name = "doubleMeanNoNulls", value = DoubleMeanNoNullAveragerFactory.class), - @JsonSubTypes.Type(name = "doubleSum", value = DoubleSumAveragerFactory.class), - @JsonSubTypes.Type(name = "doubleMax", value = DoubleMaxAveragerFactory.class), - @JsonSubTypes.Type(name = "doubleMin", value = DoubleMinAveragerFactory.class), - @JsonSubTypes.Type(name = "longMean", value = LongMeanAveragerFactory.class), - @JsonSubTypes.Type(name = "longMeanNoNulls", value = LongMeanNoNullAveragerFactory.class), - @JsonSubTypes.Type(name = "longSum", value = LongSumAveragerFactory.class), - @JsonSubTypes.Type(name = "longMax", value = LongMaxAveragerFactory.class), - @JsonSubTypes.Type(name = "longMin", value = LongMinAveragerFactory.class) -}) -public interface AveragerFactory -{ - int DEFAULT_PERIOD = 1; - - /** - * Gets the column name that will be populated by the Averager - * - * @return The column name - */ - String getName(); - - /** - * Returns the window size over which the averaging calculations will be - * performed. Size is computed in terms of buckets rather than absolute time. - * - * @return The window size - */ - int getNumBuckets(); - - /** - * Returns the cycle size (number of periods to skip during averaging calculations). - * - * @return The cycle size - */ - int getCycleSize(); - - /** - * Create an Averager for a specific dimension combination. - * - * @return The {@link Averager} - */ - Averager createAverager(); - - /** - * Gets the list of dependent fields that will be used by this Averager. Most - * {@link Averager}s depend on only a single field from the underlying query, but - * that is not required. This method allow the required fields to be communicated - * back to the main query so that validation to enforce the fields presence can - * be accomplished. - * - * @return A list of field names - */ - List getDependentFields(); - - /** - * Returns a {@link Comparator} that can be used to compare result values for - * purposes of sorting the end result of the query. - * - * @return A {@link Comparator} - */ - Comparator getComparator(); - - /** - * Finalize result value. - * - * @param val the value to finalize. - * - * @return The finalized value. - */ - F finalizeComputation(R val); -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/BaseAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/BaseAverager.java deleted file mode 100644 index 0c236b899ea2..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/BaseAverager.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.apache.druid.query.aggregation.AggregatorFactory; - -import java.lang.reflect.Array; -import java.util.Map; - -/** - * Common base class available for use by averagers. The base class implements methods that - * capture incoming and skipped rows and store them in an array, to be used later for - * calculating the actual value. - * - * @param The type of intermediate value to be retrieved from the row and stored - * @param The type of result the averager is expected to produce - */ -public abstract class BaseAverager implements Averager -{ - - final int numBuckets; - final int cycleSize; - private final String name; - private final String fieldName; - final I[] buckets; - private int index; - - /** - * {@link BaseAverager#startFrom} is needed because `buckets` field is a fixed array, not a list. - * It makes computeResults() start from the correct bucket in the array. - */ - int startFrom = 0; - - /** - * @param storageType The class to use for storing intermediate values - * @param numBuckets The number of buckets to include in the window being aggregated - * @param name The name of the resulting metric - * @param fieldName The field to extra from incoming rows and stored in the window cache - * @param cycleSize Cycle group size. Used to calculate day-of-week option. Default=1 (single element in group). - */ - public BaseAverager(Class storageType, int numBuckets, String name, String fieldName, int cycleSize) - { - this.numBuckets = numBuckets; - this.name = name; - this.fieldName = fieldName; - this.index = 0; - @SuppressWarnings("unchecked") - final I[] array = (I[]) Array.newInstance(storageType, numBuckets); - this.buckets = array; - this.cycleSize = cycleSize; - } - - - /* (non-Javadoc) - * @see Averager#addElement(java.util.Map, java.util.Map) - */ - @SuppressWarnings("unchecked") - @Override - public void addElement(Map e, Map a) - { - Object metric = e.get(fieldName); - I finalMetric; - if (a.containsKey(fieldName)) { - AggregatorFactory af = a.get(fieldName); - finalMetric = metric != null ? (I) af.finalizeComputation(metric) : null; - } else { - finalMetric = (I) metric; - } - buckets[index++] = finalMetric; - index %= numBuckets; - } - - /* (non-Javadoc) - * @see Averager#skip() - */ - @Override - public void skip() - { - buckets[index++] = null; - index %= numBuckets; - } - - /* (non-Javadoc) - * @see Averager#getResult() - */ - @Override - public R getResult() - { - if (!hasData()) { - return null; - } - return computeResult(); - } - - /** - * Compute the result value to be returned by getResult. - * - *

This routine will only be called when there is valid data within the window - * and doesn't need to worry about detecting the case where no data should be returned. - * - *

- * The method typically should use {@link #getBuckets()} to retrieve the set of buckets - * within the window and then compute a value based on those. It should expect nulls within - * the array, indicating buckets where no row was found for the dimension combination. It is - * up to the actual implementation to determin how to evaluate those nulls. - * - *

- * The type returned is NOT required to be the same type as the intermediary value. For example, - * the intermediate value could be a Sketch, but the result a Long. - * - * @return the computed result - */ - protected abstract R computeResult(); - - /* (non-Javadoc) - * @see Averager#getName() - */ - @Override - public String getName() - { - return name; - } - - /** - * Returns the fieldname to be extracted from any event rows passed in and stored - * for use computing the windowed function. - * - * @return the fieldName - */ - public String getFieldName() - { - return fieldName; - } - - /** - * @return the numBuckets - */ - public int getNumBuckets() - { - return numBuckets; - } - - /** - * @return the cycleSize - */ - public int getCycleSize() - { - return cycleSize; - } - - /** - * @return the array of buckets - */ - protected I[] getBuckets() - { - return buckets; - } - - /** - * Determines wheter any data is present. If all the buckets are empty (not "0"), then - * no value should be returned from the Averager, as there were not valid rows within the window. - * - * @return true if any non-null values available - */ - protected boolean hasData() - { - for (Object b : buckets) { - if (b != null) { - return true; - } - } - return false; - } - -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactory.java deleted file mode 100644 index 831000fed5c1..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactory.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; - -import java.util.Collections; -import java.util.List; - -/** - * Common base class for AveragerFactories - * - * @param Base type that the averager should return as a result - * @param Type that that is returned from finalization - */ -public abstract class BaseAveragerFactory implements AveragerFactory -{ - - protected String name; - protected String fieldName; - protected int numBuckets; - protected int cycleSize; - - /** - * Constructor. - * - * @param name Name of the Averager - * @param numBuckets Number of buckets in the analysis window - * @param fieldName Field from incoming events to include in the analysis - * @param cycleSize Cycle group size. Used to calculate day-of-week option. Default=1 (single element in group). - */ - public BaseAveragerFactory(String name, int numBuckets, String fieldName, Integer cycleSize) - { - this.name = name; - this.numBuckets = numBuckets; - this.fieldName = fieldName; - this.cycleSize = (cycleSize != null) ? cycleSize : DEFAULT_PERIOD; - Preconditions.checkNotNull(name, "Must have a valid, non-null averager name"); - Preconditions.checkNotNull(fieldName, "Must have a valid, non-null field name"); - Preconditions.checkArgument(this.cycleSize > 0, "Cycle size must be greater than zero"); - Preconditions.checkArgument(numBuckets > 0, "Bucket size must be greater than zero"); - Preconditions.checkArgument(!(this.cycleSize > numBuckets), "Cycle size must be less than the bucket size"); - Preconditions.checkArgument(numBuckets % this.cycleSize == 0, "cycleSize must devide numBuckets without a remainder"); - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @JsonProperty - public String getFieldName() - { - return fieldName; - } - - @Override - @JsonProperty("buckets") - public int getNumBuckets() - { - return numBuckets; - } - - @Override - @JsonProperty("cycleSize") - public int getCycleSize() - { - return cycleSize; - } - - @Override - public List getDependentFields() - { - return Collections.singletonList(fieldName); - } - - @SuppressWarnings("unchecked") - @Override - public F finalizeComputation(R val) - { - return (F) val; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ComparableAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ComparableAveragerFactory.java deleted file mode 100644 index 0463d55b97da..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ComparableAveragerFactory.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import java.util.Comparator; - -/** - * Base averager factory that adds a default comparable method. - * - * @param return type - * @param finalized type - */ -public abstract class ComparableAveragerFactory, F> extends BaseAveragerFactory -{ - /** - * Constructor. - * - * @param name Name of the Averager - * @param numBuckets Number of buckets in the analysis window - * @param fieldName Field from incoming events to include in the analysis - * @param cycleSize Cycle group size. Used to calculate day-of-week option. Default=1 (single element in group). - */ - public ComparableAveragerFactory(String name, int numBuckets, String fieldName, Integer cycleSize) - { - super(name, numBuckets, fieldName, cycleSize); - } - - @Override - public Comparator getComparator() - { - return Comparator.naturalOrder(); - } - -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ConstantAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ConstantAverager.java deleted file mode 100644 index bc76c99610f9..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ConstantAverager.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.apache.druid.query.aggregation.AggregatorFactory; - -import java.util.Map; - -/** - * The constant averager.Created soley for incremental development and wiring things up. - */ -public class ConstantAverager implements Averager -{ - - private String name; - private float retval; - - /** - * @param n - * @param name - * @param retval - */ - public ConstantAverager(int n, String name, float retval) - { - this.name = name; - this.retval = retval; - } - - /* (non-Javadoc) - * @see Averager#getResult() - */ - @Override - public Float getResult() - { - return retval; - } - - /* (non-Javadoc) - * @see Averager#getName() - */ - @Override - public String getName() - { - return name; - } - - /* (non-Javadoc) - * @see Averager#addElement(java.util.Map, java.util.Map) - */ - @Override - public void addElement(Map e, Map a) - { - // since we return a constant, no need to read from the event - } - - /* (non-Javadoc) - * @see Averager#skip() - */ - @Override - public void skip() - { - } - -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ConstantAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ConstantAveragerFactory.java deleted file mode 100644 index 45339c37058b..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/ConstantAveragerFactory.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.util.Collections; -import java.util.Comparator; -import java.util.List; - -/** - * Implementation of AveragerFacvtory created solely for incremental development - */ - -public class ConstantAveragerFactory implements AveragerFactory -{ - - private String name; - private int numBuckets; - private float retval; - - @JsonCreator - public ConstantAveragerFactory( - @JsonProperty("name") String name, - @JsonProperty("buckets") int numBuckets, - @JsonProperty("retval") float retval - ) - { - this.name = name; - this.numBuckets = numBuckets; - this.retval = retval; - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @Override - @JsonProperty("buckets") - public int getNumBuckets() - { - return numBuckets; - } - - @JsonProperty - public float getRetval() - { - return retval; - } - - @Override - public Averager createAverager() - { - return new ConstantAverager(numBuckets, name, retval); - } - - @Override - public List getDependentFields() - { - return Collections.emptyList(); - } - - @Override - public Comparator getComparator() - { - return Comparator.naturalOrder(); - } - - @Override - public int getCycleSize() - { - return 1; - } - - @Override - public Float finalizeComputation(Float val) - { - return val; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAverager.java deleted file mode 100644 index 5e25617025b6..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAverager.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -public class DoubleMaxAverager extends BaseAverager -{ - - public DoubleMaxAverager(int numBuckets, String name, String fieldName, int cycleSize) - { - super(Number.class, numBuckets, name, fieldName, cycleSize); - } - - @Override - protected Double computeResult() - { - double result = Double.NEGATIVE_INFINITY; - - for (int i = 0; i < numBuckets; i += cycleSize) { - if (buckets[(i + startFrom) % numBuckets] != null) { - result = Double.max(result, (buckets[(i + startFrom) % numBuckets]).doubleValue()); - } - } - - startFrom++; - return result; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactory.java deleted file mode 100644 index 1e82f09e9e95..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactory.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -public class DoubleMaxAveragerFactory extends ComparableAveragerFactory -{ - - @JsonCreator - public DoubleMaxAveragerFactory( - @JsonProperty("name") String name, - @JsonProperty("buckets") int numBuckets, - @JsonProperty("cycleSize") Integer cycleSize, - @JsonProperty("fieldName") String fieldName - ) - { - super(name, numBuckets, fieldName, cycleSize); - } - - @Override - public Averager createAverager() - { - return new DoubleMaxAverager(numBuckets, name, fieldName, cycleSize); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAverager.java deleted file mode 100644 index be9292c94c7b..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAverager.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -public class DoubleMeanAverager extends BaseAverager -{ - - public DoubleMeanAverager(int numBuckets, String name, String fieldName, int cycleSize) - { - super(Number.class, numBuckets, name, fieldName, cycleSize); - } - - @Override - protected Double computeResult() - { - double result = 0.0; - int validBuckets = 0; - - for (int i = 0; i < numBuckets; i += cycleSize) { - if (buckets[(i + startFrom) % numBuckets] != null) { - result += (buckets[(i + startFrom) % numBuckets]).doubleValue(); - } else { - result += 0.0; - } - validBuckets++; - } - - startFrom++; - return result / validBuckets; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactory.java deleted file mode 100644 index 58f544671a96..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactory.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -public class DoubleMeanAveragerFactory extends ComparableAveragerFactory -{ - - @JsonCreator - public DoubleMeanAveragerFactory( - @JsonProperty("name") String name, - @JsonProperty("buckets") int numBuckets, - @JsonProperty("cycleSize") Integer cycleSize, - @JsonProperty("fieldName") String fieldName - ) - { - super(name, numBuckets, fieldName, cycleSize); - } - - @Override - public Averager createAverager() - { - return new DoubleMeanAverager(numBuckets, name, fieldName, cycleSize); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAverager.java deleted file mode 100644 index 573f12a9e8b8..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAverager.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -public class DoubleMeanNoNullAverager extends BaseAverager -{ - - public DoubleMeanNoNullAverager(int numBuckets, String name, String fieldName, int cycleSize) - { - super(Number.class, numBuckets, name, fieldName, cycleSize); - } - - @Override - protected Double computeResult() - { - double result = 0.0; - int validBuckets = 0; - - for (int i = 0; i < numBuckets; i += cycleSize) { - if (buckets[(i + startFrom) % numBuckets] != null) { - result += (buckets[(i + startFrom) % numBuckets]).doubleValue(); - validBuckets++; - } - } - - startFrom++; - return result / validBuckets; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactory.java deleted file mode 100644 index d6e11893a5e7..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactory.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -public class DoubleMeanNoNullAveragerFactory extends ComparableAveragerFactory -{ - @JsonCreator - public DoubleMeanNoNullAveragerFactory( - @JsonProperty("name") String name, - @JsonProperty("buckets") int numBuckets, - @JsonProperty("cycleSize") Integer cycleSize, - @JsonProperty("fieldName") String fieldName - ) - { - super(name, numBuckets, fieldName, cycleSize); - } - - @Override - public Averager createAverager() - { - return new DoubleMeanNoNullAverager(numBuckets, name, fieldName, cycleSize); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAverager.java deleted file mode 100644 index d108feed0224..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAverager.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -public class DoubleMinAverager extends BaseAverager -{ - - public DoubleMinAverager(int numBuckets, String name, String fieldName, int cycleSize) - { - super(Number.class, numBuckets, name, fieldName, cycleSize); - } - - @Override - protected Double computeResult() - { - double result = Double.POSITIVE_INFINITY; - - for (int i = 0; i < numBuckets; i += cycleSize) { - if (buckets[(i + startFrom) % numBuckets] != null) { - result = Double.min(result, (buckets[(i + startFrom) % numBuckets]).doubleValue()); - } - } - - startFrom++; - return result; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactory.java deleted file mode 100644 index 35a783b2235d..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactory.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -public class DoubleMinAveragerFactory extends ComparableAveragerFactory -{ - @JsonCreator - public DoubleMinAveragerFactory( - @JsonProperty("name") String name, - @JsonProperty("buckets") int numBuckets, - @JsonProperty("cycleSize") Integer cycleSize, - @JsonProperty("fieldName") String fieldName - ) - { - super(name, numBuckets, fieldName, cycleSize); - } - - @Override - public Averager createAverager() - { - return new DoubleMinAverager(numBuckets, name, fieldName, cycleSize); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAverager.java deleted file mode 100644 index 87523f40052b..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAverager.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -public class DoubleSumAverager extends BaseAverager -{ - public DoubleSumAverager(int numBuckets, String name, String fieldName, int cycleSize) - { - super(Number.class, numBuckets, name, fieldName, cycleSize); - } - - @Override - protected Double computeResult() - { - double result = 0.0; - - for (int i = 0; i < numBuckets; i += cycleSize) { - if (buckets[(i + startFrom) % numBuckets] != null) { - result += (buckets[(i + startFrom) % numBuckets]).doubleValue(); - } else { - result += 0.0; - } - } - - startFrom++; - return result; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerFactory.java deleted file mode 100644 index f26b2ab00116..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerFactory.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -public class DoubleSumAveragerFactory extends ComparableAveragerFactory -{ - - @JsonCreator - public DoubleSumAveragerFactory( - @JsonProperty("name") String name, - @JsonProperty("buckets") int numBuckets, - @JsonProperty("cycleSize") Integer cycleSize, - @JsonProperty("fieldName") String fieldName - ) - { - super(name, numBuckets, fieldName, cycleSize); - } - - @Override - public Averager createAverager() - { - return new DoubleSumAverager(numBuckets, name, fieldName, cycleSize); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMaxAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMaxAverager.java deleted file mode 100644 index a45503ca58c1..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMaxAverager.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -public class LongMaxAverager extends BaseAverager -{ - - public LongMaxAverager(int numBuckets, String name, String fieldName, int cycleSize) - { - super(Number.class, numBuckets, name, fieldName, cycleSize); - } - - @Override - protected Long computeResult() - { - long result = Long.MIN_VALUE; - - for (int i = 0; i < numBuckets; i += cycleSize) { - if (buckets[(i + startFrom) % numBuckets] != null) { - result = Long.max(result, (buckets[(i + startFrom) % numBuckets]).longValue()); - } - } - - startFrom++; - return result; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactory.java deleted file mode 100644 index 847bbcb9e341..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactory.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -public class LongMaxAveragerFactory extends ComparableAveragerFactory -{ - @JsonCreator - public LongMaxAveragerFactory( - @JsonProperty("name") String name, - @JsonProperty("buckets") int numBuckets, - @JsonProperty("cycleSize") Integer cycleSize, - @JsonProperty("fieldName") String fieldName - ) - { - super(name, numBuckets, fieldName, cycleSize); - } - - @Override - public Averager createAverager() - { - return new LongMaxAverager(numBuckets, name, fieldName, cycleSize); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanAverager.java deleted file mode 100644 index a5919d727f78..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanAverager.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -public class LongMeanAverager extends BaseAverager -{ - - public LongMeanAverager(int numBuckets, String name, String fieldName, int cycleSize) - { - super(Number.class, numBuckets, name, fieldName, cycleSize); - } - - @Override - protected Double computeResult() - { - long result = 0; - int validBuckets = 0; - - for (int i = 0; i < numBuckets; i += cycleSize) { - if (buckets[(i + startFrom) % numBuckets] != null) { - result += (buckets[(i + startFrom) % numBuckets]).longValue(); - } else { - result += 0; - } - validBuckets++; - } - - startFrom++; - return ((double) result) / validBuckets; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactory.java deleted file mode 100644 index d02e06d96173..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactory.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -public class LongMeanAveragerFactory extends ComparableAveragerFactory -{ - - @JsonCreator - public LongMeanAveragerFactory( - @JsonProperty("name") String name, - @JsonProperty("buckets") int numBuckets, - @JsonProperty("cycleSize") Integer cycleSize, - @JsonProperty("fieldName") String fieldName - ) - { - super(name, numBuckets, fieldName, cycleSize); - } - - @Override - public Averager createAverager() - { - return new LongMeanAverager(numBuckets, name, fieldName, cycleSize); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAverager.java deleted file mode 100644 index ecdd17a6f265..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAverager.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -public class LongMeanNoNullAverager extends BaseAverager -{ - - public LongMeanNoNullAverager(int numBuckets, String name, String fieldName, int cycleSize) - { - super(Number.class, numBuckets, name, fieldName, cycleSize); - } - - @Override - protected Double computeResult() - { - long result = 0; - int validBuckets = 0; - - for (int i = 0; i < numBuckets; i += cycleSize) { - if (buckets[(i + startFrom) % numBuckets] != null) { - result += (buckets[(i + startFrom) % numBuckets]).longValue(); - validBuckets++; - } - } - - startFrom++; - return ((double) result) / validBuckets; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactory.java deleted file mode 100644 index 03ad7d1e654c..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactory.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -public class LongMeanNoNullAveragerFactory extends ComparableAveragerFactory -{ - - @JsonCreator - public LongMeanNoNullAveragerFactory( - @JsonProperty("name") String name, - @JsonProperty("buckets") int numBuckets, - @JsonProperty("cycleSize") Integer cycleSize, - @JsonProperty("fieldName") String fieldName - ) - { - super(name, numBuckets, fieldName, cycleSize); - } - - @Override - public Averager createAverager() - { - return new LongMeanNoNullAverager(numBuckets, name, fieldName, cycleSize); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMinAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMinAverager.java deleted file mode 100644 index cc999e6b9abf..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMinAverager.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -public class LongMinAverager extends BaseAverager -{ - - public LongMinAverager(int numBuckets, String name, String fieldName, int cycleSize) - { - super(Number.class, numBuckets, name, fieldName, cycleSize); - } - - @Override - protected Long computeResult() - { - long result = Long.MAX_VALUE; - - for (int i = 0; i < numBuckets; i += cycleSize) { - if (buckets[(i + startFrom) % numBuckets] != null) { - result = Long.min(result, (buckets[(i + startFrom) % numBuckets]).longValue()); - } - } - - startFrom++; - return result; - } - -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactory.java deleted file mode 100644 index ff2562541172..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactory.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -public class LongMinAveragerFactory extends ComparableAveragerFactory -{ - - @JsonCreator - public LongMinAveragerFactory( - @JsonProperty("name") String name, - @JsonProperty("buckets") int numBuckets, - @JsonProperty("cycleSize") int cycleSize, - @JsonProperty("fieldName") String fieldName - ) - { - super(name, numBuckets, fieldName, cycleSize); - } - - @Override - public Averager createAverager() - { - return new LongMinAverager(numBuckets, name, fieldName, cycleSize); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongSumAverager.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongSumAverager.java deleted file mode 100644 index c4baf84f617b..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongSumAverager.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -public class LongSumAverager extends BaseAverager -{ - public LongSumAverager(int numBuckets, String name, String fieldName, int cycleSize) - { - super(Number.class, numBuckets, name, fieldName, cycleSize); - } - - @Override - protected Long computeResult() - { - long result = 0; - - for (int i = 0; i < numBuckets; i += cycleSize) { - if (buckets[(i + startFrom) % numBuckets] != null) { - result += (buckets[(i + startFrom) % numBuckets]).longValue(); - } else { - result += 0; - } - } - - startFrom++; - return result; - } -} diff --git a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerFactory.java b/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerFactory.java deleted file mode 100644 index 0a603fe9c9f3..000000000000 --- a/extensions-contrib/moving-average-query/src/main/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerFactory.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -public class LongSumAveragerFactory extends ComparableAveragerFactory -{ - - @JsonCreator - public LongSumAveragerFactory( - @JsonProperty("name") String name, - @JsonProperty("buckets") int numBuckets, - @JsonProperty("cycleSize") Integer cycleSize, - @JsonProperty("fieldName") String fieldName - ) - { - super(name, numBuckets, fieldName, cycleSize); - } - - @Override - public Averager createAverager() - { - return new LongSumAverager(numBuckets, name, fieldName, cycleSize); - } -} diff --git a/extensions-contrib/moving-average-query/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/moving-average-query/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index ec70e7d9c464..000000000000 --- a/extensions-contrib/moving-average-query/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.query.movingaverage.MovingAverageQueryModule diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java deleted file mode 100644 index 589b03d6d20e..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java +++ /dev/null @@ -1,810 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import org.apache.druid.common.config.NullHandling; -import org.apache.druid.data.input.MapBasedRow; -import org.apache.druid.data.input.Row; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.java.util.common.guava.Sequences; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.FilteredAggregatorFactory; -import org.apache.druid.query.aggregation.LongSumAggregatorFactory; -import org.apache.druid.query.dimension.DefaultDimensionSpec; -import org.apache.druid.query.dimension.DimensionSpec; -import org.apache.druid.query.filter.DimFilter; -import org.apache.druid.query.filter.SelectorDimFilter; -import org.apache.druid.query.movingaverage.averagers.AveragerFactory; -import org.apache.druid.query.movingaverage.averagers.ConstantAveragerFactory; -import org.apache.druid.query.movingaverage.averagers.LongMeanAveragerFactory; -import org.apache.druid.testing.InitializedNullHandlingTest; -import org.hamcrest.CoreMatchers; -import org.joda.time.DateTime; -import org.joda.time.chrono.ISOChronology; -import org.junit.Assert; -import org.junit.Test; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -public class MovingAverageIterableTest extends InitializedNullHandlingTest -{ - private static final DateTime JAN_1 = new DateTime(2017, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - private static final DateTime JAN_2 = new DateTime(2017, 1, 2, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - private static final DateTime JAN_3 = new DateTime(2017, 1, 3, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - private static final DateTime JAN_4 = new DateTime(2017, 1, 4, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - private static final DateTime JAN_5 = new DateTime(2017, 1, 5, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - private static final DateTime JAN_6 = new DateTime(2017, 1, 6, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - - private static final String GENDER = "gender"; - private static final String AGE = "age"; - private static final String COUNTRY = "country"; - - private static final Map DIMS1 = new HashMap<>(); - private static final Map DIMS2 = new HashMap<>(); - private static final Map DIMS3 = new HashMap<>(); - - static { - DIMS1.put(GENDER, "m"); - DIMS1.put(AGE, "10"); - DIMS1.put(COUNTRY, "US"); - - DIMS2.put(GENDER, "f"); - DIMS2.put(AGE, "8"); - DIMS2.put(COUNTRY, "US"); - - DIMS3.put(GENDER, "u"); - DIMS3.put(AGE, "5"); - DIMS3.put(COUNTRY, "UK"); - } - - @Test - public void testNext() - { - - List dims = Arrays.asList( - new DefaultDimensionSpec(GENDER, GENDER), - new DefaultDimensionSpec(AGE, AGE), - new DefaultDimensionSpec(COUNTRY, COUNTRY) - ); - - Sequence dayBuckets = Sequences.simple(Arrays.asList( - new RowBucket(JAN_1, Arrays.asList( - new MapBasedRow(JAN_1, DIMS1), - new MapBasedRow(JAN_1, DIMS2) - )), - new RowBucket(JAN_2, Collections.singletonList( - new MapBasedRow(JAN_2, DIMS1) - )), - new RowBucket(JAN_3, Collections.emptyList()), - new RowBucket(JAN_4, Arrays.asList( - new MapBasedRow(JAN_4, DIMS2), - new MapBasedRow(JAN_4, DIMS3) - )) - )); - - Iterable iterable = new MovingAverageIterable( - dayBuckets, - dims, - Collections.singletonList(new ConstantAveragerFactory("noop", 1, 1.1f)), - Collections.emptyList(), - Collections.emptyList() - ); - - Iterator iter = iterable.iterator(); - - Assert.assertTrue(iter.hasNext()); - Row r = iter.next(); - Assert.assertEquals(JAN_1, r.getTimestamp()); - Assert.assertEquals("m", r.getRaw(GENDER)); - - Assert.assertTrue(iter.hasNext()); - r = iter.next(); - Assert.assertEquals(JAN_1, r.getTimestamp()); - Assert.assertEquals("f", r.getRaw(GENDER)); - - Assert.assertTrue(iter.hasNext()); - r = iter.next(); - Assert.assertEquals(JAN_2, r.getTimestamp()); - Assert.assertEquals("m", r.getRaw(GENDER)); - - Assert.assertTrue(iter.hasNext()); - r = iter.next(); - Assert.assertEquals(JAN_2, r.getTimestamp()); - Assert.assertEquals("f", r.getRaw(GENDER)); - - Assert.assertTrue(iter.hasNext()); - r = iter.next(); - Row r2 = r; - Assert.assertEquals(JAN_3, r.getTimestamp()); - Assert.assertEquals("US", r.getRaw(COUNTRY)); - - Assert.assertTrue(iter.hasNext()); - r = iter.next(); - Assert.assertEquals(JAN_3, r.getTimestamp()); - Assert.assertEquals("US", r.getRaw(COUNTRY)); - Assert.assertThat(r.getRaw(AGE), CoreMatchers.not(CoreMatchers.equalTo(r2.getRaw(AGE)))); - - Assert.assertTrue(iter.hasNext()); - r = iter.next(); - Assert.assertEquals(JAN_4, r.getTimestamp()); - Assert.assertEquals("f", r.getRaw(GENDER)); - - Assert.assertTrue(iter.hasNext()); - r = iter.next(); - Assert.assertEquals(JAN_4, r.getTimestamp()); - Assert.assertEquals("u", r.getRaw(GENDER)); - - Assert.assertTrue(iter.hasNext()); - r = iter.next(); - Assert.assertEquals(JAN_4, r.getTimestamp()); - Assert.assertEquals("m", r.getRaw(GENDER)); - - Assert.assertFalse(iter.hasNext()); - } - - @Test - public void testAveraging() - { - - Map event1 = new HashMap<>(); - Map event2 = new HashMap<>(); - Map event3 = new HashMap<>(); - Map event4 = new HashMap<>(); - - List ds = new ArrayList<>(); - ds.add(new DefaultDimensionSpec("gender", "gender")); - - event1.put("gender", "m"); - event1.put("pageViews", 10L); - Row row1 = new MapBasedRow(JAN_1, event1); - - event2.put("gender", "m"); - event2.put("pageViews", 20L); - Row row2 = new MapBasedRow(JAN_2, event2); - - event3.put("gender", "m"); - event3.put("pageViews", 30L); - Row row3 = new MapBasedRow(JAN_3, event3); - - event4.put("gender", "f"); - event4.put("pageViews", 40L); - Row row4 = new MapBasedRow(JAN_3, event4); - - float retval = 14.5f; - - Sequence seq = Sequences.simple(Arrays.asList( - new RowBucket(JAN_1, Collections.singletonList(row1)), - new RowBucket(JAN_2, Collections.singletonList(row2)), - new RowBucket(JAN_3, Arrays.asList(row3, row4)) - )); - - Iterator iter = new MovingAverageIterable( - seq, - ds, - Arrays.asList( - new ConstantAveragerFactory("costPageViews", 7, retval), - new LongMeanAveragerFactory("movingAvgPageViews", 7, 1, "pageViews") - ), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) - ).iterator(); - - Assert.assertTrue(iter.hasNext()); - Row caResult = iter.next(); - - Assert.assertEquals(JAN_1, caResult.getTimestamp()); - Assert.assertEquals("m", (caResult.getDimension("gender")).get(0)); - Assert.assertEquals(retval, caResult.getMetric("costPageViews").floatValue(), 0.0f); - Assert.assertEquals(1.4285715f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - caResult = iter.next(); - Assert.assertEquals("m", (caResult.getDimension("gender")).get(0)); - Assert.assertEquals(4.285714f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - caResult = iter.next(); - Assert.assertEquals("m", (caResult.getDimension("gender")).get(0)); - Assert.assertEquals(8.571428f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - caResult = iter.next(); - Assert.assertEquals("f", (caResult.getDimension("gender")).get(0)); - Assert.assertEquals(5.714285850f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertFalse(iter.hasNext()); - - } - - - @Test - public void testCompleteData() - { - - Map event1 = new HashMap<>(); - Map event2 = new HashMap<>(); - Map event3 = new HashMap<>(); - - event1.put("gender", "m"); - event1.put("pageViews", 10L); - event2.put("gender", "f"); - event2.put("pageViews", 20L); - event3.put("gender", "u"); - event3.put("pageViews", 30L); - - List ds = new ArrayList<>(); - ds.add(new DefaultDimensionSpec("gender", "gender")); - - Row jan1Row1 = new MapBasedRow(JAN_1, event1); - Row jan1Row2 = new MapBasedRow(JAN_1, event2); - Row jan1Row3 = new MapBasedRow(JAN_1, event3); - - Row jan2Row1 = new MapBasedRow(JAN_2, event1); - Row jan2Row2 = new MapBasedRow(JAN_2, event2); - Row jan2Row3 = new MapBasedRow(JAN_2, event3); - - Sequence seq = Sequences.simple(Arrays.asList( - new RowBucket(JAN_1, Arrays.asList(jan1Row1, jan1Row2, jan1Row3)), - new RowBucket(JAN_2, Arrays.asList(jan2Row1, jan2Row2, jan2Row3)) - )); - - Iterator iter = new MovingAverageIterable( - seq, - ds, - Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews") - ), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) - ).iterator(); - - Assert.assertTrue(iter.hasNext()); - Row result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_1, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("f", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_1, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("u", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_1, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("f", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("u", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - Assert.assertFalse(iter.hasNext()); - - } - - // no injection if the data missing at the begining - @Test - public void testMissingDataAtBeginning() - { - - Map event1 = new HashMap<>(); - Map event2 = new HashMap<>(); - Map event3 = new HashMap<>(); - - event1.put("gender", "m"); - event1.put("pageViews", 10L); - event2.put("gender", "f"); - event2.put("pageViews", 20L); - event3.put("gender", "u"); - event3.put("pageViews", 30L); - - List ds = new ArrayList<>(); - ds.add(new DefaultDimensionSpec("gender", "gender")); - - Row jan1Row1 = new MapBasedRow(JAN_1, event1); - - Row jan2Row1 = new MapBasedRow(JAN_2, event1); - Row jan2Row2 = new MapBasedRow(JAN_2, event2); - Row jan2Row3 = new MapBasedRow(JAN_2, event3); - - Sequence seq = Sequences.simple(Arrays.asList( - new RowBucket(JAN_1, Collections.singletonList(jan1Row1)), - new RowBucket(JAN_2, Arrays.asList(jan2Row1, jan2Row2, jan2Row3)) - )); - - Iterator iter = new MovingAverageIterable( - seq, - ds, - Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews") - ), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) - ).iterator(); - - Assert.assertTrue(iter.hasNext()); - Row result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_1, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("f", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("u", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - Assert.assertFalse(iter.hasNext()); - } - - // test injection when the data is missing at the end - @Test - public void testMissingDataAtTheEnd() - { - - Map event1 = new HashMap<>(); - Map event2 = new HashMap<>(); - Map event3 = new HashMap<>(); - - event1.put("gender", "m"); - event1.put("pageViews", 10L); - event2.put("gender", "f"); - event2.put("pageViews", 20L); - event3.put("gender", "u"); - event3.put("pageViews", 30L); - - List ds = new ArrayList<>(); - ds.add(new DefaultDimensionSpec("gender", "gender")); - - Row jan1Row1 = new MapBasedRow(JAN_1, event1); - Row jan1Row2 = new MapBasedRow(JAN_1, event2); - Row jan1Row3 = new MapBasedRow(JAN_1, event3); - Row jan2Row1 = new MapBasedRow(JAN_2, event1); - - Sequence seq = Sequences.simple(Arrays.asList( - new RowBucket(JAN_1, Arrays.asList(jan1Row1, jan1Row2, jan1Row3)), - new RowBucket(JAN_2, Collections.singletonList(jan2Row1)) - )); - - Iterator iter = new MovingAverageIterable( - seq, - ds, - Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews") - ), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) - ).iterator(); - - Assert.assertTrue(iter.hasNext()); - Row result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_1, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("f", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_1, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("u", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_1, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("u", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("f", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - Assert.assertFalse(iter.hasNext()); - } - - // test injection when the data is missing in the middle - @Test - public void testMissingDataAtMiddle() - { - - Map eventM = new HashMap<>(); - Map eventF = new HashMap<>(); - Map eventU = new HashMap<>(); - - eventM.put("gender", "m"); - eventM.put("pageViews", 10L); - eventF.put("gender", "f"); - eventF.put("pageViews", 20L); - eventU.put("gender", "u"); - eventU.put("pageViews", 30L); - - List ds = new ArrayList<>(); - ds.add(new DefaultDimensionSpec("gender", "gender")); - - Row jan1Row1M = new MapBasedRow(JAN_1, eventM); - Row jan1Row2F = new MapBasedRow(JAN_1, eventF); - Row jan1Row3U = new MapBasedRow(JAN_1, eventU); - Row jan2Row1M = new MapBasedRow(JAN_2, eventM); - Row jan3Row1M = new MapBasedRow(JAN_3, eventM); - Row jan3Row2F = new MapBasedRow(JAN_3, eventF); - Row jan3Row3U = new MapBasedRow(JAN_3, eventU); - Row jan4Row1M = new MapBasedRow(JAN_4, eventM); - - Sequence seq = Sequences.simple(Arrays.asList( - new RowBucket(JAN_1, Arrays.asList(jan1Row1M, jan1Row2F, jan1Row3U)), - new RowBucket(JAN_2, Collections.singletonList(jan2Row1M)), - new RowBucket(JAN_3, Arrays.asList(jan3Row1M, jan3Row2F, jan3Row3U)), - new RowBucket(JAN_4, Collections.singletonList(jan4Row1M)) - )); - - Iterator iter = new MovingAverageIterable( - seq, - ds, - Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 3, 1, "pageViews") - ), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) - ).iterator(); - - // Jan 1 - Assert.assertTrue(iter.hasNext()); - Row result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_1, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("f", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_1, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("u", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_1, (result.getTimestamp())); - - // Jan 2 - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("u", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("f", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_2, (result.getTimestamp())); - - // Jan 3 - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_3, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("f", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_3, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("u", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_3, (result.getTimestamp())); - - // Jan 4 - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_4, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("u", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_4, (result.getTimestamp())); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("f", (result.getDimension("gender")).get(0)); - Assert.assertEquals(JAN_4, (result.getTimestamp())); - - Assert.assertFalse(iter.hasNext()); - } - - @Test - public void testMissingDaysAtBegining() - { - - Map event1 = new HashMap<>(); - Map event2 = new HashMap<>(); - - List ds = new ArrayList<>(); - ds.add(new DefaultDimensionSpec("gender", "gender")); - - event1.put("gender", "m"); - event1.put("pageViews", 10L); - Row row1 = new MapBasedRow(JAN_3, event1); - - event2.put("gender", "m"); - event2.put("pageViews", 20L); - Row row2 = new MapBasedRow(JAN_4, event2); - - Sequence seq = Sequences.simple(Arrays.asList( - new RowBucket(JAN_1, Collections.emptyList()), - new RowBucket(JAN_2, Collections.emptyList()), - new RowBucket(JAN_3, Collections.singletonList(row1)), - new RowBucket(JAN_4, Collections.singletonList(row2)) - )); - - Iterator iter = new MovingAverageIterable( - seq, - ds, - Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews") - ), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) - ).iterator(); - - Assert.assertTrue(iter.hasNext()); - Row result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertFalse(iter.hasNext()); - } - - @Test - public void testMissingDaysInMiddle() - { - System.setProperty("druid.generic.useDefaultValueForNull", "true"); - NullHandling.initializeForTests(); - Map event1 = new HashMap<>(); - Map event2 = new HashMap<>(); - - List ds = new ArrayList<>(); - ds.add(new DefaultDimensionSpec("gender", "gender")); - - event1.put("gender", "m"); - event1.put("pageViews", 10L); - Row row1 = new MapBasedRow(JAN_1, event1); - - event2.put("gender", "m"); - event2.put("pageViews", 20L); - Row row2 = new MapBasedRow(JAN_4, event2); - - Sequence seq = Sequences.simple(Arrays.asList( - new RowBucket(JAN_1, Collections.singletonList(row1)), - new RowBucket(JAN_2, Collections.emptyList()), - new RowBucket(JAN_3, Collections.emptyList()), - new RowBucket(JAN_4, Collections.singletonList(row2)) - )); - - Iterator iter = new MovingAverageIterable( - seq, - ds, - Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews") - ), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) - ).iterator(); - - Assert.assertTrue(iter.hasNext()); - Row result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertFalse(iter.hasNext()); - } - - @Test - public void testWithFilteredAggregation() - { - - Map event1 = new HashMap<>(); - Map event2 = new HashMap<>(); - - List ds = new ArrayList<>(); - ds.add(new DefaultDimensionSpec("gender", "gender")); - - event1.put("gender", "m"); - event1.put("pageViews", 10L); - Row row1 = new MapBasedRow(JAN_1, event1); - - event2.put("gender", "m"); - event2.put("pageViews", 20L); - Row row2 = new MapBasedRow(JAN_4, event2); - - Sequence seq = Sequences.simple(Arrays.asList( - new RowBucket(JAN_1, Collections.singletonList(row1)), - new RowBucket(JAN_2, Collections.emptyList()), - new RowBucket(JAN_3, Collections.emptyList()), - new RowBucket(JAN_4, Collections.singletonList(row2)) - )); - - AveragerFactory averagerfactory = new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews"); - AggregatorFactory aggregatorFactory = new LongSumAggregatorFactory("pageViews", "pageViews"); - DimFilter filter = new SelectorDimFilter("gender", "m", null); - FilteredAggregatorFactory filteredAggregatorFactory = new FilteredAggregatorFactory(aggregatorFactory, filter); - - Iterator iter = new MovingAverageIterable( - seq, - ds, - Collections.singletonList(averagerfactory), - Collections.emptyList(), - Collections.singletonList(filteredAggregatorFactory) - ).iterator(); - - Assert.assertTrue(iter.hasNext()); - Row result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertFalse(iter.hasNext()); - } - - @Test - public void testMissingDaysAtEnd() - { - System.setProperty("druid.generic.useDefaultValueForNull", "true"); - NullHandling.initializeForTests(); - Map event1 = new HashMap<>(); - Map event2 = new HashMap<>(); - - List ds = new ArrayList<>(); - ds.add(new DefaultDimensionSpec("gender", "gender")); - - event1.put("gender", "m"); - event1.put("pageViews", 10L); - Row row1 = new MapBasedRow(JAN_1, event1); - - event2.put("gender", "m"); - event2.put("pageViews", 20L); - Row row2 = new MapBasedRow(JAN_2, event2); - - Sequence seq = Sequences.simple(Arrays.asList( - new RowBucket(JAN_1, Collections.singletonList(row1)), - new RowBucket(JAN_2, Collections.singletonList(row2)), - new RowBucket(JAN_3, Collections.emptyList()), - new RowBucket(JAN_4, Collections.emptyList()), - new RowBucket(JAN_5, Collections.emptyList()), - new RowBucket(JAN_6, Collections.emptyList()) - )); - - Iterator iter = new MovingAverageIterable( - seq, - ds, - Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews") - ), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) - ).iterator(); - - Assert.assertTrue(iter.hasNext()); - Row result = iter.next(); - - Assert.assertEquals(JAN_1, result.getTimestamp()); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals(JAN_2, result.getTimestamp()); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals(JAN_3, result.getTimestamp()); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals(JAN_4, result.getTimestamp()); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals(JAN_5, result.getTimestamp()); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(5.0f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertTrue(iter.hasNext()); - result = iter.next(); - Assert.assertEquals(JAN_6, result.getTimestamp()); - Assert.assertEquals("m", (result.getDimension("gender")).get(0)); - Assert.assertEquals(0.0f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - Assert.assertFalse(iter.hasNext()); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageQueryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageQueryTest.java deleted file mode 100644 index 23dc65f5235c..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageQueryTest.java +++ /dev/null @@ -1,403 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; -import com.google.inject.Injector; -import com.google.inject.Module; -import com.google.inject.name.Names; -import com.google.inject.util.Providers; -import org.apache.druid.client.CachingClusteredClient; -import org.apache.druid.client.DruidServer; -import org.apache.druid.client.ImmutableDruidServer; -import org.apache.druid.client.TimelineServerView; -import org.apache.druid.client.cache.CacheConfig; -import org.apache.druid.client.cache.CachePopulatorStats; -import org.apache.druid.client.cache.ForegroundCachePopulator; -import org.apache.druid.client.cache.MapCache; -import org.apache.druid.client.selector.ServerSelector; -import org.apache.druid.data.input.MapBasedRow; -import org.apache.druid.guice.DruidProcessingModule; -import org.apache.druid.guice.GuiceInjectors; -import org.apache.druid.guice.QueryRunnerFactoryModule; -import org.apache.druid.guice.QueryableModule; -import org.apache.druid.guice.http.DruidHttpClientConfig; -import org.apache.druid.initialization.Initialization; -import org.apache.druid.java.util.common.guava.Accumulators; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.java.util.common.guava.Sequences; -import org.apache.druid.java.util.emitter.core.Event; -import org.apache.druid.java.util.emitter.service.ServiceEmitter; -import org.apache.druid.query.DruidProcessingConfig; -import org.apache.druid.query.Query; -import org.apache.druid.query.QueryPlus; -import org.apache.druid.query.QueryRunner; -import org.apache.druid.query.QuerySegmentWalker; -import org.apache.druid.query.QueryToolChestWarehouse; -import org.apache.druid.query.Result; -import org.apache.druid.query.RetryQueryRunnerConfig; -import org.apache.druid.query.SegmentDescriptor; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.groupby.ResultRow; -import org.apache.druid.query.movingaverage.test.TestConfig; -import org.apache.druid.query.planning.DataSourceAnalysis; -import org.apache.druid.query.timeseries.TimeseriesQuery; -import org.apache.druid.query.timeseries.TimeseriesResultValue; -import org.apache.druid.segment.join.MapJoinableFactory; -import org.apache.druid.server.ClientQuerySegmentWalker; -import org.apache.druid.server.QueryStackTests; -import org.apache.druid.server.initialization.ServerConfig; -import org.apache.druid.testing.InitializedNullHandlingTest; -import org.apache.druid.timeline.TimelineLookup; -import org.hamcrest.core.IsInstanceOf; -import org.joda.time.Interval; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.Executor; -import java.util.concurrent.ForkJoinPool; - -/** - * Base class for implementing MovingAverageQuery tests - */ -@RunWith(Parameterized.class) -public class MovingAverageQueryTest extends InitializedNullHandlingTest -{ - private final ObjectMapper jsonMapper; - private final QueryToolChestWarehouse warehouse; - private final RetryQueryRunnerConfig retryConfig; - private final ServerConfig serverConfig; - - private final List groupByResults = new ArrayList<>(); - private final List> timeseriesResults = new ArrayList<>(); - - private final TestConfig config; - - @Parameters(name = "{0}") - public static Iterable data() throws IOException - { - BufferedReader testReader = new BufferedReader( - new InputStreamReader(MovingAverageQueryTest.class.getResourceAsStream("/queryTests"), StandardCharsets.UTF_8)); - List tests = new ArrayList<>(); - - for (String line = testReader.readLine(); line != null; line = testReader.readLine()) { - tests.add(new String[]{line}); - } - - return tests; - } - - public MovingAverageQueryTest(String yamlFile) throws IOException - { - - List modules = getRequiredModules(); - modules.add( - binder -> { - binder.bindConstant().annotatedWith(Names.named("serviceName")).to("queryTest"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); - binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(1); - binder.bind(QuerySegmentWalker.class).toProvider(Providers.of(new QuerySegmentWalker() - { - @Override - public QueryRunner getQueryRunnerForIntervals(Query query, Iterable intervals) - { - return (queryPlus, responseContext) -> { - if (query instanceof GroupByQuery) { - return (Sequence) Sequences.simple(groupByResults); - } else if (query instanceof TimeseriesQuery) { - return (Sequence) Sequences.simple(timeseriesResults); - } - throw new UnsupportedOperationException("unexpected query type " + query.getType()); - }; - } - - @Override - public QueryRunner getQueryRunnerForSegments(Query query, Iterable specs) - { - return getQueryRunnerForIntervals(query, null); - } - })); - } - ); - - System.setProperty("druid.generic.useDefaultValueForNull", "true"); - System.setProperty("druid.processing.buffer.sizeBytes", "655360"); - Injector baseInjector = GuiceInjectors.makeStartupInjector(); - Injector injector = Initialization.makeInjectorWithModules(baseInjector, modules); - - jsonMapper = injector.getInstance(ObjectMapper.class); - warehouse = injector.getInstance(QueryToolChestWarehouse.class); - retryConfig = injector.getInstance(RetryQueryRunnerConfig.class); - serverConfig = injector.getInstance(ServerConfig.class); - - InputStream is = getClass().getResourceAsStream("/queryTests/" + yamlFile); - ObjectMapper reader = new ObjectMapper(new YAMLFactory()); - config = reader.readValue(is, TestConfig.class); - } - - /** - * Returns the JSON query that should be used in the test. - * - * @return The JSON query - */ - private String getQueryString() - { - return config.query.toString(); - } - - /** - * Returns the JSON result that should be expected from the query. - * - * @return The JSON result - */ - private String getExpectedResultString() - { - return config.expectedOutput.toString(); - } - - /** - * Returns the JSON result that the nested groupby query should produce. - * Either this method or {@link #getTimeseriesResultJson()} must be defined - * by the subclass. - * - * @return The JSON result from the groupby query - */ - private String getGroupByResultJson() - { - ArrayNode node = config.intermediateResults.get("groupBy"); - return node == null ? null : node.toString(); - } - - /** - * Returns the JSON result that the nested timeseries query should produce. - * Either this method or {@link #getGroupByResultJson()} must be defined - * by the subclass. - * - * @return The JSON result from the timeseries query - */ - private String getTimeseriesResultJson() - { - ArrayNode node = config.intermediateResults.get("timeseries"); - return node == null ? null : node.toString(); - } - - /** - * Returns the expected query type. - * - * @return The Query type - */ - private Class getExpectedQueryType() - { - return MovingAverageQuery.class; - } - - private TypeReference> getExpectedResultType() - { - return new TypeReference>() - { - }; - } - - /** - * Returns a list of any additional Druid Modules necessary to run the test. - */ - private List getRequiredModules() - { - List list = new ArrayList<>(); - - list.add(new QueryRunnerFactoryModule()); - list.add(new QueryableModule()); - list.add(new DruidProcessingModule()); - - return list; - } - - /** - * Set up any needed mocks to stub out backend query behavior. - */ - private void defineMocks() throws IOException - { - groupByResults.clear(); - timeseriesResults.clear(); - - if (getGroupByResultJson() != null) { - groupByResults.addAll(jsonMapper.readValue(getGroupByResultJson(), new TypeReference>() {})); - } - - if (getTimeseriesResultJson() != null) { - timeseriesResults.addAll( - jsonMapper.readValue( - getTimeseriesResultJson(), - new TypeReference>>() {} - ) - ); - } - } - - /** - * converts Int to Long, Float to Double in the actual and expected result - */ - private List consistentTypeCasting(List result) - { - List newResult = new ArrayList<>(); - for (MapBasedRow row : result) { - final Map event = Maps.newLinkedHashMap((row).getEvent()); - event.forEach((key, value) -> { - if (value instanceof Integer) { - event.put(key, ((Integer) value).longValue()); - } - if (value instanceof Float) { - event.put(key, ((Float) value).doubleValue()); - } - }); - newResult.add(new MapBasedRow(row.getTimestamp(), event)); - } - - return newResult; - } - - /** - * Validate that the specified query behaves correctly. - */ - @SuppressWarnings({"unchecked", "rawtypes"}) - @Test - public void testQuery() throws IOException - { - Query query = jsonMapper.readValue(getQueryString(), Query.class); - Assert.assertThat(query, IsInstanceOf.instanceOf(getExpectedQueryType())); - - List expectedResults = jsonMapper.readValue(getExpectedResultString(), getExpectedResultType()); - Assert.assertNotNull(expectedResults); - Assert.assertThat(expectedResults, IsInstanceOf.instanceOf(List.class)); - - CachingClusteredClient baseClient = new CachingClusteredClient( - warehouse, - new TimelineServerView() - { - @Override - public Optional> getTimeline(DataSourceAnalysis analysis) - { - return Optional.empty(); - } - - @Override - public List getDruidServers() - { - return null; - } - - @Override - public QueryRunner getQueryRunner(DruidServer server) - { - return null; - } - - @Override - public void registerTimelineCallback(Executor exec, TimelineCallback callback) - { - - } - - @Override - public void registerSegmentCallback(Executor exec, SegmentCallback callback) - { - - } - - @Override - public void registerServerRemovedCallback(Executor exec, ServerRemovedCallback callback) - { - - } - }, - MapCache.create(100000), - jsonMapper, - new ForegroundCachePopulator(jsonMapper, new CachePopulatorStats(), -1), - new CacheConfig(), - new DruidHttpClientConfig() - { - @Override - public long getMaxQueuedBytes() - { - return 0L; - } - }, - new DruidProcessingConfig() - { - @Override - public String getFormatString() - { - return null; - } - }, - ForkJoinPool.commonPool(), - QueryStackTests.DEFAULT_NOOP_SCHEDULER - ); - - ClientQuerySegmentWalker walker = new ClientQuerySegmentWalker( - new ServiceEmitter("", "", null) - { - @Override - public void emit(Event event) - { - } - }, - baseClient, - null /* local client; unused in this test, so pass in null */, - warehouse, - new MapJoinableFactory(ImmutableMap.of()), - retryConfig, - jsonMapper, - serverConfig, - null, - new CacheConfig() - ); - - defineMocks(); - - QueryPlus queryPlus = QueryPlus.wrap(query); - final Sequence res = query.getRunner(walker).run(queryPlus); - - List actualResults = new ArrayList(); - actualResults = (List) res.accumulate(actualResults, Accumulators.list()); - - expectedResults = consistentTypeCasting(expectedResults); - actualResults = consistentTypeCasting(actualResults); - - Assert.assertEquals(expectedResults, actualResults); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculatorTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculatorTest.java deleted file mode 100644 index 280cedd1088c..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculatorTest.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import org.apache.druid.common.config.NullHandling; -import org.apache.druid.data.input.MapBasedRow; -import org.apache.druid.data.input.Row; -import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.query.TableDataSource; -import org.apache.druid.query.aggregation.CountAggregatorFactory; -import org.apache.druid.query.aggregation.post.ArithmeticPostAggregator; -import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator; -import org.apache.druid.query.movingaverage.averagers.DoubleMeanAveragerFactory; -import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; -import org.joda.time.DateTime; -import org.joda.time.Interval; -import org.joda.time.chrono.ISOChronology; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -public class PostAveragerAggregatorCalculatorTest -{ - private PostAveragerAggregatorCalculator pac; - private Map event; - private MapBasedRow row; - - @Before - public void setup() - { - System.setProperty("druid.generic.useDefaultValueForNull", "true"); - NullHandling.initializeForTests(); - MovingAverageQuery query = new MovingAverageQuery( - new TableDataSource("d"), - new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval( - "2017-01-01/2017-01-01", - ISOChronology.getInstanceUTC() - ))), - null, - Granularities.DAY, - null, - Collections.singletonList(new CountAggregatorFactory("count")), - Collections.emptyList(), - null, - Collections.singletonList(new DoubleMeanAveragerFactory("avgCount", 7, 1, "count")), - Collections.singletonList(new ArithmeticPostAggregator( - "avgCountRatio", - "/", - Arrays.asList( - new FieldAccessPostAggregator("count", "count"), - new FieldAccessPostAggregator("avgCount", "avgCount") - ) - )), - null, - null - ); - - pac = new PostAveragerAggregatorCalculator(query); - event = new HashMap<>(); - row = new MapBasedRow(new DateTime(ISOChronology.getInstanceUTC()), event); - } - - @Test - public void testApply() - { - event.put("count", 10.0); - event.put("avgCount", 12.0); - - Row result = pac.apply(row); - - Assert.assertEquals(10.0f / 12.0f, result.getMetric("avgCountRatio").floatValue(), 0.0); - } - - @Test - public void testApplyMissingColumn() - { - event.put("count", 10.0); - - Row result = pac.apply(row); - - Assert.assertEquals(0.0, result.getMetric("avgCountRatio").floatValue(), 0.0); - Assert.assertNull(result.getRaw("avgCountRatio")); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/RowBucketIterableTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/RowBucketIterableTest.java deleted file mode 100644 index b49e4d950331..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/RowBucketIterableTest.java +++ /dev/null @@ -1,630 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage; - -import org.apache.druid.data.input.MapBasedRow; -import org.apache.druid.data.input.Row; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.java.util.common.guava.Sequences; -import org.joda.time.DateTime; -import org.joda.time.Interval; -import org.joda.time.Period; -import org.joda.time.chrono.ISOChronology; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Test; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -public class RowBucketIterableTest -{ - private static final DateTime JAN_1 = new DateTime(2017, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - private static final DateTime JAN_2 = new DateTime(2017, 1, 2, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - private static final DateTime JAN_3 = new DateTime(2017, 1, 3, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - private static final DateTime JAN_4 = new DateTime(2017, 1, 4, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - private static final DateTime JAN_5 = new DateTime(2017, 1, 5, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - private static final DateTime JAN_6 = new DateTime(2017, 1, 6, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - private static final DateTime JAN_9 = new DateTime(2017, 1, 9, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); - - private static final Map EVENT_M_10 = new HashMap<>(); - private static final Map EVENT_F_20 = new HashMap<>(); - private static final Map EVENT_U_30 = new HashMap<>(); - - private static final Row JAN_1_M_10 = new MapBasedRow(new DateTime(2017, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_M_10); - private static final Row JAN_1_F_20 = new MapBasedRow(new DateTime(2017, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_F_20); - private static final Row JAN_1_U_30 = new MapBasedRow(new DateTime(2017, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_U_30); - private static final Row JAN_2_M_10 = new MapBasedRow(new DateTime(2017, 1, 2, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_M_10); - private static final Row JAN_3_M_10 = new MapBasedRow(new DateTime(2017, 1, 3, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_M_10); - private static final Row JAN_3_F_20 = new MapBasedRow(new DateTime(2017, 1, 3, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_F_20); - private static final Row JAN_4_M_10 = new MapBasedRow(new DateTime(2017, 1, 4, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_M_10); - private static final Row JAN_4_F_20 = new MapBasedRow(new DateTime(2017, 1, 4, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_F_20); - private static final Row JAN_4_U_30 = new MapBasedRow(new DateTime(2017, 1, 4, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_U_30); - private static final Row JAN_5_M_10 = new MapBasedRow(new DateTime(2017, 1, 5, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_M_10); - private static final Row JAN_6_M_10 = new MapBasedRow(new DateTime(2017, 1, 6, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_M_10); - private static final Row JAN_7_F_20 = new MapBasedRow(new DateTime(2017, 1, 7, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_F_20); - private static final Row JAN_8_U_30 = new MapBasedRow(new DateTime(2017, 1, 8, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), EVENT_U_30); - - private static final Interval INTERVAL_JAN_1_1 = new Interval(JAN_1, JAN_2); - private static final Interval INTERVAL_JAN_1_2 = new Interval(JAN_1, JAN_3); - private static final Interval INTERVAL_JAN_1_4 = new Interval(JAN_1, JAN_5); - private static final Interval INTERVAL_JAN_1_5 = new Interval(JAN_1, JAN_6); - private static final Interval INTERVAL_JAN_6_8 = new Interval(JAN_6, JAN_9); - private static final Period ONE_DAY = Period.days(1); - - private List rows = null; - private List intervals = new ArrayList<>(); - - @BeforeClass - public static void setupClass() - { - EVENT_M_10.put("gender", "m"); - EVENT_M_10.put("pageViews", 10L); - EVENT_F_20.put("gender", "f"); - EVENT_F_20.put("pageViews", 20L); - EVENT_U_30.put("gender", "u"); - EVENT_U_30.put("pageViews", 30L); - } - - @Test - public void testCompleteData() - { - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - - rows = new ArrayList<>(); - rows.add(JAN_1_M_10); - rows.add(JAN_2_M_10); - rows.add(JAN_3_M_10); - rows.add(JAN_4_M_10); - - List expectedDay1 = Collections.singletonList(JAN_1_M_10); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - List expectedDay3 = Collections.singletonList(JAN_3_M_10); - List expectedDay4 = Collections.singletonList(JAN_4_M_10); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(JAN_1, actual.getDateTime()); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_2, actual.getDateTime()); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_3, actual.getDateTime()); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_4, actual.getDateTime()); - Assert.assertEquals(expectedDay4, actual.getRows()); - } - - @Test - public void testApplyLastDaySingleRow() - { - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - - List expectedDay1 = Arrays.asList(JAN_1_M_10, JAN_1_F_20); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - List expectedDay3 = Collections.singletonList(JAN_3_F_20); - List expectedDay4 = Collections.singletonList(JAN_4_M_10); - - rows = new ArrayList<>(); - rows.add(JAN_1_M_10); - rows.add(JAN_1_F_20); - rows.add(JAN_2_M_10); - rows.add(JAN_3_F_20); - rows.add(JAN_4_M_10); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay4, actual.getRows()); - } - - @Test - public void testApplyLastDayMultipleRows() - { - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - - List expectedDay1 = Arrays.asList(JAN_1_M_10, JAN_1_F_20); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - List expectedDay3 = Collections.singletonList(JAN_3_F_20); - List expectedDay4 = Arrays.asList(JAN_4_M_10, JAN_4_F_20, JAN_4_U_30); - - rows = new ArrayList<>(); - rows.add(JAN_1_M_10); - rows.add(JAN_1_F_20); - rows.add(JAN_2_M_10); - rows.add(JAN_3_F_20); - rows.add(JAN_4_M_10); - rows.add(JAN_4_F_20); - rows.add(JAN_4_U_30); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay4, actual.getRows()); - } - - @Test - public void testSingleDaySingleRow() - { - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_1); - - rows = new ArrayList<>(); - rows.add(JAN_1_M_10); - - List expectedDay1 = Collections.singletonList(JAN_1_M_10); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(expectedDay1, actual.getRows()); - Assert.assertEquals(JAN_1, actual.getDateTime()); - } - - @Test - public void testSingleDayMultipleRow() - { - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_1); - - rows = new ArrayList<>(); - rows.add(JAN_1_M_10); - rows.add(JAN_1_F_20); - rows.add(JAN_1_U_30); - - List expectedDay1 = Arrays.asList(JAN_1_M_10, JAN_1_F_20, JAN_1_U_30); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(JAN_1, actual.getDateTime()); - Assert.assertEquals(expectedDay1, actual.getRows()); - } - - @Test - public void testMissingDaysAtBegining() - { - List expectedDay1 = Collections.emptyList(); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_2); - - rows = new ArrayList<>(); - rows.add(JAN_2_M_10); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(JAN_1, actual.getDateTime()); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_2, actual.getDateTime()); - Assert.assertEquals(expectedDay2, actual.getRows()); - } - - @Test - public void testMissingDaysAtBeginingFollowedByMultipleRow() - { - List expectedDay1 = Collections.emptyList(); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - List expectedDay3 = Collections.singletonList(JAN_3_M_10); - List expectedDay4 = Collections.singletonList(JAN_4_M_10); - - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - - rows = new ArrayList<>(); - rows.add(JAN_2_M_10); - rows.add(JAN_3_M_10); - rows.add(JAN_4_M_10); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(JAN_1, actual.getDateTime()); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_2, actual.getDateTime()); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_3, actual.getDateTime()); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_4, actual.getDateTime()); - Assert.assertEquals(expectedDay4, actual.getRows()); - } - - @Test - public void testMissingDaysAtBeginingAndAtTheEnd() - { - List expectedDay1 = Collections.emptyList(); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - List expectedDay3 = Collections.singletonList(JAN_3_M_10); - List expectedDay4 = Collections.emptyList(); - - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - - rows = new ArrayList<>(); - rows.add(JAN_2_M_10); - rows.add(JAN_3_M_10); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(JAN_1, actual.getDateTime()); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_2, actual.getDateTime()); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_3, actual.getDateTime()); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_4, actual.getDateTime()); - Assert.assertEquals(expectedDay4, actual.getRows()); - } - - @Test - public void testMultipleMissingDays() - { - List expectedDay1 = Collections.emptyList(); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - List expectedDay3 = Collections.emptyList(); - List expectedDay4 = Collections.singletonList(JAN_4_M_10); - - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - - rows = new ArrayList<>(); - rows.add(JAN_2_M_10); - rows.add(JAN_4_M_10); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(JAN_1, actual.getDateTime()); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_2, actual.getDateTime()); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_3, actual.getDateTime()); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_4, actual.getDateTime()); - Assert.assertEquals(expectedDay4, actual.getRows()); - } - - @Test - public void testMultipleMissingDaysMultipleRowAtTheEnd() - { - List expectedDay1 = Collections.emptyList(); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - List expectedDay3 = Collections.emptyList(); - List expectedDay4 = Collections.singletonList(JAN_4_M_10); - List expectedDay5 = Collections.singletonList(JAN_5_M_10); - - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_5); - - rows = new ArrayList<>(); - rows.add(JAN_2_M_10); - rows.add(JAN_4_M_10); - rows.add(JAN_5_M_10); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(JAN_1, actual.getDateTime()); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_2, actual.getDateTime()); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_3, actual.getDateTime()); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_4, actual.getDateTime()); - Assert.assertEquals(expectedDay4, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_5, actual.getDateTime()); - Assert.assertEquals(expectedDay5, actual.getRows()); - } - - @Test - public void testMissingDaysInMiddleOneRow() - { - List expectedDay1 = Collections.singletonList(JAN_1_M_10); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - List expectedDay3 = Collections.emptyList(); - List expectedDay4 = Collections.singletonList(JAN_4_M_10); - - rows = new ArrayList<>(); - rows.add(JAN_1_M_10); - rows.add(JAN_2_M_10); - rows.add(JAN_4_M_10); - - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_3, actual.getDateTime()); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay4, actual.getRows()); - } - - @Test - public void testMissingDaysInMiddleMultipleRow() - { - List expectedDay1 = Collections.singletonList(JAN_1_M_10); - List expectedDay2 = Collections.emptyList(); - List expectedDay3 = Collections.singletonList(JAN_3_M_10); - List expectedDay4 = Collections.singletonList(JAN_4_M_10); - - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - - rows = new ArrayList<>(); - rows.add(JAN_1_M_10); - rows.add(JAN_3_M_10); - rows.add(JAN_4_M_10); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(JAN_1, actual.getDateTime()); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_2, actual.getDateTime()); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_3, actual.getDateTime()); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_4, actual.getDateTime()); - Assert.assertEquals(expectedDay4, actual.getRows()); - } - - @Test - public void testApplyLastDayNoRows() - { - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - - List expectedDay1 = Arrays.asList(JAN_1_M_10, JAN_1_F_20); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - List expectedDay3 = Collections.singletonList(JAN_3_F_20); - List expectedDay4 = Collections.emptyList(); - - rows = new ArrayList<>(); - rows.add(JAN_1_M_10); - rows.add(JAN_1_F_20); - rows.add(JAN_2_M_10); - rows.add(JAN_3_F_20); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_4, actual.getDateTime()); - Assert.assertEquals(expectedDay4, actual.getRows()); - } - - @Test - public void testApplyLastTwoDayNoRows() - { - List expectedDay1 = Arrays.asList(JAN_1_M_10, JAN_1_F_20); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - List expectedDay3 = Collections.emptyList(); - List expectedDay4 = Collections.emptyList(); - - rows = new ArrayList<>(); - rows.add(JAN_1_M_10); - rows.add(JAN_1_F_20); - rows.add(JAN_2_M_10); - - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_3, actual.getDateTime()); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(JAN_4, actual.getDateTime()); - Assert.assertEquals(expectedDay4, actual.getRows()); - } - - @Test - public void testApplyMultipleInterval() - { - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - intervals.add(INTERVAL_JAN_6_8); - - List expectedDay1 = Arrays.asList(JAN_1_M_10, JAN_1_F_20); - List expectedDay2 = Collections.singletonList(JAN_2_M_10); - List expectedDay3 = Collections.singletonList(JAN_3_F_20); - List expectedDay4 = Arrays.asList(JAN_4_M_10, JAN_4_F_20, JAN_4_U_30); - List expectedDay6 = Collections.singletonList(JAN_6_M_10); - List expectedDay7 = Collections.singletonList(JAN_7_F_20); - List expectedDay8 = Collections.singletonList(JAN_8_U_30); - - rows = new ArrayList<>(); - rows.add(JAN_1_M_10); - rows.add(JAN_1_F_20); - rows.add(JAN_2_M_10); - rows.add(JAN_3_F_20); - rows.add(JAN_4_M_10); - rows.add(JAN_4_F_20); - rows.add(JAN_4_U_30); - rows.add(JAN_6_M_10); - rows.add(JAN_7_F_20); - rows.add(JAN_8_U_30); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - RowBucket actual = iter.next(); - Assert.assertEquals(expectedDay1, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay2, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay3, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay4, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay6, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay7, actual.getRows()); - - actual = iter.next(); - Assert.assertEquals(expectedDay8, actual.getRows()); - } - - @Test - public void testNodata() - { - intervals = new ArrayList<>(); - intervals.add(INTERVAL_JAN_1_4); - intervals.add(INTERVAL_JAN_6_8); - - rows = new ArrayList<>(); - - Sequence seq = Sequences.simple(rows); - RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); - Iterator iter = rbi.iterator(); - - Assert.assertTrue(iter.hasNext()); - RowBucket actual = iter.next(); - Assert.assertEquals(Collections.emptyList(), actual.getRows()); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactoryTest.java deleted file mode 100644 index 30d97c81eb30..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactoryTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.util.Comparator; -import java.util.List; - -public class BaseAveragerFactoryTest -{ - private AveragerFactory fac; - - @Before - public void setup() - { - fac = new BaseAveragerFactory("test", 5, "field", 1) - { - @Override - public Averager createAverager() - { - return null; - } - - @Override - public Comparator getComparator() - { - return null; - } - }; - } - - @Test - public void testGetDependentFields() - { - List dependentFields = fac.getDependentFields(); - Assert.assertEquals(1, dependentFields.size()); - Assert.assertEquals("field", dependentFields.get(0)); - } - - @Test - public void testFinalization() - { - Long input = 5L; - Assert.assertEquals(input, fac.finalizeComputation(input)); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerTest.java deleted file mode 100644 index b51f3e72fb96..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerTest.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; - -public class BaseAveragerTest -{ - - public static class TestAverager extends BaseAverager - { - TestAverager(Class clazz, int b, String name, String field, int cycleSize) - { - super(clazz, b, name, field, cycleSize); - } - - @Override - protected Integer computeResult() - { - return 1; - } - } - - @Test - public void testBaseAverager() - { - BaseAverager avg = new TestAverager(Integer.class, 5, "test", "field", 1); - - Assert.assertEquals("test", avg.getName()); - Assert.assertEquals(5, avg.getNumBuckets()); - Assert.assertEquals(5, avg.getBuckets().length); - Assert.assertTrue(avg.getBuckets().getClass().isArray()); - } - - @Test - public void testAddElement() - { - BaseAverager avg = new TestAverager(Integer.class, 3, "test", "field", 1); - Object[] buckets = avg.getBuckets(); - - avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap()); - Assert.assertEquals(1, buckets[0]); - Assert.assertNull(buckets[1]); - Assert.assertNull(buckets[2]); - - avg.addElement(Collections.singletonMap("field", 2), Collections.emptyMap()); - Assert.assertEquals(1, buckets[0]); - Assert.assertEquals(2, buckets[1]); - Assert.assertNull(buckets[2]); - - avg.addElement(Collections.singletonMap("field", 3), Collections.emptyMap()); - Assert.assertEquals(1, buckets[0]); - Assert.assertEquals(2, buckets[1]); - Assert.assertEquals(3, buckets[2]); - - avg.addElement(Collections.singletonMap("field", 4), Collections.emptyMap()); - Assert.assertEquals(4, buckets[0]); - Assert.assertEquals(2, buckets[1]); - Assert.assertEquals(3, buckets[2]); - } - - @Test - public void testSkip() - { - BaseAverager avg = new TestAverager(Integer.class, 3, "test", "field", 1); - Object[] buckets = avg.getBuckets(); - - avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap()); - avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap()); - avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap()); - - Assert.assertEquals(1, buckets[0]); - Assert.assertEquals(1, buckets[1]); - Assert.assertEquals(1, buckets[2]); - - avg.skip(); - Assert.assertNull(buckets[0]); - Assert.assertNotNull(buckets[1]); - Assert.assertNotNull(buckets[2]); - - avg.skip(); - Assert.assertNull(buckets[0]); - Assert.assertNull(buckets[1]); - Assert.assertNotNull(buckets[2]); - - avg.skip(); - Assert.assertNull(buckets[0]); - Assert.assertNull(buckets[1]); - Assert.assertNull(buckets[2]); - - // poke some test data into the array - buckets[0] = 1; - - avg.skip(); - Assert.assertNull(buckets[0]); - Assert.assertNull(buckets[1]); - Assert.assertNull(buckets[2]); - } - - @Test - public void testHasData() - { - BaseAverager avg = new TestAverager(Integer.class, 3, "test", "field", 1); - - Assert.assertFalse(avg.hasData()); - - avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap()); - Assert.assertTrue(avg.hasData()); - - avg.skip(); - avg.skip(); - avg.skip(); - - Assert.assertFalse(avg.hasData()); - } - - @Test - public void testGetResult() - { - BaseAverager avg = new TestAverager(Integer.class, 3, "test", "field", 1); - - Assert.assertNull(avg.getResult()); - - avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap()); - Assert.assertEquals(Integer.valueOf(1), avg.getResult()); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactoryTest.java deleted file mode 100644 index ef8c8a760408..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactoryTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.hamcrest.CoreMatchers; -import org.junit.Assert; -import org.junit.Test; - -public class DoubleMaxAveragerFactoryTest -{ - @Test - public void testCreateAverager() - { - AveragerFactory fac = new DoubleMaxAveragerFactory("test", 5, 1, "field"); - Assert.assertThat(fac.createAverager(), CoreMatchers.instanceOf(DoubleMaxAverager.class)); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerTest.java deleted file mode 100644 index 95034d021016..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; -import java.util.HashMap; - -public class DoubleMaxAveragerTest -{ - @Test - public void testComputeResult() - { - BaseAverager avg = new DoubleMaxAverager(3, "test", "field", 1); - - Assert.assertEquals(Double.NEGATIVE_INFINITY, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", -1.1e100), new HashMap<>()); - Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); - Assert.assertEquals(1.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 1), new HashMap<>()); - Assert.assertEquals(1.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 5.0), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - Assert.assertEquals(5.0, avg.computeResult(), 0.0); - - avg.skip(); - Assert.assertEquals(3.0, avg.computeResult(), 0.0); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactoryTest.java deleted file mode 100644 index d4d85f445508..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactoryTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.hamcrest.core.IsInstanceOf; -import org.junit.Assert; -import org.junit.Test; - -public class DoubleMeanAveragerFactoryTest -{ - @Test - public void testCreateAverager() - { - AveragerFactory fac = new DoubleMeanAveragerFactory("test", 5, 1, "field"); - Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(DoubleMeanAverager.class)); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerTest.java deleted file mode 100644 index bc3d20d19ddc..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; -import java.util.HashMap; - -public class DoubleMeanAveragerTest -{ - @Test - public void testComputeResult() - { - BaseAverager avg = new DoubleMeanAverager(3, "test", "field", 1); - - Assert.assertEquals(0.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - Assert.assertEquals(1.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 0), new HashMap<>()); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - - avg.skip(); - Assert.assertEquals(4.0 / 3, avg.computeResult(), 0.0); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerWithPeriodTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerWithPeriodTest.java deleted file mode 100644 index ec3ffdacea98..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerWithPeriodTest.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; -import java.util.HashMap; - -public class DoubleMeanAveragerWithPeriodTest -{ - @Test - public void testComputeResult() - { - BaseAverager averager = new DoubleMeanAverager(14, "test", "field", 7); - - averager.addElement(Collections.singletonMap("field", 7.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 4.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 5.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 6.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 7.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 4.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 5.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 6.0), new HashMap<>()); - - Assert.assertEquals(7, averager.computeResult(), 0.0); // (7+7)/2 - - averager.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - Assert.assertEquals(1, averager.computeResult(), 0.0); // (1+1)/2 - - BaseAverager averager1 = new DoubleMeanAverager(14, "test", "field", 3); - - averager1.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); - averager1.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - - Assert.assertEquals(1, averager1.computeResult(), 0.0); // (1+1+1+1+1)/5 - - Assert.assertEquals(2, averager1.computeResult(), 0.0); // (2+2+2+2+2)/5 - - Assert.assertEquals(13.0 / 5, averager1.computeResult(), 0.0); // (3+3+3+3+1)/5 - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactoryTest.java deleted file mode 100644 index 6afbd478b521..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactoryTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.hamcrest.core.IsInstanceOf; -import org.junit.Assert; -import org.junit.Test; - -public class DoubleMeanNoNullAveragerFactoryTest -{ - @Test - public void testCreateAverager() - { - AveragerFactory fac = new DoubleMeanNoNullAveragerFactory("test", 5, 1, "field"); - Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(DoubleMeanNoNullAverager.class)); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerTest.java deleted file mode 100644 index 190fc84e1eb5..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; -import java.util.HashMap; - -public class DoubleMeanNoNullAveragerTest -{ - @Test - public void testComputeResult() - { - BaseAverager avg = new DoubleMeanNoNullAverager(3, "test", "field", 1); - - Assert.assertEquals(Double.NaN, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - Assert.assertEquals(3.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - Assert.assertEquals(3.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 0), new HashMap<>()); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - - avg.skip(); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - - // testing cycleSize functionality - BaseAverager averager = new DoubleMeanNoNullAverager(14, "test", "field", 7); - - averager.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - Assert.assertEquals(2.0, averager.computeResult(), 0.0); - - averager.addElement(Collections.singletonMap("field", 4.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 5.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 6.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 7.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 8.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 9.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", null), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 11.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 12.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 13.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 14.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 15.0), new HashMap<>()); - averager.addElement(Collections.singletonMap("field", 16.0), new HashMap<>()); - - Assert.assertEquals(7.5, averager.computeResult(), 0.0); - - averager.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - Assert.assertEquals(8.5, averager.computeResult(), 0.0); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactoryTest.java deleted file mode 100644 index 61250215afea..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactoryTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.hamcrest.core.IsInstanceOf; -import org.junit.Assert; -import org.junit.Test; - -public class DoubleMinAveragerFactoryTest -{ - @Test - public void testCreateAverager() - { - AveragerFactory fac = new DoubleMinAveragerFactory("test", 5, 1, "field"); - Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(DoubleMinAverager.class)); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerTest.java deleted file mode 100644 index 0fcabf52f6f0..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerTest.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; -import java.util.HashMap; - -public class DoubleMinAveragerTest -{ - @Test - public void testComputeResult() - { - BaseAverager avg = new DoubleMinAverager(3, "test", "field", 1); - - Assert.assertEquals(Double.POSITIVE_INFINITY, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", -1.1e100), new HashMap<>()); - Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); - Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", new Integer(1)), new HashMap<>()); - Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 5.0), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - - avg.skip(); - avg.skip(); - Assert.assertEquals(3.0, avg.computeResult(), 0.0); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerFactoryTest.java deleted file mode 100644 index bb65e963afa6..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerFactoryTest.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.hamcrest.core.IsInstanceOf; -import org.junit.Assert; -import org.junit.Test; - -public class DoubleSumAveragerFactoryTest -{ - - @Test - public void testCreateAverager() - { - AveragerFactory fac = new DoubleSumAveragerFactory("test", 5, 1, "field"); - Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(DoubleSumAverager.class)); - } - -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerTest.java deleted file mode 100644 index e08896c4e2b8..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleSumAveragerTest.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; -import java.util.HashMap; - -public class DoubleSumAveragerTest -{ - - @Test - public void testComputeResult() - { - BaseAverager avg = new DoubleSumAverager(3, "test", "field", 1); - - Assert.assertEquals(0.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - Assert.assertEquals(3.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - Assert.assertEquals(6.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", new Integer(0)), new HashMap<>()); - Assert.assertEquals(6.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 2.5), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - Assert.assertEquals(6.5, avg.computeResult(), 0.0); - - avg.skip(); - Assert.assertEquals(4.0, avg.computeResult(), 0.0); - - } - -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactoryTest.java deleted file mode 100644 index 0f429e787cff..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactoryTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.hamcrest.core.IsInstanceOf; -import org.junit.Assert; -import org.junit.Test; - -public class LongMaxAveragerFactoryTest -{ - @Test - public void testCreateAverager() - { - AveragerFactory fac = new LongMaxAveragerFactory("test", 5, 1, "field"); - Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMaxAverager.class)); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerTest.java deleted file mode 100644 index 825d1c1cac2b..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; -import java.util.HashMap; - -public class LongMaxAveragerTest -{ - @Test - public void testComputeResult() - { - BaseAverager avg = new LongMaxAverager(3, "test", "field", 1); - - Assert.assertEquals(Long.MIN_VALUE, (long) avg.computeResult()); - - avg.addElement(Collections.singletonMap("field", -1000000L), new HashMap<>()); - Assert.assertEquals(-1000000, (long) avg.computeResult()); - - avg.addElement(Collections.singletonMap("field", 1L), new HashMap<>()); - Assert.assertEquals(1, (long) avg.computeResult()); - - avg.addElement(Collections.singletonMap("field", 1), new HashMap<>()); - Assert.assertEquals(1, (long) avg.computeResult()); - - avg.addElement(Collections.singletonMap("field", 5L), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - Assert.assertEquals(5, (long) avg.computeResult()); - - avg.skip(); - Assert.assertEquals(3, (long) avg.computeResult()); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactoryTest.java deleted file mode 100644 index 3b5b3e56be7a..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactoryTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.hamcrest.core.IsInstanceOf; -import org.junit.Assert; -import org.junit.Test; - -public class LongMeanAveragerFactoryTest -{ - @Test - public void testCreateAverager() - { - AveragerFactory fac = new LongMeanAveragerFactory("test", 5, 1, "field"); - Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMeanAverager.class)); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerTest.java deleted file mode 100644 index 8ff63de2e800..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; -import java.util.HashMap; - -public class LongMeanAveragerTest -{ - @Test - public void testComputeResult() - { - BaseAverager avg = new LongMeanAverager(3, "test", "field", 1); - - Assert.assertEquals(0.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - Assert.assertEquals(1.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3), new HashMap<>()); - Assert.assertEquals(3.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - - avg.skip(); - Assert.assertEquals(4.0 / 3, avg.computeResult(), 0.0); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactoryTest.java deleted file mode 100644 index fb3e33eaa5e2..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactoryTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.hamcrest.core.IsInstanceOf; -import org.junit.Assert; -import org.junit.Test; - -public class LongMeanNoNullAveragerFactoryTest -{ - @Test - public void testCreateAverager() - { - AveragerFactory fac = new LongMeanNoNullAveragerFactory("test", 5, 1, "field"); - Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMeanNoNullAverager.class)); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerTest.java deleted file mode 100644 index 2c16d052f715..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerTest.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; -import java.util.HashMap; - -public class LongMeanNoNullAveragerTest -{ - @Test - public void testComputeResult() - { - BaseAverager avg = new LongMeanNoNullAverager(3, "test", "field", 1); - - Assert.assertEquals(Double.NaN, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - Assert.assertEquals(3.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - Assert.assertEquals(3.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 0), new HashMap<>()); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - - avg.skip(); - Assert.assertEquals(2.0, avg.computeResult(), 0.0); - } - -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactoryTest.java deleted file mode 100644 index 9dca156bc63b..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactoryTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.hamcrest.core.IsInstanceOf; -import org.junit.Assert; -import org.junit.Test; - -public class LongMinAveragerFactoryTest -{ - @Test - public void testCreateAverager() - { - AveragerFactory fac = new LongMinAveragerFactory("test", 5, 1, "field"); - Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMinAverager.class)); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerTest.java deleted file mode 100644 index cdf80d76ea85..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerTest.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; -import java.util.HashMap; - -public class LongMinAveragerTest -{ - @Test - public void testComputeResult() - { - BaseAverager avg = new LongMinAverager(3, "test", "field", 1); - - Assert.assertEquals(Long.MAX_VALUE, (long) avg.computeResult()); - - avg.addElement(Collections.singletonMap("field", -10000L), new HashMap<>()); - Assert.assertEquals(-10000, (long) avg.computeResult()); - - avg.addElement(Collections.singletonMap("field", 1L), new HashMap<>()); - Assert.assertEquals(-10000, (long) avg.computeResult()); - - avg.addElement(Collections.singletonMap("field", 1000), new HashMap<>()); - Assert.assertEquals(-10000, (long) avg.computeResult()); - - avg.addElement(Collections.singletonMap("field", 5L), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - Assert.assertEquals(2, (long) avg.computeResult()); - - avg.skip(); - avg.skip(); - Assert.assertEquals(3, (long) avg.computeResult()); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerFactoryTest.java deleted file mode 100644 index fb297adf8c5a..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerFactoryTest.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.hamcrest.core.IsInstanceOf; -import org.junit.Assert; -import org.junit.Test; - -public class LongSumAveragerFactoryTest -{ - - @Test - public void testCreateAverager() - { - AveragerFactory fac = new LongSumAveragerFactory("test", 5, 1, "field"); - Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongSumAverager.class)); - } - -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerTest.java deleted file mode 100644 index b4631b5b4bf5..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongSumAveragerTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.averagers; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Collections; -import java.util.HashMap; - -public class LongSumAveragerTest -{ - @Test - public void testComputeResult() - { - BaseAverager avg = new LongSumAverager(3, "test", "field", 1); - - Assert.assertEquals(0.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - Assert.assertEquals(3.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - Assert.assertEquals(6.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 3), new HashMap<>()); - Assert.assertEquals(9.0, avg.computeResult(), 0.0); - - avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - Assert.assertEquals(6.0, avg.computeResult(), 0.0); - - avg.skip(); - Assert.assertEquals(4.0, avg.computeResult(), 0.0); - } -} diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/test/TestConfig.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/test/TestConfig.java deleted file mode 100644 index 792394e2723e..000000000000 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/test/TestConfig.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.movingaverage.test; - -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; - -import java.util.Map; - -/** - * Configuration for a unit test. - */ -public class TestConfig -{ - public ObjectNode query; - public ArrayNode expectedOutput; - public Map intermediateResults; -} diff --git a/extensions-contrib/moving-average-query/src/test/resources/queryTests/basicGroupByMovingAverage.yaml b/extensions-contrib/moving-average-query/src/test/resources/queryTests/basicGroupByMovingAverage.yaml deleted file mode 100644 index fba40e85a642..000000000000 --- a/extensions-contrib/moving-average-query/src/test/resources/queryTests/basicGroupByMovingAverage.yaml +++ /dev/null @@ -1,46 +0,0 @@ -query: - queryType: movingAverage - dataSource: - type: table - name: slice_pf_us_pl_dt_os - context: { - } - granularity: - type: period - period: P1D - intervals: - - 2017-01-02T00:00Z/2017-01-03T00:00Z - dimensions: - - gender - averagers: - - buckets: 2 - name: trailing7DayAvgTimeSpent - fieldName: timeSpent - type: doubleMean - aggregations: - - name: timespent_secs - fieldName: timespent - type: longSum - postAggregations: - - type: arithmetic - name: timeSpent - fn: / - fields: - - type: fieldAccess - fieldName: timespent_secs - - type: constant - name: seconds_per_minute - value: 60.0 - postAveragers: [] -expectedOutput: -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: m - timespent_secs: 240.0 - timeSpent: 4.0 - trailing7DayAvgTimeSpent: 3.0 -intermediateResults: - groupBy: - - [1483228800000, m, 120.0, 2.0] - - [1483315200000, m, 240.0, 4.0] diff --git a/extensions-contrib/moving-average-query/src/test/resources/queryTests/basicGroupByMovingAverage2.yaml b/extensions-contrib/moving-average-query/src/test/resources/queryTests/basicGroupByMovingAverage2.yaml deleted file mode 100644 index ac422089925a..000000000000 --- a/extensions-contrib/moving-average-query/src/test/resources/queryTests/basicGroupByMovingAverage2.yaml +++ /dev/null @@ -1,47 +0,0 @@ -query: - queryType: movingAverage - dataSource: - type: table - name: slice_pf_us_pl_dt_os - context: { - } - granularity: - type: period - period: P1D - intervals: - - 2017-01-02T00:00Z/2017-01-03T00:00Z - dimensions: - - gender - averagers: - - buckets: 2 - name: trailing7DayAvgTimeSpent - fieldName: timeSpent - type: doubleMean - aggregations: - - name: timespent_secs - fieldName: timespent - type: longSum - postAggregations: - - type: arithmetic - name: timeSpent - fn: / - fields: - - type: fieldAccess - fieldName: timespent_secs - - type: constant - name: seconds_per_minute - value: 60.0 - postAveragers: [ - ] -expectedOutput: -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: m - timespent_secs: 240.0 - timeSpent: 4.0 - trailing7DayAvgTimeSpent: 3.0 -intermediateResults: - groupBy: - - [1483228800000, m, 120.0, 2.0] - - [1483315200000, m, 240.0, 4.0] diff --git a/extensions-contrib/moving-average-query/src/test/resources/queryTests/basicTimeseriesMovingAverage.yaml b/extensions-contrib/moving-average-query/src/test/resources/queryTests/basicTimeseriesMovingAverage.yaml deleted file mode 100644 index 1458ed8c2af6..000000000000 --- a/extensions-contrib/moving-average-query/src/test/resources/queryTests/basicTimeseriesMovingAverage.yaml +++ /dev/null @@ -1,51 +0,0 @@ -query: - queryType: movingAverage - dataSource: - type: table - name: slice_pf_us_pl_dt_os - context: { - } - granularity: - type: period - period: P1D - intervals: - - 2017-01-02T00:00Z/2017-01-03T00:00Z - dimensions: [] - averagers: - - buckets: 2 - name: trailing7DayAvgTimeSpent - fieldName: timeSpent - type: doubleMean - aggregations: - - name: timespent_secs - fieldName: timespent - type: longSum - postAggregations: - - type: arithmetic - name: timeSpent - fn: / - fields: - - type: fieldAccess - fieldName: timespent_secs - - type: constant - name: seconds_per_minute - value: 60.0 - postAveragers: [ - ] -expectedOutput: -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - timespent_secs: 240.0 - timeSpent: 4.0 - trailing7DayAvgTimeSpent: 3.0 -intermediateResults: - timeseries: - - timestamp: 2017-01-01T00:00Z - result: - timespent_secs: 120.0 - timeSpent: 2.0 - - timestamp: 2017-01-02T00:00Z - result: - timespent_secs: 240.0 - timeSpent: 4.0 diff --git a/extensions-contrib/moving-average-query/src/test/resources/queryTests/missingGroupByValues.yaml b/extensions-contrib/moving-average-query/src/test/resources/queryTests/missingGroupByValues.yaml deleted file mode 100644 index c4bb0c5ef253..000000000000 --- a/extensions-contrib/moving-average-query/src/test/resources/queryTests/missingGroupByValues.yaml +++ /dev/null @@ -1,60 +0,0 @@ -query: - queryType: movingAverage - dataSource: - type: table - name: slice_pf_us_pl_dt_os - context: { - } - granularity: - type: period - period: P1D - intervals: - - 2017-01-02T00:00Z/2017-01-03T00:00Z - dimensions: - - gender - averagers: - - buckets: 2 - name: trailing7DayAvgTimeSpent - fieldName: timeSpent - type: doubleMean - aggregations: - - name: timespent_secs - fieldName: timespent - type: longSum - - name: someSum - fieldName: someSum_field - type: doubleSum - postAggregations: - - type: arithmetic - name: timeSpent - fn: / - fields: - - type: fieldAccess - fieldName: timespent_secs - - type: constant - name: seconds_per_minute - value: 60.0 - postAveragers: [ - ] -expectedOutput: -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: m - timespent_secs: 240 - timeSpent: 4.0 - trailing7DayAvgTimeSpent: 3.0 - someSum: 3.0 -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: f - timespent_secs: 0 - timeSpent: 0.0 - trailing7DayAvgTimeSpent: 1.0 - someSum: 0.0 -intermediateResults: - groupBy: - - [1483228800000, m, 120, 5.0, 2.0] - - [1483228800000, f, 120, 2.0, 2.0] - - [1483315200000, m, 240, 3.0, 4.0] diff --git a/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingAveragersAsc.yaml b/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingAveragersAsc.yaml deleted file mode 100644 index 161861c38164..000000000000 --- a/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingAveragersAsc.yaml +++ /dev/null @@ -1,60 +0,0 @@ -query: - queryType: movingAverage - dataSource: - type: table - name: slice_pf_us_pl_dt_os - context: { - } - granularity: - type: period - period: P1D - intervals: - - 2017-01-02T00:00Z/2017-01-03T00:00Z - dimensions: - - gender - averagers: - - buckets: 2 - name: trailing7DayAvgTimeSpent - fieldName: timeSpent - type: doubleMean - aggregations: - - name: timespent_secs - fieldName: timespent - type: longSum - postAggregations: - - type: arithmetic - name: timeSpent - fn: / - fields: - - type: fieldAccess - fieldName: timespent_secs - - type: constant - name: seconds_per_minute - value: 60.0 - postAveragers: [ - ] - limitSpec: - type: default - columns: - - dimension: trailing7DayAvgTimeSpent -expectedOutput: -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: m - timespent_secs: 240.0 - timeSpent: 4.0 - trailing7DayAvgTimeSpent: 3.0 -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: f - timespent_secs: 480.0 - timeSpent: 8.0 - trailing7DayAvgTimeSpent: 6.0 -intermediateResults: - groupBy: - - [1483228800000, m, 120.0, 2.0] - - [1483228800000, f, 240.0, 4.0] - - [1483315200000, m, 240.0, 4.0] - - [1483315200000, f, 480.0, 8.0] diff --git a/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingAveragersDesc.yaml b/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingAveragersDesc.yaml deleted file mode 100644 index 08bf53cfee25..000000000000 --- a/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingAveragersDesc.yaml +++ /dev/null @@ -1,61 +0,0 @@ -query: - queryType: movingAverage - dataSource: - type: table - name: slice_pf_us_pl_dt_os - context: { - } - granularity: - type: period - period: P1D - intervals: - - 2017-01-02T00:00Z/2017-01-03T00:00Z - dimensions: - - gender - averagers: - - buckets: 2 - name: trailing7DayAvgTimeSpent - fieldName: timeSpent - type: doubleMean - aggregations: - - name: timespent_secs - fieldName: timespent - type: longSum - postAggregations: - - type: arithmetic - name: timeSpent - fn: / - fields: - - type: fieldAccess - fieldName: timespent_secs - - type: constant - name: seconds_per_minute - value: 60.0 - postAveragers: [ - ] - limitSpec: - type: default - columns: - - dimension: trailing7DayAvgTimeSpent - direction: DESC -expectedOutput: -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: f - timespent_secs: 480.0 - timeSpent: 8.0 - trailing7DayAvgTimeSpent: 6.0 -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: m - timespent_secs: 240.0 - timeSpent: 4.0 - trailing7DayAvgTimeSpent: 3.0 -intermediateResults: - groupBy: - - [1483228800000, m, 120.0, 2.0] - - [1483228800000, f, 240.0, 4.0] - - [1483315200000, m, 240.0, 4.0] - - [1483315200000, f, 480.0, 8.0] diff --git a/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingWithNonMovingAndMovingAvgMetric.yaml b/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingWithNonMovingAndMovingAvgMetric.yaml deleted file mode 100644 index 4b438c441324..000000000000 --- a/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingWithNonMovingAndMovingAvgMetric.yaml +++ /dev/null @@ -1,66 +0,0 @@ -query: - queryType: movingAverage - dataSource: - type: table - name: slice_pf_us_uc_ud - context: { - } - granularity: - type: period - period: P1D - intervals: - - 2017-01-02T00:00Z/2017-01-03T00:00Z - dimensions: - - gender - averagers: - - buckets: 7 - name: trailing7DayAvgTotalPageViews - fieldName: totalPageViews - type: doubleMean - aggregations: - - name: addPageViews - fieldName: additive_page_views - type: longSum - - name: pageViews - fieldName: other_page_views - type: longSum - postAggregations: - - type: arithmetic - name: totalPageViews - fn: + - fields: - - type: fieldAccess - fieldName: addPageViews - - type: fieldAccess - fieldName: pageViews - postAveragers: [ - ] - limitSpec: - type: default - columns: - - dimension: addPageViews - direction: DESC - dimension: trailing7DayAvgTotalPageViews - direction: DESC -expectedOutput: -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: f - addPageViews: 1.0 - pageViews: 2.0 - totalPageViews: 3.0 - trailing7DayAvgTotalPageViews: 3.0 -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: m - addPageViews: 0 - pageViews: 0 - totalPageViews: 0.0 - trailing7DayAvgTotalPageViews: 2.142857142857143 -intermediateResults: - groupBy: - - [1483228800000, m, 5.0, 10.0, 15.0] - - [1483228800000, f, 6.0, 12.0, 18.0] - - [1483315200000, f, 1.0, 2.0, 3.0] diff --git a/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingWithNonMovingAvgMetric.yaml b/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingWithNonMovingAvgMetric.yaml deleted file mode 100644 index 0e8c635103b9..000000000000 --- a/extensions-contrib/moving-average-query/src/test/resources/queryTests/sortingWithNonMovingAvgMetric.yaml +++ /dev/null @@ -1,64 +0,0 @@ -query: - queryType: movingAverage - dataSource: - type: table - name: slice_pf_us_uc_ud - context: { - } - granularity: - type: period - period: P1D - intervals: - - 2017-01-02T00:00Z/2017-01-03T00:00Z - dimensions: - - gender - averagers: - - buckets: 7 - name: trailing7DayAvgTotalPageViews - fieldName: totalPageViews - type: doubleMean - aggregations: - - name: addPageViews - fieldName: additive_page_views - type: longSum - - name: pageViews - fieldName: other_page_views - type: longSum - postAggregations: - - type: arithmetic - name: totalPageViews - fn: + - fields: - - type: fieldAccess - fieldName: addPageViews - - type: fieldAccess - fieldName: pageViews - postAveragers: [ - ] - limitSpec: - type: default - columns: - - dimension: addPageViews - direction: DESC -expectedOutput: -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: f - addPageViews: 1.0 - pageViews: 2.0 - totalPageViews: 3.0 - trailing7DayAvgTotalPageViews: 3.0 -- version: v1 - timestamp: 2017-01-02T00:00Z - event: - gender: m - addPageViews: 0 - pageViews: 0 - totalPageViews: 0.0 - trailing7DayAvgTotalPageViews: 2.142857142857143 -intermediateResults: - groupBy: - - [1483228800000, m, 5.0, 10.0, 15.0] - - [1483228800000, f, 6.0, 12.0, 18.0] - - [1483315200000, f, 1.0, 2.0, 3.0] diff --git a/extensions-contrib/opentsdb-emitter/pom.xml b/extensions-contrib/opentsdb-emitter/pom.xml deleted file mode 100644 index d67574436162..000000000000 --- a/extensions-contrib/opentsdb-emitter/pom.xml +++ /dev/null @@ -1,102 +0,0 @@ - - - - - - 4.0.0 - - org.apache.druid.extensions.contrib - druid-opentsdb-emitter - druid-opentsdb-emitter - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - org.apache.druid - druid-server - ${project.parent.version} - provided - - - com.sun.jersey - jersey-client - ${jersey.version} - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.guava - guava - provided - - - joda-time - joda-time - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - javax.ws.rs - jsr311-api - - - - - junit - junit - test - - - diff --git a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/EventConverter.java b/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/EventConverter.java deleted file mode 100644 index c7bfe305d381..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/EventConverter.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.opentsdb; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Strings; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; -import java.util.regex.Pattern; - -public class EventConverter -{ - private static final Logger log = new Logger(EventConverter.class); - private static final Pattern WHITESPACE = Pattern.compile("[\\s]+"); - - private final Map> metricMap; - private final String namespacePrefix; - - public EventConverter(ObjectMapper mapper, String metricMapPath, String namespacePrefix) - { - metricMap = readMap(mapper, metricMapPath); - this.namespacePrefix = namespacePrefix; - } - - protected String sanitize(String metric) - { - return WHITESPACE.matcher(metric.trim()).replaceAll("_").replace('/', '.'); - } - - private String buildMetric(String metric) - { - final String sanitized = sanitize(metric); - if (namespacePrefix == null) { - return sanitized; - } else { - return StringUtils.format("%s.%s", sanitize(namespacePrefix), sanitized); - } - } - - /** - * This function will convert a druid event to a opentsdb event. - * Also this function acts as a filter. It returns null if the event is not suppose to be emitted to Opentsdb. - * And it will filter out dimensions which is not suppose to be emitted. - * - * @param serviceMetricEvent Druid event ot type {@link ServiceMetricEvent} - * - * @return {@link OpentsdbEvent} or null - */ - public OpentsdbEvent convert(ServiceMetricEvent serviceMetricEvent) - { - String metric = serviceMetricEvent.getMetric(); - if (!metricMap.containsKey(metric)) { - return null; - } - - long timestamp = serviceMetricEvent.getCreatedTime().getMillis() / 1000L; - Number value = serviceMetricEvent.getValue(); - - Map tags = new HashMap<>(); - String service = serviceMetricEvent.getService().replace(':', '_'); - String host = serviceMetricEvent.getHost().replace(':', '_'); - tags.put("service", service); - tags.put("host", host); - - Map userDims = serviceMetricEvent.getUserDims(); - for (String dim : metricMap.get(metric)) { - if (userDims.containsKey(dim)) { - Object dimValue = userDims.get(dim); - if (dimValue instanceof String) { - dimValue = ((String) dimValue).replace(':', '_'); - } - tags.put(dim, dimValue); - } - } - - return new OpentsdbEvent(buildMetric(metric), timestamp, value, tags); - } - - private Map> readMap(ObjectMapper mapper, String metricMapPath) - { - try { - InputStream is; - if (Strings.isNullOrEmpty(metricMapPath)) { - log.info("Using default metric map"); - is = this.getClass().getClassLoader().getResourceAsStream("defaultMetrics.json"); - } else { - log.info("Using default metric map located at [%s]", metricMapPath); - is = new FileInputStream(new File(metricMapPath)); - } - return mapper.readerFor(new TypeReference>>() - { - }).readValue(is); - } - catch (IOException e) { - throw new ISE(e, "Failed to parse metrics and dimensions"); - } - } -} diff --git a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitter.java b/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitter.java deleted file mode 100644 index 41c2413cbb6b..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitter.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.opentsdb; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.core.Emitter; -import org.apache.druid.java.util.emitter.core.Event; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; - -import java.util.concurrent.atomic.AtomicBoolean; - -public class OpentsdbEmitter implements Emitter -{ - private static final Logger log = new Logger(OpentsdbEmitter.class); - - private final OpentsdbSender sender; - private final EventConverter converter; - private final AtomicBoolean started = new AtomicBoolean(false); - - public OpentsdbEmitter(OpentsdbEmitterConfig config, ObjectMapper mapper) - { - this.sender = new OpentsdbSender( - config.getHost(), - config.getPort(), - config.getConnectionTimeout(), - config.getReadTimeout(), - config.getFlushThreshold(), - config.getMaxQueueSize(), - config.getConsumeDelay() - ); - this.converter = new EventConverter(mapper, config.getMetricMapPath(), config.getNamespacePrefix()); - } - - @Override - public void start() - { - synchronized (started) { - if (!started.get()) { - log.info("Starting Opentsdb Emitter."); - sender.start(); - started.set(true); - } - } - } - - @Override - public void emit(Event event) - { - if (!started.get()) { - throw new ISE("WTF emit was called while service is not started yet"); - } - if (event instanceof ServiceMetricEvent) { - OpentsdbEvent opentsdbEvent = converter.convert((ServiceMetricEvent) event); - if (opentsdbEvent != null) { - sender.enqueue(opentsdbEvent); - } else { - log.debug( - "Metric=[%s] has not been configured to be emitted to opentsdb", - ((ServiceMetricEvent) event).getMetric() - ); - } - } - } - - @Override - public void flush() - { - if (started.get()) { - sender.flush(); - } - } - - @Override - public void close() - { - if (started.get()) { - sender.close(); - started.set(false); - } - } -} diff --git a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterConfig.java b/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterConfig.java deleted file mode 100644 index 1d876e252f24..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterConfig.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.opentsdb; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import java.util.Objects; - -public class OpentsdbEmitterConfig -{ - private static final int DEFAULT_FLUSH_THRESHOLD = 100; - private static final int DEFAULT_MAX_QUEUE_SIZE = 1000; - private static final long DEFAULT_CONSUME_DELAY_MILLIS = 10000; - private static final int DEFAULT_CONNECTION_TIMEOUT_MILLIS = 2000; - private static final int DEFAULT_READ_TIMEOUT_MILLIS = 2000; - - @JsonProperty - private final String host; - - @JsonProperty - private final int port; - - @JsonProperty - private final int connectionTimeout; - - @JsonProperty - private final int readTimeout; - - @JsonProperty - private final int flushThreshold; - - @JsonProperty - private final int maxQueueSize; - - @JsonProperty - private final long consumeDelay; - - @JsonProperty - private final String metricMapPath; - - @JsonProperty - private final String namespacePrefix; - - @JsonCreator - public OpentsdbEmitterConfig( - @JsonProperty("host") String host, - @JsonProperty("port") Integer port, - @JsonProperty("connectionTimeout") Integer connectionTimeout, - @JsonProperty("readTimeout") Integer readTimeout, - @JsonProperty("flushThreshold") Integer flushThreshold, - @JsonProperty("maxQueueSize") Integer maxQueueSize, - @JsonProperty("consumeDelay") Long consumeDelay, - @JsonProperty("metricMapPath") String metricMapPath, - @JsonProperty("namespacePrefix") String namespacePrefix - ) - { - this.host = Preconditions.checkNotNull(host, "host can not be null."); - this.port = Preconditions.checkNotNull(port, "port can not be null"); - this.connectionTimeout = (connectionTimeout == null || connectionTimeout < 0) - ? DEFAULT_CONNECTION_TIMEOUT_MILLIS - : connectionTimeout; - this.readTimeout = - (readTimeout == null || readTimeout < 0) ? DEFAULT_READ_TIMEOUT_MILLIS : readTimeout; - this.flushThreshold = (flushThreshold == null || flushThreshold < 0) ? DEFAULT_FLUSH_THRESHOLD : flushThreshold; - this.maxQueueSize = (maxQueueSize == null || maxQueueSize < 0) ? DEFAULT_MAX_QUEUE_SIZE : maxQueueSize; - this.consumeDelay = (consumeDelay == null || consumeDelay < 0) ? DEFAULT_CONSUME_DELAY_MILLIS : consumeDelay; - this.metricMapPath = metricMapPath; - this.namespacePrefix = "".equals(namespacePrefix) ? null : namespacePrefix; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - OpentsdbEmitterConfig that = (OpentsdbEmitterConfig) o; - - if (!host.equals(that.host)) { - return false; - } - if (port != that.port) { - return false; - } - if (connectionTimeout != that.connectionTimeout) { - return false; - } - if (readTimeout != that.readTimeout) { - return false; - } - if (flushThreshold != that.flushThreshold) { - return false; - } - if (maxQueueSize != that.maxQueueSize) { - return false; - } - if (consumeDelay != that.consumeDelay) { - return false; - } - if (!Objects.equals(namespacePrefix, that.namespacePrefix)) { - return false; - } - return metricMapPath != null ? metricMapPath.equals(that.metricMapPath) - : that.metricMapPath == null; - } - - @Override - public int hashCode() - { - int result = host.hashCode(); - result = 31 * result + port; - result = 31 * result + connectionTimeout; - result = 31 * result + readTimeout; - result = 31 * result + flushThreshold; - result = 31 * result + maxQueueSize; - result = 31 * result + (int) consumeDelay; - result = 31 * result + (metricMapPath != null ? metricMapPath.hashCode() : 0); - result = 31 * result + (namespacePrefix != null ? namespacePrefix.hashCode() : 0); - return result; - } - - public String getHost() - { - return host; - } - - public int getPort() - { - return port; - } - - public int getConnectionTimeout() - { - return connectionTimeout; - } - - public int getReadTimeout() - { - return readTimeout; - } - - public int getFlushThreshold() - { - return flushThreshold; - } - - public int getMaxQueueSize() - { - return maxQueueSize; - } - - public long getConsumeDelay() - { - return consumeDelay; - } - - public String getMetricMapPath() - { - return metricMapPath; - } - - public String getNamespacePrefix() - { - return namespacePrefix; - } - -} diff --git a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterModule.java b/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterModule.java deleted file mode 100644 index a46d7150a54f..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterModule.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.opentsdb; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.inject.Binder; -import com.google.inject.Provides; -import com.google.inject.name.Named; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.ManageLifecycle; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.java.util.emitter.core.Emitter; - -import java.util.Collections; -import java.util.List; - -public class OpentsdbEmitterModule implements DruidModule -{ - private static final String EMITTER_TYPE = "opentsdb"; - - @Override - public List getJacksonModules() - { - return Collections.emptyList(); - } - - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bind(binder, "druid.emitter." + EMITTER_TYPE, OpentsdbEmitterConfig.class); - } - - @Provides - @ManageLifecycle - @Named(EMITTER_TYPE) - public Emitter getEmitter(OpentsdbEmitterConfig config, ObjectMapper mapper) - { - return new OpentsdbEmitter(config, mapper); - } -} diff --git a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEvent.java b/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEvent.java deleted file mode 100644 index e0979f228a4f..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbEvent.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.opentsdb; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; - -import java.util.Map; - -public class OpentsdbEvent -{ - - @JsonProperty - private final String metric; - - // timestamp in seconds - @JsonProperty - private final long timestamp; - - @JsonProperty - private final Object value; - - @JsonProperty - private final Map tags; - - public OpentsdbEvent( - @JsonProperty("metric") String metric, - @JsonProperty("timestamp") Long timestamp, - @JsonProperty("value") Object value, - @JsonProperty("tags") Map tags - ) - { - this.metric = Preconditions.checkNotNull(metric, "metric can not be null."); - this.timestamp = Preconditions.checkNotNull(timestamp, "timestamp can not be null."); - this.value = Preconditions.checkNotNull(value, "value can not be null."); - this.tags = Preconditions.checkNotNull(tags, "tags can not be null."); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - OpentsdbEvent that = (OpentsdbEvent) o; - - if (!metric.equals(that.metric)) { - return false; - } - if (timestamp != that.timestamp) { - return false; - } - if (!value.equals(that.value)) { - return false; - } - return tags.equals(that.tags); - } - - @Override - public int hashCode() - { - int result = metric.hashCode(); - result = 31 * result + (int) timestamp; - result = 31 * result + value.hashCode(); - result = 31 * result + tags.hashCode(); - return result; - } - - public String getMetric() - { - return metric; - } - - public long getTimestamp() - { - return timestamp; - } - - public Object getValue() - { - return value; - } - - public Map getTags() - { - return tags; - } -} diff --git a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbSender.java b/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbSender.java deleted file mode 100644 index 8f7e3f7a5060..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/main/java/org/apache/druid/emitter/opentsdb/OpentsdbSender.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.opentsdb; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.util.concurrent.ThreadFactoryBuilder; -import com.sun.jersey.api.client.Client; -import com.sun.jersey.api.client.WebResource; -import org.apache.druid.java.util.common.logger.Logger; - -import javax.ws.rs.core.MediaType; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; - -public class OpentsdbSender -{ - /** - * @see Opentsdb - /api/put - */ - private static final String PATH = "/api/put"; - private static final Logger log = new Logger(OpentsdbSender.class); - private static final long FLUSH_TIMEOUT = 60000; // default flush wait 1 min - - private final AtomicLong countLostEvents = new AtomicLong(0); - private final int flushThreshold; - private final BlockingQueue eventQueue; - private final ScheduledExecutorService scheduler; - private final EventConsumer eventConsumer; - private final long consumeDelay; - private final Client client; - private final WebResource webResource; - - public OpentsdbSender( - String host, - int port, - int connectionTimeout, - int readTimeout, - int flushThreshold, - int maxQueueSize, - long consumeDelay - ) - { - this.flushThreshold = flushThreshold; - this.consumeDelay = consumeDelay; - eventQueue = new ArrayBlockingQueue<>(maxQueueSize); - scheduler = Executors.newScheduledThreadPool(2, new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat("OpentsdbEventSender-%s") - .build()); - eventConsumer = new EventConsumer(); - - client = Client.create(); - client.setConnectTimeout(connectionTimeout); - client.setReadTimeout(readTimeout); - webResource = client.resource("http://" + host + ":" + port + PATH); - } - - public void enqueue(OpentsdbEvent event) - { - if (!eventQueue.offer(event)) { - if (countLostEvents.getAndIncrement() % 1000 == 0) { - log.error( - "Lost total of [%s] events because of emitter queue is full. Please increase the capacity.", - countLostEvents.get() - ); - } - } - } - - public void start() - { - scheduler.scheduleWithFixedDelay( - eventConsumer, - consumeDelay, - consumeDelay, - TimeUnit.MILLISECONDS - ); - } - - public void flush() - { - try { - EventConsumer flushConsumer = new EventConsumer(); - Future future = scheduler.schedule(flushConsumer, 0, TimeUnit.MILLISECONDS); - future.get(FLUSH_TIMEOUT, TimeUnit.MILLISECONDS); - // send remaining events which size may less than flushThreshold - eventConsumer.sendEvents(); - flushConsumer.sendEvents(); - } - catch (Exception e) { - log.warn(e, e.getMessage()); - } - } - - public void close() - { - flush(); - client.destroy(); - scheduler.shutdown(); - } - - private class EventConsumer implements Runnable - { - private final List events; - - public EventConsumer() - { - events = new ArrayList<>(flushThreshold); - } - - @Override - public void run() - { - while (!eventQueue.isEmpty() && !scheduler.isShutdown()) { - OpentsdbEvent event = eventQueue.poll(); - events.add(event); - if (events.size() >= flushThreshold) { - sendEvents(); - } - } - } - - public void sendEvents() - { - if (!events.isEmpty()) { - try { - webResource.entity(events, MediaType.APPLICATION_JSON_TYPE).post(); - } - catch (Exception e) { - log.error(e, "error occurred when sending metrics to opentsdb server."); - } - finally { - events.clear(); - } - } - } - } - - @VisibleForTesting - WebResource getWebResource() - { - return webResource; - } -} diff --git a/extensions-contrib/opentsdb-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/opentsdb-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index 61ca3e643d12..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.emitter.opentsdb.OpentsdbEmitterModule diff --git a/extensions-contrib/opentsdb-emitter/src/main/resources/defaultMetrics.json b/extensions-contrib/opentsdb-emitter/src/main/resources/defaultMetrics.json deleted file mode 100644 index d00b79265420..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/main/resources/defaultMetrics.json +++ /dev/null @@ -1,211 +0,0 @@ -{ - "query/time": [ - "dataSource", - "type" - ], - "query/bytes": [ - "dataSource", - "type" - ], - "query/node/time": [ - "server" - ], - "query/node/bytes": [ - "server" - ], - "query/node/ttfb": [ - "server" - ], - "query/success/count": [], - "query/failed/count": [], - "query/interrupted/count": [], - "query/wait/time": [ - "segment" - ], - "query/cpu/time": [ - "dataSource", - "type" - ], - "jetty/numOpenConnections": [], - "query/cache/delta/numEntries": [], - "query/cache/delta/sizeBytes": [], - "query/cache/delta/hits": [], - "query/cache/delta/misses": [], - "query/cache/delta/evictions": [], - "query/cache/delta/hitRate": [], - "query/cache/delta/averageBytes": [], - "query/cache/delta/timeouts": [], - "query/cache/delta/errors": [], - "query/cache/total/numEntries": [], - "query/cache/total/sizeBytes": [], - "query/cache/total/hits": [], - "query/cache/total/misses": [], - "query/cache/total/evictions": [], - "query/cache/total/hitRate": [], - "query/cache/total/averageBytes": [], - "query/cache/total/timeouts": [], - "query/cache/total/errors": [], - "ingest/events/thrownAway": [ - "dataSource" - ], - "ingest/events/unparseable": [ - "dataSource" - ], - "ingest/events/duplicate": [ - "dataSource" - ], - "ingest/events/processed": [ - "dataSource" - ], - "ingest/rows/output": [ - "dataSource" - ], - "ingest/persists/count": [ - "dataSource" - ], - "ingest/persists/time": [ - "dataSource" - ], - "ingest/persists/cpu": [ - "dataSource" - ], - "ingest/persists/backPressure": [ - "dataSource" - ], - "ingest/persists/failed": [ - "dataSource" - ], - "ingest/handoff/failed": [ - "dataSource" - ], - "ingest/merge/time": [ - "dataSource" - ], - "ingest/merge/cpu": [ - "dataSource" - ], - "ingest/handoff/count": [ - "dataSource" - ], - "ingest/sink/count": [ - "dataSource" - ], - "ingest/events/messageGap": [ - "dataSource" - ], - "ingest/kafka/lag": [ - "dataSource" - ], - "ingest/kafka/maxLag": [ - "dataSource" - ], - "ingest/kafka/avgLag": [ - "dataSource" - ], - "task/run/time": [ - "dataSource" - ], - "segment/added/bytes": [ - "dataSource" - ], - "segment/moved/bytes": [ - "dataSource" - ], - "segment/nuked/bytes": [ - "dataSource" - ], - "segment/assigned/count": [ - "tier" - ], - "segment/moved/count": [ - "tier" - ], - "segment/dropped/count": [ - "tier" - ], - "segment/deleted/count": [ - "tier" - ], - "segment/unneeded/count": [ - "tier" - ], - "segment/cost/raw": [ - "tier" - ], - "segment/cost/normalization": [ - "tier" - ], - "segment/cost/normalized": [ - "tier" - ], - "segment/loadQueue/size": [ - "server" - ], - "segment/loadQueue/failed": [ - "server" - ], - "segment/loadQueue/count": [ - "server" - ], - "segment/dropQueue/count": [ - "server" - ], - "segment/size": [ - "dataSource" - ], - "segment/count": [ - "dataSource" - ], - "segment/overShadowed/count": [], - "segment/unavailable/count": [ - "dataSource" - ], - "segment/underReplicated/count": [ - "dataSource" - ], - "segment/max": [], - "segment/used": [ - "dataSource" - ], - "segment/usedPercent": [ - "dataSource" - ], - "segment/pendingDelete": [], - "jvm/pool/committed": [], - "jvm/pool/init": [], - "jvm/pool/max": [], - "jvm/pool/used": [], - "jvm/bufferpool/count": [], - "jvm/bufferpool/used": [], - "jvm/bufferpool/capacity": [], - "jvm/mem/init": [], - "jvm/mem/max": [], - "jvm/mem/used": [], - "jvm/mem/committed": [], - "jvm/gc/count": [], - "jvm/gc/cpu": [], - "ingest/events/buffered": [ - "dataSource" - ], - "ingest/bytes/received": [ - "dataSource" - ], - "sys/swap/free": [], - "sys/swap/max": [], - "sys/swap/pageIn": [], - "sys/swap/pageOut": [], - "sys/disk/write/count": [], - "sys/disk/read/count": [], - "sys/disk/write/size": [], - "sys/disk/read/size": [], - "sys/net/write/size": [], - "sys/net/read/size": [], - "sys/fs/used": [], - "sys/fs/max": [], - "sys/mem/used": [], - "sys/mem/max": [], - "sys/storage/used": [], - "sys/cpu": [], - "coordinator-segment/count": [], - "historical-segment/count": [] -} \ No newline at end of file diff --git a/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/EventConverterTest.java b/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/EventConverterTest.java deleted file mode 100644 index 1380417f16d9..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/EventConverterTest.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.opentsdb; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.joda.time.DateTime; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.util.HashMap; -import java.util.Map; - -public class EventConverterTest -{ - private EventConverter converterWithNamespacePrefix; - private EventConverter converterWithNamespacePrefixContainingSpace; - private EventConverter converterWithoutNamespacePrefix; - - @Before - public void setUp() - { - converterWithNamespacePrefix = new EventConverter(new ObjectMapper(), null, "druid"); - converterWithNamespacePrefixContainingSpace = new EventConverter(new ObjectMapper(), null, "legendary druid"); - converterWithoutNamespacePrefix = new EventConverter(new ObjectMapper(), null, null); - } - - @Test - public void testSanitize() - { - String metric = " foo bar/baz"; - Assert.assertEquals("foo_bar.baz", converterWithNamespacePrefix.sanitize(metric)); - Assert.assertEquals("foo_bar.baz", converterWithNamespacePrefixContainingSpace.sanitize(metric)); - Assert.assertEquals("foo_bar.baz", converterWithoutNamespacePrefix.sanitize(metric)); - } - - @Test - public void testConvertWithNamespacePrefix() - { - DateTime dateTime = DateTimes.nowUtc(); - ServiceMetricEvent configuredEvent = new ServiceMetricEvent.Builder() - .setDimension("dataSource", "foo:bar") - .setDimension("type", "groupBy") - .build(dateTime, "query/time", 10) - .build("druid:broker", "127.0.0.1:8080"); - - Map expectedTags = new HashMap<>(); - expectedTags.put("service", "druid_broker"); - expectedTags.put("host", "127.0.0.1_8080"); - expectedTags.put("dataSource", "foo_bar"); - expectedTags.put("type", "groupBy"); - - OpentsdbEvent opentsdbEvent = converterWithNamespacePrefix.convert(configuredEvent); - Assert.assertEquals("druid.query.time", opentsdbEvent.getMetric()); - Assert.assertEquals(dateTime.getMillis() / 1000L, opentsdbEvent.getTimestamp()); - Assert.assertEquals(10, opentsdbEvent.getValue()); - Assert.assertEquals(expectedTags, opentsdbEvent.getTags()); - - ServiceMetricEvent notConfiguredEvent = new ServiceMetricEvent.Builder() - .setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .build(dateTime, "foo/bar", 10) - .build("broker", "brokerHost1"); - Assert.assertNull(converterWithNamespacePrefix.convert(notConfiguredEvent)); - } - - @Test - public void testConvertWithNamespacePrefixContainingSpace() - { - DateTime dateTime = DateTimes.nowUtc(); - ServiceMetricEvent configuredEvent = new ServiceMetricEvent.Builder() - .setDimension("dataSource", "foo:bar") - .setDimension("type", "groupBy") - .build(dateTime, "query/time", 10) - .build("druid:broker", "127.0.0.1:8080"); - - Map expectedTags = new HashMap<>(); - expectedTags.put("service", "druid_broker"); - expectedTags.put("host", "127.0.0.1_8080"); - expectedTags.put("dataSource", "foo_bar"); - expectedTags.put("type", "groupBy"); - - OpentsdbEvent opentsdbEvent = converterWithNamespacePrefixContainingSpace.convert(configuredEvent); - Assert.assertEquals("legendary_druid.query.time", opentsdbEvent.getMetric()); - Assert.assertEquals(dateTime.getMillis() / 1000L, opentsdbEvent.getTimestamp()); - Assert.assertEquals(10, opentsdbEvent.getValue()); - Assert.assertEquals(expectedTags, opentsdbEvent.getTags()); - - ServiceMetricEvent notConfiguredEvent = new ServiceMetricEvent.Builder() - .setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .build(dateTime, "foo/bar", 10) - .build("broker", "brokerHost1"); - Assert.assertNull(converterWithNamespacePrefixContainingSpace.convert(notConfiguredEvent)); - } - - @Test - public void testConvertWithoutNamespacePrefix() - { - DateTime dateTime = DateTimes.nowUtc(); - ServiceMetricEvent configuredEvent = new ServiceMetricEvent.Builder() - .setDimension("dataSource", "foo:bar") - .setDimension("type", "groupBy") - .build(dateTime, "query/time", 10) - .build("druid:broker", "127.0.0.1:8080"); - - Map expectedTags = new HashMap<>(); - expectedTags.put("service", "druid_broker"); - expectedTags.put("host", "127.0.0.1_8080"); - expectedTags.put("dataSource", "foo_bar"); - expectedTags.put("type", "groupBy"); - - OpentsdbEvent opentsdbEvent = converterWithoutNamespacePrefix.convert(configuredEvent); - Assert.assertEquals("query.time", opentsdbEvent.getMetric()); - Assert.assertEquals(dateTime.getMillis() / 1000L, opentsdbEvent.getTimestamp()); - Assert.assertEquals(10, opentsdbEvent.getValue()); - Assert.assertEquals(expectedTags, opentsdbEvent.getTags()); - - ServiceMetricEvent notConfiguredEvent = new ServiceMetricEvent.Builder() - .setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .build(dateTime, "foo/bar", 10) - .build("broker", "brokerHost1"); - Assert.assertNull(converterWithoutNamespacePrefix.convert(notConfiguredEvent)); - } - -} diff --git a/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterConfigTest.java b/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterConfigTest.java deleted file mode 100644 index 66c8f62e3320..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbEmitterConfigTest.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.opentsdb; - -import com.fasterxml.jackson.databind.InjectableValues; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -public class OpentsdbEmitterConfigTest -{ - private ObjectMapper mapper = new DefaultObjectMapper(); - - @Before - public void setUp() - { - mapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, new DefaultObjectMapper())); - } - - @Test - public void testSerDeserOpentsdbEmitterConfig() throws Exception - { - OpentsdbEmitterConfig opentsdbEmitterConfig = new OpentsdbEmitterConfig("localhost", 9999, 2000, 2000, 200, 2000, 10000L, null, "druid"); - String opentsdbEmitterConfigString = mapper.writeValueAsString(opentsdbEmitterConfig); - OpentsdbEmitterConfig expectedOpentsdbEmitterConfig = mapper.readerFor(OpentsdbEmitterConfig.class) - .readValue(opentsdbEmitterConfigString); - Assert.assertEquals(expectedOpentsdbEmitterConfig, opentsdbEmitterConfig); - } - - @Test - public void testSerDeserOpentsdbEmitterConfigWithNamespacePrefixContainingSpace() throws Exception - { - OpentsdbEmitterConfig opentsdbEmitterConfig = new OpentsdbEmitterConfig("localhost", 9999, 2000, 2000, 200, 2000, 10000L, null, "legendary druid"); - String opentsdbEmitterConfigString = mapper.writeValueAsString(opentsdbEmitterConfig); - OpentsdbEmitterConfig expectedOpentsdbEmitterConfig = mapper.readerFor(OpentsdbEmitterConfig.class) - .readValue(opentsdbEmitterConfigString); - Assert.assertEquals(expectedOpentsdbEmitterConfig, opentsdbEmitterConfig); - } - - @Test - public void testSerDeserOpentsdbEmitterConfigWithNullNamespacePrefix() throws Exception - { - OpentsdbEmitterConfig opentsdbEmitterConfig = new OpentsdbEmitterConfig("localhost", 9999, 2000, 2000, 200, 2000, 10000L, null, null); - String opentsdbEmitterConfigString = mapper.writeValueAsString(opentsdbEmitterConfig); - OpentsdbEmitterConfig expectedOpentsdbEmitterConfig = mapper.readerFor(OpentsdbEmitterConfig.class) - .readValue(opentsdbEmitterConfigString); - Assert.assertEquals(expectedOpentsdbEmitterConfig, opentsdbEmitterConfig); - } - - @Test - public void testSerDeserOpentsdbEmitterConfigWithEmptyNamespacePrefix() throws Exception - { - OpentsdbEmitterConfig opentsdbEmitterConfig = new OpentsdbEmitterConfig("localhost", 9999, 2000, 2000, 200, 2000, 10000L, null, ""); - String opentsdbEmitterConfigString = mapper.writeValueAsString(opentsdbEmitterConfig); - OpentsdbEmitterConfig expectedOpentsdbEmitterConfig = mapper.readerFor(OpentsdbEmitterConfig.class) - .readValue(opentsdbEmitterConfigString); - Assert.assertEquals(expectedOpentsdbEmitterConfig, opentsdbEmitterConfig); - } - - @Test - public void testJacksonModules() - { - Assert.assertTrue(new OpentsdbEmitterModule().getJacksonModules().isEmpty()); - } -} diff --git a/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbEventTest.java b/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbEventTest.java deleted file mode 100644 index 4e19a2562617..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbEventTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.opentsdb; - -import com.fasterxml.jackson.databind.InjectableValues; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.util.HashMap; -import java.util.Map; - -public class OpentsdbEventTest -{ - private ObjectMapper mapper = new DefaultObjectMapper(); - - @Before - public void setUp() - { - mapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, new DefaultObjectMapper())); - } - - @Test - public void testSerDeserOpentsdbEvent() throws Exception - { - Map tags = new HashMap<>(); - tags.put("foo", "bar"); - tags.put("baz", 1); - OpentsdbEvent opentsdbEvent = new OpentsdbEvent("foo.bar", 1000L, 20, tags); - String opentsdbString = mapper.writeValueAsString(opentsdbEvent); - OpentsdbEvent expectedOpentsdbEvent = mapper.readerFor(OpentsdbEvent.class) - .readValue(opentsdbString); - Assert.assertEquals(expectedOpentsdbEvent, opentsdbEvent); - } -} diff --git a/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbSenderTest.java b/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbSenderTest.java deleted file mode 100644 index 79c8ae52ce4f..000000000000 --- a/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/OpentsdbSenderTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.opentsdb; - -import org.junit.Assert; -import org.junit.Test; - -public class OpentsdbSenderTest -{ - @Test - public void testUrl() - { - OpentsdbSender sender = new OpentsdbSender("localhost", 9999, 2000, 2000, 100, 1000, 10000L); - String expectedUrl = "http://localhost:9999/api/put"; - Assert.assertEquals(expectedUrl, sender.getWebResource().getURI().toString()); - } -} diff --git a/extensions-contrib/redis-cache/pom.xml b/extensions-contrib/redis-cache/pom.xml deleted file mode 100644 index dfaa74c4bec7..000000000000 --- a/extensions-contrib/redis-cache/pom.xml +++ /dev/null @@ -1,95 +0,0 @@ - - - - - - 4.0.0 - - org.apache.druid.extensions.contrib - druid-redis-cache - druid-redis-cache - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - org.apache.druid - druid-server - ${project.parent.version} - provided - - - redis.clients - jedis - 2.9.0 - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.guava - guava - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - - - junit - junit - test - - - com.fiftyonred - mock-jedis - 0.4.0 - test - - - - diff --git a/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCache.java b/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCache.java deleted file mode 100644 index d85f1e0f70af..000000000000 --- a/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCache.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.client.cache; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; -import org.apache.druid.java.util.common.lifecycle.LifecycleStop; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.service.ServiceEmitter; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import redis.clients.jedis.Jedis; -import redis.clients.jedis.JedisPool; -import redis.clients.jedis.JedisPoolConfig; -import redis.clients.jedis.exceptions.JedisException; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicLong; - -public class RedisCache implements Cache -{ - private static final Logger log = new Logger(RedisCache.class); - - private JedisPool pool; - private RedisCacheConfig config; - - private final AtomicLong hitCount = new AtomicLong(0); - private final AtomicLong missCount = new AtomicLong(0); - private final AtomicLong timeoutCount = new AtomicLong(0); - private final AtomicLong errorCount = new AtomicLong(0); - - private final AtomicLong priorRequestCount = new AtomicLong(0); - // both get、put and getBulk will increase request count by 1 - private final AtomicLong totalRequestCount = new AtomicLong(0); - - private RedisCache(JedisPool pool, RedisCacheConfig config) - { - this.pool = pool; - this.config = config; - } - - public static RedisCache create(final RedisCacheConfig config) - { - JedisPoolConfig poolConfig = new JedisPoolConfig(); - poolConfig.setMaxTotal(config.getMaxTotalConnections()); - poolConfig.setMaxIdle(config.getMaxIdleConnections()); - poolConfig.setMinIdle(config.getMinIdleConnections()); - - JedisPool pool = new JedisPool(poolConfig, config.getHost(), config.getPort(), config.getTimeout()); - return new RedisCache(pool, config); - } - - @Override - public byte[] get(NamedKey key) - { - totalRequestCount.incrementAndGet(); - - try (Jedis jedis = pool.getResource()) { - byte[] bytes = jedis.get(key.toByteArray()); - if (bytes == null) { - missCount.incrementAndGet(); - return null; - } else { - hitCount.incrementAndGet(); - return bytes; - } - } - catch (JedisException e) { - if (e.getMessage().contains("Read timed out")) { - timeoutCount.incrementAndGet(); - } else { - errorCount.incrementAndGet(); - } - log.warn(e, "Exception pulling item from cache"); - return null; - } - } - - @Override - public void put(NamedKey key, byte[] value) - { - totalRequestCount.incrementAndGet(); - - try (Jedis jedis = pool.getResource()) { - jedis.psetex(key.toByteArray(), config.getExpiration(), value); - } - catch (JedisException e) { - errorCount.incrementAndGet(); - log.warn(e, "Exception pushing item to cache"); - } - } - - @Override - public Map getBulk(Iterable keys) - { - totalRequestCount.incrementAndGet(); - - Map results = new HashMap<>(); - - try (Jedis jedis = pool.getResource()) { - List namedKeys = Lists.newArrayList(keys); - List byteKeys = Lists.transform(namedKeys, NamedKey::toByteArray); - - List byteValues = jedis.mget(byteKeys.toArray(new byte[0][])); - - for (int i = 0; i < byteValues.size(); ++i) { - if (byteValues.get(i) != null) { - results.put(namedKeys.get(i), byteValues.get(i)); - } - } - - hitCount.addAndGet(results.size()); - missCount.addAndGet(namedKeys.size() - results.size()); - } - catch (JedisException e) { - if (e.getMessage().contains("Read timed out")) { - timeoutCount.incrementAndGet(); - } else { - errorCount.incrementAndGet(); - } - log.warn(e, "Exception pulling items from cache"); - } - - return results; - } - - @Override - public void close(String namespace) - { - // no resources to cleanup - } - - @Override - @LifecycleStop - public void close() - { - pool.close(); - } - - @Override - public CacheStats getStats() - { - return new CacheStats( - hitCount.get(), - missCount.get(), - 0, - 0, - 0, - timeoutCount.get(), - errorCount.get() - ); - } - - @Override - public boolean isLocal() - { - return false; - } - - @Override - public void doMonitor(ServiceEmitter emitter) - { - final long priorCount = priorRequestCount.get(); - final long totalCount = totalRequestCount.get(); - final ServiceMetricEvent.Builder builder = ServiceMetricEvent.builder(); - emitter.emit(builder.build("query/cache/redis/total/requests", totalCount)); - emitter.emit(builder.build("query/cache/redis/delta/requests", totalCount - priorCount)); - if (!priorRequestCount.compareAndSet(priorCount, totalCount)) { - log.error("Prior value changed while I was reporting! updating anyways"); - priorRequestCount.set(totalCount); - } - } - - @VisibleForTesting - static RedisCache create(final JedisPool pool, final RedisCacheConfig config) - { - return new RedisCache(pool, config); - } -} diff --git a/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCacheConfig.java b/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCacheConfig.java deleted file mode 100644 index 0db1b3a352b9..000000000000 --- a/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCacheConfig.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.client.cache; - -import com.fasterxml.jackson.annotation.JsonProperty; - -public class RedisCacheConfig -{ - @JsonProperty - private String host; - - @JsonProperty - private int port; - - // milliseconds, default to one day - @JsonProperty - private long expiration = 24 * 3600 * 1000; - - // milliseconds, the type is 'int' because current Jedis only accept 'int' for timeout - @JsonProperty - private int timeout = 2000; - - // max connections of redis connection pool - @JsonProperty - private int maxTotalConnections = 8; - - // max idle connections of redis connection pool - @JsonProperty - private int maxIdleConnections = 8; - - // min idle connections of redis connection pool - @JsonProperty - private int minIdleConnections = 0; - - public String getHost() - { - return host; - } - - public int getPort() - { - return port; - } - - public long getExpiration() - { - return expiration; - } - - public int getTimeout() - { - return timeout; - } - - public int getMaxTotalConnections() - { - return maxTotalConnections; - } - - public int getMaxIdleConnections() - { - return maxIdleConnections; - } - - public int getMinIdleConnections() - { - return minIdleConnections; - } -} diff --git a/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCacheProvider.java b/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCacheProvider.java deleted file mode 100644 index fc05031a033b..000000000000 --- a/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCacheProvider.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.client.cache; - -import com.fasterxml.jackson.annotation.JsonTypeName; - -@JsonTypeName("redis") -public class RedisCacheProvider extends RedisCacheConfig implements CacheProvider -{ - @Override - public Cache get() - { - return RedisCache.create(this); - } -} diff --git a/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisDruidModule.java b/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisDruidModule.java deleted file mode 100644 index fae1b453ed65..000000000000 --- a/extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisDruidModule.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.client.cache; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import org.apache.druid.initialization.DruidModule; - -import java.util.List; - -public class RedisDruidModule implements DruidModule -{ - @Override - public void configure(Binder binder) - { - // do nothing - } - - @Override - public List getJacksonModules() - { - return ImmutableList.of(new SimpleModule("DruidRedisCache").registerSubtypes(RedisCacheProvider.class)); - } -} diff --git a/extensions-contrib/redis-cache/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/redis-cache/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index d5dab6c57423..000000000000 --- a/extensions-contrib/redis-cache/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.client.cache.RedisDruidModule diff --git a/extensions-contrib/redis-cache/src/test/java/org/apache/druid/client/cache/RedisCacheTest.java b/extensions-contrib/redis-cache/src/test/java/org/apache/druid/client/cache/RedisCacheTest.java deleted file mode 100644 index e5699de971b1..000000000000 --- a/extensions-contrib/redis-cache/src/test/java/org/apache/druid/client/cache/RedisCacheTest.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.client.cache; - -import com.fiftyonred.mock_jedis.MockJedis; -import com.fiftyonred.mock_jedis.MockJedisPool; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.primitives.Ints; -import com.google.inject.Inject; -import com.google.inject.Injector; -import com.google.inject.name.Names; -import org.apache.druid.guice.GuiceInjectors; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.ManageLifecycle; -import org.apache.druid.initialization.Initialization; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.lifecycle.Lifecycle; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import redis.clients.jedis.JedisPoolConfig; - -import java.util.Map; -import java.util.UUID; - -public class RedisCacheTest -{ - private static final byte[] HI = StringUtils.toUtf8("hiiiiiiiiiiiiiiiiiii"); - private static final byte[] HO = StringUtils.toUtf8("hooooooooooooooooooo"); - - private RedisCache cache; - private final RedisCacheConfig cacheConfig = new RedisCacheConfig() - { - @Override - public int getTimeout() - { - return 10; - } - - @Override - public long getExpiration() - { - return 3600000; - } - }; - - @Before - public void setUp() - { - JedisPoolConfig poolConfig = new JedisPoolConfig(); - poolConfig.setMaxTotal(cacheConfig.getMaxTotalConnections()); - poolConfig.setMaxIdle(cacheConfig.getMaxIdleConnections()); - poolConfig.setMinIdle(cacheConfig.getMinIdleConnections()); - - MockJedisPool pool = new MockJedisPool(poolConfig, "localhost"); - // orginal MockJedis do not support 'milliseconds' in long type, - // for test we override to support it - pool.setClient(new MockJedis("localhost") - { - @Override - public String psetex(byte[] key, long milliseconds, byte[] value) - { - return this.psetex(key, (int) milliseconds, value); - } - }); - - cache = RedisCache.create(pool, cacheConfig); - } - - @Test - public void testBasicInjection() throws Exception - { - final RedisCacheConfig config = new RedisCacheConfig(); - Injector injector = Initialization.makeInjectorWithModules( - GuiceInjectors.makeStartupInjector(), ImmutableList.of( - binder -> { - binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/redis"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); - binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); - - binder.bind(RedisCacheConfig.class).toInstance(config); - binder.bind(Cache.class).toProvider(RedisCacheProviderWithConfig.class).in(ManageLifecycle.class); - } - ) - ); - Lifecycle lifecycle = injector.getInstance(Lifecycle.class); - lifecycle.start(); - try { - Cache cache = injector.getInstance(Cache.class); - Assert.assertEquals(RedisCache.class, cache.getClass()); - } - finally { - lifecycle.stop(); - } - } - - @Test - public void testSimpleInjection() - { - final String uuid = UUID.randomUUID().toString(); - System.setProperty(uuid + ".type", "redis"); - final Injector injector = Initialization.makeInjectorWithModules( - GuiceInjectors.makeStartupInjector(), ImmutableList.of( - binder -> { - binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test/redis"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); - binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); - - binder.bind(Cache.class).toProvider(CacheProvider.class); - JsonConfigProvider.bind(binder, uuid, CacheProvider.class); - } - ) - ); - final CacheProvider cacheProvider = injector.getInstance(CacheProvider.class); - Assert.assertNotNull(cacheProvider); - Assert.assertEquals(RedisCacheProvider.class, cacheProvider.getClass()); - } - - @Test - public void testSanity() - { - Assert.assertNull(cache.get(new Cache.NamedKey("a", HI))); - put(cache, "a", HI, 0); - Assert.assertEquals(0, get(cache, "a", HI)); - Assert.assertNull(cache.get(new Cache.NamedKey("the", HI))); - - put(cache, "the", HI, 1); - Assert.assertEquals(0, get(cache, "a", HI)); - Assert.assertEquals(1, get(cache, "the", HI)); - - put(cache, "the", HO, 10); - Assert.assertEquals(0, get(cache, "a", HI)); - Assert.assertNull(cache.get(new Cache.NamedKey("a", HO))); - Assert.assertEquals(1, get(cache, "the", HI)); - Assert.assertEquals(10, get(cache, "the", HO)); - - cache.close("the"); - Assert.assertEquals(0, get(cache, "a", HI)); - Assert.assertNull(cache.get(new Cache.NamedKey("a", HO))); - } - - @Test - public void testGetBulk() - { - Assert.assertNull(cache.get(new Cache.NamedKey("the", HI))); - - put(cache, "the", HI, 1); - put(cache, "the", HO, 10); - - Cache.NamedKey key1 = new Cache.NamedKey("the", HI); - Cache.NamedKey key2 = new Cache.NamedKey("the", HO); - Cache.NamedKey key3 = new Cache.NamedKey("a", HI); - - Map result = cache.getBulk( - Lists.newArrayList( - key1, - key2, - key3 - ) - ); - - Assert.assertEquals(1, Ints.fromByteArray(result.get(key1))); - Assert.assertEquals(10, Ints.fromByteArray(result.get(key2))); - Assert.assertEquals(null, result.get(key3)); - } - - public void put(Cache cache, String namespace, byte[] key, Integer value) - { - cache.put(new Cache.NamedKey(namespace, key), Ints.toByteArray(value)); - } - - public int get(Cache cache, String namespace, byte[] key) - { - return Ints.fromByteArray(cache.get(new Cache.NamedKey(namespace, key))); - } -} - -class RedisCacheProviderWithConfig extends RedisCacheProvider -{ - private final RedisCacheConfig config; - - @Inject - public RedisCacheProviderWithConfig(RedisCacheConfig config) - { - this.config = config; - } - - @Override - public Cache get() - { - return RedisCache.create(config); - } -} - diff --git a/extensions-contrib/sqlserver-metadata-storage/pom.xml b/extensions-contrib/sqlserver-metadata-storage/pom.xml deleted file mode 100644 index 6f696e8e213a..000000000000 --- a/extensions-contrib/sqlserver-metadata-storage/pom.xml +++ /dev/null @@ -1,88 +0,0 @@ - - - - 4.0.0 - - org.apache.druid.extensions.contrib - sqlserver-metadata-storage - sqlserver-metadata-storage - sqlserver-metadata-storage - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-server - ${project.parent.version} - provided - - - - org.jdbi - jdbi - provided - - - com.google.guava - guava - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.google.inject.extensions - guice-multibindings - provided - - - org.apache.commons - commons-dbcp2 - provided - - - - junit - junit - test - - - - diff --git a/extensions-contrib/sqlserver-metadata-storage/src/main/java/org/apache/druid/metadata/storage/sqlserver/SQLServerConnector.java b/extensions-contrib/sqlserver-metadata-storage/src/main/java/org/apache/druid/metadata/storage/sqlserver/SQLServerConnector.java deleted file mode 100644 index 152501dc3c4c..000000000000 --- a/extensions-contrib/sqlserver-metadata-storage/src/main/java/org/apache/druid/metadata/storage/sqlserver/SQLServerConnector.java +++ /dev/null @@ -1,285 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.metadata.storage.sqlserver; - -import com.google.common.base.Supplier; -import com.google.inject.Inject; -import org.apache.commons.dbcp2.BasicDataSource; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.metadata.MetadataStorageConnectorConfig; -import org.apache.druid.metadata.MetadataStorageTablesConfig; -import org.apache.druid.metadata.SQLMetadataConnector; -import org.skife.jdbi.v2.Binding; -import org.skife.jdbi.v2.ColonPrefixNamedParamStatementRewriter; -import org.skife.jdbi.v2.DBI; -import org.skife.jdbi.v2.Handle; -import org.skife.jdbi.v2.StatementContext; -import org.skife.jdbi.v2.tweak.HandleCallback; -import org.skife.jdbi.v2.tweak.RewrittenStatement; -import org.skife.jdbi.v2.tweak.StatementRewriter; -import org.skife.jdbi.v2.util.StringMapper; - -import java.sql.SQLException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; -import java.util.regex.Pattern; - -@SuppressWarnings("nls") -public class SQLServerConnector extends SQLMetadataConnector -{ - private static final Logger log = new Logger(SQLServerConnector.class); - - /** - *

- *

- * - *
-   *
-   * Sql Server equivalent to the SERIAL_TYPE value in other SQLMetadataConnectors.
-   *
-   * SqlServer - PAYLOAD_TYPE = "VARBINARY(MAX)"
-   *     Variable-length binary data
-   *
-   * PostgreSQL - PAYLOAD_TYPE = "BYTEA"
-   *     variable-length binary string
-   *
-   * MySQL - PAYLOAD_TYPE = "LONGBLOB"
-   *     a binary large object that can hold a variable amount of data
-   *
-   * 
- * - *
- *

- * - * See also PostgreSQLConnector and MySQLConnector in corresponding modules in Druid. - * - * @see MS - * SQL Server Numeric Types - */ - private static final String PAYLOAD_TYPE = "VARBINARY(MAX)"; - - /** - * - *

- *

- * - *
-   * Sql Server equivalent to the SERIAL_TYPE value in other SQLMetadataConnectors.
-   *
-   * SqlServer - SERIAL_TYPE = "[bigint] IDENTITY (1, 1)"
-   *     The bigint range is from -9,223,372,036,854,775,808 to 9,223,372,036,854,775,807
-   *
-   * PostgreSQL - SERIAL_TYPE ="BIGSERIAL"
-   *     The BIGSERIAL range is from 1 to 9223372036854775807
-   *
-   * MySQL - SERIAL_TYPE = "BIGINT(20) AUTO_INCREMENT"
-   *     The BIGINT range is from -9223372036854775808 to 9223372036854775807
-   *     Also note that "SERIAL" is an alias for "BIGINT UNSIGNED NOT NULL AUTO_INCREMENT UNIQUE"
-   *
-   * 
- * - *
- *

- * - * See also PostgreSQLConnector and MySQLConnector in corresponding modules in Druid. - * - * @see MS SQL Server Numeric Types - */ - private static final String SERIAL_TYPE = "[bigint] IDENTITY (1, 1)"; - - private static final String QUOTE_STRING = "\\\""; - public static final int DEFAULT_STREAMING_RESULT_SIZE = 100; - - private final DBI dbi; - - /** - *

- *

- * - *
-   * Classify Transient Sql State Codes
-   * 
- * - *
- *

- * - * @see Spring Framework SQLStateSQLExceptionTranslator - * @see SQLException#getSQLState() - */ - private final Set TRANSIENT_SQL_CLASS_CODES = new HashSet<>(Arrays.asList( - "08", "53", "54", "57", "58", // Resource Failures - "JW", "JZ", "S1", // Transient Failures - "40" // Transaction Rollback - )); - - @Inject - public SQLServerConnector(Supplier config, Supplier dbTables) - { - super(config, dbTables); - - final BasicDataSource datasource = getDatasource(); - datasource.setDriverClassLoader(getClass().getClassLoader()); - datasource.setDriverClassName("com.microsoft.sqlserver.jdbc.SQLServerDriver"); - - this.dbi = new DBI(datasource); - - this.dbi.setStatementRewriter(new CustomStatementRewriter()); - - log.info("Configured Sql Server as metadata storage"); - } - - public static class CustomStatementRewriter implements StatementRewriter - { - - private static final Pattern REWRITE_PATTERN1 = Pattern.compile("(?i)BOOLEAN NOT NULL DEFAULT FALSE"); - private static final Pattern REWRITE_PATTERN2 = Pattern.compile("(?i)BOOLEAN NOT NULL DEFAULT TRUE"); - private static final Pattern REWRITE_PATTERN3 = Pattern.compile("(?i)BOOLEAN DEFAULT FALSE"); - private static final Pattern REWRITE_PATTERN4 = Pattern.compile("(?i)BOOLEAN DEFAULT TRUE"); - private static final Pattern REWRITE_PATTERN5 = Pattern.compile("(?i)BOOLEAN"); - private static final Pattern REWRITE_PATTERN6 = Pattern.compile("(?i)TRUE"); - private static final Pattern REWRITE_PATTERN7 = Pattern.compile("(?i)FALSE"); - - private ColonPrefixNamedParamStatementRewriter colonPrefixNamedParamStatementRewriter = new ColonPrefixNamedParamStatementRewriter(); - - @Override - public RewrittenStatement rewrite(String sql, Binding params, StatementContext ctx) - { - - String currentSql = sql; - currentSql = REWRITE_PATTERN1.matcher(currentSql).replaceAll("BIT NOT NULL DEFAULT (0)"); - currentSql = REWRITE_PATTERN2.matcher(currentSql).replaceAll("BIT NOT NULL DEFAULT (1)"); - currentSql = REWRITE_PATTERN3.matcher(currentSql).replaceAll("BIT NOT NULL DEFAULT (0)"); - currentSql = REWRITE_PATTERN4.matcher(currentSql).replaceAll("BIT NOT NULL DEFAULT (1)"); - currentSql = REWRITE_PATTERN5.matcher(currentSql).replaceAll("BIT"); - currentSql = REWRITE_PATTERN6.matcher(currentSql).replaceAll("1"); - currentSql = REWRITE_PATTERN7.matcher(currentSql).replaceAll("0"); - - return (colonPrefixNamedParamStatementRewriter).rewrite(currentSql, params, ctx); - - } - } - - @Override - protected String getPayloadType() - { - return PAYLOAD_TYPE; - } - - @Override - protected String getSerialType() - { - return SERIAL_TYPE; - } - - @Override - public String getQuoteString() - { - return QUOTE_STRING; - } - - @Override - protected int getStreamingFetchSize() - { - return DEFAULT_STREAMING_RESULT_SIZE; - } - - @Override - public boolean tableExists(final Handle handle, final String tableName) - { - return !handle.createQuery("SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = :tableName") - .bind("tableName", tableName) - .map(StringMapper.FIRST) - .list() - .isEmpty(); - } - - /** - * - * {@inheritDoc} - * - * @see http://stackoverflow.com/questions/1197733/does-sql-server-offer-anything-like-mysqls-on-duplicate-key-update - * - */ - @Override - public Void insertOrUpdate( - final String tableName, - final String keyColumn, - final String valueColumn, - final String key, - final byte[] value) - { - return getDBI().withHandle( - new HandleCallback() - { - @Override - public Void withHandle(Handle handle) - { - handle.createStatement(StringUtils.format( - "MERGE INTO %1$s WITH (UPDLOCK, HOLDLOCK) as target" - + " USING " - + " (:key, :value) as source (%2$s, %3$s)" - + " ON" - + " (target.%2$s = source.%2$s)" - + " WHEN MATCHED THEN UPDATE SET %3$s = :value" - + " WHEN NOT MATCHED THEN INSERT (%2$s, %3$s) VALUES (:key, :value)", - tableName, - keyColumn, - valueColumn)) - .bind("key", key) - .bind("value", value) - .execute(); - - return null; - } - }); - } - - @Override - public DBI getDBI() - { - return dbi; - } - - /** - * - * {@inheritDoc} - * - * @see SQLException#getSQLState() - * - */ - @Override - protected boolean connectorIsTransientException(Throwable e) - { - if (e instanceof SQLException) { - final String sqlState = ((SQLException) e).getSQLState(); - if (sqlState == null) { - return false; - } - - final String sqlClassCode = sqlState.substring(0, 2); - if (TRANSIENT_SQL_CLASS_CODES.contains(sqlClassCode)) { - return true; - } - } - return false; - } -} diff --git a/extensions-contrib/sqlserver-metadata-storage/src/main/java/org/apache/druid/metadata/storage/sqlserver/SQLServerMetadataStorageModule.java b/extensions-contrib/sqlserver-metadata-storage/src/main/java/org/apache/druid/metadata/storage/sqlserver/SQLServerMetadataStorageModule.java deleted file mode 100644 index c4ac67217826..000000000000 --- a/extensions-contrib/sqlserver-metadata-storage/src/main/java/org/apache/druid/metadata/storage/sqlserver/SQLServerMetadataStorageModule.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.metadata.storage.sqlserver; - -import com.fasterxml.jackson.databind.Module; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import com.google.inject.Key; -import org.apache.druid.guice.LazySingleton; -import org.apache.druid.guice.PolyBind; -import org.apache.druid.guice.SQLMetadataStorageDruidModule; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.metadata.MetadataStorageActionHandlerFactory; -import org.apache.druid.metadata.MetadataStorageConnector; -import org.apache.druid.metadata.MetadataStorageProvider; -import org.apache.druid.metadata.NoopMetadataStorageProvider; -import org.apache.druid.metadata.SQLMetadataConnector; -import org.apache.druid.metadata.SQLServerMetadataStorageActionHandlerFactory; - -import java.util.List; - -@SuppressWarnings("nls") -public class SQLServerMetadataStorageModule extends SQLMetadataStorageDruidModule implements DruidModule -{ - - public static final String TYPE = "sqlserver"; - - public SQLServerMetadataStorageModule() - { - super(TYPE); - } - - @Override - public List getJacksonModules() - { - return ImmutableList.of(); - } - - @Override - public void configure(Binder binder) - { - super.configure(binder); - - PolyBind - .optionBinder(binder, Key.get(MetadataStorageProvider.class)) - .addBinding(TYPE) - .to(NoopMetadataStorageProvider.class) - .in(LazySingleton.class); - - PolyBind - .optionBinder(binder, Key.get(MetadataStorageConnector.class)) - .addBinding(TYPE) - .to(SQLServerConnector.class) - .in(LazySingleton.class); - - PolyBind - .optionBinder(binder, Key.get(SQLMetadataConnector.class)) - .addBinding(TYPE) - .to(SQLServerConnector.class) - .in(LazySingleton.class); - - PolyBind.optionBinder(binder, Key.get(MetadataStorageActionHandlerFactory.class)) - .addBinding(TYPE) - .to(SQLServerMetadataStorageActionHandlerFactory.class) - .in(LazySingleton.class); - } -} diff --git a/extensions-contrib/sqlserver-metadata-storage/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/sqlserver-metadata-storage/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index 7d325254c33c..000000000000 --- a/extensions-contrib/sqlserver-metadata-storage/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.metadata.storage.sqlserver.SQLServerMetadataStorageModule diff --git a/extensions-contrib/sqlserver-metadata-storage/src/test/java/org/apache/druid/metadata/storage/sqlserver/CustomStatementRewriterTest.java b/extensions-contrib/sqlserver-metadata-storage/src/test/java/org/apache/druid/metadata/storage/sqlserver/CustomStatementRewriterTest.java deleted file mode 100644 index 7dc3ae300827..000000000000 --- a/extensions-contrib/sqlserver-metadata-storage/src/test/java/org/apache/druid/metadata/storage/sqlserver/CustomStatementRewriterTest.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.metadata.storage.sqlserver; - -import junit.framework.Assert; -import org.apache.druid.metadata.storage.sqlserver.SQLServerConnector.CustomStatementRewriter; -import org.junit.Before; -import org.junit.Test; -import org.skife.jdbi.v2.Binding; -import org.skife.jdbi.v2.StatementContext; -import org.skife.jdbi.v2.exceptions.UnableToCreateStatementException; -import org.skife.jdbi.v2.tweak.RewrittenStatement; - -@SuppressWarnings("nls") -public class CustomStatementRewriterTest -{ - - private CustomStatementRewriter customStatementRewriter; - private Binding params; - private StatementContext ctx; - - @Before - public void setUp() - { - customStatementRewriter = new CustomStatementRewriter(); - - params = null; - ctx = null; - } - - private String rewrite(String sql) - { - RewrittenStatement rewrittenStatement = customStatementRewriter.rewrite(sql, params, ctx); - return rewrittenStatement.getSql(); - } - - @Test - public void testExactPatternReplacement() - { - - Assert.assertEquals("BIT NOT NULL DEFAULT (0)", rewrite("BOOLEAN NOT NULL DEFAULT FALSE")); - Assert.assertEquals("BIT NOT NULL DEFAULT (1)", rewrite("BOOLEAN NOT NULL DEFAULT TRUE")); - Assert.assertEquals("BIT NOT NULL DEFAULT (0)", rewrite("BOOLEAN DEFAULT FALSE")); - Assert.assertEquals("BIT NOT NULL DEFAULT (1)", rewrite("BOOLEAN DEFAULT TRUE")); - Assert.assertEquals("BIT", rewrite("BOOLEAN")); - Assert.assertEquals("1", rewrite("TRUE")); - Assert.assertEquals("0", rewrite("FALSE")); - } - - /** - * See https://github.com/jdbi/jdbi/blob/jdbi2/src/test/java/org/skife/jdbi/v2/TestColonStatementRewriter.java - */ - @Test - public void testCustomStatementRewriter() - { - - Assert.assertEquals("select column# from table1 where id = ?", - rewrite("select column# from table1 where id = :id")); - - Assert.assertEquals("select * from table2\n where id = ?", rewrite("select * from table2\n where id = :id")); - - try { - rewrite("select * from table3 where id = :\u0091\u009c"); // Control codes - // - - // https://en.wikipedia.org/wiki/List_of_Unicode_characters - Assert.fail("Expected 'UnableToCreateStatementException'"); - } - catch (UnableToCreateStatementException e) { - // expected - } - - } - - /** - * - * @see org.apache.druid.metadata.SQLMetadataConnector#createTable(String, Iterable) - * - */ - @Test - public void testSQLMetadataConnectorCreateTable() - { - String sqlIn = "CREATE TABLE %1$s (\n" - + " id VARCHAR(255) NOT NULL,\n" - + " dataSource VARCHAR(255) NOT NULL,\n" - + " created_date VARCHAR(255) NOT NULL,\n" - + " start VARCHAR(255) NOT NULL,\n" - + " `end` VARCHAR(255) NOT NULL,\n" - + " partitioned BOOLEAN NOT NULL,\n" - + " version VARCHAR(255) NOT NULL,\n" - + " used BOOLEAN NOT NULL,\n" - + " payload %2$s NOT NULL,\n" - + " PRIMARY KEY (id)\n" - + ")"; - - String sqlOut = "CREATE TABLE %1$s (\n" + - " id VARCHAR(255) NOT NULL,\n" + - " dataSource VARCHAR(255) NOT NULL,\n" + - " created_date VARCHAR(255) NOT NULL,\n" + - " start VARCHAR(255) NOT NULL,\n" + - " `end` VARCHAR(255) NOT NULL,\n" + - " partitioned BIT NOT NULL,\n" + - " version VARCHAR(255) NOT NULL,\n" + - " used BIT NOT NULL,\n" + - " payload %2$s NOT NULL,\n" + - " PRIMARY KEY (id)\n" + - ")"; - - Assert.assertEquals(sqlOut, rewrite(sqlIn)); - - } - - /** - * - * @see org.apache.druid.metadata.SQLMetadataStorageActionHandler#setStatus(String, - * boolean, Object) - * - */ - @Test - public void testSQLMetadataStorageActionHandlerSetStatus() - { - Assert.assertEquals("UPDATE %s SET active = ?, status_payload = ? WHERE id = ? AND active = 1", - rewrite("UPDATE %s SET active = :active, status_payload = :status_payload WHERE id = :id AND active = TRUE")); - - } -} diff --git a/extensions-contrib/sqlserver-metadata-storage/src/test/java/org/apache/druid/metadata/storage/sqlserver/SQLServerConnectorTest.java b/extensions-contrib/sqlserver-metadata-storage/src/test/java/org/apache/druid/metadata/storage/sqlserver/SQLServerConnectorTest.java deleted file mode 100644 index d061b99701de..000000000000 --- a/extensions-contrib/sqlserver-metadata-storage/src/test/java/org/apache/druid/metadata/storage/sqlserver/SQLServerConnectorTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.metadata.storage.sqlserver; - -import com.google.common.base.Suppliers; -import org.apache.druid.metadata.MetadataStorageConnectorConfig; -import org.apache.druid.metadata.MetadataStorageTablesConfig; -import org.junit.Assert; -import org.junit.Test; - -import java.sql.SQLException; - -@SuppressWarnings("nls") -public class SQLServerConnectorTest -{ - - @Test - public void testIsTransientException() - { - SQLServerConnector connector = new SQLServerConnector( - Suppliers.ofInstance(new MetadataStorageConnectorConfig()), - Suppliers.ofInstance( - new MetadataStorageTablesConfig( - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ) - ) - ); - - Assert.assertTrue(connector.isTransientException(new SQLException("Resource Failure!", "08DIE"))); - Assert.assertTrue(connector.isTransientException(new SQLException("Resource Failure as well!", "53RES"))); - Assert.assertTrue(connector.isTransientException(new SQLException("Transient Failures", "JW001"))); - Assert.assertTrue(connector.isTransientException(new SQLException("Transient Rollback", "40001"))); - - Assert.assertFalse(connector.isTransientException(new SQLException("SQLException with reason only"))); - Assert.assertFalse(connector.isTransientException(new SQLException())); - Assert.assertFalse(connector.isTransientException(new Exception("Exception with reason only"))); - Assert.assertFalse(connector.isTransientException(new Throwable("Throwable with reason only"))); - } - -} diff --git a/extensions-contrib/statsd-emitter/pom.xml b/extensions-contrib/statsd-emitter/pom.xml deleted file mode 100644 index 650325cb88c9..000000000000 --- a/extensions-contrib/statsd-emitter/pom.xml +++ /dev/null @@ -1,111 +0,0 @@ - - - - - druid - org.apache.druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - 4.0.0 - - org.apache.druid.extensions.contrib - statsd-emitter - statsd-emitter - Extension support for emitting Druid metrics to StatsD - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - com.datadoghq - java-dogstatsd-client - 2.6.1 - - - com.google.code.findbugs - jsr305 - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - joda-time - joda-time - provided - - - com.google.guava - guava - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - junit - junit - test - - - org.easymock - easymock - test - - - pl.pragmatists - JUnitParams - test - - - org.apache.druid - druid-server - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-processing - ${project.parent.version} - test-jar - test - - - diff --git a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/DimensionConverter.java b/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/DimensionConverter.java deleted file mode 100644 index 0386472ded59..000000000000 --- a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/DimensionConverter.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.statsd; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableMap; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.logger.Logger; - -import javax.annotation.Nullable; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Map; - -/** - */ -public class DimensionConverter -{ - - private static final Logger log = new Logger(DimensionConverter.class); - private Map metricMap; - - public DimensionConverter(ObjectMapper mapper, String dimensionMapPath) - { - metricMap = readMap(mapper, dimensionMapPath); - } - - @Nullable - public StatsDMetric addFilteredUserDims( - String service, - String metric, - Map userDims, - ImmutableMap.Builder builder - ) - { - /* - Find the metric in the map. If we cant find it try to look it up prefixed by the service name. - This is because some metrics are reported differently, but with the same name, from different services. - */ - StatsDMetric statsDMetric = null; - if (metricMap.containsKey(metric)) { - statsDMetric = metricMap.get(metric); - } else if (metricMap.containsKey(service + "-" + metric)) { - statsDMetric = metricMap.get(service + "-" + metric); - } - if (statsDMetric != null) { - for (String dim : statsDMetric.dimensions) { - if (userDims.containsKey(dim)) { - builder.put(dim, userDims.get(dim).toString()); - } - } - return statsDMetric; - } else { - return null; - } - } - - private Map readMap(ObjectMapper mapper, String dimensionMapPath) - { - try { - InputStream is; - if (Strings.isNullOrEmpty(dimensionMapPath)) { - log.info("Using default metric dimension and types"); - is = this.getClass().getClassLoader().getResourceAsStream("defaultMetricDimensions.json"); - } else { - log.info("Using metric dimensions at types at [%s]", dimensionMapPath); - is = new FileInputStream(new File(dimensionMapPath)); - } - return mapper.readerFor(new TypeReference>() - { - }).readValue(is); - } - catch (IOException e) { - throw new ISE(e, "Failed to parse metric dimensions and types"); - } - } -} diff --git a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitter.java b/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitter.java deleted file mode 100644 index b93926046378..000000000000 --- a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitter.java +++ /dev/null @@ -1,257 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.statsd; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Joiner; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.timgroup.statsd.Event.AlertType; -import com.timgroup.statsd.NonBlockingStatsDClient; -import com.timgroup.statsd.StatsDClient; -import com.timgroup.statsd.StatsDClientErrorHandler; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.emitter.core.Emitter; -import org.apache.druid.java.util.emitter.core.Event; -import org.apache.druid.java.util.emitter.service.AlertEvent; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; - -import java.util.List; -import java.util.Map; -import java.util.regex.Pattern; - -/** - */ -public class StatsDEmitter implements Emitter -{ - - private static final Logger log = new Logger(StatsDEmitter.class); - private static final char DRUID_METRIC_SEPARATOR = '/'; - private static final String DRUID_DEFAULT_PREFIX = "druid"; - private static final Pattern STATSD_SEPARATOR = Pattern.compile("[:|]"); - private static final Pattern BLANK = Pattern.compile("\\s+"); - private static final String[] EMPTY_ARRAY = new String[0]; - private static final String TAG_HOSTNAME = "hostname"; - private static final String TAG_SERVICE = "druid_service"; - private static final String TAG_FEED = "feed"; - private static final String TAG_SEVERITY = "severity"; - - static StatsDEmitter of(StatsDEmitterConfig config, ObjectMapper mapper) - { - NonBlockingStatsDClient client = new NonBlockingStatsDClient( - config.getPrefix(), - config.getHostname(), - config.getPort(), - config.isDogstatsd() ? config.getDogstatsdConstantTags().toArray(new String[0]) : EMPTY_ARRAY, - new StatsDClientErrorHandler() - { - private int exceptionCount = 0; - - @Override - public void handle(Exception exception) - { - if (exceptionCount % 1000 == 0) { - log.error(exception, "Error sending metric to StatsD."); - } - exceptionCount += 1; - } - } - ); - return new StatsDEmitter(config, mapper, client); - } - - private final StatsDClient statsd; - private final StatsDEmitterConfig config; - private final DimensionConverter converter; - private final ObjectMapper mapper; - - public StatsDEmitter(StatsDEmitterConfig config, ObjectMapper mapper, StatsDClient client) - { - this.config = config; - this.converter = new DimensionConverter(mapper, config.getDimensionMapPath()); - this.statsd = client; - this.mapper = mapper; - } - - @Override - public void start() - { - } - - @Override - public void emit(Event event) - { - if (event instanceof ServiceMetricEvent) { - emitMetric((ServiceMetricEvent) event); - } else if (event instanceof AlertEvent && config.isDogstatsd() && config.isDogstatsdEvents()) { - emitAlert((AlertEvent) event); - } - } - - void emitMetric(ServiceMetricEvent metricEvent) - { - String host = metricEvent.getHost(); - String service = metricEvent.getService(); - String metric = metricEvent.getMetric(); - Map userDims = metricEvent.getUserDims(); - Number value = metricEvent.getValue(); - - ImmutableList.Builder nameBuilder = new ImmutableList.Builder<>(); - ImmutableMap.Builder dimsBuilder = new ImmutableMap.Builder<>(); - - if (config.isDogstatsd() && config.isDogstatsdServiceAsTag()) { - dimsBuilder.put(TAG_SERVICE, service); - nameBuilder.add(DRUID_DEFAULT_PREFIX); - } else { - nameBuilder.add(service); - } - nameBuilder.add(metric); - - StatsDMetric statsDMetric = converter.addFilteredUserDims(service, metric, userDims, dimsBuilder); - - if (statsDMetric != null) { - List fullNameList; - String[] tags; - if (config.isDogstatsd()) { - if (config.getIncludeHost()) { - dimsBuilder.put(TAG_HOSTNAME, host); - } - - fullNameList = nameBuilder.build(); - tags = tagsFromMap(dimsBuilder.build()); - } else { - ImmutableList.Builder fullNameBuilder = new ImmutableList.Builder<>(); - if (config.getIncludeHost()) { - fullNameBuilder.add(host); - } - fullNameBuilder.addAll(nameBuilder.build()); - fullNameBuilder.addAll(dimsBuilder.build().values()); - - fullNameList = fullNameBuilder.build(); - tags = EMPTY_ARRAY; - } - - String fullName = Joiner.on(config.getSeparator()).join(fullNameList); - fullName = StringUtils.replaceChar(fullName, DRUID_METRIC_SEPARATOR, config.getSeparator()); - fullName = STATSD_SEPARATOR.matcher(fullName).replaceAll(config.getSeparator()); - fullName = BLANK.matcher(fullName).replaceAll(config.getBlankHolder()); - - if (config.isDogstatsd() && (value instanceof Float || value instanceof Double)) { - switch (statsDMetric.type) { - case count: - statsd.count(fullName, value.doubleValue(), tags); - break; - case timer: - statsd.time(fullName, value.longValue(), tags); - break; - case gauge: - statsd.gauge(fullName, value.doubleValue(), tags); - break; - } - } else { - long val = statsDMetric.convertRange && !config.isDogstatsd() ? - Math.round(value.doubleValue() * 100) : - value.longValue(); - - switch (statsDMetric.type) { - case count: - statsd.count(fullName, val, tags); - break; - case timer: - statsd.time(fullName, val, tags); - break; - case gauge: - statsd.gauge(fullName, val, tags); - break; - } - } - } else { - log.debug("Service=[%s], Metric=[%s] has no StatsD type mapping", service, metric); - } - } - - void emitAlert(AlertEvent alertEvent) - { - ImmutableMap.Builder tagBuilder = ImmutableMap.builder(); - - tagBuilder - .put(TAG_FEED, alertEvent.getFeed()) - .put(TAG_SERVICE, alertEvent.getService()) - .put(TAG_SEVERITY, alertEvent.getSeverity().toString()); - if (config.getIncludeHost()) { - tagBuilder.put(TAG_HOSTNAME, alertEvent.getHost()); - } - - String text; - try { - text = mapper.writeValueAsString(alertEvent.getDataMap()); - } - catch (JsonProcessingException e) { - log.error(e, "Unable to convert alert data to json"); - text = "Unable to convert alert data to JSON: " + e.getMessage(); - } - statsd.recordEvent( - com.timgroup.statsd.Event - .builder() - .withDate(alertEvent.getCreatedTime().getMillis()) - .withAlertType(alertType(alertEvent.getSeverity())) - .withPriority(com.timgroup.statsd.Event.Priority.NORMAL) - .withTitle(alertEvent.getDescription()) - .withText(text) - .build(), - tagsFromMap(tagBuilder.build()) - ); - } - - private static String[] tagsFromMap(Map tags) - { - return tags.entrySet() - .stream() - .map(e -> e.getKey() + ":" + e.getValue()) - .toArray(String[]::new); - } - - private static AlertType alertType(AlertEvent.Severity severity) - { - switch (severity) { - case ANOMALY: - return AlertType.WARNING; - case COMPONENT_FAILURE: - case SERVICE_FAILURE: - return AlertType.ERROR; - default: - return AlertType.INFO; - } - } - - @Override - public void flush() - { - } - - @Override - public void close() - { - statsd.stop(); - } - -} diff --git a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitterConfig.java b/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitterConfig.java deleted file mode 100644 index 64c9ce6a61fc..000000000000 --- a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitterConfig.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.statsd; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; - -import javax.annotation.Nullable; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - */ -public class StatsDEmitterConfig -{ - - @JsonProperty - private final String hostname; - @JsonProperty - private final Integer port; - @JsonProperty - private final String prefix; - @JsonProperty - private final String separator; - @JsonProperty - private final Boolean includeHost; - @JsonProperty - @Nullable - private final String dimensionMapPath; - @JsonProperty - private final String blankHolder; - @JsonProperty - private final Boolean dogstatsd; - @JsonProperty - private final List dogstatsdConstantTags; - @JsonProperty - private final Boolean dogstatsdServiceAsTag; - @JsonProperty - private final Boolean dogstatsdEvents; - - @JsonCreator - public StatsDEmitterConfig( - @JsonProperty("hostname") String hostname, - @JsonProperty("port") Integer port, - @JsonProperty("prefix") @Nullable String prefix, - @JsonProperty("separator") @Nullable String separator, - @JsonProperty("includeHost") @Nullable Boolean includeHost, - @JsonProperty("dimensionMapPath") @Nullable String dimensionMapPath, - @JsonProperty("blankHolder") @Nullable String blankHolder, - @JsonProperty("dogstatsd") @Nullable Boolean dogstatsd, - @JsonProperty("dogstatsdConstantTags") @Nullable List dogstatsdConstantTags, - @JsonProperty("dogstatsdServiceAsTag") @Nullable Boolean dogstatsdServiceAsTag, - @JsonProperty("dogstatsdEvents") @Nullable Boolean dogstatsdEvents - ) - { - this.hostname = Preconditions.checkNotNull(hostname, "StatsD hostname cannot be null."); - this.port = Preconditions.checkNotNull(port, "StatsD port cannot be null."); - this.prefix = prefix != null ? prefix : ""; - this.separator = separator != null ? separator : "."; - this.includeHost = includeHost != null ? includeHost : false; - this.dimensionMapPath = dimensionMapPath; - this.blankHolder = blankHolder != null ? blankHolder : "-"; - this.dogstatsd = dogstatsd != null ? dogstatsd : false; - this.dogstatsdConstantTags = dogstatsdConstantTags != null ? dogstatsdConstantTags : Collections.emptyList(); - this.dogstatsdServiceAsTag = dogstatsdServiceAsTag != null ? dogstatsdServiceAsTag : false; - this.dogstatsdEvents = dogstatsdEvents != null ? dogstatsdEvents : false; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - StatsDEmitterConfig that = (StatsDEmitterConfig) o; - - if (!Objects.equals(hostname, that.hostname)) { - return false; - } - if (!Objects.equals(port, that.port)) { - return false; - } - if (!Objects.equals(prefix, that.prefix)) { - return false; - } - if (!Objects.equals(separator, that.separator)) { - return false; - } - if (!Objects.equals(includeHost, that.includeHost)) { - return false; - } - if (!Objects.equals(dimensionMapPath, that.dimensionMapPath)) { - return false; - } - if (!Objects.equals(dogstatsd, that.dogstatsd)) { - return false; - } - if (!Objects.equals(dogstatsdServiceAsTag, that.dogstatsdServiceAsTag)) { - return false; - } - return Objects.equals(dogstatsdConstantTags, that.dogstatsdConstantTags); - } - - @Override - public int hashCode() - { - return Objects.hash(hostname, port, prefix, separator, includeHost, dimensionMapPath, - blankHolder, dogstatsd, dogstatsdConstantTags, dogstatsdServiceAsTag); - } - - @JsonProperty - public String getHostname() - { - return hostname; - } - - @JsonProperty - public int getPort() - { - return port; - } - - @JsonProperty - public String getPrefix() - { - return prefix; - } - - @JsonProperty - public String getSeparator() - { - return separator; - } - - @JsonProperty - public Boolean getIncludeHost() - { - return includeHost; - } - - @JsonProperty - @Nullable - public String getDimensionMapPath() - { - return dimensionMapPath; - } - - @JsonProperty - public String getBlankHolder() - { - return blankHolder; - } - - @JsonProperty - public Boolean isDogstatsd() - { - return dogstatsd; - } - - @JsonProperty - public List getDogstatsdConstantTags() - { - return dogstatsdConstantTags; - } - - @JsonProperty - public Boolean isDogstatsdServiceAsTag() - { - return dogstatsdServiceAsTag; - } - - @JsonProperty - public Boolean isDogstatsdEvents() - { - return dogstatsdEvents; - } -} diff --git a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitterModule.java b/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitterModule.java deleted file mode 100644 index 50bebdf0c447..000000000000 --- a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDEmitterModule.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.statsd; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.inject.Binder; -import com.google.inject.Provides; -import com.google.inject.name.Named; -import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.ManageLifecycle; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.java.util.emitter.core.Emitter; - -import java.util.Collections; -import java.util.List; - -/** - */ -public class StatsDEmitterModule implements DruidModule -{ - private static final String EMITTER_TYPE = "statsd"; - - @Override - public List getJacksonModules() - { - return Collections.emptyList(); - } - - @Override - public void configure(Binder binder) - { - JsonConfigProvider.bind(binder, "druid.emitter." + EMITTER_TYPE, StatsDEmitterConfig.class); - } - - @Provides - @ManageLifecycle - @Named(EMITTER_TYPE) - public Emitter getEmitter(StatsDEmitterConfig config, ObjectMapper mapper) - { - return StatsDEmitter.of(config, mapper); - } -} diff --git a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDMetric.java b/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDMetric.java deleted file mode 100644 index 3036d3e1aacb..000000000000 --- a/extensions-contrib/statsd-emitter/src/main/java/org/apache/druid/emitter/statsd/StatsDMetric.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.statsd; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.util.SortedSet; - -/** - */ -public class StatsDMetric -{ - public final SortedSet dimensions; - public final Type type; - public final boolean convertRange; - - @JsonCreator - public StatsDMetric( - @JsonProperty("dimensions") SortedSet dimensions, - @JsonProperty("type") Type type, - @JsonProperty("convertRange") boolean convertRange - ) - { - this.dimensions = dimensions; - this.type = type; - this.convertRange = convertRange; - } - - public enum Type - { - count, gauge, timer - } -} diff --git a/extensions-contrib/statsd-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/statsd-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index c19ee1ea93d5..000000000000 --- a/extensions-contrib/statsd-emitter/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.emitter.statsd.StatsDEmitterModule diff --git a/extensions-contrib/statsd-emitter/src/main/resources/defaultMetricDimensions.json b/extensions-contrib/statsd-emitter/src/main/resources/defaultMetricDimensions.json deleted file mode 100644 index 859a9c688a7a..000000000000 --- a/extensions-contrib/statsd-emitter/src/main/resources/defaultMetricDimensions.json +++ /dev/null @@ -1,127 +0,0 @@ -{ - "query/time" : { "dimensions" : ["dataSource", "type"], "type" : "timer"}, - "query/bytes" : { "dimensions" : ["dataSource", "type"], "type" : "count"}, - "query/node/time" : { "dimensions" : ["server"], "type" : "timer"}, - "query/node/ttfb" : { "dimensions" : ["server"], "type" : "timer"}, - "query/node/bytes" : { "dimensions" : ["server"], "type" : "count"}, - "query/node/backpressure": { "dimensions" : ["server"], "type" : "timer"}, - - "query/segment/time" : { "dimensions" : [], "type" : "timer"}, - "query/wait/time" : { "dimensions" : [], "type" : "timer"}, - "segment/scan/pending" : { "dimensions" : [], "type" : "gauge"}, - "query/segmentAndCache/time" : { "dimensions" : [], "type" : "timer" }, - "query/cpu/time" : { "dimensions" : ["dataSource", "type"], "type" : "timer" }, - - "query/count" : { "dimensions" : [], "type" : "count" }, - "query/success/count" : { "dimensions" : [], "type" : "count" }, - "query/failed/count" : { "dimensions" : [], "type" : "count" }, - "query/interrupted/count" : { "dimensions" : [], "type" : "count" }, - - "query/cache/delta/numEntries" : { "dimensions" : [], "type" : "count" }, - "query/cache/delta/sizeBytes" : { "dimensions" : [], "type" : "count" }, - "query/cache/delta/hits" : { "dimensions" : [], "type" : "count" }, - "query/cache/delta/misses" : { "dimensions" : [], "type" : "count" }, - "query/cache/delta/evictions" : { "dimensions" : [], "type" : "count" }, - "query/cache/delta/hitRate" : { "dimensions" : [], "type" : "count", "convertRange" : true }, - "query/cache/delta/averageBytes" : { "dimensions" : [], "type" : "count" }, - "query/cache/delta/timeouts" : { "dimensions" : [], "type" : "count" }, - "query/cache/delta/errors" : { "dimensions" : [], "type" : "count" }, - - "query/cache/total/numEntries" : { "dimensions" : [], "type" : "gauge" }, - "query/cache/total/sizeBytes" : { "dimensions" : [], "type" : "gauge" }, - "query/cache/total/hits" : { "dimensions" : [], "type" : "gauge" }, - "query/cache/total/misses" : { "dimensions" : [], "type" : "gauge" }, - "query/cache/total/evictions" : { "dimensions" : [], "type" : "gauge" }, - "query/cache/total/hitRate" : { "dimensions" : [], "type" : "gauge", "convertRange" : true }, - "query/cache/total/averageBytes" : { "dimensions" : [], "type" : "gauge" }, - "query/cache/total/timeouts" : { "dimensions" : [], "type" : "gauge" }, - "query/cache/total/errors" : { "dimensions" : [], "type" : "gauge" }, - - "ingest/events/thrownAway" : { "dimensions" : ["dataSource"], "type" : "count" }, - "ingest/events/unparseable" : { "dimensions" : ["dataSource"], "type" : "count" }, - "ingest/events/duplicate" : { "dimensions" : ["dataSource"], "type" : "count" }, - "ingest/events/processed" : { "dimensions" : ["dataSource"], "type" : "count" }, - "ingest/events/messageGap" : { "dimensions" : ["dataSource"], "type" : "gauge" }, - "ingest/rows/output" : { "dimensions" : ["dataSource"], "type" : "count" }, - "ingest/persists/count" : { "dimensions" : ["dataSource"], "type" : "count" }, - "ingest/persists/time" : { "dimensions" : ["dataSource"], "type" : "timer" }, - "ingest/persists/cpu" : { "dimensions" : ["dataSource"], "type" : "timer" }, - "ingest/persists/backPressure" : { "dimensions" : ["dataSource"], "type" : "gauge" }, - "ingest/persists/failed" : { "dimensions" : ["dataSource"], "type" : "count" }, - "ingest/handoff/failed" : { "dimensions" : ["dataSource"], "type" : "count" }, - "ingest/merge/time" : { "dimensions" : ["dataSource"], "type" : "timer" }, - "ingest/merge/cpu" : { "dimensions" : ["dataSource"], "type" : "timer" }, - - "ingest/kafka/lag" : { "dimensions" : ["dataSource"], "type" : "gauge" }, - "ingest/kafka/maxLag" : { "dimensions" : ["dataSource"], "type" : "gauge" }, - "ingest/kafka/avgLag" : { "dimensions" : ["dataSource"], "type" : "gauge" }, - - "task/success/count" : { "dimensions" : ["dataSource"], "type" : "count" }, - "task/failed/count" : { "dimensions" : ["dataSource"], "type" : "count" }, - "task/running/count" : { "dimensions" : ["dataSource"], "type" : "count" }, - "task/pending/count" : { "dimensions" : ["dataSource"], "type" : "count" }, - "task/waiting/count" : { "dimensions" : ["dataSource"], "type" : "count" }, - - "task/run/time" : { "dimensions" : ["dataSource", "taskType"], "type" : "timer" }, - "segment/added/bytes" : { "dimensions" : ["dataSource", "taskType"], "type" : "count" }, - "segment/moved/bytes" : { "dimensions" : ["dataSource", "taskType"], "type" : "count" }, - "segment/nuked/bytes" : { "dimensions" : ["dataSource", "taskType"], "type" : "count" }, - - "segment/assigned/count" : { "dimensions" : ["tier"], "type" : "count" }, - "segment/moved/count" : { "dimensions" : ["tier"], "type" : "count" }, - "segment/dropped/count" : { "dimensions" : ["tier"], "type" : "count" }, - "segment/deleted/count" : { "dimensions" : ["tier"], "type" : "count" }, - "segment/unneeded/count" : { "dimensions" : ["tier"], "type" : "count" }, - "segment/unavailable/count" : { "dimensions" : ["dataSource"], "type" : "gauge" }, - "segment/underReplicated/count" : { "dimensions" : ["dataSource", "tier"], "type" : "gauge" }, - "segment/cost/raw" : { "dimensions" : ["tier"], "type" : "count" }, - "segment/cost/normalization" : { "dimensions" : ["tier"], "type" : "count" }, - "segment/cost/normalized" : { "dimensions" : ["tier"], "type" : "count" }, - "segment/loadQueue/size" : { "dimensions" : ["server"], "type" : "gauge" }, - "segment/loadQueue/failed" : { "dimensions" : ["server"], "type" : "gauge" }, - "segment/loadQueue/count" : { "dimensions" : ["server"], "type" : "gauge" }, - "segment/dropQueue/count" : { "dimensions" : ["server"], "type" : "gauge" }, - "segment/size" : { "dimensions" : ["dataSource"], "type" : "gauge" }, - "segment/overShadowed/count" : { "dimensions" : [], "type" : "gauge" }, - - "segment/max" : { "dimensions" : [], "type" : "gauge"}, - "segment/used" : { "dimensions" : ["dataSource", "tier", "priority"], "type" : "gauge" }, - "segment/usedPercent" : { "dimensions" : ["dataSource", "tier", "priority"], "type" : "gauge", "convertRange" : true }, - "segment/pendingDelete" : { "dimensions" : [], "type" : "gauge"}, - - "jvm/pool/committed" : { "dimensions" : ["poolKind", "poolName"], "type" : "gauge" }, - "jvm/pool/init" : { "dimensions" : ["poolKind", "poolName"], "type" : "gauge" }, - "jvm/pool/max" : { "dimensions" : ["poolKind", "poolName"], "type" : "gauge" }, - "jvm/pool/used" : { "dimensions" : ["poolKind", "poolName"], "type" : "gauge" }, - "jvm/bufferpool/count" : { "dimensions" : ["bufferpoolName"], "type" : "gauge" }, - "jvm/bufferpool/used" : { "dimensions" : ["bufferpoolName"], "type" : "gauge" }, - "jvm/bufferpool/capacity" : { "dimensions" : ["bufferpoolName"], "type" : "gauge" }, - "jvm/mem/init" : { "dimensions" : ["memKind"], "type" : "gauge" }, - "jvm/mem/max" : { "dimensions" : ["memKind"], "type" : "gauge" }, - "jvm/mem/used" : { "dimensions" : ["memKind"], "type" : "gauge" }, - "jvm/mem/committed" : { "dimensions" : ["memKind"], "type" : "gauge" }, - "jvm/gc/count" : { "dimensions" : ["gcName", "gcGen"], "type" : "count" }, - "jvm/gc/cpu" : { "dimensions" : ["gcName", "gcGen"], "type" : "count" }, - - "ingest/events/buffered" : { "dimensions" : ["serviceName, bufferCapacity"], "type" : "gauge"}, - - "sys/swap/free" : { "dimensions" : [], "type" : "gauge"}, - "sys/swap/max" : { "dimensions" : [], "type" : "gauge"}, - "sys/swap/pageIn" : { "dimensions" : [], "type" : "gauge"}, - "sys/swap/pageOut" : { "dimensions" : [], "type" : "gauge"}, - "sys/disk/write/count" : { "dimensions" : ["fsDevName"], "type" : "count"}, - "sys/disk/read/count" : { "dimensions" : ["fsDevName"], "type" : "count"}, - "sys/disk/write/size" : { "dimensions" : ["fsDevName"], "type" : "count"}, - "sys/disk/read/size" : { "dimensions" : ["fsDevName"], "type" : "count"}, - "sys/net/write/size" : { "dimensions" : [], "type" : "count"}, - "sys/net/read/size" : { "dimensions" : [], "type" : "count"}, - "sys/fs/used" : { "dimensions" : ["fsDevName", "fsDirName", "fsTypeName", "fsSysTypeName", "fsOptions"], "type" : "gauge"}, - "sys/fs/max" : { "dimensions" : ["fsDevName", "fsDirName", "fsTypeName", "fsSysTypeName", "fsOptions"], "type" : "gauge"}, - "sys/mem/used" : { "dimensions" : [], "type" : "gauge"}, - "sys/mem/max" : { "dimensions" : [], "type" : "gauge"}, - "sys/storage/used" : { "dimensions" : ["fsDirName"], "type" : "gauge"}, - "sys/cpu" : { "dimensions" : ["cpuName", "cpuTime"], "type" : "gauge"}, - - "coordinator-segment/count" : { "dimensions" : ["dataSource"], "type" : "gauge" }, - "historical-segment/count" : { "dimensions" : ["dataSource", "tier", "priority"], "type" : "gauge" } -} diff --git a/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/DimensionConverterTest.java b/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/DimensionConverterTest.java deleted file mode 100644 index c6bb65281bb0..000000000000 --- a/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/DimensionConverterTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.statsd; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.junit.Assert; -import org.junit.Test; - -public class DimensionConverterTest -{ - @Test - public void testConvert() - { - DimensionConverter dimensionConverter = new DimensionConverter(new ObjectMapper(), null); - ServiceMetricEvent event = new ServiceMetricEvent.Builder() - .setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .setDimension("interval", "2013/2015") - .setDimension("some_random_dim1", "random_dim_value1") - .setDimension("some_random_dim2", "random_dim_value2") - .setDimension("hasFilters", "no") - .setDimension("duration", "P1D") - .setDimension("remoteAddress", "194.0.90.2") - .setDimension("id", "ID") - .setDimension("context", "{context}") - .build(DateTimes.nowUtc(), "query/time", 10) - .build("broker", "brokerHost1"); - - ImmutableMap.Builder actual = new ImmutableMap.Builder<>(); - StatsDMetric statsDMetric = dimensionConverter.addFilteredUserDims( - event.getService(), - event.getMetric(), - event.getUserDims(), - actual - ); - Assert.assertEquals("correct StatsDMetric.Type", StatsDMetric.Type.timer, statsDMetric.type); - ImmutableMap.Builder expected = new ImmutableMap.Builder<>(); - expected.put("dataSource", "data-source"); - expected.put("type", "groupBy"); - Assert.assertEquals("correct Dimensions", expected.build(), actual.build()); - } -} diff --git a/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/StatsDEmitterTest.java b/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/StatsDEmitterTest.java deleted file mode 100644 index ebab54e47a55..000000000000 --- a/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/StatsDEmitterTest.java +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.emitter.statsd; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; -import com.timgroup.statsd.Event; -import com.timgroup.statsd.StatsDClient; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.emitter.service.AlertBuilder; -import org.apache.druid.java.util.emitter.service.AlertEvent; -import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; -import org.easymock.Capture; -import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.Test; - -public class StatsDEmitterTest -{ - @Test - public void testConvertRange() - { - StatsDClient client = EasyMock.createMock(StatsDClient.class); - StatsDEmitter emitter = new StatsDEmitter( - new StatsDEmitterConfig("localhost", 8888, null, null, null, null, null, null, null, null, null), - new ObjectMapper(), - client - ); - client.gauge("broker.query.cache.total.hitRate", 54); - EasyMock.replay(client); - emitter.emit(new ServiceMetricEvent.Builder() - .setDimension("dataSource", "data-source") - .build(DateTimes.nowUtc(), "query/cache/total/hitRate", 0.54) - .build("broker", "brokerHost1") - ); - EasyMock.verify(client); - } - - @Test - public void testConvertRangeWithDogstatsd() - { - StatsDClient client = EasyMock.createMock(StatsDClient.class); - StatsDEmitter emitter = new StatsDEmitter( - new StatsDEmitterConfig("localhost", 8888, null, null, null, null, null, true, null, null, null), - new ObjectMapper(), - client - ); - client.gauge("broker.query.cache.total.hitRate", 0.54); - EasyMock.replay(client); - emitter.emit(new ServiceMetricEvent.Builder() - .setDimension("dataSource", "data-source") - .build(DateTimes.nowUtc(), "query/cache/total/hitRate", 0.54) - .build("broker", "brokerHost1") - ); - EasyMock.verify(client); - } - - @Test - public void testNoConvertRange() - { - StatsDClient client = EasyMock.createMock(StatsDClient.class); - StatsDEmitter emitter = new StatsDEmitter( - new StatsDEmitterConfig("localhost", 8888, null, null, null, null, null, null, null, null, null), - new ObjectMapper(), - client - ); - client.time("broker.query.time.data-source.groupBy", 10); - EasyMock.replay(client); - emitter.emit(new ServiceMetricEvent.Builder() - .setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .setDimension("interval", "2013/2015") - .setDimension("some_random_dim1", "random_dim_value1") - .setDimension("some_random_dim2", "random_dim_value2") - .setDimension("hasFilters", "no") - .setDimension("duration", "P1D") - .setDimension("remoteAddress", "194.0.90.2") - .setDimension("id", "ID") - .setDimension("context", "{context}") - .build(DateTimes.nowUtc(), "query/time", 10) - .build("broker", "brokerHost1") - ); - EasyMock.verify(client); - } - - @Test - public void testConfigOptions() - { - StatsDClient client = EasyMock.createMock(StatsDClient.class); - StatsDEmitter emitter = new StatsDEmitter( - new StatsDEmitterConfig("localhost", 8888, null, "#", true, null, null, null, null, null, null), - new ObjectMapper(), - client - ); - client.time("brokerHost1#broker#query#time#data-source#groupBy", 10); - EasyMock.replay(client); - emitter.emit(new ServiceMetricEvent.Builder() - .setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .setDimension("interval", "2013/2015") - .setDimension("some_random_dim1", "random_dim_value1") - .setDimension("some_random_dim2", "random_dim_value2") - .setDimension("hasFilters", "no") - .setDimension("duration", "P1D") - .setDimension("remoteAddress", "194.0.90.2") - .setDimension("id", "ID") - .setDimension("context", "{context}") - .build(DateTimes.nowUtc(), "query/time", 10) - .build("broker", "brokerHost1") - ); - EasyMock.verify(client); - } - - @Test - public void testDogstatsdEnabled() - { - StatsDClient client = EasyMock.createMock(StatsDClient.class); - StatsDEmitter emitter = new StatsDEmitter( - new StatsDEmitterConfig("localhost", 8888, null, "#", true, null, null, true, null, null, null), - new ObjectMapper(), - client - ); - client.time("broker#query#time", 10, - "dataSource:data-source", "type:groupBy", "hostname:brokerHost1" - ); - EasyMock.replay(client); - emitter.emit(new ServiceMetricEvent.Builder() - .setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .setDimension("interval", "2013/2015") - .setDimension("some_random_dim1", "random_dim_value1") - .setDimension("some_random_dim2", "random_dim_value2") - .setDimension("hasFilters", "no") - .setDimension("duration", "P1D") - .setDimension("remoteAddress", "194.0.90.2") - .setDimension("id", "ID") - .setDimension("context", "{context}") - .build(DateTimes.nowUtc(), "query/time", 10) - .build("broker", "brokerHost1") - ); - EasyMock.verify(client); - } - - @Test - public void testBlankHolderOptions() - { - StatsDClient client = EasyMock.createMock(StatsDClient.class); - StatsDEmitter emitter = new StatsDEmitter( - new StatsDEmitterConfig("localhost", 8888, null, null, true, null, null, null, null, null, null), - new ObjectMapper(), - client - ); - client.count("brokerHost1.broker.jvm.gc.count.G1-GC", 1); - EasyMock.replay(client); - emitter.emit(new ServiceMetricEvent.Builder() - .setDimension("gcName", "G1 GC") - .build(DateTimes.nowUtc(), "jvm/gc/count", 1) - .build("broker", "brokerHost1") - ); - EasyMock.verify(client); - } - - @Test - public void testServiceAsTagOption() - { - StatsDClient client = EasyMock.createMock(StatsDClient.class); - StatsDEmitter emitter = new StatsDEmitter( - new StatsDEmitterConfig("localhost", 8888, null, null, true, null, null, true, null, true, null), - new ObjectMapper(), - client - ); - client.time("druid.query.time", 10, - "druid_service:druid/broker", "dataSource:data-source", "type:groupBy", "hostname:brokerHost1" - ); - EasyMock.replay(client); - emitter.emit(new ServiceMetricEvent.Builder() - .setDimension("dataSource", "data-source") - .setDimension("type", "groupBy") - .build(DateTimes.nowUtc(), "query/time", 10) - .build("druid/broker", "brokerHost1") - ); - EasyMock.verify(client); - } - - @Test - public void testAlertEvent() - { - StatsDClient client = EasyMock.createMock(StatsDClient.class); - StatsDEmitter emitter = new StatsDEmitter( - new StatsDEmitterConfig("localhost", 8888, null, null, true, null, null, true, null, true, true), - new ObjectMapper(), - client - ); - Event expectedEvent = Event - .builder() - .withPriority(Event.Priority.NORMAL) - .withAlertType(Event.AlertType.WARNING) - .withTitle("something bad happened [exception]") - .withText("{\"exception\":\"NPE\"}") - .build(); - - Capture eventCapture = EasyMock.newCapture(); - client.recordEvent( - EasyMock.capture(eventCapture), - EasyMock.eq("feed:alerts"), EasyMock.eq("druid_service:druid/broker"), - EasyMock.eq("severity:anomaly"), EasyMock.eq("hostname:brokerHost1") - ); - EasyMock.replay(client); - emitter.emit(AlertBuilder.create("something bad happened [%s]", "exception") - .severity(AlertEvent.Severity.ANOMALY) - .addData(ImmutableMap.of("exception", "NPE")) - .build("druid/broker", "brokerHost1") - ); - EasyMock.verify(client); - Event actualEvent = eventCapture.getValue(); - Assert.assertTrue(actualEvent.getMillisSinceEpoch() > 0); - Assert.assertEquals(expectedEvent.getPriority(), actualEvent.getPriority()); - Assert.assertEquals(expectedEvent.getAlertType(), actualEvent.getAlertType()); - Assert.assertEquals(expectedEvent.getTitle(), actualEvent.getTitle()); - Assert.assertEquals(expectedEvent.getText(), actualEvent.getText()); - } - - @Test - public void testJacksonModules() - { - Assert.assertTrue(new StatsDEmitterModule().getJacksonModules().isEmpty()); - } -} diff --git a/extensions-contrib/tdigestsketch/pom.xml b/extensions-contrib/tdigestsketch/pom.xml deleted file mode 100644 index 0c33b29e6c38..000000000000 --- a/extensions-contrib/tdigestsketch/pom.xml +++ /dev/null @@ -1,184 +0,0 @@ - - - - - - druid - org.apache.druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - 4.0.0 - - org.apache.druid.extensions.contrib - druid-tdigestsketch - tdigestsketch - Druid extension for generating tdigest backed sketches - - - - com.tdunning - t-digest - 3.2 - - - com.google.guava - guava - ${guava.version} - provided - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - com.google.code.findbugs - jsr305 - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.fasterxml.jackson.datatype - jackson-datatype-guava - provided - - - com.fasterxml.jackson.datatype - jackson-datatype-joda - provided - - - com.fasterxml.jackson.dataformat - jackson-dataformat-smile - provided - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-json-provider - provided - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-smile-provider - provided - - - it.unimi.dsi - fastutil - provided - - - com.fasterxml.jackson.core - jackson-core - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.errorprone - error_prone_annotations - provided - - - org.apache.druid - druid-sql - ${project.parent.version} - provided - - - org.apache.calcite - calcite-core - provided - - - org.apache.druid - druid-server - provided - ${project.parent.version} - - - - - junit - junit - test - - - org.easymock - easymock - test - - - nl.jqno.equalsverifier - equalsverifier - test - - - org.apache.druid - druid-core - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-processing - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-server - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-sql - ${project.parent.version} - test-jar - test - - - - - diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregator.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregator.java deleted file mode 100644 index d43b19ca0652..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregator.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.google.errorprone.annotations.concurrent.GuardedBy; -import com.tdunning.math.stats.MergingDigest; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.query.aggregation.Aggregator; -import org.apache.druid.segment.ColumnValueSelector; - -import javax.annotation.Nullable; - - -/** - * Aggregator to build T-Digest sketches on numeric values. - * It generally makes sense to use this aggregator during the ingestion time. - *

- * One can use this aggregator to build these sketches during query time too, just - * that it will be slower and more resource intensive. - */ -public class TDigestSketchAggregator implements Aggregator -{ - - private final ColumnValueSelector selector; - - @GuardedBy("this") - private MergingDigest histogram; - - - public TDigestSketchAggregator(ColumnValueSelector selector, @Nullable Integer compression) - { - this.selector = selector; - if (compression != null) { - this.histogram = new MergingDigest(compression); - } else { - this.histogram = new MergingDigest(TDigestSketchAggregatorFactory.DEFAULT_COMPRESSION); - } - } - - @Override - public void aggregate() - { - Object obj = selector.getObject(); - if (obj == null) { - return; - } - if (obj instanceof Number) { - synchronized (this) { - histogram.add(((Number) obj).doubleValue()); - } - } else if (obj instanceof MergingDigest) { - synchronized (this) { - histogram.add((MergingDigest) obj); - } - } else { - throw new IAE( - "Expected a number or an instance of MergingDigest, but received [%s] of type [%s]", - obj, - obj.getClass() - ); - } - } - - @Nullable - @Override - public synchronized Object get() - { - return histogram; - } - - @Override - public float getFloat() - { - throw new UnsupportedOperationException("Casting to float type is not supported"); - } - - @Override - public long getLong() - { - throw new UnsupportedOperationException("Casting to long type is not supported"); - } - - @Override - public synchronized void close() - { - histogram = null; - } -} diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregatorFactory.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregatorFactory.java deleted file mode 100644 index dc93f75fc061..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregatorFactory.java +++ /dev/null @@ -1,299 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonTypeName; -import com.tdunning.math.stats.MergingDigest; -import com.tdunning.math.stats.TDigest; -import org.apache.druid.query.aggregation.AggregateCombiner; -import org.apache.druid.query.aggregation.Aggregator; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.AggregatorFactoryNotMergeableException; -import org.apache.druid.query.aggregation.AggregatorUtil; -import org.apache.druid.query.aggregation.BufferAggregator; -import org.apache.druid.query.aggregation.ObjectAggregateCombiner; -import org.apache.druid.query.cache.CacheKeyBuilder; -import org.apache.druid.segment.ColumnSelectorFactory; -import org.apache.druid.segment.ColumnValueSelector; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -/** - * Aggregation operations over the tdigest-based quantile sketch - * available on github and described - * in the paper - * - * Computing extremely accurate quantiles using t-digests. - *

- *

- * At the time of writing this implementation, there are two flavors of {@link TDigest} - * available - {@link MergingDigest} and {@link com.tdunning.math.stats.AVLTreeDigest}. - * This implementation uses {@link MergingDigest} since it is more suited for the cases - * when we have to merge intermediate aggregations which Druid needs to do as - * part of query processing. - */ -@JsonTypeName(TDigestSketchAggregatorFactory.TYPE_NAME) -public class TDigestSketchAggregatorFactory extends AggregatorFactory -{ - - // Default compression - public static final int DEFAULT_COMPRESSION = 100; - - @Nonnull - private final String name; - @Nonnull - private final String fieldName; - - private final int compression; - - @Nonnull - private final byte cacheTypeId; - - public static final String TYPE_NAME = "tDigestSketch"; - - @JsonCreator - public TDigestSketchAggregatorFactory( - @JsonProperty("name") final String name, - @JsonProperty("fieldName") final String fieldName, - @JsonProperty("compression") @Nullable final Integer compression - ) - { - this(name, fieldName, compression, AggregatorUtil.TDIGEST_BUILD_SKETCH_CACHE_TYPE_ID); - } - - TDigestSketchAggregatorFactory( - final String name, - final String fieldName, - @Nullable final Integer compression, - final byte cacheTypeId - ) - { - this.name = Objects.requireNonNull(name, "Must have a valid, non-null aggregator name"); - this.fieldName = Objects.requireNonNull(fieldName, "Parameter fieldName must be specified"); - this.compression = compression == null ? DEFAULT_COMPRESSION : compression; - this.cacheTypeId = cacheTypeId; - } - - - @Override - public byte[] getCacheKey() - { - return new CacheKeyBuilder( - cacheTypeId - ).appendString(fieldName).appendInt(compression).build(); - } - - - @Override - public Aggregator factorize(ColumnSelectorFactory metricFactory) - { - return new TDigestSketchAggregator(metricFactory.makeColumnValueSelector(fieldName), compression); - } - - @Override - public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) - { - return new TDigestSketchBufferAggregator(metricFactory.makeColumnValueSelector(fieldName), compression); - } - - public static final Comparator COMPARATOR = Comparator.nullsFirst( - Comparator.comparingLong(a -> a.size()) - ); - - @Override - public Comparator getComparator() - { - return COMPARATOR; - } - - @Override - public Object combine(@Nullable Object lhs, @Nullable Object rhs) - { - if (lhs == null) { - return rhs; - } - if (rhs == null) { - return lhs; - } - TDigest union = (TDigest) lhs; - union.add((TDigest) rhs); - return union; - } - - @Override - public AggregatorFactory getCombiningFactory() - { - return new TDigestSketchAggregatorFactory(name, name, compression); - } - - @Override - public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException - { - if (other.getName().equals(this.getName()) && this.getClass() == other.getClass()) { - return getCombiningFactory(); - } else { - throw new AggregatorFactoryNotMergeableException(this, other); - } - } - - @Override - public List getRequiredColumns() - { - return Collections.singletonList( - new TDigestSketchAggregatorFactory( - fieldName, - fieldName, - compression - ) - ); - } - - @Override - public Object deserialize(Object serializedSketch) - { - return TDigestSketchUtils.deserialize(serializedSketch); - } - - @Nullable - @Override - public Object finalizeComputation(@Nullable Object object) - { - return object; - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @JsonProperty - public String getFieldName() - { - return fieldName; - } - - @JsonProperty - public int getCompression() - { - return compression; - } - - @Override - public List requiredFields() - { - return Collections.singletonList(fieldName); - } - - @Override - public String getTypeName() - { - return TYPE_NAME; - } - - @Override - public int getMaxIntermediateSize() - { - return TDigestSketchUtils.getMaxIntermdiateTDigestSize(compression); - } - - @Override - public AggregateCombiner makeAggregateCombiner() - { - return new ObjectAggregateCombiner() - { - private MergingDigest combined = new MergingDigest(compression); - - @Override - public void reset(final ColumnValueSelector selector) - { - combined = null; - fold(selector); - } - - @Override - public void fold(final ColumnValueSelector selector) - { - MergingDigest other = (MergingDigest) selector.getObject(); - if (other == null) { - return; - } - if (combined == null) { - combined = new MergingDigest(compression); - } - combined.add(other); - } - - @Nullable - @Override - public MergingDigest getObject() - { - return combined; - } - - @Override - public Class classOfObject() - { - return MergingDigest.class; - } - }; - } - - @Override - public boolean equals(final Object o) - { - if (this == o) { - return true; - } - if (o == null || !getClass().equals(o.getClass())) { - return false; - } - final TDigestSketchAggregatorFactory that = (TDigestSketchAggregatorFactory) o; - - return Objects.equals(name, that.name) && - Objects.equals(fieldName, that.fieldName) && - compression == that.compression; - } - - @Override - public int hashCode() - { - return Objects.hash(name, fieldName, compression); - } - - @Override - public String toString() - { - return getClass().getSimpleName() + "{" - + "name=" + name - + ", fieldName=" + fieldName - + ", compression=" + compression - + "}"; - } - -} diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchBufferAggregator.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchBufferAggregator.java deleted file mode 100644 index be65896239c2..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchBufferAggregator.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.google.common.base.Preconditions; -import com.tdunning.math.stats.MergingDigest; -import it.unimi.dsi.fastutil.ints.Int2ObjectMap; -import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.query.aggregation.BufferAggregator; -import org.apache.druid.segment.ColumnValueSelector; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import java.nio.ByteBuffer; -import java.util.IdentityHashMap; - -/** - * Aggregator that builds T-Digest backed sketch using numeric values read from {@link ByteBuffer} - */ -public class TDigestSketchBufferAggregator implements BufferAggregator -{ - - @Nonnull - private final ColumnValueSelector selector; - private final int compression; - private final IdentityHashMap> sketchCache = new IdentityHashMap(); - - public TDigestSketchBufferAggregator( - final ColumnValueSelector valueSelector, - @Nullable final Integer compression - ) - { - Preconditions.checkNotNull(valueSelector); - this.selector = valueSelector; - if (compression != null) { - this.compression = compression; - } else { - this.compression = TDigestSketchAggregatorFactory.DEFAULT_COMPRESSION; - } - } - - @Override - public void init(ByteBuffer buffer, int position) - { - MergingDigest emptyDigest = new MergingDigest(compression); - addToCache(buffer, position, emptyDigest); - } - - @Override - public void aggregate(ByteBuffer buffer, int position) - { - Object x = selector.getObject(); - if (x == null) { - return; - } - MergingDigest sketch = sketchCache.get(buffer).get(position); - if (x instanceof Number) { - sketch.add(((Number) x).doubleValue()); - } else if (x instanceof MergingDigest) { - sketch.add((MergingDigest) x); - } else { - throw new IAE( - "Expected a number or an instance of MergingDigest, but received [%s] of type [%s]", - x, - x.getClass() - ); - } - } - - @Override - public Object get(final ByteBuffer buffer, final int position) - { - // sketchCache is an IdentityHashMap where the reference of buffer is used for equality checks. - // So the returned object isn't impacted by the changes in the buffer object made by concurrent threads. - return sketchCache.get(buffer).get(position); - } - - @Override - public float getFloat(final ByteBuffer buffer, final int position) - { - throw new UnsupportedOperationException("Not implemented"); - } - - @Override - public long getLong(final ByteBuffer buffer, final int position) - { - throw new UnsupportedOperationException("Not implemented"); - } - - @Override - public void close() - { - sketchCache.clear(); - } - - @Override - public void relocate(int oldPosition, int newPosition, ByteBuffer oldBuffer, ByteBuffer newBuffer) - { - MergingDigest sketch = sketchCache.get(oldBuffer).get(oldPosition); - addToCache(newBuffer, newPosition, sketch); - final Int2ObjectMap map = sketchCache.get(oldBuffer); - map.remove(oldPosition); - if (map.isEmpty()) { - sketchCache.remove(oldBuffer); - } - } - - private void addToCache(final ByteBuffer buffer, final int position, final MergingDigest sketch) - { - Int2ObjectMap map = sketchCache.computeIfAbsent(buffer, b -> new Int2ObjectOpenHashMap<>()); - map.put(position, sketch); - } -} diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchComplexMetricSerde.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchComplexMetricSerde.java deleted file mode 100644 index d44ce80fc7e3..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchComplexMetricSerde.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.tdunning.math.stats.MergingDigest; -import org.apache.druid.data.input.InputRow; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.segment.GenericColumnSerializer; -import org.apache.druid.segment.column.ColumnBuilder; -import org.apache.druid.segment.data.GenericIndexed; -import org.apache.druid.segment.data.ObjectStrategy; -import org.apache.druid.segment.serde.ComplexColumnPartSupplier; -import org.apache.druid.segment.serde.ComplexMetricExtractor; -import org.apache.druid.segment.serde.ComplexMetricSerde; -import org.apache.druid.segment.serde.LargeColumnSupportedComplexColumnSerializer; -import org.apache.druid.segment.writeout.SegmentWriteOutMedium; - -import java.nio.ByteBuffer; - -public class TDigestSketchComplexMetricSerde extends ComplexMetricSerde -{ - private static final TDigestSketchObjectStrategy STRATEGY = new TDigestSketchObjectStrategy(); - - @Override - public String getTypeName() - { - return TDigestSketchAggregatorFactory.TYPE_NAME; - } - - @Override - public ComplexMetricExtractor getExtractor() - { - return new ComplexMetricExtractor() - { - @Override - public Class extractedClass() - { - return MergingDigest.class; - } - - @Override - public Object extractValue(final InputRow inputRow, final String metricName) - { - final Object object = inputRow.getRaw(metricName); - if (object == null || object instanceof Number || object instanceof MergingDigest) { - return object; - } - if (object instanceof String) { - String objectString = (String) object; - if (Character.isDigit((objectString).charAt(0))) { - // Base64 representation of MergingDigest starts with A. So if it's a - // string that starts with a digit, we assume it is a number. - try { - Double doubleValue = Double.parseDouble(objectString); - return doubleValue; - } - catch (NumberFormatException e) { - throw new IAE("Expected a string with a number, received value " + objectString); - } - } - } - return TDigestSketchUtils.deserialize(object); - } - }; - } - - @Override - public void deserializeColumn(ByteBuffer buffer, ColumnBuilder builder) - { - final GenericIndexed column = GenericIndexed.read( - buffer, - STRATEGY, - builder.getFileMapper() - ); - builder.setComplexColumnSupplier(new ComplexColumnPartSupplier(getTypeName(), column)); - } - - @Override - public ObjectStrategy getObjectStrategy() - { - return STRATEGY; - } - - @Override - public GenericColumnSerializer getSerializer(SegmentWriteOutMedium segmentWriteOutMedium, String column) - { - return LargeColumnSupportedComplexColumnSerializer.create( - segmentWriteOutMedium, - column, - this.getObjectStrategy() - ); - } - -} diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchJsonSerializer.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchJsonSerializer.java deleted file mode 100644 index b8a3ebd34a1a..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchJsonSerializer.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.databind.JsonSerializer; -import com.fasterxml.jackson.databind.SerializerProvider; -import com.tdunning.math.stats.MergingDigest; - -import java.io.IOException; - -public class TDigestSketchJsonSerializer extends JsonSerializer -{ - @Override - public void serialize( - MergingDigest tDigest, - JsonGenerator jsonGenerator, - SerializerProvider serializerProvider - ) throws IOException - { - jsonGenerator.writeBinary(TDigestSketchUtils.toBytes(tDigest)); - } - -} diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchModule.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchModule.java deleted file mode 100644 index 1a5150fdf746..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchModule.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import com.tdunning.math.stats.MergingDigest; -import org.apache.druid.initialization.DruidModule; -import org.apache.druid.query.aggregation.tdigestsketch.sql.TDigestGenerateSketchSqlAggregator; -import org.apache.druid.query.aggregation.tdigestsketch.sql.TDigestSketchQuantileSqlAggregator; -import org.apache.druid.segment.serde.ComplexMetrics; -import org.apache.druid.sql.guice.SqlBindings; - -import java.util.List; - -/** - * Module defining aggregators for the T-Digest based sketches - */ -public class TDigestSketchModule implements DruidModule -{ - @Override - public List getJacksonModules() - { - return ImmutableList.of( - new SimpleModule( - getClass().getSimpleName() - ).registerSubtypes( - new NamedType( - TDigestSketchAggregatorFactory.class, - TDigestSketchAggregatorFactory.TYPE_NAME - ), - new NamedType( - TDigestSketchToQuantilesPostAggregator.class, - TDigestSketchToQuantilesPostAggregator.TYPE_NAME - ), - new NamedType( - TDigestSketchToQuantilePostAggregator.class, - TDigestSketchToQuantilePostAggregator.TYPE_NAME - ) - ).addSerializer(MergingDigest.class, new TDigestSketchJsonSerializer()) - ); - } - - @Override - public void configure(Binder binder) - { - registerSerde(); - SqlBindings.addAggregator(binder, TDigestSketchQuantileSqlAggregator.class); - SqlBindings.addAggregator(binder, TDigestGenerateSketchSqlAggregator.class); - } - - @VisibleForTesting - public static void registerSerde() - { - ComplexMetrics.registerSerde(TDigestSketchAggregatorFactory.TYPE_NAME, new TDigestSketchComplexMetricSerde()); - ComplexMetrics.registerSerde("TDIGEST_GENERATE_SKETCH", new TDigestSketchComplexMetricSerde()); - } -} diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchObjectStrategy.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchObjectStrategy.java deleted file mode 100644 index 706e3e1ac866..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchObjectStrategy.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.tdunning.math.stats.MergingDigest; -import org.apache.druid.segment.data.ObjectStrategy; - -import javax.annotation.Nullable; -import java.nio.ByteBuffer; - -public class TDigestSketchObjectStrategy implements ObjectStrategy -{ - private static final byte[] EMPTY_BYTES = new byte[0]; - - @Override - public Class getClazz() - { - return MergingDigest.class; - } - - @Override - public MergingDigest fromByteBuffer(ByteBuffer buffer, int numBytes) - { - if (numBytes == 0) { - return null; - } - ByteBuffer readOnlyBuffer = buffer.asReadOnlyBuffer(); - readOnlyBuffer.limit(buffer.position() + numBytes); - return MergingDigest.fromBytes(readOnlyBuffer); - } - - @Override - public byte[] toBytes(@Nullable MergingDigest val) - { - if (val == null) { - return EMPTY_BYTES; - } - return TDigestSketchUtils.toBytes(val); - } - - @Override - public int compare(MergingDigest o1, MergingDigest o2) - { - return TDigestSketchAggregatorFactory.COMPARATOR.compare(o1, o2); - } -} diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilePostAggregator.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilePostAggregator.java deleted file mode 100644 index e41cb3ac3b8c..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilePostAggregator.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.primitives.Doubles; -import com.tdunning.math.stats.MergingDigest; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.aggregation.post.PostAggregatorIds; -import org.apache.druid.query.cache.CacheKeyBuilder; - -import java.util.Comparator; -import java.util.Map; -import java.util.Objects; -import java.util.Set; - -/** - * Post aggregation operator that can take in aggregated T-Digest sketches and - * generate quantiles from it. - */ -public class TDigestSketchToQuantilePostAggregator implements PostAggregator -{ - private final String name; - private final PostAggregator field; - - private final double fraction; - - public static final String TYPE_NAME = "quantileFromTDigestSketch"; - - @JsonCreator - public TDigestSketchToQuantilePostAggregator( - @JsonProperty("name") final String name, - @JsonProperty("field") final PostAggregator field, - @JsonProperty("fraction") final double fraction - ) - { - this.name = Preconditions.checkNotNull(name, "name is null"); - this.field = Preconditions.checkNotNull(field, "field is null"); - this.fraction = fraction; - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @JsonProperty - public PostAggregator getField() - { - return field; - } - - @JsonProperty - public double getFraction() - { - return fraction; - } - - @Override - public Object compute(final Map combinedAggregators) - { - final MergingDigest sketch = (MergingDigest) field.compute(combinedAggregators); - return sketch.quantile(fraction); - } - - @Override - public Comparator getComparator() - { - return Doubles::compare; - } - - @Override - public Set getDependentFields() - { - return field.getDependentFields(); - } - - @Override - public String toString() - { - return getClass().getSimpleName() + "{" + - "name='" + name + '\'' + - ", field=" + field + - ", fraction=" + fraction + - "}"; - } - - @Override - public byte[] getCacheKey() - { - final CacheKeyBuilder builder = new CacheKeyBuilder( - PostAggregatorIds.TDIGEST_SKETCH_TO_QUANTILE_CACHE_TYPE_ID).appendCacheable(field); - builder.appendDouble(fraction); - return builder.build(); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - TDigestSketchToQuantilePostAggregator that = (TDigestSketchToQuantilePostAggregator) o; - return Double.compare(that.fraction, fraction) == 0 && - Objects.equals(name, that.name) && - Objects.equals(field, that.field); - } - - @Override - public int hashCode() - { - return Objects.hash(name, field, fraction); - } - - @Override - public PostAggregator decorate(final Map map) - { - return this; - } - -} diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilesPostAggregator.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilesPostAggregator.java deleted file mode 100644 index 245f648cc3cf..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilesPostAggregator.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.tdunning.math.stats.MergingDigest; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.aggregation.post.PostAggregatorIds; -import org.apache.druid.query.cache.CacheKeyBuilder; - -import java.util.Arrays; -import java.util.Comparator; -import java.util.Map; -import java.util.Set; - -/** - * Post aggregation operator that can take in aggregated T-Digest sketches and - * generate quantiles from it. - */ -public class TDigestSketchToQuantilesPostAggregator implements PostAggregator -{ - - private final String name; - private final PostAggregator field; - private final double[] fractions; - - public static final String TYPE_NAME = "quantilesFromTDigestSketch"; - - @JsonCreator - public TDigestSketchToQuantilesPostAggregator( - @JsonProperty("name") final String name, - @JsonProperty("field") final PostAggregator field, - @JsonProperty("fractions") final double[] fractions - ) - { - this.name = Preconditions.checkNotNull(name, "name is null"); - this.field = Preconditions.checkNotNull(field, "field is null"); - this.fractions = Preconditions.checkNotNull(fractions, "array of fractions is null"); - Preconditions.checkArgument(this.fractions.length >= 1, "Array of fractions cannot be empty"); - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @JsonProperty - public PostAggregator getField() - { - return field; - } - - @JsonProperty - public double[] getFractions() - { - return fractions; - } - - @Override - public Object compute(final Map combinedAggregators) - { - final MergingDigest sketch = (MergingDigest) field.compute(combinedAggregators); - double[] quantiles = new double[fractions.length]; - int i = 0; - - for (double f : fractions) { - quantiles[i++] = sketch.quantile(f); - } - return quantiles; - } - - @Override - public Comparator getComparator() - { - throw new IAE("Comparing arrays of quantiles is not supported"); - } - - @Override - public Set getDependentFields() - { - return field.getDependentFields(); - } - - @Override - public String toString() - { - return getClass().getSimpleName() + "{" + - "name='" + name + '\'' + - ", field=" + field + - ", fractions=" + Arrays.toString(fractions) + - "}"; - } - - @Override - public boolean equals(final Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final TDigestSketchToQuantilesPostAggregator that = (TDigestSketchToQuantilesPostAggregator) o; - if (!name.equals(that.name)) { - return false; - } - if (!Arrays.equals(fractions, that.fractions)) { - return false; - } - return field.equals(that.field); - } - - @Override - public int hashCode() - { - return (name.hashCode() * 31 + field.hashCode()) * 31 + Arrays.hashCode(fractions); - } - - @Override - public byte[] getCacheKey() - { - final CacheKeyBuilder builder = new CacheKeyBuilder( - PostAggregatorIds.TDIGEST_SKETCH_TO_QUANTILES_CACHE_TYPE_ID).appendCacheable(field); - for (final double value : fractions) { - builder.appendDouble(value); - } - return builder.build(); - } - - @Override - public PostAggregator decorate(final Map map) - { - return this; - } - -} diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchUtils.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchUtils.java deleted file mode 100644 index 3a5be1310497..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchUtils.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.tdunning.math.stats.MergingDigest; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.segment.VirtualColumn; -import org.apache.druid.segment.virtual.ExpressionVirtualColumn; -import org.apache.druid.sql.calcite.aggregation.Aggregation; -import org.apache.druid.sql.calcite.expression.DruidExpression; - -import java.nio.ByteBuffer; - -public class TDigestSketchUtils -{ - // Class is not meant to be instantiated - private TDigestSketchUtils() - { - } - - public static MergingDigest deserialize(Object serializedSketch) - { - if (serializedSketch instanceof String) { - String str = (String) serializedSketch; - return MergingDigest.fromBytes(ByteBuffer.wrap(StringUtils.decodeBase64(StringUtils.toUtf8(str)))); - } else if (serializedSketch instanceof byte[]) { - return MergingDigest.fromBytes(ByteBuffer.wrap((byte[]) serializedSketch)); - } else if (serializedSketch instanceof MergingDigest) { - return (MergingDigest) serializedSketch; - } - throw new IAE( - "Object cannot be deserialized to a TDigest Sketch: " - + serializedSketch.getClass() - ); - } - - static byte[] toBytes(MergingDigest tDigest) - { - byte[] arr = new byte[tDigest.byteSize()]; - ByteBuffer result = ByteBuffer.wrap(arr); - tDigest.asBytes(result); - return result.array(); - } - - /** - * This method computes an estimate of the max intermediate size of a {@link MergingDigest}. - * Since there is no utility available in the T-Digest library to compute this size, - * the below code is inspired by looking at - * {@link MergingDigest#MergingDigest(double, int, int)} - * This method is current as of 3.2 version and many need to change in future. - */ - static int getMaxIntermdiateTDigestSize(int compression) - { - int intermediateSize = 0; - int centroidArraySize = (int) (2 * Math.ceil(compression)) + 10; - intermediateSize += 2 * centroidArraySize * Double.BYTES; - int tempBufferSize = (int) (5 * Math.ceil(compression)); - intermediateSize += 2 * tempBufferSize * Double.BYTES; - intermediateSize += tempBufferSize * Integer.BYTES; - // Adding an extra buffer of 1K for overhead - return intermediateSize + 1000; - } - - public static boolean matchingAggregatorFactoryExists( - final DruidExpression input, - final Integer compression, - final Aggregation existing, - final TDigestSketchAggregatorFactory factory - ) - { - // Check input for equivalence. - final boolean inputMatches; - final VirtualColumn virtualInput = existing.getVirtualColumns() - .stream() - .filter( - virtualColumn -> - virtualColumn.getOutputName() - .equals(factory.getFieldName()) - ) - .findFirst() - .orElse(null); - - if (virtualInput == null) { - inputMatches = input.isDirectColumnAccess() - && input.getDirectColumn().equals(factory.getFieldName()); - } else { - inputMatches = ((ExpressionVirtualColumn) virtualInput).getExpression() - .equals(input.getExpression()); - } - return inputMatches && compression == factory.getCompression(); - } -} diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestGenerateSketchSqlAggregator.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestGenerateSketchSqlAggregator.java deleted file mode 100644 index a7346ffb0ffb..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestGenerateSketchSqlAggregator.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch.sql; - -import org.apache.calcite.rel.core.AggregateCall; -import org.apache.calcite.rel.core.Project; -import org.apache.calcite.rex.RexBuilder; -import org.apache.calcite.rex.RexLiteral; -import org.apache.calcite.rex.RexNode; -import org.apache.calcite.sql.SqlAggFunction; -import org.apache.calcite.sql.SqlFunctionCategory; -import org.apache.calcite.sql.SqlKind; -import org.apache.calcite.sql.type.OperandTypes; -import org.apache.calcite.sql.type.ReturnTypes; -import org.apache.calcite.sql.type.SqlTypeFamily; -import org.apache.calcite.sql.type.SqlTypeName; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchAggregatorFactory; -import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchUtils; -import org.apache.druid.segment.VirtualColumn; -import org.apache.druid.segment.column.RowSignature; -import org.apache.druid.sql.calcite.aggregation.Aggregation; -import org.apache.druid.sql.calcite.aggregation.SqlAggregator; -import org.apache.druid.sql.calcite.expression.DruidExpression; -import org.apache.druid.sql.calcite.expression.Expressions; -import org.apache.druid.sql.calcite.planner.PlannerContext; -import org.apache.druid.sql.calcite.rel.VirtualColumnRegistry; - -import javax.annotation.Nullable; -import java.util.ArrayList; -import java.util.List; - -public class TDigestGenerateSketchSqlAggregator implements SqlAggregator -{ - private static final SqlAggFunction FUNCTION_INSTANCE = new TDigestGenerateSketchSqlAggregator.TDigestGenerateSketchSqlAggFunction(); - private static final String NAME = "TDIGEST_GENERATE_SKETCH"; - - @Override - public SqlAggFunction calciteFunction() - { - return FUNCTION_INSTANCE; - } - - @Nullable - @Override - public Aggregation toDruidAggregation( - final PlannerContext plannerContext, - final RowSignature rowSignature, - final VirtualColumnRegistry virtualColumnRegistry, - final RexBuilder rexBuilder, - final String name, - final AggregateCall aggregateCall, - final Project project, - final List existingAggregations, - final boolean finalizeAggregations - ) - { - final RexNode inputOperand = Expressions.fromFieldAccess( - rowSignature, - project, - aggregateCall.getArgList().get(0) - ); - final DruidExpression input = Expressions.toDruidExpression( - plannerContext, - rowSignature, - inputOperand - ); - if (input == null) { - return null; - } - - final AggregatorFactory aggregatorFactory; - final String aggName = StringUtils.format("%s:agg", name); - - Integer compression = TDigestSketchAggregatorFactory.DEFAULT_COMPRESSION; - if (aggregateCall.getArgList().size() > 1) { - RexNode compressionOperand = Expressions.fromFieldAccess( - rowSignature, - project, - aggregateCall.getArgList().get(1) - ); - if (!compressionOperand.isA(SqlKind.LITERAL)) { - // compressionOperand must be a literal in order to plan. - return null; - } - compression = ((Number) RexLiteral.value(compressionOperand)).intValue(); - } - - // Look for existing matching aggregatorFactory. - for (final Aggregation existing : existingAggregations) { - for (AggregatorFactory factory : existing.getAggregatorFactories()) { - if (factory instanceof TDigestSketchAggregatorFactory) { - final TDigestSketchAggregatorFactory theFactory = (TDigestSketchAggregatorFactory) factory; - final boolean matches = TDigestSketchUtils.matchingAggregatorFactoryExists( - input, - compression, - existing, - (TDigestSketchAggregatorFactory) factory - ); - - if (matches) { - // Found existing one. Use this. - return Aggregation.create( - theFactory - ); - } - } - } - } - - // No existing match found. Create a new one. - final List virtualColumns = new ArrayList<>(); - - if (input.isDirectColumnAccess()) { - aggregatorFactory = new TDigestSketchAggregatorFactory( - aggName, - input.getDirectColumn(), - compression - ); - } else { - VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression( - plannerContext, - input, - SqlTypeName.FLOAT - ); - virtualColumns.add(virtualColumn); - aggregatorFactory = new TDigestSketchAggregatorFactory( - aggName, - virtualColumn.getOutputName(), - compression - ); - } - - return Aggregation.create( - virtualColumns, - aggregatorFactory - ); - } - - private static class TDigestGenerateSketchSqlAggFunction extends SqlAggFunction - { - private static final String SIGNATURE_WITH_COMPRESSION = "'" + NAME + "(column, compression)'\n"; - - TDigestGenerateSketchSqlAggFunction() - { - super( - NAME, - null, - SqlKind.OTHER_FUNCTION, - ReturnTypes.explicit(SqlTypeName.OTHER), - null, - OperandTypes.or( - OperandTypes.ANY, - OperandTypes.and( - OperandTypes.sequence(SIGNATURE_WITH_COMPRESSION, OperandTypes.ANY, OperandTypes.LITERAL), - OperandTypes.family(SqlTypeFamily.ANY, SqlTypeFamily.NUMERIC) - ) - ), - SqlFunctionCategory.USER_DEFINED_FUNCTION, - false, - false - ); - } - } -} diff --git a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchQuantileSqlAggregator.java b/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchQuantileSqlAggregator.java deleted file mode 100644 index 09d8b02fecab..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchQuantileSqlAggregator.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch.sql; - -import com.google.common.collect.ImmutableList; -import org.apache.calcite.rel.core.AggregateCall; -import org.apache.calcite.rel.core.Project; -import org.apache.calcite.rex.RexBuilder; -import org.apache.calcite.rex.RexLiteral; -import org.apache.calcite.rex.RexNode; -import org.apache.calcite.sql.SqlAggFunction; -import org.apache.calcite.sql.SqlFunctionCategory; -import org.apache.calcite.sql.SqlKind; -import org.apache.calcite.sql.type.OperandTypes; -import org.apache.calcite.sql.type.ReturnTypes; -import org.apache.calcite.sql.type.SqlTypeFamily; -import org.apache.calcite.sql.type.SqlTypeName; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator; -import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchAggregatorFactory; -import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchToQuantilePostAggregator; -import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchUtils; -import org.apache.druid.segment.VirtualColumn; -import org.apache.druid.segment.column.RowSignature; -import org.apache.druid.sql.calcite.aggregation.Aggregation; -import org.apache.druid.sql.calcite.aggregation.SqlAggregator; -import org.apache.druid.sql.calcite.expression.DruidExpression; -import org.apache.druid.sql.calcite.expression.Expressions; -import org.apache.druid.sql.calcite.planner.PlannerContext; -import org.apache.druid.sql.calcite.rel.VirtualColumnRegistry; - -import javax.annotation.Nullable; -import java.util.ArrayList; -import java.util.List; - -public class TDigestSketchQuantileSqlAggregator implements SqlAggregator -{ - private static final SqlAggFunction FUNCTION_INSTANCE = new TDigestSketchQuantileSqlAggFunction(); - private static final String NAME = "TDIGEST_QUANTILE"; - - @Override - public SqlAggFunction calciteFunction() - { - return FUNCTION_INSTANCE; - } - - @Nullable - @Override - public Aggregation toDruidAggregation( - final PlannerContext plannerContext, - final RowSignature rowSignature, - final VirtualColumnRegistry virtualColumnRegistry, - final RexBuilder rexBuilder, - final String name, - final AggregateCall aggregateCall, - final Project project, - final List existingAggregations, - final boolean finalizeAggregations - ) - { - // This is expected to be a tdigest sketch - final DruidExpression input = Expressions.toDruidExpression( - plannerContext, - rowSignature, - Expressions.fromFieldAccess( - rowSignature, - project, - aggregateCall.getArgList().get(0) - ) - ); - if (input == null) { - return null; - } - - final AggregatorFactory aggregatorFactory; - final String sketchName = StringUtils.format("%s:agg", name); - - // this is expected to be quantile fraction - final RexNode quantileArg = Expressions.fromFieldAccess( - rowSignature, - project, - aggregateCall.getArgList().get(1) - ); - - if (!quantileArg.isA(SqlKind.LITERAL)) { - // Quantile must be a literal in order to plan. - return null; - } - - final double quantile = ((Number) RexLiteral.value(quantileArg)).floatValue(); - Integer compression = TDigestSketchAggregatorFactory.DEFAULT_COMPRESSION; - if (aggregateCall.getArgList().size() > 2) { - final RexNode compressionArg = Expressions.fromFieldAccess( - rowSignature, - project, - aggregateCall.getArgList().get(2) - ); - compression = ((Number) RexLiteral.value(compressionArg)).intValue(); - } - - // Look for existing matching aggregatorFactory. - for (final Aggregation existing : existingAggregations) { - for (AggregatorFactory factory : existing.getAggregatorFactories()) { - if (factory instanceof TDigestSketchAggregatorFactory) { - final boolean matches = TDigestSketchUtils.matchingAggregatorFactoryExists( - input, - compression, - existing, - (TDigestSketchAggregatorFactory) factory - ); - - if (matches) { - // Found existing one. Use this. - return Aggregation.create( - ImmutableList.of(), - new TDigestSketchToQuantilePostAggregator( - name, - new FieldAccessPostAggregator( - factory.getName(), - factory.getName() - ), - quantile - ) - ); - } - } - } - } - - // No existing match found. Create a new one. - final List virtualColumns = new ArrayList<>(); - - if (input.isDirectColumnAccess()) { - aggregatorFactory = new TDigestSketchAggregatorFactory( - sketchName, - input.getDirectColumn(), - compression - ); - } else { - VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression( - plannerContext, - input, - SqlTypeName.FLOAT - ); - virtualColumns.add(virtualColumn); - aggregatorFactory = new TDigestSketchAggregatorFactory( - sketchName, - virtualColumn.getOutputName(), - compression - ); - } - - return Aggregation.create( - virtualColumns, - ImmutableList.of(aggregatorFactory), - new TDigestSketchToQuantilePostAggregator( - name, - new FieldAccessPostAggregator( - sketchName, - sketchName - ), - quantile - ) - ); - } - - private static class TDigestSketchQuantileSqlAggFunction extends SqlAggFunction - { - private static final String SIGNATURE1 = "'" + NAME + "(column, quantile)'\n"; - private static final String SIGNATURE2 = "'" + NAME + "(column, quantile, compression)'\n"; - - TDigestSketchQuantileSqlAggFunction() - { - super( - NAME, - null, - SqlKind.OTHER_FUNCTION, - ReturnTypes.explicit(SqlTypeName.DOUBLE), - null, - OperandTypes.or( - OperandTypes.and( - OperandTypes.sequence(SIGNATURE1, OperandTypes.ANY, OperandTypes.LITERAL), - OperandTypes.family(SqlTypeFamily.ANY, SqlTypeFamily.NUMERIC) - ), - OperandTypes.and( - OperandTypes.sequence(SIGNATURE2, OperandTypes.ANY, OperandTypes.LITERAL, OperandTypes.LITERAL), - OperandTypes.family(SqlTypeFamily.ANY, SqlTypeFamily.NUMERIC, SqlTypeFamily.NUMERIC) - ) - ), - SqlFunctionCategory.USER_DEFINED_FUNCTION, - false, - false - ); - } - } -} diff --git a/extensions-contrib/tdigestsketch/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/tdigestsketch/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index 89a23890b8ad..000000000000 --- a/extensions-contrib/tdigestsketch/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchModule diff --git a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/GenerateTestData.java b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/GenerateTestData.java deleted file mode 100644 index 0d5f66a5fa1e..000000000000 --- a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/GenerateTestData.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.tdunning.math.stats.MergingDigest; -import org.apache.druid.java.util.common.StringUtils; - -import java.io.BufferedWriter; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Random; -import java.util.concurrent.ThreadLocalRandom; - -/** - * This class is largely a copy of GenerateTestData class for Datasketches project. - * It is used for generating test data for {@link TDigestSketchAggregatorTest}. - */ -public class GenerateTestData -{ - - public static void main(String[] args) throws Exception - { - Path buildPath = FileSystems.getDefault().getPath("doubles_build_data.tsv"); - Path sketchPath = FileSystems.getDefault().getPath("doubles_sketch_data.tsv"); - BufferedWriter buildData = Files.newBufferedWriter(buildPath, StandardCharsets.UTF_8); - BufferedWriter sketchData = Files.newBufferedWriter(sketchPath, StandardCharsets.UTF_8); - Random rand = ThreadLocalRandom.current(); - int sequenceNumber = 0; - for (int i = 0; i < 20; i++) { - int product = rand.nextInt(10); - MergingDigest sketch = new MergingDigest(100); - for (int j = 0; j < 20; j++) { - double value = rand.nextDouble(); - buildData.write("2016010101"); - buildData.write('\t'); - buildData.write(Integer.toString(sequenceNumber)); // dimension with unique numbers for ingesting raw data - buildData.write('\t'); - buildData.write(Integer.toString(product)); // product dimension - buildData.write('\t'); - // make 20% of the values null - buildData.write((rand.nextInt(10) % 5 == 0) ? "" : Double.toString(value)); - buildData.newLine(); - sketch.add(value); - sequenceNumber++; - } - sketchData.write("2016010101"); - sketchData.write('\t'); - sketchData.write(Integer.toString(product)); // product dimension - sketchData.write('\t'); - byte[] bytes = new byte[sketch.byteSize()]; - ByteBuffer buffer = ByteBuffer.wrap(bytes); - sketch.asBytes(buffer); - sketchData.write(StringUtils.encodeBase64String(buffer.array())); - sketchData.newLine(); - } - buildData.close(); - sketchData.close(); - } - -} diff --git a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregatorTest.java b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregatorTest.java deleted file mode 100644 index a2220e60c3d8..000000000000 --- a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchAggregatorTest.java +++ /dev/null @@ -1,264 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.druid.common.config.NullHandling; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.query.aggregation.AggregationTestHelper; -import org.apache.druid.query.aggregation.AggregatorFactory; -import org.apache.druid.query.groupby.GroupByQueryConfig; -import org.apache.druid.query.groupby.GroupByQueryRunnerTest; -import org.apache.druid.query.groupby.ResultRow; -import org.apache.druid.testing.InitializedNullHandlingTest; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import java.io.File; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -@RunWith(Parameterized.class) -public class TDigestSketchAggregatorTest extends InitializedNullHandlingTest -{ - private final AggregationTestHelper helper; - - @Rule - public final TemporaryFolder tempFolder = new TemporaryFolder(); - - public TDigestSketchAggregatorTest(final GroupByQueryConfig config) - { - TDigestSketchModule.registerSerde(); - TDigestSketchModule module = new TDigestSketchModule(); - helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper( - module.getJacksonModules(), config, tempFolder); - } - - @Parameterized.Parameters(name = "{0}") - public static Collection constructorFeeder() - { - final List constructors = new ArrayList<>(); - for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { - constructors.add(new Object[]{config}); - } - return constructors; - } - - // this is to test Json properties and equals - @Test - public void serializeDeserializeFactoryWithFieldName() throws Exception - { - ObjectMapper objectMapper = new DefaultObjectMapper(); - new TDigestSketchModule().getJacksonModules().forEach(objectMapper::registerModule); - TDigestSketchAggregatorFactory factory = new TDigestSketchAggregatorFactory("name", "filedName", 128); - - AggregatorFactory other = objectMapper.readValue( - objectMapper.writeValueAsString(factory), - AggregatorFactory.class - ); - - Assert.assertEquals(factory, other); - } - - @Test - public void buildingSketchesAtIngestionTime() throws Exception - { - Sequence seq = helper.createIndexAndRunQueryOnSegment( - new File(this.getClass().getClassLoader().getResource("doubles_build_data.tsv").getFile()), - String.join( - "\n", - "{", - " \"type\": \"string\",", - " \"parseSpec\": {", - " \"format\": \"tsv\",", - " \"timestampSpec\": {\"column\": \"timestamp\", \"format\": \"yyyyMMddHH\"},", - " \"dimensionsSpec\": {", - " \"dimensions\": [\"product\"],", - " \"dimensionExclusions\": [ \"sequenceNumber\"],", - " \"spatialDimensions\": []", - " },", - " \"columns\": [\"timestamp\", \"sequenceNumber\", \"product\", \"value\"]", - " }", - "}" - ), - "[{\"type\": \"tDigestSketch\", \"name\": \"sketch\", \"fieldName\": \"value\", \"compression\": 200}]", - 0, // minTimestamp - Granularities.NONE, - 10, // maxRowCount - String.join( - "\n", - "{", - " \"queryType\": \"groupBy\",", - " \"dataSource\": \"test_datasource\",", - " \"granularity\": \"ALL\",", - " \"dimensions\": [],", - " \"aggregations\": [", - " {\"type\": \"tDigestSketch\", \"name\": \"merged_sketch\", \"fieldName\": \"sketch\", " - + "\"compression\": " - + "200}", - " ],", - " \"postAggregations\": [", - " {\"type\": \"quantilesFromTDigestSketch\", \"name\": \"quantiles\", \"fractions\": [0, 0.5, 1], " - + "\"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"merged_sketch\"}}", - " ],", - " \"intervals\": [\"2016-01-01T00:00:00.000Z/2016-01-31T00:00:00.000Z\"]", - "}" - ) - ); - List results = seq.toList(); - Assert.assertEquals(1, results.size()); - ResultRow row = results.get(0); - - // post agg - Object quantilesObject = row.get(1); // "quantiles" - Assert.assertTrue(quantilesObject instanceof double[]); - double[] quantiles = (double[]) quantilesObject; - Assert.assertEquals(0.001, quantiles[0], 0.0006); // min value - Assert.assertEquals(NullHandling.replaceWithDefault() ? 0.47 : 0.5, quantiles[1], 0.05); // median value - Assert.assertEquals(1, quantiles[2], 0.05); // max value - } - - @Test - public void buildingSketchesAtQueryTime() throws Exception - { - Sequence seq = helper.createIndexAndRunQueryOnSegment( - new File(this.getClass().getClassLoader().getResource("doubles_build_data.tsv").getFile()), - String.join( - "\n", - "{", - " \"type\": \"string\",", - " \"parseSpec\": {", - " \"format\": \"tsv\",", - " \"timestampSpec\": {\"column\": \"timestamp\", \"format\": \"yyyyMMddHH\"},", - " \"dimensionsSpec\": {", - " \"dimensions\": [\"sequenceNumber\", \"product\"],", - " \"dimensionExclusions\": [],", - " \"spatialDimensions\": []", - " },", - " \"columns\": [\"timestamp\", \"sequenceNumber\", \"product\", \"value\"]", - " }", - "}" - ), - "[{\"type\": \"doubleSum\", \"name\": \"value\", \"fieldName\": \"value\"}]", - 0, // minTimestamp - Granularities.NONE, - 10, // maxRowCount - String.join( - "\n", - "{", - " \"queryType\": \"groupBy\",", - " \"dataSource\": \"test_datasource\",", - " \"granularity\": \"ALL\",", - " \"dimensions\": [],", - " \"aggregations\": [", - " {\"type\": \"tDigestSketch\", \"name\": \"sketch\", \"fieldName\": \"value\", \"compression\": 200}", - " ],", - " \"postAggregations\": [", - " {\"type\": \"quantilesFromTDigestSketch\", \"name\": \"quantiles\", \"fractions\": [0, 0.5, 1], \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}}", - " ],", - " \"intervals\": [\"2016-01-01T00:00:00.000Z/2016-01-31T00:00:00.000Z\"]", - "}" - ) - ); - List results = seq.toList(); - Assert.assertEquals(1, results.size()); - ResultRow row = results.get(0); - - - // post agg - Object quantilesObject = row.get(1); // "quantiles" - Assert.assertTrue(quantilesObject instanceof double[]); - double[] quantiles = (double[]) quantilesObject; - Assert.assertEquals(NullHandling.replaceWithDefault() ? 0.0 : 0.001, quantiles[0], 0.0006); // min value - Assert.assertEquals(NullHandling.replaceWithDefault() ? 0.35 : 0.5, quantiles[1], 0.05); // median value - Assert.assertEquals(1, quantiles[2], 0.05); // max value - } - - @Test - public void testIngestingSketches() throws Exception - { - Sequence seq = helper.createIndexAndRunQueryOnSegment( - new File(this.getClass().getClassLoader().getResource("doubles_sketch_data.tsv").getFile()), - String.join( - "\n", - "{", - " \"type\": \"string\",", - " \"parseSpec\": {", - " \"format\": \"tsv\",", - " \"timestampSpec\": {\"column\": \"timestamp\", \"format\": \"yyyyMMddHH\"},", - " \"dimensionsSpec\": {", - " \"dimensions\": [\"product\"],", - " \"dimensionExclusions\": [],", - " \"spatialDimensions\": []", - " },", - " \"columns\": [\"timestamp\", \"product\", \"sketch\"]", - " }", - "}" - ), - String.join( - "\n", - "[", - " {\"type\": \"tDigestSketch\", \"name\": \"first_level_merge_sketch\", \"fieldName\": \"sketch\", " - + "\"compression\": " - + "200}", - "]" - ), - 0, // minTimestamp - Granularities.NONE, - 10, // maxRowCount - String.join( - "\n", - "{", - " \"queryType\": \"groupBy\",", - " \"dataSource\": \"test_datasource\",", - " \"granularity\": \"ALL\",", - " \"dimensions\": [],", - " \"aggregations\": [", - " {\"type\": \"tDigestSketch\", \"name\": \"second_level_merge_sketch\", \"fieldName\": " - + "\"first_level_merge_sketch\", \"compression\": " - + "200}", - " ],", - " \"postAggregations\": [", - " {\"type\": \"quantilesFromTDigestSketch\", \"name\": \"quantiles\", \"fractions\": [0, 0.5, 1], \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"second_level_merge_sketch\"}}", - " ],", - " \"intervals\": [\"2016-01-01T00:00:00.000Z/2016-01-31T00:00:00.000Z\"]", - "}" - ) - ); - List results = seq.toList(); - Assert.assertEquals(1, results.size()); - ResultRow row = results.get(0); - - // post agg - Object quantilesObject = row.get(1); // "quantiles" - Assert.assertTrue(quantilesObject instanceof double[]); - double[] quantiles = (double[]) quantilesObject; - Assert.assertEquals(0.001, quantiles[0], 0.0006); // min value - Assert.assertEquals(NullHandling.replaceWithDefault() ? 0.47 : 0.5, quantiles[1], 0.05); // median value - Assert.assertEquals(1, quantiles[2], 0.05); // max value - } -} diff --git a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilePostAggregatorTest.java b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilePostAggregatorTest.java deleted file mode 100644 index db85a4cbe297..000000000000 --- a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilePostAggregatorTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import nl.jqno.equalsverifier.EqualsVerifier; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.aggregation.post.ConstantPostAggregator; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -public class TDigestSketchToQuantilePostAggregatorTest -{ - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void testSerde() throws Exception - { - TDigestSketchToQuantilePostAggregator there = - new TDigestSketchToQuantilePostAggregator("post", new ConstantPostAggregator("", 100), 0.5); - - DefaultObjectMapper mapper = new DefaultObjectMapper(); - TDigestSketchToQuantilePostAggregator andBackAgain = mapper.readValue( - mapper.writeValueAsString(there), - TDigestSketchToQuantilePostAggregator.class - ); - - Assert.assertEquals(there, andBackAgain); - Assert.assertArrayEquals(there.getCacheKey(), andBackAgain.getCacheKey()); - Assert.assertEquals(there.getDependentFields(), andBackAgain.getDependentFields()); - } - - @Test - public void testToString() - { - PostAggregator postAgg = - new TDigestSketchToQuantilePostAggregator("post", new ConstantPostAggregator("", 100), 0.5); - - Assert.assertEquals( - "TDigestSketchToQuantilePostAggregator{name='post', field=ConstantPostAggregator{name='', constantValue=100}, fraction=0.5}", - postAgg.toString() - ); - } - - @Test - public void testEquals() - { - EqualsVerifier.forClass(TDigestSketchToQuantilePostAggregator.class) - .withNonnullFields("name", "field", "fraction") - .usingGetClass() - .verify(); - } -} diff --git a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilesPostAggregatorTest.java b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilesPostAggregatorTest.java deleted file mode 100644 index 7660a3b10125..000000000000 --- a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/TDigestSketchToQuantilesPostAggregatorTest.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch; - -import nl.jqno.equalsverifier.EqualsVerifier; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.aggregation.post.ConstantPostAggregator; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -public class TDigestSketchToQuantilesPostAggregatorTest -{ - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void testSerde() throws Exception - { - TDigestSketchToQuantilesPostAggregator there = - new TDigestSketchToQuantilesPostAggregator("post", new ConstantPostAggregator("", 100), new double[]{0.25, 0.75}); - - DefaultObjectMapper mapper = new DefaultObjectMapper(); - TDigestSketchToQuantilesPostAggregator andBackAgain = mapper.readValue( - mapper.writeValueAsString(there), - TDigestSketchToQuantilesPostAggregator.class - ); - - Assert.assertEquals(there, andBackAgain); - Assert.assertArrayEquals(there.getCacheKey(), andBackAgain.getCacheKey()); - Assert.assertEquals(there.getDependentFields(), andBackAgain.getDependentFields()); - } - - @Test - public void testToString() - { - PostAggregator postAgg = new TDigestSketchToQuantilesPostAggregator( - "post", - new ConstantPostAggregator("", 100), - new double[]{0.25, 0.75} - ); - - Assert.assertEquals( - "TDigestSketchToQuantilesPostAggregator{name='post', field=ConstantPostAggregator{name='', constantValue=100}, fractions=[0.25, 0.75]}", - postAgg.toString() - ); - } - - @Test - public void testComparator() - { - expectedException.expect(IAE.class); - expectedException.expectMessage("Comparing arrays of quantiles is not supported"); - PostAggregator postAgg = new TDigestSketchToQuantilesPostAggregator( - "post", - new ConstantPostAggregator("", 100), - new double[]{0.25, 0.75} - ); - postAgg.getComparator(); - } - - @Test - public void testEquals() - { - EqualsVerifier.forClass(TDigestSketchToQuantilesPostAggregator.class) - .withNonnullFields("name", "field", "fractions") - .usingGetClass() - .verify(); - } -} diff --git a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java b/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java deleted file mode 100644 index dcf1d0f225c6..000000000000 --- a/extensions-contrib/tdigestsketch/src/test/java/org/apache/druid/query/aggregation/tdigestsketch/sql/TDigestSketchSqlAggregatorTest.java +++ /dev/null @@ -1,476 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation.tdigestsketch.sql; - -import com.fasterxml.jackson.databind.Module; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Iterables; -import org.apache.calcite.schema.SchemaPlus; -import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.java.util.common.io.Closer; -import org.apache.druid.query.Druids; -import org.apache.druid.query.QueryDataSource; -import org.apache.druid.query.QueryRunnerFactoryConglomerate; -import org.apache.druid.query.aggregation.CountAggregatorFactory; -import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; -import org.apache.druid.query.aggregation.PostAggregator; -import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator; -import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchAggregatorFactory; -import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchModule; -import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchToQuantilePostAggregator; -import org.apache.druid.query.dimension.DefaultDimensionSpec; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; -import org.apache.druid.segment.IndexBuilder; -import org.apache.druid.segment.QueryableIndex; -import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; -import org.apache.druid.server.QueryStackTests; -import org.apache.druid.server.security.AuthTestUtils; -import org.apache.druid.server.security.AuthenticationResult; -import org.apache.druid.sql.SqlLifecycle; -import org.apache.druid.sql.SqlLifecycleFactory; -import org.apache.druid.sql.calcite.filtration.Filtration; -import org.apache.druid.sql.calcite.planner.DruidOperatorTable; -import org.apache.druid.sql.calcite.planner.PlannerConfig; -import org.apache.druid.sql.calcite.planner.PlannerContext; -import org.apache.druid.sql.calcite.planner.PlannerFactory; -import org.apache.druid.sql.calcite.util.CalciteTestBase; -import org.apache.druid.sql.calcite.util.CalciteTests; -import org.apache.druid.sql.calcite.util.QueryLogHook; -import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; -import org.apache.druid.timeline.DataSegment; -import org.apache.druid.timeline.partition.LinearShardSpec; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.stream.Stream; - -public class TDigestSketchSqlAggregatorTest extends CalciteTestBase -{ - private static final String DATA_SOURCE = "foo"; - - private static QueryRunnerFactoryConglomerate conglomerate; - private static Closer resourceCloser; - private static AuthenticationResult authenticationResult = CalciteTests.REGULAR_USER_AUTH_RESULT; - private static final Map QUERY_CONTEXT_DEFAULT = ImmutableMap.of( - PlannerContext.CTX_SQL_QUERY_ID, "dummy" - ); - - @BeforeClass - public static void setUpClass() - { - resourceCloser = Closer.create(); - conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(resourceCloser); - } - - @AfterClass - public static void tearDownClass() throws IOException - { - resourceCloser.close(); - } - - @Rule - public TemporaryFolder temporaryFolder = new TemporaryFolder(); - - @Rule - public QueryLogHook queryLogHook = QueryLogHook.create(); - - private SpecificSegmentsQuerySegmentWalker walker; - private SqlLifecycleFactory sqlLifecycleFactory; - - @Before - public void setUp() throws Exception - { - TDigestSketchModule.registerSerde(); - for (Module mod : new TDigestSketchModule().getJacksonModules()) { - CalciteTests.getJsonMapper().registerModule(mod); - } - - final QueryableIndex index = - IndexBuilder.create() - .tmpDir(temporaryFolder.newFolder()) - .segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()) - .schema( - new IncrementalIndexSchema.Builder() - .withMetrics( - new CountAggregatorFactory("cnt"), - new DoubleSumAggregatorFactory("m1", "m1"), - new TDigestSketchAggregatorFactory( - "qsketch_m1", - "m1", - 128 - ) - ) - .withRollup(false) - .build() - ) - .rows(CalciteTests.ROWS1) - .buildMMappedIndex(); - - walker = new SpecificSegmentsQuerySegmentWalker(conglomerate).add( - DataSegment.builder() - .dataSource(DATA_SOURCE) - .interval(index.getDataInterval()) - .version("1") - .shardSpec(new LinearShardSpec(0)) - .size(0) - .build(), - index - ); - - final PlannerConfig plannerConfig = new PlannerConfig(); - final DruidOperatorTable operatorTable = new DruidOperatorTable( - ImmutableSet.of(new TDigestSketchQuantileSqlAggregator(), new TDigestGenerateSketchSqlAggregator()), - ImmutableSet.of() - ); - SchemaPlus rootSchema = - CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER); - sqlLifecycleFactory = CalciteTests.createSqlLifecycleFactory( - new PlannerFactory( - rootSchema, - CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate), - operatorTable, - CalciteTests.createExprMacroTable(), - plannerConfig, - AuthTestUtils.TEST_AUTHORIZER_MAPPER, - CalciteTests.getJsonMapper(), - CalciteTests.DRUID_SCHEMA_NAME - ) - ); - } - - @After - public void tearDown() throws Exception - { - walker.close(); - walker = null; - } - - @Test - public void testComputingSketchOnNumericValues() throws Exception - { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); - final String sql = "SELECT\n" - + "TDIGEST_GENERATE_SKETCH(m1, 200)" - + "FROM foo"; - - // Verify results - final List results = sqlLifecycle.runSimple( - sql, - QUERY_CONTEXT_DEFAULT, - DEFAULT_PARAMETERS, - authenticationResult - ).toList(); - final List expectedResults = ImmutableList.of( - new String[]{ - "\"AAAAAT/wAAAAAAAAQBgAAAAAAABAaQAAAAAAAAAAAAY/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAABAAAAAAAAAAD/wAAAAAAAAQAgAAAAAAAA/8AAAAAAAAEAQAAAAAAAAP/AAAAAAAABAFAAAAAAAAD/wAAAAAAAAQBgAAAAAAAA=\"" - } - ); - - Assert.assertEquals(expectedResults.size(), results.size()); - - // Verify query - Assert.assertEquals( - Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) - .granularity(Granularities.ALL) - .aggregators(ImmutableList.of( - new TDigestSketchAggregatorFactory("a0:agg", "m1", 200) - )) - .context(ImmutableMap.of("skipEmptyBuckets", true, PlannerContext.CTX_SQL_QUERY_ID, "dummy")) - .build(), - Iterables.getOnlyElement(queryLogHook.getRecordedQueries()) - ); - } - - @Test - public void testDefaultCompressionForTDigestGenerateSketchAgg() throws Exception - { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); - final String sql = "SELECT\n" - + "TDIGEST_GENERATE_SKETCH(m1)" - + "FROM foo"; - - // Log query - sqlLifecycle.runSimple( - sql, - QUERY_CONTEXT_DEFAULT, - DEFAULT_PARAMETERS, - authenticationResult - ).toList(); - - // Verify query - Assert.assertEquals( - Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) - .granularity(Granularities.ALL) - .aggregators(ImmutableList.of( - new TDigestSketchAggregatorFactory("a0:agg", "m1", TDigestSketchAggregatorFactory.DEFAULT_COMPRESSION) - )) - .context(ImmutableMap.of("skipEmptyBuckets", true, PlannerContext.CTX_SQL_QUERY_ID, "dummy")) - .build(), - Iterables.getOnlyElement(queryLogHook.getRecordedQueries()) - ); - } - - @Test - public void testComputingQuantileOnPreAggregatedSketch() throws Exception - { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); - final String sql = "SELECT\n" - + "TDIGEST_QUANTILE(qsketch_m1, 0.1),\n" - + "TDIGEST_QUANTILE(qsketch_m1, 0.4),\n" - + "TDIGEST_QUANTILE(qsketch_m1, 0.8),\n" - + "TDIGEST_QUANTILE(qsketch_m1, 1.0)\n" - + "FROM foo"; - - // Verify results - final List results = sqlLifecycle.runSimple( - sql, - QUERY_CONTEXT_DEFAULT, - DEFAULT_PARAMETERS, - authenticationResult - ).toList(); - final List expectedResults = ImmutableList.of( - new double[]{ - 1.1, - 2.9, - 5.3, - 6.0 - } - ); - - Assert.assertEquals(expectedResults.size(), results.size()); - for (int i = 0; i < expectedResults.size(); i++) { - Object[] objects = results.get(i); - Assert.assertArrayEquals( - expectedResults.get(i), - Stream.of(objects).mapToDouble(value -> ((Double) value).doubleValue()).toArray(), - 0.000001 - ); - } - - // Verify query - Assert.assertEquals( - Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) - .granularity(Granularities.ALL) - .aggregators(ImmutableList.of( - new TDigestSketchAggregatorFactory("a0:agg", "qsketch_m1", 100) - )) - .postAggregators( - new TDigestSketchToQuantilePostAggregator("a0", makeFieldAccessPostAgg("a0:agg"), 0.1f), - new TDigestSketchToQuantilePostAggregator("a1", makeFieldAccessPostAgg("a0:agg"), 0.4f), - new TDigestSketchToQuantilePostAggregator("a2", makeFieldAccessPostAgg("a0:agg"), 0.8f), - new TDigestSketchToQuantilePostAggregator("a3", makeFieldAccessPostAgg("a0:agg"), 1.0f) - ) - .context(ImmutableMap.of("skipEmptyBuckets", true, PlannerContext.CTX_SQL_QUERY_ID, "dummy")) - .build(), - Iterables.getOnlyElement(queryLogHook.getRecordedQueries()) - ); - } - - @Test - public void testGeneratingSketchAndComputingQuantileOnFly() throws Exception - { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); - final String sql = "SELECT TDIGEST_QUANTILE(x, 0.0), TDIGEST_QUANTILE(x, 0.5), TDIGEST_QUANTILE(x, 1.0)\n" - + "FROM (SELECT dim1, TDIGEST_GENERATE_SKETCH(m1, 200) AS x FROM foo group by dim1)"; - - // Verify results - final List results = sqlLifecycle.runSimple( - sql, - QUERY_CONTEXT_DEFAULT, - DEFAULT_PARAMETERS, - authenticationResult - ).toList(); - final List expectedResults = ImmutableList.of( - new double[]{ - 1.0, - 3.5, - 6.0 - } - ); - Assert.assertEquals(expectedResults.size(), results.size()); - for (int i = 0; i < expectedResults.size(); i++) { - Object[] objects = results.get(i); - Assert.assertArrayEquals( - expectedResults.get(i), - Stream.of(objects).mapToDouble(value -> ((Double) value).doubleValue()).toArray(), - 0.000001 - ); - } - - // Verify query - Assert.assertEquals( - GroupByQuery.builder() - .setDataSource( - new QueryDataSource( - GroupByQuery.builder() - .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) - .setGranularity(Granularities.ALL) - .setDimensions(new DefaultDimensionSpec("dim1", "d0")) - .setAggregatorSpecs( - ImmutableList.of( - new TDigestSketchAggregatorFactory("a0:agg", "m1", 200) - ) - ) - .setContext(ImmutableMap.of(PlannerContext.CTX_SQL_QUERY_ID, "dummy")) - .build() - ) - ) - .setInterval(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) - .setGranularity(Granularities.ALL) - .setAggregatorSpecs( - ImmutableList.of( - new TDigestSketchAggregatorFactory("_a0:agg", "a0:agg", 100) - ) - ) - .setPostAggregatorSpecs( - ImmutableList.of( - new TDigestSketchToQuantilePostAggregator("_a0", makeFieldAccessPostAgg("_a0:agg"), 0.0f), - new TDigestSketchToQuantilePostAggregator("_a1", makeFieldAccessPostAgg("_a0:agg"), 0.5f), - new TDigestSketchToQuantilePostAggregator("_a2", makeFieldAccessPostAgg("_a0:agg"), 1.0f) - ) - ) - .setContext(ImmutableMap.of(PlannerContext.CTX_SQL_QUERY_ID, "dummy")) - .build(), - Iterables.getOnlyElement(queryLogHook.getRecordedQueries()) - ); - } - - @Test - public void testQuantileOnNumericValues() throws Exception - { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); - final String sql = "SELECT\n" - + "TDIGEST_QUANTILE(m1, 0.0), TDIGEST_QUANTILE(m1, 0.5), TDIGEST_QUANTILE(m1, 1.0)\n" - + "FROM foo"; - - // Verify results - final List results = sqlLifecycle.runSimple( - sql, - QUERY_CONTEXT_DEFAULT, - DEFAULT_PARAMETERS, - authenticationResult - ).toList(); - final List expectedResults = ImmutableList.of( - new double[]{ - 1.0, - 3.5, - 6.0 - } - ); - Assert.assertEquals(expectedResults.size(), results.size()); - for (int i = 0; i < expectedResults.size(); i++) { - Object[] objects = results.get(i); - Assert.assertArrayEquals( - expectedResults.get(i), - Stream.of(objects).mapToDouble(value -> ((Double) value).doubleValue()).toArray(), - 0.000001 - ); - } - - // Verify query - Assert.assertEquals( - Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) - .granularity(Granularities.ALL) - .aggregators(ImmutableList.of( - new TDigestSketchAggregatorFactory("a0:agg", "m1", null) - )) - .postAggregators( - new TDigestSketchToQuantilePostAggregator("a0", makeFieldAccessPostAgg("a0:agg"), 0.0f), - new TDigestSketchToQuantilePostAggregator("a1", makeFieldAccessPostAgg("a0:agg"), 0.5f), - new TDigestSketchToQuantilePostAggregator("a2", makeFieldAccessPostAgg("a0:agg"), 1.0f) - ) - .context(ImmutableMap.of("skipEmptyBuckets", true, PlannerContext.CTX_SQL_QUERY_ID, "dummy")) - .build(), - Iterables.getOnlyElement(queryLogHook.getRecordedQueries()) - ); - } - - @Test - public void testCompressionParamForTDigestQuantileAgg() throws Exception - { - SqlLifecycle sqlLifecycle = sqlLifecycleFactory.factorize(); - final String sql = "SELECT\n" - + "TDIGEST_QUANTILE(m1, 0.0), TDIGEST_QUANTILE(m1, 0.5, 200), TDIGEST_QUANTILE(m1, 1.0, 300)\n" - + "FROM foo"; - - // Log query - sqlLifecycle.runSimple( - sql, - QUERY_CONTEXT_DEFAULT, - DEFAULT_PARAMETERS, - authenticationResult - ).toList(); - - // Verify query - Assert.assertEquals( - Druids.newTimeseriesQueryBuilder() - .dataSource(CalciteTests.DATASOURCE1) - .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) - .granularity(Granularities.ALL) - .aggregators(ImmutableList.of( - new TDigestSketchAggregatorFactory("a0:agg", "m1", - TDigestSketchAggregatorFactory.DEFAULT_COMPRESSION - ), - new TDigestSketchAggregatorFactory("a1:agg", "m1", - 200 - ), - new TDigestSketchAggregatorFactory("a2:agg", "m1", - 300 - ) - )) - .postAggregators( - new TDigestSketchToQuantilePostAggregator("a0", makeFieldAccessPostAgg("a0:agg"), 0.0f), - new TDigestSketchToQuantilePostAggregator("a1", makeFieldAccessPostAgg("a1:agg"), 0.5f), - new TDigestSketchToQuantilePostAggregator("a2", makeFieldAccessPostAgg("a2:agg"), 1.0f) - ) - .context(ImmutableMap.of("skipEmptyBuckets", true, PlannerContext.CTX_SQL_QUERY_ID, "dummy")) - .build(), - Iterables.getOnlyElement(queryLogHook.getRecordedQueries()) - ); - } - - - private static PostAggregator makeFieldAccessPostAgg(String name) - { - return new FieldAccessPostAggregator(name, name); - } -} diff --git a/extensions-contrib/tdigestsketch/src/test/resources/doubles_build_data.tsv b/extensions-contrib/tdigestsketch/src/test/resources/doubles_build_data.tsv deleted file mode 100644 index 3f370597fdca..000000000000 --- a/extensions-contrib/tdigestsketch/src/test/resources/doubles_build_data.tsv +++ /dev/null @@ -1,400 +0,0 @@ -2016010101 0 3 0.4806453614322793 -2016010101 1 3 0.6419678871373409 -2016010101 2 3 -2016010101 3 3 0.7020558734824461 -2016010101 4 3 0.22613138189319204 -2016010101 5 3 0.06909583208106174 -2016010101 6 3 0.5376346416013744 -2016010101 7 3 -2016010101 8 3 0.8149467521679741 -2016010101 9 3 0.7251559245004248 -2016010101 10 3 0.49855832099546715 -2016010101 11 3 0.5798820881203658 -2016010101 12 3 0.5677866915980911 -2016010101 13 3 -2016010101 14 3 0.8516993764638755 -2016010101 15 3 -2016010101 16 3 0.8109196906946308 -2016010101 17 3 0.7791330245692353 -2016010101 18 3 0.5499336255807227 -2016010101 19 3 0.23268682623354076 -2016010101 20 4 -2016010101 21 4 0.6399367705457312 -2016010101 22 4 0.6484832626361673 -2016010101 23 4 0.1662318456280305 -2016010101 24 4 0.19354936279837376 -2016010101 25 4 0.5802200573751369 -2016010101 26 4 0.4269381252768615 -2016010101 27 4 0.3070593197796049 -2016010101 28 4 0.35382552338561346 -2016010101 29 4 0.7119445442397593 -2016010101 30 4 0.7869733865139936 -2016010101 31 4 0.0805272768355706 -2016010101 32 4 0.03922392755570692 -2016010101 33 4 0.5840598980488145 -2016010101 34 4 0.20511232154618975 -2016010101 35 4 -2016010101 36 4 0.07710156514815447 -2016010101 37 4 0.23290085820099904 -2016010101 38 4 -2016010101 39 4 0.5733869947288424 -2016010101 40 1 0.9858872798659482 -2016010101 41 1 -2016010101 42 1 0.14839505115144447 -2016010101 43 1 0.09840489123150087 -2016010101 44 1 0.5479691071569414 -2016010101 45 1 0.0023559980599913155 -2016010101 46 1 0.7896086868719593 -2016010101 47 1 0.0319014726614526 -2016010101 48 1 0.842015759464531 -2016010101 49 1 0.6604873440176756 -2016010101 50 1 -2016010101 51 1 0.6615216129493895 -2016010101 52 1 0.8367143245438604 -2016010101 53 1 0.9758944303783366 -2016010101 54 1 0.5709096686275406 -2016010101 55 1 0.21653637585091645 -2016010101 56 1 0.3261165064735497 -2016010101 57 1 0.5334133677491381 -2016010101 58 1 0.8620204683908819 -2016010101 59 1 0.5484128559617791 -2016010101 60 4 0.12033006888377096 -2016010101 61 4 -2016010101 62 4 -2016010101 63 4 -2016010101 64 4 0.9858000457292002 -2016010101 65 4 0.19210448139852032 -2016010101 66 4 0.3974718376343238 -2016010101 67 4 0.9732376558043744 -2016010101 68 4 0.11356123536860396 -2016010101 69 4 -2016010101 70 4 0.8076862670275398 -2016010101 71 4 0.09236347669493417 -2016010101 72 4 -2016010101 73 4 0.854189677115464 -2016010101 74 4 0.7396706282809763 -2016010101 75 4 0.31708622509985873 -2016010101 76 4 0.43648603480196757 -2016010101 77 4 0.589213905769546 -2016010101 78 4 0.7415426026749161 -2016010101 79 4 0.7358282894757189 -2016010101 80 9 -2016010101 81 9 0.4255911372929798 -2016010101 82 9 0.9331910249355163 -2016010101 83 9 0.06445900439995578 -2016010101 84 9 0.8115413172888221 -2016010101 85 9 -2016010101 86 9 0.0963857458400349 -2016010101 87 9 0.06153408750057188 -2016010101 88 9 0.44027926988833244 -2016010101 89 9 -2016010101 90 9 0.5233141932162747 -2016010101 91 9 0.32621493954167546 -2016010101 92 9 0.34647299592637026 -2016010101 93 9 0.15192824813669525 -2016010101 94 9 0.644889890933122 -2016010101 95 9 -2016010101 96 9 0.3015940264437008 -2016010101 97 9 0.435933310145303 -2016010101 98 9 0.7965720726264395 -2016010101 99 9 0.6948764513522069 -2016010101 100 8 -2016010101 101 8 0.9541985425546318 -2016010101 102 8 0.4078660397769671 -2016010101 103 8 0.3761817754153792 -2016010101 104 8 0.4630916538268274 -2016010101 105 8 0.17184297531868054 -2016010101 106 8 0.616480413683724 -2016010101 107 8 0.9958850843107127 -2016010101 108 8 0.29264803594577704 -2016010101 109 8 0.9282101022070045 -2016010101 110 8 0.12103623749465953 -2016010101 111 8 0.5756846725738404 -2016010101 112 8 0.4986956805169892 -2016010101 113 8 0.16368254315504893 -2016010101 114 8 0.8246939105217244 -2016010101 115 8 -2016010101 116 8 0.2802464651921067 -2016010101 117 8 0.14533675338382146 -2016010101 118 8 0.1550635453509872 -2016010101 119 8 0.9764250057102191 -2016010101 120 3 0.5639798746175808 -2016010101 121 3 0.5252357184891421 -2016010101 122 3 0.4224461843890118 -2016010101 123 3 0.8623764079415396 -2016010101 124 3 0.23821471344004463 -2016010101 125 3 0.6562988643211294 -2016010101 126 3 0.6045632944796968 -2016010101 127 3 0.9216081547045152 -2016010101 128 3 0.9111787373931876 -2016010101 129 3 -2016010101 130 3 0.22309670266127934 -2016010101 131 3 0.5610286454514603 -2016010101 132 3 0.6449829420830484 -2016010101 133 3 0.47359871694806055 -2016010101 134 3 0.4456546777062259 -2016010101 135 3 0.3233370634627728 -2016010101 136 3 0.535941427413942 -2016010101 137 3 0.1465746735321063 -2016010101 138 3 0.1619540222600243 -2016010101 139 3 -2016010101 140 1 0.017761763118174123 -2016010101 141 1 0.19652899466185436 -2016010101 142 1 0.8918577110251682 -2016010101 143 1 0.5483769339947813 -2016010101 144 1 0.35583586810262346 -2016010101 145 1 0.7872104182932219 -2016010101 146 1 0.4708873523759258 -2016010101 147 1 -2016010101 148 1 0.5782684331898654 -2016010101 149 1 0.9493458125552832 -2016010101 150 1 0.34871248062641946 -2016010101 151 1 0.12964044092772886 -2016010101 152 1 0.7565381339014415 -2016010101 153 1 0.722119729581673 -2016010101 154 1 0.35411310281363473 -2016010101 155 1 0.4485837785057891 -2016010101 156 1 -2016010101 157 1 0.29365186469373317 -2016010101 158 1 0.28026386528276104 -2016010101 159 1 0.04245162399196889 -2016010101 160 3 0.17387064034440958 -2016010101 161 3 0.08578972205632507 -2016010101 162 3 0.14307939514143686 -2016010101 163 3 -2016010101 164 3 0.05790520846514535 -2016010101 165 3 0.5340068761562542 -2016010101 166 3 0.700106038458213 -2016010101 167 3 0.38998802776748753 -2016010101 168 3 0.5050112412495604 -2016010101 169 3 0.4923503731785702 -2016010101 170 3 0.09763476584855624 -2016010101 171 3 0.9181948066342877 -2016010101 172 3 -2016010101 173 3 0.845482260534406 -2016010101 174 3 0.43828851854546647 -2016010101 175 3 0.753761527760726 -2016010101 176 3 0.04212838877462455 -2016010101 177 3 0.691468086019305 -2016010101 178 3 0.5740697793884527 -2016010101 179 3 -2016010101 180 2 -2016010101 181 2 -2016010101 182 2 0.6391276620023679 -2016010101 183 2 0.18995634100597447 -2016010101 184 2 -2016010101 185 2 0.3601348485475453 -2016010101 186 2 0.5898107379081887 -2016010101 187 2 0.7654577155215041 -2016010101 188 2 0.9861893898445978 -2016010101 189 2 -2016010101 190 2 -2016010101 191 2 -2016010101 192 2 0.2502642896132842 -2016010101 193 2 0.007316463522836103 -2016010101 194 2 0.7995874341737429 -2016010101 195 2 0.8767428241522481 -2016010101 196 2 0.20911811774820832 -2016010101 197 2 0.6944454810391126 -2016010101 198 2 0.6138639733419406 -2016010101 199 2 0.5340384213931233 -2016010101 200 4 0.610341783366869 -2016010101 201 4 0.8095356348162531 -2016010101 202 4 0.14576711100717 -2016010101 203 4 0.324791997579967 -2016010101 204 4 0.7012027438404959 -2016010101 205 4 0.6855125265148104 -2016010101 206 4 0.725721465888873 -2016010101 207 4 0.37334253570089415 -2016010101 208 4 0.7033238012522983 -2016010101 209 4 0.6289935861560849 -2016010101 210 4 0.22100961408197517 -2016010101 211 4 0.5361822265452533 -2016010101 212 4 0.23524963349934325 -2016010101 213 4 -2016010101 214 4 0.2151612160248132 -2016010101 215 4 0.27034057325897454 -2016010101 216 4 0.45788942603194727 -2016010101 217 4 0.1900006529735202 -2016010101 218 4 0.21761539728764212 -2016010101 219 4 -2016010101 220 9 0.11191094372411481 -2016010101 221 9 0.7257156210111483 -2016010101 222 9 -2016010101 223 9 0.09767407184252375 -2016010101 224 9 -2016010101 225 9 0.8016522677725126 -2016010101 226 9 0.8944075176139713 -2016010101 227 9 0.7071810476904448 -2016010101 228 9 0.7425380900058187 -2016010101 229 9 -2016010101 230 9 0.1031211607034147 -2016010101 231 9 0.38694779402631885 -2016010101 232 9 0.6121565493162887 -2016010101 233 9 0.08826787524008717 -2016010101 234 9 0.34982652581050666 -2016010101 235 9 0.294468865237702 -2016010101 236 9 -2016010101 237 9 0.5190906777357499 -2016010101 238 9 0.2424354751098784 -2016010101 239 9 0.7584304131139413 -2016010101 240 7 0.35704199266816017 -2016010101 241 7 0.6213205251006355 -2016010101 242 7 0.6794778377157997 -2016010101 243 7 0.9660152207885527 -2016010101 244 7 0.746230867578865 -2016010101 245 7 -2016010101 246 7 0.6637336893540101 -2016010101 247 7 0.527025000973831 -2016010101 248 7 -2016010101 249 7 0.3689478346414077 -2016010101 250 7 0.1046606291981873 -2016010101 251 7 0.42368572552625094 -2016010101 252 7 0.10870686807188557 -2016010101 253 7 0.06569693633418128 -2016010101 254 7 0.29873141724229657 -2016010101 255 7 0.29158560982689863 -2016010101 256 7 0.7678017218931323 -2016010101 257 7 0.8900303350507414 -2016010101 258 7 0.4419580092209411 -2016010101 259 7 0.6381120775261563 -2016010101 260 3 -2016010101 261 3 -2016010101 262 3 0.4227980856443392 -2016010101 263 3 -2016010101 264 3 0.8755401132173695 -2016010101 265 3 0.5275377089199973 -2016010101 266 3 0.12424387758622746 -2016010101 267 3 0.01547071944810885 -2016010101 268 3 0.37451206779305857 -2016010101 269 3 0.1989423043276275 -2016010101 270 3 0.5949853939670747 -2016010101 271 3 -2016010101 272 3 -2016010101 273 3 0.6788434157726136 -2016010101 274 3 0.4138070035489033 -2016010101 275 3 0.3262153201368553 -2016010101 276 3 -2016010101 277 3 0.43177816031851957 -2016010101 278 3 0.551450932204876 -2016010101 279 3 0.7282741792330263 -2016010101 280 3 0.9122069202680759 -2016010101 281 3 0.7413285089462801 -2016010101 282 3 0.03663726371403986 -2016010101 283 3 0.23947998113921076 -2016010101 284 3 0.9362838173143953 -2016010101 285 3 0.28291781154121487 -2016010101 286 3 0.9083170701852669 -2016010101 287 3 0.6955809083495521 -2016010101 288 3 0.9102559703489196 -2016010101 289 3 0.5856005115807994 -2016010101 290 3 -2016010101 291 3 0.7547680618605328 -2016010101 292 3 0.3635413762669889 -2016010101 293 3 0.5060093676499698 -2016010101 294 3 0.748493032129933 -2016010101 295 3 0.36173218418100006 -2016010101 296 3 0.8017631866114252 -2016010101 297 3 0.09251208639535535 -2016010101 298 3 0.3956843833130532 -2016010101 299 3 0.8965031193765175 -2016010101 300 8 0.06138169953397199 -2016010101 301 8 0.22791862853999423 -2016010101 302 8 0.4246825688431949 -2016010101 303 8 0.7695915902917281 -2016010101 304 8 -2016010101 305 8 -2016010101 306 8 0.15549809858942576 -2016010101 307 8 0.3236736994444922 -2016010101 308 8 -2016010101 309 8 0.44112090310236873 -2016010101 310 8 0.28658459361862487 -2016010101 311 8 0.9346348774247973 -2016010101 312 8 -2016010101 313 8 0.32921840037119676 -2016010101 314 8 -2016010101 315 8 0.5619515224721092 -2016010101 316 8 0.5436607404043168 -2016010101 317 8 0.13126793260709302 -2016010101 318 8 -2016010101 319 8 0.08486286173372692 -2016010101 320 2 0.9380754465335691 -2016010101 321 2 0.8698491012104429 -2016010101 322 2 0.2141986220865666 -2016010101 323 2 0.8191428099424286 -2016010101 324 2 0.5374397266436216 -2016010101 325 2 0.8014642292436202 -2016010101 326 2 0.2454633759035828 -2016010101 327 2 0.2659186693999648 -2016010101 328 2 0.12215527116473579 -2016010101 329 2 0.23512672887844477 -2016010101 330 2 0.17317858307470202 -2016010101 331 2 0.014761951009997776 -2016010101 332 2 -2016010101 333 2 -2016010101 334 2 -2016010101 335 2 0.4839228057946262 -2016010101 336 2 0.13113562836707116 -2016010101 337 2 0.5776063788487777 -2016010101 338 2 0.18353702932146465 -2016010101 339 2 0.9550943323447759 -2016010101 340 3 0.010294130457498829 -2016010101 341 3 -2016010101 342 3 -2016010101 343 3 0.043939221631064784 -2016010101 344 3 0.468615649016912 -2016010101 345 3 0.8182318625708176 -2016010101 346 3 -2016010101 347 3 -2016010101 348 3 0.13438311444894857 -2016010101 349 3 0.9612868278105434 -2016010101 350 3 0.8957083777498813 -2016010101 351 3 0.49303068183606236 -2016010101 352 3 0.3907574108316315 -2016010101 353 3 0.7609044660129155 -2016010101 354 3 0.0015306502862820759 -2016010101 355 3 -2016010101 356 3 0.0777103319482042 -2016010101 357 3 0.040512845904230654 -2016010101 358 3 -2016010101 359 3 0.8615439676963182 -2016010101 360 0 0.541631191849709 -2016010101 361 0 0.38839056303777064 -2016010101 362 0 -2016010101 363 0 -2016010101 364 0 0.25282693081575114 -2016010101 365 0 -2016010101 366 0 0.8088940492058253 -2016010101 367 0 0.23287052835067323 -2016010101 368 0 0.2388792692348808 -2016010101 369 0 -2016010101 370 0 0.047812293417679674 -2016010101 371 0 0.5904656835670964 -2016010101 372 0 -2016010101 373 0 0.21010216933405235 -2016010101 374 0 0.6128169315116692 -2016010101 375 0 0.0021979088847082773 -2016010101 376 0 -2016010101 377 0 0.029233195772592535 -2016010101 378 0 -2016010101 379 0 0.13492455955229932 -2016010101 380 7 0.45162411597798047 -2016010101 381 7 0.6017062629482749 -2016010101 382 7 -2016010101 383 7 0.6347243397708097 -2016010101 384 7 -2016010101 385 7 0.3679411384173339 -2016010101 386 7 0.11111298782358625 -2016010101 387 7 0.848348012358186 -2016010101 388 7 0.20181516171015812 -2016010101 389 7 -2016010101 390 7 -2016010101 391 7 0.03477179524923002 -2016010101 392 7 -2016010101 393 7 -2016010101 394 7 -2016010101 395 7 0.4974934049704761 -2016010101 396 7 0.6947690881973858 -2016010101 397 7 0.21185369837139645 -2016010101 398 7 -2016010101 399 7 0.6859065700191138 diff --git a/extensions-contrib/tdigestsketch/src/test/resources/doubles_sketch_data.tsv b/extensions-contrib/tdigestsketch/src/test/resources/doubles_sketch_data.tsv deleted file mode 100644 index b6dbb511f1bb..000000000000 --- a/extensions-contrib/tdigestsketch/src/test/resources/doubles_sketch_data.tsv +++ /dev/null @@ -1,20 +0,0 @@ -2016010101 3 AAAAAT+xsEOzE/xAP+tBHwz98zNAWQAAAAAAAAAAABQ/8AAAAAAAAD+xsEOzE/xAP/AAAAAAAAA/zPHfhOpPWD/wAAAAAAAAP83IrpJxCSw/8AAAAAAAAD/QXr1zLi6iP/AAAAAAAAA/3sLkwxTY6j/wAAAAAAAAP999yNeVpKY/8AAAAAAAAD/fseTgIDWMP/AAAAAAAAA/3+hhKPS/uD/wAAAAAAAAP+E0TZBb/Jg/8AAAAAAAAD/hmQ5nGuCRP/AAAAAAAAA/4itO/vCSJD/wAAAAAAAAP+KOZOGAa9E/8AAAAAAAAD/kiwA9CsmCP/AAAAAAAAA/5nc94RJMWT/wAAAAAAAAP+c0ejKHW+k/8AAAAAAAAD/o7qhheEUPP/AAAAAAAAA/6RJmipS0YD/wAAAAAAAAP+nzDdnm6HI/8AAAAAAAAD/qFAs2EV8oP/AAAAAAAAA/60EfDP3zMw== -2016010101 4 AAAAAT+kFSicJRYgP+ku4s+8zI5AWQAAAAAAAAAAABQ/8AAAAAAAAD+kFSicJRYgP/AAAAAAAAA/s7ztnMgf6D/wAAAAAAAAP7Sdb4Rx2Mg/8AAAAAAAAD/DaXVAMS+EP/AAAAAAAAA/xUcVykNXoD/wAAAAAAAAP8jGObuwuUg/8AAAAAAAAD/KQR7chhUsP/AAAAAAAAA/zc+yAJeBAD/wAAAAAAAAP9Om3CIYqoo/8AAAAAAAAD/WpRPO25pOP/AAAAAAAAA/21L0SV6y7j/wAAAAAAAAP+BzIym5Ip8/8AAAAAAAAD/iWS+uyfy+P/AAAAAAAAA/4pEpp10aRT/wAAAAAAAAP+KwnmIgysY/8AAAAAAAAD/kelytoA6DP/AAAAAAAAA/5MBf+KDSrT/wAAAAAAAAP+Ut1lrG3Tw/8AAAAAAAAD/myD/swmmbP/AAAAAAAAA/6S7iz7zMjg== -2016010101 1 AAAAAT9jTOLTv00AP++MY3sSIfhAWQAAAAAAAAAAABQ/8AAAAAAAAD9jTOLTv00AP/AAAAAAAAA/oFVjy4zkAD/wAAAAAAAAP7QIPYZEaBg/8AAAAAAAAD+5MRAdmxFYP/AAAAAAAAA/wv6b6cq2uD/wAAAAAAAAP8u3dsZWR0g/8AAAAAAAAD/U3xfEf1a6P/AAAAAAAAA/3LsvjOo31j/wAAAAAAAAP+ERuOk3aqc/8AAAAAAAAD/hiPaCTqLhP/AAAAAAAAA/4YyZHiH6RD/wAAAAAAAAP+JE5Fp3Nyo/8AAAAAAAAD/lIrZav0pJP/AAAAAAAAA/5SsvX6bfoD/wAAAAAAAAP+lEeW/YFQ0/8AAAAAAAAD/qxl0egFbsP/AAAAAAAAA/6vHLCLO8hj/wAAAAAAAAP+uVq/MHFgg/8AAAAAAAAD/vOob02lomP/AAAAAAAAA/74xjexIh+A== -2016010101 4 AAAAAT+3pSH/v+XQP++LrImZq8hAWQAAAAAAAAAAABQ/8AAAAAAAAD+3pSH/v+XQP/AAAAAAAAA/t9AJhnsVeD/wAAAAAAAAP70SWWAAYyA/8AAAAAAAAD++zfOOlMsQP/AAAAAAAAA/yJbhMILGlD/wAAAAAAAAP9A2VdMNF14/8AAAAAAAAD/USyQFtDkkP/AAAAAAAAA/1WkV3NUchD/wAAAAAAAAP9lwLbfuG7w/8AAAAAAAAD/b72MfKKaaP/AAAAAAAAA/4l6iqiRjxT/wAAAAAAAAP+La1x70HbQ/8AAAAAAAAD/ni+fE2KJ6P/AAAAAAAAA/56thvMjqAz/wAAAAAAAAP+e6t41iiyg/8AAAAAAAAD/p2JDeyfiuP/AAAAAAAAA/61WFlvlVwD/wAAAAAAAAP+yTZZqLLAk/8AAAAAAAAD/vJMNL3Uu3P/AAAAAAAAA/74usiZmryA== -2016010101 9 AAAAAT+vgWVazu3gP+3cs2ygmbVAWQAAAAAAAAAAABQ/8AAAAAAAAD+vgWVazu3gP/AAAAAAAAA/sIBio9SdoD/wAAAAAAAAP7isvHou+wg/8AAAAAAAAD/DcmKEiveIP/AAAAAAAAA/001RCA+oaj/wAAAAAAAAP9TgtKAzFEo/8AAAAAAAAD/WLJ0SnNuSP/AAAAAAAAA/2zzinAkCxj/wAAAAAAAAP9vmVNOT6RY/8AAAAAAAAD/cLYkaUbzAP/AAAAAAAAA/3Up9Y8hcuD/wAAAAAAAAP91y9a0/yY4/8AAAAAAAAD/giQL+z+AZP/AAAAAAAAA/4L79aCywVT/wAAAAAAAAP+Si8B/ihUA/8AAAAAAAAD/mPG2KKi83P/AAAAAAAAA/6IZBcInfYT/wAAAAAAAAP+l9hLcazME/8AAAAAAAAD/p+CV/I3bJP/AAAAAAAAA/7dyzbKCZtQ== -2016010101 8 AAAAAT++/DsZq6K4P+/eSmV2CfFAWQAAAAAAAAAAABQ/8AAAAAAAAD++/DsZq6K4P/AAAAAAAAA/wpplDVhcwD/wAAAAAAAAP8PZH0wKynA/8AAAAAAAAD/E84yw43YsP/AAAAAAAAA/xf7zW4VBuD/wAAAAAAAAP9Hvjt60dfo/8AAAAAAAAD/Sur7T6Aw0P/AAAAAAAAA/2BNcubCoRj/wAAAAAAAAP9oaeil/blQ/8AAAAAAAAD/do0stDydaP/AAAAAAAAA/3+qhSZ6GEj/wAAAAAAAAP+FFqmG3uBw/8AAAAAAAAD/ibAJDMGtrP/AAAAAAAAA/47o1IeytOT/wAAAAAAAAP+pj5Hvc1kY/8AAAAAAAAD/ssfXmciNJP/AAAAAAAAA/7bPlrBl4Bz/wAAAAAAAAP+6Iy2HFN1w/8AAAAAAAAD/vPt+nULRCP/AAAAAAAAA/795KZXYJ8Q== -2016010101 3 AAAAAT+Re3LE3bVAP+190GKF2YJAWQAAAAAAAAAAABQ/8AAAAAAAAD+Re3LE3bVAP/AAAAAAAAA/wsL1ep8C0D/wAAAAAAAAP8S66M6H/rQ/8AAAAAAAAD/Mjm7I40fgP/AAAAAAAAA/zn3R2dNUFD/wAAAAAAAAP9SxjfBKEPg/8AAAAAAAAD/bCVu4kVSmP/AAAAAAAAA/3IWbMoOwtj/wAAAAAAAAP91IqdyrmNA/8AAAAAAAAD/eT3D+Lgv6P/AAAAAAAAA/4M67IzNC/j/wAAAAAAAAP+EmbqLqD+I/8AAAAAAAAD/h8/JYiqmVP/AAAAAAAAA/4gwfhaK1yD/wAAAAAAAAP+NYlR9E38Q/8AAAAAAAAD/ko7NEVy+OP/AAAAAAAAA/5QBmedUofD/wAAAAAAAAP+uYlmiebYE/8AAAAAAAAD/tKGBPvUINP/AAAAAAAAA/7X3QYoXZgg== -2016010101 1 AAAAAT+SMCO+2PEAP+5hCngwn6RAWQAAAAAAAAAAABQ/8AAAAAAAAD+SMCO+2PEAP/AAAAAAAAA/pbw4IWpJ8D/wAAAAAAAAP7dVtWNupgA/8AAAAAAAAD/AmA7XAwXUP/AAAAAAAAA/ySfcsmTrDD/wAAAAAAAAP9Hv19no9eY/8AAAAAAAAD/SyzEw0TdQP/AAAAAAAAA/1lFOJv/XID/wAAAAAAAAP9apygDq234/8AAAAAAAAD/WxgPOD6aAP/AAAAAAAAA/3LWYvIyzxD/wAAAAAAAAP94jBLSjfww/8AAAAAAAAD/hjE3IrG1AP/AAAAAAAAA/4oEszRuCHT/wAAAAAAAAP+U/HSqtIks/8AAAAAAAAD/nG5rVyzBUP/AAAAAAAAA/6DWPdekS7T/wAAAAAAAAP+kw0+c0ec0/8AAAAAAAAD/sihkusTuGP/AAAAAAAAA/7mEKeDCfpA== -2016010101 3 AAAAAT+lkdooClXQP+1h2hM7Pe1AWQAAAAAAAAAAABQ/8AAAAAAAAD+lkdooClXQP/AAAAAAAAA/raXAYUBw8D/wAAAAAAAAP7X2ULKQmcA/8AAAAAAAAD+4/peORay4P/AAAAAAAAA/wlBs9W6VWD/wAAAAAAAAP8ZBZKUBxMQ/8AAAAAAAAD/SuFZxcpXCP/AAAAAAAAA/1i+HOa7COD/wAAAAAAAAP9j1kFhF8g4/8AAAAAAAAD/cDOtJV11GP/AAAAAAAAA/3FQTaYlSWD/wAAAAAAAAP9+CqyO+bDY/8AAAAAAAAD/gKQ1VqO9wP/AAAAAAAAA/4RaVlp3Eej/wAAAAAAAAP+Jex5YDDQU/8AAAAAAAAD/mIIGt9ce4P/AAAAAAAAA/5mdEx10f5D/wAAAAAAAAP+ge0H7W49s/8AAAAAAAAD/rDjDQSv1XP/AAAAAAAAA/7WHaEzs97Q== -2016010101 2 AAAAAT9999442OqAP++O3Q0hbodAWQAAAAAAAAAAABQ/8AAAAAAAAD9999442OqAP/AAAAAAAAA/yFB9SCTtDD/wAAAAAAAAP8rEYepdZJQ/8AAAAAAAAD/QBFSCz7oCP/AAAAAAAAA/0wEEESPJpj/wAAAAAAAAP9cMcwkqV/w/8AAAAAAAAD/cFhjUNpfQP/AAAAAAAAA/4RbXvlYcHD/wAAAAAAAAP+LfusTEpkg/8AAAAAAAAD/jpMYPNkewP/AAAAAAAAA/5HO72sickT/wAAAAAAAAP+SotU5UAuM/8AAAAAAAAD/mOOW6vWARP/AAAAAAAAA/6H6hLdRX6j/wAAAAAAAAP+mWOGMCM28/8AAAAAAAAD/rvxpFgM51P/AAAAAAAAA/7A5G95eSGj/wAAAAAAAAP+1XL4NZ1x0/8AAAAAAAAD/ux0oguTaqP/AAAAAAAAA/747dDSFuhw== -2016010101 4 AAAAAT+m3C8b+aEwP+nnt0aPb99AWQAAAAAAAAAAABQ/8AAAAAAAAD+m3C8b+aEwP/AAAAAAAAA/wqh/J03ZZD/wAAAAAAAAP8hR8P9etYQ/8AAAAAAAAD/LimcZGNpcP/AAAAAAAAA/y9rSQzptnD/wAAAAAAAAP8xKCwRKtYQ/8AAAAAAAAD/OHKj1I0kAP/AAAAAAAAA/0U1CjDt30D/wAAAAAAAAP9TJZF/m8So/8AAAAAAAAD/X5NgXQqCcP/AAAAAAAAA/3U4Pc39zgD/wAAAAAAAAP+EoZ6D2qps/8AAAAAAAAD/jh+t93i02P/AAAAAAAAA/5CC3KD3kgD/wAAAAAAAAP+XLXomaYzU/8AAAAAAAAD/l77f3TCRnP/AAAAAAAAA/5nBAvJUh9T/wAAAAAAAAP+aBoOqcEJc/8AAAAAAAAD/nORw5P+61P/AAAAAAAAA/6ee3Ro9v3w== -2016010101 9 AAAAAT94j9Ko3+8AP+ye/IOuWodAWQAAAAAAAAAAABQ/8AAAAAAAAD94j9Ko3+8AP/AAAAAAAAA/tpi5NXGOoD/wAAAAAAAAP7kBKwA7FVA/8AAAAAAAAD+6ZiX8vyl4P/AAAAAAAAA/vKYyE1wMwD/wAAAAAAAAP88IICp+Wuw/8AAAAAAAAD/Rf1DR/nYCP/AAAAAAAAA/0tiT8Hi23j/wAAAAAAAAP9ZjjsvoSIQ/8AAAAAAAAD/Yw8CuJo10P/AAAAAAAAA/4JxkDZESmz/wAAAAAAAAP+OWyVTrD70/8AAAAAAAAD/lCywmoa8pP/AAAAAAAAA/5qE6JgXOqD/wAAAAAAAAP+c5D/dODgc/8AAAAAAAAD/na/LamNpTP/AAAAAAAAA/58LfPZN/Wj/wAAAAAAAAP+hFD9uTt3s/8AAAAAAAAD/ppyKoGxwSP/AAAAAAAAA/7J78g65ahw== -2016010101 7 AAAAAT+w0YOxALEgP+7pmMCXM6NAWQAAAAAAAAAAABQ/8AAAAAAAAD+w0YOxALEgP/AAAAAAAAA/ussJ+5XCYD/wAAAAAAAAP7vUNps4Jlg/8AAAAAAAAD/SqVawjDUuP/AAAAAAAAA/0x5qYNX6LD/wAAAAAAAAP9bZxqhzvSY/8AAAAAAAAD/XnNdg7cBMP/AAAAAAAAA/2x2qu7q0uD/wAAAAAAAAP9xJCj7zyhI/8AAAAAAAAD/g3WOI62xRP/AAAAAAAAA/4+HblPSBPj/wAAAAAAAAP+RragUFCeU/8AAAAAAAAD/lPU5vIOy5P/AAAAAAAAA/5b5ITmsTtD/wAAAAAAAAP+dDXlx0KII/8AAAAAAAAD/nqnp9eJ8IP/AAAAAAAAA/5+EfjnCKtT/wAAAAAAAAP+iR1Oqq/eA/8AAAAAAAAD/seyDlr7UZP/AAAAAAAAA/7umYwJczow== -2016010101 3 AAAAAT+PrxzQmgxAP+yI5iOn0R9AWQAAAAAAAAAAABQ/8AAAAAAAAD+PrxzQmgxAP/AAAAAAAAA/usZCS3p5+D/wAAAAAAAAP7/Ocl72CPg/8AAAAAAAAD/JdvEBcGOYP/AAAAAAAAA/y8dhB3biJD/wAAAAAAAAP8/6diire1w/8AAAAAAAAD/Ss7JjSMrKP/AAAAAAAAA/1OC2ONxJIj/wAAAAAAAAP9f4AXbIObI/8AAAAAAAAD/aL6okrWAeP/AAAAAAAAA/2nvQXsZKej/wAAAAAAAAP9sPH7Op2JQ/8AAAAAAAAD/bokDdbHxMP/AAAAAAAAA/4OGWwub7Ij/wAAAAAAAAP+GlfGzlZaA/8AAAAAAAAD/jCh7PFfXbP/AAAAAAAAA/5bkV07slUz/wAAAAAAAAP+dOBabKd4Y/8AAAAAAAAD/sBGyzE1oGP/AAAAAAAAA/7IjmI6fRHw== -2016010101 3 AAAAAT+iwh6S7vcQP+32CXrkdbBAWQAAAAAAAAAAABQ/8AAAAAAAAD+iwh6S7vcQP/AAAAAAAAA/t67fQY2C2D/wAAAAAAAAP86nR6+FEdg/8AAAAAAAAD/SG1NPAaBOP/AAAAAAAAA/1yaevz30bj/wAAAAAAAAP9dEQwxzz6I/8AAAAAAAAD/ZUuSXd4T8P/AAAAAAAAA/34pBg2b31j/wAAAAAAAAP+AxOo6w1yE/8AAAAAAAAD/ivT1IuFU4P/AAAAAAAAA/5kIy5KKqZD/wAAAAAAAAP+e49pCwiPE/8AAAAAAAAD/n86eoyQWmP/AAAAAAAAA/6CcPWbgzjj/wAAAAAAAAP+moC0U0QAg/8AAAAAAAAD/ssCdPT333P/AAAAAAAAA/7RDu9dsEKT/wAAAAAAAAP+0g0SD0ZTc/8AAAAAAAAD/tMMyRN499P/AAAAAAAAA/7fYJeuR1sA== -2016010101 8 AAAAAT+vbWwQJIKAP+3oh2cHsfVAWQAAAAAAAAAAABQ/8AAAAAAAAD+vbWwQJIKAP/AAAAAAAAA/sD2W5LsTwD/wAAAAAAAAP7W5ko/KlGg/8AAAAAAAAD/AzWM6x868P/AAAAAAAAA/w+dcmAQQ9D/wAAAAAAAAP80scAfdPcQ/8AAAAAAAAD/SV2boSEjQP/AAAAAAAAA/1LcR5GwflD/wAAAAAAAAP9UR6g21fII/8AAAAAAAAD/a3gGas/4GP/AAAAAAAAA/2y3/zBc39j/wAAAAAAAAP9w7UysaBrI/8AAAAAAAAD/hZas1hPtiP/AAAAAAAAA/4YSE6Xzv8z/wAAAAAAAAP+H7gcJekFQ/8AAAAAAAAD/iw9La42+dP/AAAAAAAAA/47nu8/tkND/wAAAAAAAAP+igforyjBw/8AAAAAAAAD/tQfieUskQP/AAAAAAAAA/7eiHZwex9Q== -2016010101 2 AAAAAT+OO4OGgTfAP+6QIf1AgStAWQAAAAAAAAAAABQ/8AAAAAAAAD+OO4OGgTfAP/AAAAAAAAA/vb9gdDyVyD/wAAAAAAAAP79FkV6vweA/8AAAAAAAAD/AyQ1hlqdMP/AAAAAAAAA/xiq3P1Y0FD/wAAAAAAAAP8d+JDFFNSA/8AAAAAAAAD/LatxGWuN4P/AAAAAAAAA/zhih9XlkfD/wAAAAAAAAP89rWAnvkfw/8AAAAAAAAD/RBM+9Hf7uP/AAAAAAAAA/3viXXCtLbj/wAAAAAAAAP98Cuvp1PWo/8AAAAAAAAD/hMrTMMC1XP/AAAAAAAAA/4nvAX2O6Qj/wAAAAAAAAP+MIMzc5k54/8AAAAAAAAD/ppZhPsHx7P/AAAAAAAAA/6jZq+26ZzT/wAAAAAAAAP+vVzchE7E8/8AAAAAAAAD/uBLbMgVpTP/AAAAAAAAA/7pAh/UCBKw== -2016010101 3 AAAAAT9ZFAM69NoAP+7C3JfwsyNAWQAAAAAAAAAAABQ/8AAAAAAAAD9ZFAM69NoAP/AAAAAAAAA/hRUWzT7ngD/wAAAAAAAAP6S+GYhz/3A/8AAAAAAAAD+mfzOf1ZCgP/AAAAAAAAA/s+TTBkdiID/wAAAAAAAAP8Ezd0TYr5w/8AAAAAAAAD/WeqxSn9JsP/AAAAAAAAA/2JQy3vAB1j/wAAAAAAAAP9kCK18MQW4/8AAAAAAAAD/d/cx9uvkQP/AAAAAAAAA/343Qj5pFID/wAAAAAAAAP+EZfDhmZ0Q/8AAAAAAAAD/hx1Fh5B0NP/AAAAAAAAA/52S6S92nrT/wAAAAAAAAP+hZVFKc/HE/8AAAAAAAAD/pikIpuRuUP/AAAAAAAAA/6i70lkktsj/wAAAAAAAAP+uRxKeqS+s/8AAAAAAAAD/sqaSdpgvqP/AAAAAAAAA/7sLcl/CzIw== -2016010101 0 AAAAAT9iAVlY8O0AP+n+Tr9bC/ZAWQAAAAAAAAAAABQ/8AAAAAAAAD9iAVlY8O0AP/AAAAAAAAA/ne9OjzQSID/wAAAAAAAAP54+gQvEzwA/8AAAAAAAAD+gt+lQ1DIAP/AAAAAAAAA/qHraWSbA4D/wAAAAAAAAP8FFNT1aJ3w/8AAAAAAAAD/IC7GNR4BIP/AAAAAAAAA/yuSgvQ3gWD/wAAAAAAAAP8wsFl5eTkQ/8AAAAAAAAD/NzrOTu/P0P/AAAAAAAAA/zpOYjIcxTD/wAAAAAAAAP9AuUQHZtYw/8AAAAAAAAD/X/YXsSa9WP/AAAAAAAAA/2E6WGGV2WD/wAAAAAAAAP9jbZBeUn8Q/8AAAAAAAAD/hVQrv7522P/AAAAAAAAA/4uUYSgqXQT/wAAAAAAAAP+OcMkDo4tg/8AAAAAAAAD/p4nXF6JO9P/AAAAAAAAA/6f5Ov1sL9g== -2016010101 7 AAAAAT+hzZvW1llwP+5/TQhG10RAWQAAAAAAAAAAABQ/8AAAAAAAAD+hzZvW1llwP/AAAAAAAAA/vHHmmNz3WD/wAAAAAAAAP8a/FYscQ/g/8AAAAAAAAD/J1RRHsOgEP/AAAAAAAAA/ypIAO1TrBD/wAAAAAAAAP8seBaEFWZQ/8AAAAAAAAD/R+kIoQAXoP/AAAAAAAAA/0qaIewbYAj/wAAAAAAAAP9eMWP0Wv9Q/8AAAAAAAAD/c52jWDXYqP/AAAAAAAAA/39bulBS6bj/wAAAAAAAAP+GEPfm4qVs/8AAAAAAAAD/iaPd5T7uoP/AAAAAAAAA/40EtfiUosT/wAAAAAAAAP+RPqWspTmU/8AAAAAAAAD/l8vJVyv8AP/AAAAAAAAA/5juMYgKLXD/wAAAAAAAAP+slqrsWj3c/8AAAAAAAAD/tTWV1+bJJP/AAAAAAAAA/7n9NCEbXRA== diff --git a/extensions-contrib/thrift-extensions/example/books.json b/extensions-contrib/thrift-extensions/example/books.json deleted file mode 100644 index 6f1dc54c00b6..000000000000 --- a/extensions-contrib/thrift-extensions/example/books.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "type" : "index_hadoop", - "spec" : { - "dataSchema" : { - "dataSource" : "test", - "parser" : { - "type" : "thrift", - "jarPath" : "example/book.jar", - "thriftClass": "org.apache.druid.data.input.thrift.Book", - "protocol" : "compact", - "parseSpec" : { - "format" : "json", - "timestampSpec" : { - "column" : "date", - "format" : "auto" - }, - "flattenSpec" : { - "useFieldDiscovery" : true, - "fields" : [ { - "type" : "path", - "name" : "lastName", - "expr" : "$.author.lastName" - }, "title" ] - }, - "dimensionsSpec" : { - "dimensions" : [ "title", "lastName" ] - } - } - }, - "metricsSpec" : [ { - "type" : "count", - "name" : "count" - }, { - "type" : "doubleSum", - "name" : "cost", - "fieldName" : "price" - } ], - "granularitySpec" : { - "type" : "uniform", - "segmentGranularity" : "DAY", - "queryGranularity" : "DAY", - "intervals" : [ "2015-09-01/2015-10-01" ] - } - }, - "ioConfig" : { - "type" : "hadoop", - "inputSpec" : { - "type" : "static", - "inputFormat" : "org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat", - "paths" : "example/book.seq" - } - }, - "tuningConfig" : { - "type" : "hadoop", - "partitionsSpec" : { - "type" : "hashed", - "targetPartitionSize" : 5000000 - }, - "jobProperties" : { - "tmpjars": "/user/xxx/druid/test/book.jar" - } - } - }, - "dataSource" : "test" -} diff --git a/extensions-contrib/thrift-extensions/pom.xml b/extensions-contrib/thrift-extensions/pom.xml deleted file mode 100644 index ea791715d478..000000000000 --- a/extensions-contrib/thrift-extensions/pom.xml +++ /dev/null @@ -1,206 +0,0 @@ - - - - - - org.apache.druid.extensions.contrib - druid-thrift-extensions - druid-thrift-extensions - druid-thrift-extensions - - - druid - org.apache.druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - 4.0.0 - - - 0.13.0 - 4.17 - 19.10.0 - - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.thrift - libthrift - ${thrift.version} - - - commons-logging - commons-logging - - - httpclient - org.apache.httpcomponents - - - httpcore - org.apache.httpcomponents - - - - - org.apache.druid - druid-indexing-hadoop - ${project.parent.version} - provided - - - org.apache.hadoop - hadoop-client - provided - - - com.twitter.elephantbird - elephant-bird-core - ${elephantbird.version} - - - org.apache.thrift - libthrift - - - hadoop-lzo - com.hadoop.gplcompression - - - - - com.twitter.elephantbird - elephant-bird-hadoop-compat - ${elephantbird.version} - runtime - - - org.slf4j - slf4j-api - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.guava - guava - - - com.google.inject - guice - provided - - - org.apache.hadoop - hadoop-common - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - com.twitter - scrooge-core_2.11 - ${scrooge.version} - test - - - junit - junit - test - - - org.hamcrest - hamcrest-core - test - - - - - - - com.twitter - scrooge-maven-plugin - ${scrooge.version} - - java - - - - thrift-test-sources - generate-test-sources - - testCompile - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 3.0.0 - - - add-thrift-test-source - generate-test-sources - - add-test-source - - - - ${project.build.directory}/generated-test-sources/thrift/scrooge - - - - - - - - - - - strict - - - - org.apache.maven.plugins - maven-compiler-plugin - - - - -Xep:MissingOverride:WARN - - - - - - - - diff --git a/extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftDeserialization.java b/extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftDeserialization.java deleted file mode 100644 index 90b802788499..000000000000 --- a/extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftDeserialization.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.thrift; - -import com.google.common.base.Preconditions; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.thrift.TBase; -import org.apache.thrift.TDeserializer; -import org.apache.thrift.TException; -import org.apache.thrift.TSerializer; -import org.apache.thrift.protocol.TBinaryProtocol; -import org.apache.thrift.protocol.TCompactProtocol; -import org.apache.thrift.protocol.TJSONProtocol; -import org.apache.thrift.protocol.TProtocolFactory; -import org.apache.thrift.protocol.TProtocolUtil; -import org.apache.thrift.protocol.TSimpleJSONProtocol; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ThriftDeserialization -{ - private static final Logger log = LoggerFactory.getLogger(ThriftDeserialization.class); - - - private static final ThreadLocal DESERIALIZER_COMPACT = new ThreadLocal() - { - @Override - protected TDeserializer initialValue() - { - return new TDeserializer(new TCompactProtocol.Factory()); - } - }; - - private static final ThreadLocal DESERIALIZER_BINARY = new ThreadLocal() - { - @Override - protected TDeserializer initialValue() - { - return new TDeserializer(new TBinaryProtocol.Factory()); - } - }; - - private static final ThreadLocal DESERIALIZER_JSON = new ThreadLocal() - { - @Override - protected TDeserializer initialValue() - { - return new TDeserializer(new TJSONProtocol.Factory()); - } - }; - - public static final ThreadLocal SERIALIZER_SIMPLE_JSON = new ThreadLocal() - { - @Override - protected TSerializer initialValue() - { - return new TSerializer(new TSimpleJSONProtocol.Factory()); - } - }; - - - private static final byte[] EMPTY_BYTES = new byte[0]; - - public static byte[] decodeB64IfNeeded(final byte[] src) - { - Preconditions.checkNotNull(src, "src bytes cannot be null"); - if (src.length <= 0) { - return EMPTY_BYTES; - } - final byte last = src[src.length - 1]; - return (0 == last || '}' == last) ? src : StringUtils.decodeBase64(src); - } - - /** - * Deserializes byte-array into thrift object. - *

- * Supporting binary, compact and json protocols, - * and the byte array could be or not be encoded by Base64. - * - * @param bytes the byte-array to deserialize - * @param thriftObj the output thrift object - * - * @return the output thrift object, or null if error occurs - */ - public static T detectAndDeserialize(final byte[] bytes, final T thriftObj) throws TException - { - Preconditions.checkNotNull(thriftObj); - try { - final byte[] src = decodeB64IfNeeded(bytes); - final TProtocolFactory protocolFactory = TProtocolUtil.guessProtocolFactory(src, null); - Preconditions.checkNotNull(protocolFactory); - if (protocolFactory instanceof TCompactProtocol.Factory) { - DESERIALIZER_COMPACT.get().deserialize(thriftObj, src); - } else if (protocolFactory instanceof TBinaryProtocol.Factory) { - DESERIALIZER_BINARY.get().deserialize(thriftObj, src); - } else { - DESERIALIZER_JSON.get().deserialize(thriftObj, src); - } - } - catch (final IllegalArgumentException e) { - throw new TException(e); - } - return thriftObj; - } -} diff --git a/extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftExtensionsModule.java b/extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftExtensionsModule.java deleted file mode 100644 index bea2a913e39e..000000000000 --- a/extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftExtensionsModule.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.thrift; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.inject.Binder; -import org.apache.druid.initialization.DruidModule; - -import java.util.Collections; -import java.util.List; - -public class ThriftExtensionsModule implements DruidModule -{ - - @Override - public List getJacksonModules() - { - return Collections.singletonList( - new SimpleModule("ThriftInputRowParserModule") - .registerSubtypes( - new NamedType(ThriftInputRowParser.class, "thrift") - ) - ); - } - - @Override - public void configure(Binder binder) - { - } -} diff --git a/extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftInputRowParser.java b/extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftInputRowParser.java deleted file mode 100644 index 8e148d207d9d..000000000000 --- a/extensions-contrib/thrift-extensions/src/main/java/org/apache/druid/data/input/thrift/ThriftInputRowParser.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.thrift; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import com.twitter.elephantbird.mapreduce.io.ThriftWritable; -import org.apache.druid.data.input.InputRow; -import org.apache.druid.data.input.MapBasedInputRow; -import org.apache.druid.data.input.impl.InputRowParser; -import org.apache.druid.data.input.impl.ParseSpec; -import org.apache.druid.java.util.common.IAE; -import org.apache.druid.java.util.common.parsers.Parser; -import org.apache.hadoop.io.BytesWritable; -import org.apache.thrift.TBase; -import org.apache.thrift.TException; - -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.net.URLClassLoader; -import java.nio.ByteBuffer; -import java.util.List; -import java.util.Map; - -/** - * 1. load thrift class from classpath or provided jar - * 2. deserialize content bytes and serialize to json - * 3. use JsonSpec to do things left - */ -public class ThriftInputRowParser implements InputRowParser -{ - private final ParseSpec parseSpec; - private final String jarPath; - private final String thriftClassName; - - private Parser parser; - private volatile Class thriftClass = null; - private final List dimensions; - - @JsonCreator - public ThriftInputRowParser( - @JsonProperty("parseSpec") ParseSpec parseSpec, - @JsonProperty("thriftJar") String jarPath, - @JsonProperty("thriftClass") String thriftClassName - ) - { - this.jarPath = jarPath; - this.thriftClassName = thriftClassName; - Preconditions.checkNotNull(thriftClassName, "thrift class name"); - - this.parseSpec = parseSpec; - this.dimensions = parseSpec.getDimensionsSpec().getDimensionNames(); - } - - public Class getThriftClass() - throws IOException, ClassNotFoundException, IllegalAccessException, InstantiationException - { - final Class thrift; - if (jarPath != null) { - File jar = new File(jarPath); - URLClassLoader child = new URLClassLoader( - new URL[]{jar.toURI().toURL()}, - this.getClass().getClassLoader() - ); - thrift = (Class) Class.forName(thriftClassName, true, child); - } else { - thrift = (Class) Class.forName(thriftClassName); - } - thrift.newInstance(); - return thrift; - } - - - @Override - public List parseBatch(Object input) - { - if (parser == null) { - // parser should be created when it is really used to avoid unnecessary initialization of the underlying - // parseSpec. - parser = parseSpec.makeParser(); - } - - // There is a Parser check in phase 2 of mapreduce job, thrift jar may not present in peon side. - // Place it this initialization in constructor will get ClassNotFoundException - try { - if (thriftClass == null) { - thriftClass = getThriftClass(); - } - } - catch (IOException e) { - throw new IAE(e, "failed to load jar [%s]", jarPath); - } - catch (ClassNotFoundException e) { - throw new IAE(e, "class [%s] not found in jar", thriftClassName); - } - catch (InstantiationException | IllegalAccessException e) { - throw new IAE(e, "instantiation thrift instance failed"); - } - - final String json; - try { - if (input instanceof ByteBuffer) { // realtime stream - final byte[] bytes = ((ByteBuffer) input).array(); - TBase o = thriftClass.newInstance(); - ThriftDeserialization.detectAndDeserialize(bytes, o); - json = ThriftDeserialization.SERIALIZER_SIMPLE_JSON.get().toString(o); - } else if (input instanceof BytesWritable) { // sequence file - final byte[] bytes = ((BytesWritable) input).getBytes(); - TBase o = thriftClass.newInstance(); - ThriftDeserialization.detectAndDeserialize(bytes, o); - json = ThriftDeserialization.SERIALIZER_SIMPLE_JSON.get().toString(o); - } else if (input instanceof ThriftWritable) { // LzoBlockThrift file - TBase o = (TBase) ((ThriftWritable) input).get(); - json = ThriftDeserialization.SERIALIZER_SIMPLE_JSON.get().toString(o); - } else { - throw new IAE("unsupport input class of [%s]", input.getClass()); - } - } - catch (IllegalAccessException | InstantiationException | TException e) { - throw new IAE("some thing wrong with your thrift?"); - } - - Map record = parser.parseToMap(json); - final List dimensions; - if (!this.dimensions.isEmpty()) { - dimensions = this.dimensions; - } else { - dimensions = Lists.newArrayList( - Sets.difference(record.keySet(), parseSpec.getDimensionsSpec().getDimensionExclusions()) - ); - } - return ImmutableList.of(new MapBasedInputRow( - parseSpec.getTimestampSpec().extractTimestamp(record), - dimensions, - record - )); - } - - @Override - public ParseSpec getParseSpec() - { - return parseSpec; - } - - @Override - public InputRowParser withParseSpec(ParseSpec parseSpec) - { - return new ThriftInputRowParser(parseSpec, jarPath, thriftClassName); - } -} diff --git a/extensions-contrib/thrift-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/thrift-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100755 index 723da5238eaf..000000000000 --- a/extensions-contrib/thrift-extensions/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.data.input.thrift.ThriftExtensionsModule diff --git a/extensions-contrib/thrift-extensions/src/test/java/org/apache/druid/data/input/thrift/ThriftInputRowParserTest.java b/extensions-contrib/thrift-extensions/src/test/java/org/apache/druid/data/input/thrift/ThriftInputRowParserTest.java deleted file mode 100644 index c6d9c975f1d8..000000000000 --- a/extensions-contrib/thrift-extensions/src/test/java/org/apache/druid/data/input/thrift/ThriftInputRowParserTest.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.data.input.thrift; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import org.apache.druid.data.input.InputRow; -import org.apache.druid.data.input.impl.DimensionsSpec; -import org.apache.druid.data.input.impl.JSONParseSpec; -import org.apache.druid.data.input.impl.JavaScriptParseSpec; -import org.apache.druid.data.input.impl.ParseSpec; -import org.apache.druid.data.input.impl.StringDimensionSchema; -import org.apache.druid.data.input.impl.TimestampSpec; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.common.parsers.JSONPathFieldSpec; -import org.apache.druid.java.util.common.parsers.JSONPathFieldType; -import org.apache.druid.java.util.common.parsers.JSONPathSpec; -import org.apache.druid.js.JavaScriptConfig; -import org.apache.hadoop.io.BytesWritable; -import org.apache.thrift.TSerializer; -import org.apache.thrift.protocol.TBinaryProtocol; -import org.apache.thrift.protocol.TCompactProtocol; -import org.apache.thrift.protocol.TJSONProtocol; -import org.hamcrest.CoreMatchers; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.nio.ByteBuffer; - -public class ThriftInputRowParserTest -{ - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - private ParseSpec parseSpec; - - @Before - public void setUp() - { - parseSpec = new JSONParseSpec(new TimestampSpec("date", "auto", null), - new DimensionsSpec(Lists.newArrayList( - new StringDimensionSchema("title"), - new StringDimensionSchema("lastName") - ), null, null), - new JSONPathSpec( - true, - Lists.newArrayList( - new JSONPathFieldSpec(JSONPathFieldType.ROOT, "title", "title"), - new JSONPathFieldSpec(JSONPathFieldType.PATH, "lastName", "$.author.lastName") - ) - ), - null, - null - ); - } - - @Test - public void testGetThriftClass() throws Exception - { - ThriftInputRowParser parser1 = new ThriftInputRowParser( - parseSpec, - "example/book.jar", - "org.apache.druid.data.input.thrift.Book" - ); - Assert.assertEquals("org.apache.druid.data.input.thrift.Book", parser1.getThriftClass().getName()); - - ThriftInputRowParser parser2 = new ThriftInputRowParser(parseSpec, null, "org.apache.druid.data.input.thrift.Book"); - Assert.assertEquals("org.apache.druid.data.input.thrift.Book", parser2.getThriftClass().getName()); - } - - @Test - public void testParse() throws Exception - { - ThriftInputRowParser parser = new ThriftInputRowParser( - parseSpec, - "example/book.jar", - "org.apache.druid.data.input.thrift.Book" - ); - Book book = new Book().setDate("2016-08-29").setPrice(19.9).setTitle("title") - .setAuthor(new Author().setFirstName("first").setLastName("last")); - - TSerializer serializer; - byte[] bytes; - - // 1. compact - serializer = new TSerializer(new TCompactProtocol.Factory()); - bytes = serializer.serialize(book); - serializationAndTest(parser, bytes); - - // 2. binary + base64 - serializer = new TSerializer(new TBinaryProtocol.Factory()); - serializationAndTest(parser, StringUtils.encodeBase64(serializer.serialize(book))); - - // 3. json - serializer = new TSerializer(new TJSONProtocol.Factory()); - bytes = serializer.serialize(book); - serializationAndTest(parser, bytes); - } - - @Test - public void testDisableJavaScript() - { - final JavaScriptParseSpec parseSpec = new JavaScriptParseSpec( - new TimestampSpec("timestamp", "auto", null), - new DimensionsSpec( - DimensionsSpec.getDefaultSchemas( - ImmutableList.of( - "dim1", - "dim2" - ) - ), - null, - null - ), - "func", - new JavaScriptConfig(false) - ); - ThriftInputRowParser parser = new ThriftInputRowParser( - parseSpec, - "example/book.jar", - "org.apache.druid.data.input.thrift.Book" - ); - - expectedException.expect(CoreMatchers.instanceOf(IllegalStateException.class)); - expectedException.expectMessage("JavaScript is disabled"); - - //noinspection ResultOfMethodCallIgnored (this method call will trigger the expected exception) - parser.parseBatch(ByteBuffer.allocate(1)).get(0); - } - - private void serializationAndTest(ThriftInputRowParser parser, byte[] bytes) - { - ByteBuffer buffer = ByteBuffer.wrap(bytes); - - InputRow row1 = parser.parseBatch(buffer).get(0); - Assert.assertEquals("title", row1.getDimension("title").get(0)); - - InputRow row2 = parser.parseBatch(new BytesWritable(bytes)).get(0); - Assert.assertEquals("last", row2.getDimension("lastName").get(0)); - } -} diff --git a/extensions-contrib/thrift-extensions/src/test/thrift/book.thrift b/extensions-contrib/thrift-extensions/src/test/thrift/book.thrift deleted file mode 100644 index 735da87a1733..000000000000 --- a/extensions-contrib/thrift-extensions/src/test/thrift/book.thrift +++ /dev/null @@ -1,28 +0,0 @@ -namespace java org.apache.druid.data.input.thrift - -// Licensed to the Apache Software Foundation (ASF) under one or more -// contributor license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright ownership. -// The ASF licenses this file to You under the Apache License, Version 2.0 -// (the "License"); you may not use this file except in compliance with -// the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -struct Author { - 1: string firstName; - 2: string lastName; -} - -struct Book { - 1: string date; - 2: double price; - 3: string title; - 4: Author author; -} diff --git a/extensions-contrib/time-min-max/pom.xml b/extensions-contrib/time-min-max/pom.xml deleted file mode 100644 index c1e08716c354..000000000000 --- a/extensions-contrib/time-min-max/pom.xml +++ /dev/null @@ -1,113 +0,0 @@ - - - - - druid - org.apache.druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - 4.0.0 - - org.apache.druid.extensions.contrib - druid-time-min-max - druid-time-min-max - Min/Max of timestamp - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - com.google.code.findbugs - jsr305 - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.guava - guava - provided - - - joda-time - joda-time - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - it.unimi.dsi - fastutil - provided - - - junit - junit - test - - - org.easymock - easymock - test - - - org.apache.druid - druid-core - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-processing - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-server - ${project.parent.version} - test - - - diff --git a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampAggregator.java b/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampAggregator.java deleted file mode 100644 index c11b50dcece5..000000000000 --- a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampAggregator.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation; - -import org.apache.druid.data.input.impl.TimestampSpec; -import org.apache.druid.segment.BaseObjectColumnValueSelector; - -import java.util.Comparator; - -public class TimestampAggregator implements Aggregator -{ - static final Comparator COMPARATOR = Comparator.comparingLong(n -> ((Number) n).longValue()); - - static Object combineValues(Comparator comparator, Object lhs, Object rhs) - { - if (comparator.compare(((Number) lhs).longValue(), ((Number) rhs).longValue()) > 0) { - return lhs; - } else { - return rhs; - } - } - - private final BaseObjectColumnValueSelector selector; - private final String name; - private final TimestampSpec timestampSpec; - private final Comparator comparator; - private final Long initValue; - - private long most; - - public TimestampAggregator( - String name, - BaseObjectColumnValueSelector selector, - TimestampSpec timestampSpec, - Comparator comparator, - Long initValue - ) - { - this.name = name; - this.selector = selector; - this.timestampSpec = timestampSpec; - this.comparator = comparator; - this.initValue = initValue; - - most = this.initValue; - } - - @Override - public void aggregate() - { - Long value = TimestampAggregatorFactory.convertLong(timestampSpec, selector.getObject()); - - if (value != null) { - most = comparator.compare(most, value) > 0 ? most : value; - } - } - - @Override - public Object get() - { - return most; - } - - @Override - public float getFloat() - { - return (float) most; - } - - @Override - public double getDouble() - { - return (double) most; - } - - @Override - public long getLong() - { - return most; - } - - @Override - public void close() - { - // no resource to cleanup - } -} diff --git a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampAggregatorFactory.java b/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampAggregatorFactory.java deleted file mode 100644 index b161f3449e34..000000000000 --- a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampAggregatorFactory.java +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.druid.data.input.impl.TimestampSpec; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.segment.ColumnSelectorFactory; -import org.apache.druid.segment.ColumnValueSelector; -import org.joda.time.DateTime; - -import javax.annotation.Nullable; -import java.nio.ByteBuffer; -import java.sql.Timestamp; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -public class TimestampAggregatorFactory extends AggregatorFactory -{ - final String name; - final String fieldName; - final String timeFormat; - private final Comparator comparator; - private final Long initValue; - - private TimestampSpec timestampSpec; - - TimestampAggregatorFactory( - String name, - String fieldName, - String timeFormat, - Comparator comparator, - Long initValue - ) - { - this.name = name; - this.fieldName = fieldName; - this.timeFormat = timeFormat; - this.comparator = comparator; - this.initValue = initValue; - - this.timestampSpec = new TimestampSpec(fieldName, timeFormat, null); - } - - @Override - public Aggregator factorize(ColumnSelectorFactory metricFactory) - { - return new TimestampAggregator(name, metricFactory.makeColumnValueSelector(fieldName), timestampSpec, comparator, initValue); - } - - @Override - public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) - { - return new TimestampBufferAggregator(metricFactory.makeColumnValueSelector(fieldName), timestampSpec, comparator, initValue); - } - - @Override - public Comparator getComparator() - { - return TimestampAggregator.COMPARATOR; - } - - @Override - public Object combine(Object lhs, Object rhs) - { - return TimestampAggregator.combineValues(comparator, lhs, rhs); - } - - @Override - public AggregateCombiner makeAggregateCombiner() - { - // TimestampAggregatorFactory.combine() delegates to TimestampAggregator.combineValues() and it doesn't check - // for nulls, so this AggregateCombiner neither. - return new LongAggregateCombiner() - { - private long result; - - @Override - public void reset(ColumnValueSelector selector) - { - result = getTimestamp(selector); - } - - private long getTimestamp(ColumnValueSelector selector) - { - if (Long.class.equals(selector.classOfObject())) { - return selector.getLong(); - } else { - Object input = selector.getObject(); - return convertLong(timestampSpec, input); - } - } - - @Override - public void fold(ColumnValueSelector selector) - { - long other = getTimestamp(selector); - if (comparator.compare(result, other) <= 0) { - result = other; - } - } - - @Override - public long getLong() - { - return result; - } - }; - } - - @Override - public AggregatorFactory getCombiningFactory() - { - return new TimestampAggregatorFactory(name, name, timeFormat, comparator, initValue); - } - - @Override - public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException - { - if (other.getName().equals(this.getName()) && this.getClass() == other.getClass()) { - return getCombiningFactory(); - } else { - throw new AggregatorFactoryNotMergeableException(this, other); - } - } - - @Override - public List getRequiredColumns() - { - return Collections.singletonList( - new TimestampAggregatorFactory(fieldName, fieldName, timeFormat, comparator, initValue) - ); - } - - @Override - public Object deserialize(Object object) - { - return object; - } - - @Nullable - @Override - public Object finalizeComputation(@Nullable Object object) - { - return object == null ? null : DateTimes.utc((long) object); - } - - @Override - @JsonProperty - public String getName() - { - return name; - } - - @JsonProperty - public String getFieldName() - { - return fieldName; - } - - @JsonProperty - public String getTimeFormat() - { - return timeFormat; - } - - @Override - public List requiredFields() - { - return Collections.singletonList(fieldName); - } - - @Override - public byte[] getCacheKey() - { - byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - - return ByteBuffer.allocate(1 + fieldNameBytes.length) - .put(AggregatorUtil.TIMESTAMP_CACHE_TYPE_ID).put(fieldNameBytes).array(); - } - - @Override - public String getTypeName() - { - return "long"; - } - - @Override - public int getMaxIntermediateSize() - { - return Long.BYTES; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - TimestampAggregatorFactory that = (TimestampAggregatorFactory) o; - - if (!Objects.equals(fieldName, that.fieldName)) { - return false; - } - if (!Objects.equals(name, that.name)) { - return false; - } - if (!Objects.equals(comparator, that.comparator)) { - return false; - } - if (!Objects.equals(initValue, that.initValue)) { - return false; - } - - return true; - } - - @Override - public int hashCode() - { - int result = fieldName != null ? fieldName.hashCode() : 0; - result = 31 * result + (name != null ? name.hashCode() : 0); - result = 31 * result + (comparator != null ? comparator.hashCode() : 0); - result = 31 * result + (initValue != null ? initValue.hashCode() : 0); - return result; - } - - @Nullable - static Long convertLong(TimestampSpec timestampSpec, Object input) - { - if (input instanceof Number) { - return ((Number) input).longValue(); - } else if (input instanceof DateTime) { - return ((DateTime) input).getMillis(); - } else if (input instanceof Timestamp) { - return ((Timestamp) input).getTime(); - } else if (input instanceof String) { - return timestampSpec.parseDateTime(input).getMillis(); - } - - return null; - } -} diff --git a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampBufferAggregator.java b/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampBufferAggregator.java deleted file mode 100644 index 196f434720f4..000000000000 --- a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampBufferAggregator.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation; - -import org.apache.druid.data.input.impl.TimestampSpec; -import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector; -import org.apache.druid.segment.BaseObjectColumnValueSelector; - -import java.nio.ByteBuffer; -import java.util.Comparator; - -public class TimestampBufferAggregator implements BufferAggregator -{ - private final BaseObjectColumnValueSelector selector; - private final TimestampSpec timestampSpec; - private final Comparator comparator; - private final Long initValue; - - public TimestampBufferAggregator( - BaseObjectColumnValueSelector selector, - TimestampSpec timestampSpec, - Comparator comparator, - Long initValue) - { - this.selector = selector; - this.timestampSpec = timestampSpec; - this.comparator = comparator; - this.initValue = initValue; - } - - @Override - public void init(ByteBuffer buf, int position) - { - buf.putLong(position, initValue); - } - - @Override - public void aggregate(ByteBuffer buf, int position) - { - Long newTime = TimestampAggregatorFactory.convertLong(timestampSpec, selector.getObject()); - if (newTime != null) { - long prev = buf.getLong(position); - buf.putLong(position, comparator.compare(prev, newTime) > 0 ? prev : newTime); - } - } - - @Override - public Object get(ByteBuffer buf, int position) - { - return buf.getLong(position); - } - - @Override - public float getFloat(ByteBuffer buf, int position) - { - return (float) buf.getLong(position); - } - - @Override - public long getLong(ByteBuffer buf, int position) - { - return buf.getLong(position); - } - - @Override - public double getDouble(ByteBuffer buf, int position) - { - return (double) buf.getLong(position); - } - - @Override - public void close() - { - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("selector", selector); - inspector.visit("comparator", comparator); - } -} diff --git a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMaxAggregatorFactory.java b/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMaxAggregatorFactory.java deleted file mode 100644 index 4514c28c6328..000000000000 --- a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMaxAggregatorFactory.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.collect.Ordering; - -public class TimestampMaxAggregatorFactory extends TimestampAggregatorFactory -{ - @JsonCreator - public TimestampMaxAggregatorFactory( - @JsonProperty("name") String name, - @JsonProperty("fieldName") String fieldName, - @JsonProperty("timeFormat") String timeFormat - ) - { - super(name, fieldName, timeFormat, Ordering.natural(), Long.MIN_VALUE); - Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); - Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); - } - - @Override - public String toString() - { - return "TimestampMaxAggregatorFactory{" + - "fieldName='" + fieldName + '\'' + - ", name='" + name + '\'' + - '}'; - } -} diff --git a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMinAggregatorFactory.java b/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMinAggregatorFactory.java deleted file mode 100644 index 6e4a0ead0485..000000000000 --- a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMinAggregatorFactory.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.collect.Ordering; - -public class TimestampMinAggregatorFactory extends TimestampAggregatorFactory -{ - @JsonCreator - public TimestampMinAggregatorFactory( - @JsonProperty("name") String name, - @JsonProperty("fieldName") String fieldName, - @JsonProperty("timeFormat") String timeFormat - ) - { - super(name, fieldName, timeFormat, Ordering.natural().reverse(), Long.MAX_VALUE); - Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); - Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); - } - - @Override - public String toString() - { - return "TimestampMinAggregatorFactory{" + - "fieldName='" + fieldName + '\'' + - ", name='" + name + '\'' + - '}'; - } -} diff --git a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMinMaxModule.java b/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMinMaxModule.java deleted file mode 100644 index edb6f06a0503..000000000000 --- a/extensions-contrib/time-min-max/src/main/java/org/apache/druid/query/aggregation/TimestampMinMaxModule.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.inject.Binder; -import org.apache.druid.initialization.DruidModule; - -import java.util.Collections; -import java.util.List; - -public class TimestampMinMaxModule implements DruidModule -{ - @Override - public List getJacksonModules() - { - return Collections.singletonList( - new SimpleModule("TimestampMinMaxModule") - .registerSubtypes( - new NamedType(TimestampMaxAggregatorFactory.class, "timeMax"), - new NamedType(TimestampMinAggregatorFactory.class, "timeMin") - ) - ); - } - - @Override - public void configure(Binder binder) - { - - } -} diff --git a/extensions-contrib/time-min-max/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/time-min-max/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index 07037ee63c55..000000000000 --- a/extensions-contrib/time-min-max/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.query.aggregation.TimestampMinMaxModule diff --git a/extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampGroupByAggregationTest.java b/extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampGroupByAggregationTest.java deleted file mode 100644 index 72eed0dadb7f..000000000000 --- a/extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampGroupByAggregationTest.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.java.util.common.guava.Sequence; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.groupby.GroupByQueryConfig; -import org.apache.druid.query.groupby.GroupByQueryRunnerTest; -import org.apache.druid.query.groupby.ResultRow; -import org.apache.druid.segment.ColumnSelectorFactory; -import org.easymock.EasyMock; -import org.joda.time.DateTime; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import java.io.File; -import java.io.IOException; -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.List; -import java.util.zip.ZipFile; - -@RunWith(Parameterized.class) -public class TimestampGroupByAggregationTest -{ - private AggregationTestHelper helper; - - @Rule - public final TemporaryFolder temporaryFolder = new TemporaryFolder(); - - private ColumnSelectorFactory selectorFactory; - private TestObjectColumnSelector selector; - - private Timestamp[] values = new Timestamp[10]; - - @Parameterized.Parameters(name = "{index}: Test for {0}, config = {1}") - public static Iterable constructorFeeder() - { - final List constructors = new ArrayList<>(); - - final List> partialConstructors = ImmutableList.of( - ImmutableList.of("timeMin", "tmin", "time_min", DateTimes.of("2011-01-12T01:00:00.000Z")), - ImmutableList.of("timeMax", "tmax", "time_max", DateTimes.of("2011-01-31T01:00:00.000Z")) - ); - - for (final List partialConstructor : partialConstructors) { - for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { - final List constructor = Lists.newArrayList(partialConstructor); - constructor.add(config); - constructors.add(constructor.toArray()); - } - } - - return constructors; - } - - private final String aggType; - private final String aggField; - private final String groupByField; - private final DateTime expected; - private final GroupByQueryConfig config; - - public TimestampGroupByAggregationTest( - String aggType, - String aggField, - String groupByField, - DateTime expected, - GroupByQueryConfig config - ) - { - this.aggType = aggType; - this.aggField = aggField; - this.groupByField = groupByField; - this.expected = expected; - this.config = config; - } - - @Before - public void setup() - { - helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper( - new TimestampMinMaxModule().getJacksonModules(), - config, - temporaryFolder - ); - - selector = new TestObjectColumnSelector<>(values); - selectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); - EasyMock.expect(selectorFactory.makeColumnValueSelector("test")).andReturn(selector); - EasyMock.replay(selectorFactory); - } - - @After - public void teardown() throws IOException - { - helper.close(); - } - - @Test - public void testSimpleDataIngestionAndGroupByTest() throws Exception - { - String recordParser = "{\n" + - " \"type\": \"string\",\n" + - " \"parseSpec\": {\n" + - " \"format\": \"tsv\",\n" + - " \"timestampSpec\": {\n" + - " \"column\": \"timestamp\",\n" + - " \"format\": \"auto\"\n" + - " },\n" + - " \"dimensionsSpec\": {\n" + - " \"dimensions\": [\n" + - " \"product\"\n" + - " ],\n" + - " \"dimensionExclusions\": [],\n" + - " \"spatialDimensions\": []\n" + - " },\n" + - " \"columns\": [\n" + - " \"timestamp\",\n" + - " \"cat\",\n" + - " \"product\",\n" + - " \"prefer\",\n" + - " \"prefer2\",\n" + - " \"pty_country\"\n" + - " ]\n" + - " }\n" + - "}"; - String aggregator = "[\n" + - " {\n" + - " \"type\": \"" + aggType + "\",\n" + - " \"name\": \"" + aggField + "\",\n" + - " \"fieldName\": \"timestamp\"\n" + - " }\n" + - "]"; - String groupBy = "{\n" + - " \"queryType\": \"groupBy\",\n" + - " \"dataSource\": \"test_datasource\",\n" + - " \"granularity\": \"MONTH\",\n" + - " \"dimensions\": [\"product\"],\n" + - " \"aggregations\": [\n" + - " {\n" + - " \"type\": \"" + aggType + "\",\n" + - " \"name\": \"" + groupByField + "\",\n" + - " \"fieldName\": \"" + aggField + "\"\n" + - " }\n" + - " ],\n" + - " \"intervals\": [\n" + - " \"2011-01-01T00:00:00.000Z/2011-05-01T00:00:00.000Z\"\n" + - " ]\n" + - "}"; - ZipFile zip = new ZipFile(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv.zip").toURI())); - Sequence seq = helper.createIndexAndRunQueryOnSegment( - zip.getInputStream(zip.getEntry("druid.sample.tsv")), - recordParser, - aggregator, - 0, - Granularities.MONTH, - 100, - groupBy - ); - - int groupByFieldNumber = ((GroupByQuery) helper.readQuery(groupBy)).getResultRowSignature() - .indexOf(groupByField); - - List results = seq.toList(); - Assert.assertEquals(36, results.size()); - Assert.assertEquals(expected, results.get(0).get(groupByFieldNumber)); - } -} diff --git a/extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampMinMaxAggregatorTest.java b/extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampMinMaxAggregatorTest.java deleted file mode 100644 index 4aa3476098a8..000000000000 --- a/extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampMinMaxAggregatorTest.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.query.aggregation; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Function; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; -import com.google.inject.Binder; -import com.google.inject.Injector; -import com.google.inject.Module; -import com.google.inject.name.Names; -import org.apache.druid.guice.GuiceInjectors; -import org.apache.druid.initialization.Initialization; -import org.apache.druid.segment.ColumnSelectorFactory; -import org.easymock.EasyMock; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import javax.annotation.Nullable; -import java.nio.ByteBuffer; -import java.sql.Timestamp; -import java.util.List; - -@RunWith(Parameterized.class) -public class TimestampMinMaxAggregatorTest -{ - Injector injector; - ObjectMapper mapper; - - private TimestampAggregatorFactory aggregatorFactory; - private ColumnSelectorFactory selectorFactory; - private TestObjectColumnSelector selector; - - private Timestamp[] values = { - Timestamp.valueOf("2014-01-02 11:00:00"), - Timestamp.valueOf("2014-01-02 01:00:00"), - Timestamp.valueOf("2014-01-02 05:00:00"), - Timestamp.valueOf("2014-01-02 12:00:00"), - Timestamp.valueOf("2014-01-02 12:00:00"), - Timestamp.valueOf("2014-01-02 13:00:00"), - Timestamp.valueOf("2014-01-02 06:00:00"), - Timestamp.valueOf("2014-01-02 17:00:00"), - Timestamp.valueOf("2014-01-02 12:00:00"), - Timestamp.valueOf("2014-01-02 02:00:00") - }; - - @Parameterized.Parameters(name = "{index}: Test for {0}") - public static Iterable constructorFeeder() - { - return Iterables.transform( - ImmutableList.of( - ImmutableList.of("timeMin", TimestampMinAggregatorFactory.class, Long.MAX_VALUE, Timestamp.valueOf("2014-01-02 01:00:00")), - ImmutableList.of("timeMax", TimestampMaxAggregatorFactory.class, Long.MIN_VALUE, Timestamp.valueOf("2014-01-02 17:00:00")) - ), - new Function, Object[]>() - { - @Nullable - @Override - public Object[] apply(List input) - { - return input.toArray(); - } - } - ); - } - - private String aggType; - private Class aggClass; - private Long initValue; - private Timestamp expected; - - public TimestampMinMaxAggregatorTest(String aggType, Class aggClass, Long initValue, Timestamp expected) - { - this.aggType = aggType; - this.aggClass = aggClass; - this.expected = expected; - this.initValue = initValue; - } - - @Before - public void setup() throws Exception - { - injector = Initialization.makeInjectorWithModules( - GuiceInjectors.makeStartupInjector(), - ImmutableList.of( - new Module() - { - @Override - public void configure(Binder binder) - { - binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); - binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); - } - }, - new TimestampMinMaxModule() - ) - ); - mapper = injector.getInstance(ObjectMapper.class); - - String json = "{\"type\":\"" + aggType + "\",\"name\":\"" + aggType + "\",\"fieldName\":\"test\"}"; - - aggregatorFactory = mapper.readValue(json, aggClass); - selector = new TestObjectColumnSelector<>(values); - selectorFactory = EasyMock.createMock(ColumnSelectorFactory.class); - EasyMock.expect(selectorFactory.makeColumnValueSelector("test")).andReturn(selector); - EasyMock.replay(selectorFactory); - } - - @Test - public void testAggregator() - { - TimestampAggregator aggregator = (TimestampAggregator) aggregatorFactory.factorize(selectorFactory); - - Assert.assertEquals(initValue, aggregator.get()); - - for (Timestamp value : values) { - aggregate(selector, aggregator); - } - - Assert.assertEquals(expected, new Timestamp(aggregator.getLong())); - } - - @Test - public void testBufferAggregator() - { - TimestampBufferAggregator aggregator = (TimestampBufferAggregator) aggregatorFactory.factorizeBuffered(selectorFactory); - - ByteBuffer buffer = ByteBuffer.wrap(new byte[Long.BYTES]); - aggregator.init(buffer, 0); - - for (Timestamp value : values) { - aggregate(selector, aggregator, buffer, 0); - } - - Assert.assertEquals(expected, new Timestamp(aggregator.getLong(buffer, 0))); - - aggregator.init(buffer, 0); - - Assert.assertEquals(initValue, aggregator.get(buffer, 0)); - } - - private void aggregate(TestObjectColumnSelector selector, TimestampAggregator agg) - { - agg.aggregate(); - selector.increment(); - } - - private void aggregate(TestObjectColumnSelector selector, TimestampBufferAggregator agg, ByteBuffer buf, int pos) - { - agg.aggregate(buf, pos); - selector.increment(); - } -} diff --git a/extensions-contrib/time-min-max/src/test/resources/druid.sample.tsv.zip b/extensions-contrib/time-min-max/src/test/resources/druid.sample.tsv.zip deleted file mode 100644 index 36fa4671c7c7fa045a8422d8410640e806f29649..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 11899 zcmZ{KWl$Vl&@S%o?(T~R3l?l~cV7Z5?ykYz7k9S=mtetJ+&u{%+zAli+g94-EC!pw#hh#I%2?K*_0SkizLjhy$=4ofm%+HADL+kQEFycq_}h z>3h3!_!P?P`5NZ@!X`Yw`p56Nl}= zuQvgkR;#0s`kd z_8%AMjGd}+d9M&!#z6{{=y$$6&&KD~S~_1)nSShRYJQ@NM77I>rWd73Qfrv*xIw$@ zqe+>Ec(SU26Nn-+J^(KGa^+4BVR!By`(cZCgvy8@!SxDv!;g z*)J`?eK=Hggvx5)go#uocFl@U{sSEDDha6Msy?)AwF!C+*0WZZ%00;@Op1%?ewEFz zR@%D$b8hTnu#Q>=46fY#s-(R;mh{KG{RnO2dkq*vy5uS}0BrI**%3ilM-79|mG|q{ zV&l)sk=jeCN`>ZOa@l@J$*6W&5eQ;4HPAp17fbEsghp^|kD7kBU}sHM;`}iJ z-q&$U3CG{QVQn(|bWbQk@n+UND6ihKqqljW=c-w@wfO$V<)gF;@5102|v3u_XB~ ze2vcVUpTAMMce?6A}P8f<~x8WNM<7;@6vOJ)tTQlZjPi+ zjpB&4IKwmO)RKG|MP-n)rn8et91jtUiARM&pC);h<*GimSUYfz#m&|bnXc^x?o~`P z0hvfOOnJx3bh#hIaMo}JkqoeV*Lg9q;s=H?cuz)&2!zS3W#UP!P2d&>L)F9*6=h_- z^*@nJVh*L##x4|7yM&1eMyd39Gyo(uWiCiaqE3GT0 zl!bzdG2e=BWLwXPu83z251kyTLDIIbrL?zt*XdBHIUL*%oTC`o4wb^YRGsBZ6T_)M z!&beqx>4#j-HHngrtkWTj3vfu##m@FF#P%xWEy!eRzGmIu7)DW)Fffq+80F<=aEs+ zWjH8H}_!gL}uBYdWozN@K~zSub+ma67Psqw326w59gkVy##Suw@c`|CFt z$^0l0SVTx&9GZ&bpD*~e%#9@!a@bir-M!3M4&!7A&5ouncj z1QmZ_Y{nsLg-!tn9$H6o&?k$?IgIa27>|67U!u-PJ~!q6=<=#paRD&tbq|IZZfK++ z0xY7PqO_~S@*LchiRW_-@&E$p027pBJZ;i;Iep?1E6rf)(SK{bTxGwWheh`vq0-9J z2`(5v?;~g-AuLHB8k2d3&Ih~pd@aKS*BJVIO&eg+;#4nP@-evCWcZ)j>JVn4xsMt+ z8*rtwFD=hoO$Jscgu&5qlqX;C(!iRv^*abHXXYpKtrFNwmYIrKVE%A`T{LP7|2@?AwdxF`?dV+U zmtDE(LL|hvpiK3dMfG5rliqYBj3PQ##^W9ghq*ezdPt!am9CUGM(}usLXs9mKv^_x zN*ZfOQ-%!?aOO+c{DM#9x2E$zvl$*O@ddw@H0=>e=b*bVDXfb<&?`~KJgia5XCcd< zOIg(J%K|GH2ba?CqlP~lWxfKh#inX@hn8LYT4xgi(6|nhkbTrk^&75N+u&8Ofm^l( zo=qY?NS92N=+@Tq`Ma%Kq2Y=j`~;Rn!l6Vf$~MU{smtZ!W=8*G}dvy?jHyt+`tQ;|H-%!(}qPIs;r*C#(&nip66 z+l2Zjqd{I=J}VWOZ7DW`2T!_-tit))NtR-UEGw-+MgRR@N817NAfjK zOmmbeG5)T0;|@8l1M46C7XH(?&i=w;*EQ$@-@oYB@Npi`n@1w6wj31NbJdZdVSgjx zgwCZdIMtMmAe|nS@pGqXIB2a5?ZM~7m`U*bO3L*S=8ubk5KB$|XOSJLc1u_R=Remt``zwL9kMWcUN?J{l?R7kT>wEYJW~GZOaFpzC>gvj5Df`Cnr$6w)4p z`!q9MIGO%!p${vXI^u;KyQrSBYm?R|#ztw0)aUt8N0V9ATmm+)EQWpk8?&(#Y6t_P8ogT;|!b<>$!{IBZAKVf}UWV>`q0&MHf&6g*6%PX+VXRub?@o^V_ z$w~MfZiaFQ;7Wx~Vkckb&_(2H6Bb43O_ z>+kguk^{0ac7p@ycxd%K(# zsagn0NmJ|8IAW);Bzilv;>mt$z@U`3(wNV~wf{R)AQ=`K2-&)R6%cH!umvlwnVt6J zEAGFrtFdOyep^h~8GcpmAWepI-#cF1bf;al_?rr}!WOsn7?4itAI+7q#4;OzL4BoL zJ$Z0omn^#ja5iAI>;Z9@1R#`|??6Qc;iO%YjkyjN?~17D7l3&JT)>h|@HiG^WfNUg zRd5&2x;#z6Ier)5iF!K(Izf&XXcn64jjI2h9B=f3XEV%!Z{4u%_F=$Hu}JlhM2JGe ztJ|N|a%2RXy|0o0_XPE}IJ&dn=#bZFr*AneDlqD=SjU6u_hU2j^g`@5nxADg4MOi#hy_{FpoWcOejjrnBM%yXL89Hye66dl5y~&8i9 zn18N1f)ghPDHlmd`|4M0O?PJAg>6$G*u{-iom0lOMBzH!;(cq=(?LB0IJc@%Gq8Ub zHF~77YAV1Gk)Tkk2Qnm$lMThhm{LNP>avnszq$1rGwD^4QY1%op%Jy>FAew}=RudB z`I1IA8NX1ofPE0H&<^oNoEgKIv_{YCJ7xA#-4<3kewLoL;eA`)l(b^Fi9VXrTS>!) ztPi5B#BBOEZNGR-XlzMIB&m8lAt<&lcyK>h8LsRZYA^Dx$8iR^<&el-plTZ%6@h+e<@aBM@H^X4bPhB z`cRcjfIJRzQ_C*Gc$@|2E#(m#C~d*tKPDUU1`kJAix zj$CF>c4qx{>59A>v=(R6*mZPX%9jiD^P^8%kz-EZI%dJVQ^-fum-R?!5`XFM`B=9# zlHW8O;KxPL7~)JzS?aL0E*Th!pAl}z{WX_m+JJ%~YeHq_qf_{(S36V8!v`9ch@5B%L$$$ku^Q2o06?^oz{qy&bzk_Nqj`cH;R>clD4fmCYo#1C!Tkz8T3?|rXdvK8L z2KsEY-^eKQ1lan7s_S+;zjEL4_K_?_hr&0d_Kbv;h6iHaD(v5Hk9oFK(pKxW!{ZOU zN4Ugl7pJd>0Gwvy1M(T~_@Ky{2I+7?D!UqbA1DJ+Q!6X&FO_q~g!PNL{@45}3Vgq{ z1%jy>zC^tV#)_LJcIsMBa_xx~1@u?X6`2C@yd{5@@qcp|w^*my<6S3w!34Nlp&a8f zbQ&Ex0rlEP zA(M*35z{@3-sJ>0pt(?+eCI7^$)GjdLcg9c;6ud?@4U7;o%g%mO!yuoZo@0WK;^bb zl=)CGo;TN6hou&*iC7ILfr}8VWEs$wzT+6LiIuQFerylQdSu`|NE0{5_EY~dd59yRp|uv)KIHh{}%!KXqoB!;wlYLd|NgH zG7t4xSX3nI8HFsO*FSntiH~(G0^2z>UwkrduqH2}9FyXGWDMv0LL-cKV0^6`Oh#CQ zO77;f?R1ytC`k$@EiYoCv2T?R3UWYUR7CaC3NBNv|BmK#VGaMh9LPPZc4dzDtpDCb4}xA354m@UR`Z3P(lVl2|pF< zMMYWA#dEakDWfLB!8B3O<9O9+E7=o}N`-z8rmOis1}`Cl4=F;!ueBQWs=i-VT1?F^ za?n@)@if#wv&>WL*s5P#Wn%K}Jd5TWr8(b8PfSavMdvlGO10`OC^WF5h$>L!!a6S7 zc9isv7&K3&PeQ3~MxNaoA9OR|O1DqQK;_*%w#iD{E(NP~Os%!0s@vw9ckNI;3ch&2 zKvq|?wB``vHCGn8O%U0%>5zlH(cKsle%ejeckFtbpWmD@92{uatFLCT=AA`I4Ui#v|py_@RYhPvtYpmxX@#z5+0 zLPh11v~{BW$~;3D}A z48J-VkaIx~4GvzGBCa=P^Wwg#GBp>~JYW7#0qHzyb+2w`L(x^!D3%A(xm+IbID&Sh z)$Nydr@W<7shaG>2*UPcClYh-TTHc{P@1Y2Y4+zxi9LbF6fuz#iEzW?S@l0;pJS%U zMAyKe!SYi9$iiVQB-s|@PlLw4P5I__oYC8-ceKkNKR%yG&hHqx^!nVcEJ^R&Nkrxn z8zVmQBbvPI@T)wSaGXHlX^L*ai&M$V)@st1#unw2LYs^x`kkm1ZP@t zGvK!96fb3CH%--BlQ;5dC$U?y@rgtAJ(CRHDcR?#6{57~#HrSX0b{f7nk|xbuu19P zsB8Z$M2GG656h(W1T3N?+fN&Vy8M}$rh|W*a6Lo#t!s~9eRYy7Ga9hbTmOJhB#!T6 zmCR&sS1~H^kHpHx)O=1F6%Dw-_5y{g6lZtkxN5`+K8dW3DE9p*_%4(YYwzIa`;~4! zO1c1?$0#V|uhK|NJQ>(laPUQk}g{J34qK8%5RMtJoCU4)sb zL`#Do!P9*446{|REUzJj=-ND5CsJ%xV7kVUh+Rk)dD6lic)XdI5w^m@?!feQc?xUR zifWR|Cq(4@IT~xy*a#`uW|^xBX3N>l!M%Os;=i(vRM<+_4EJO~V%_&>u7D6;;Qo%v zsi)kj71`ZrIE^u3ygyk04TnG?`>6;cUh0K&XzmStynhjcEF%iK2+k9L=B4oiqlL{) zZzNI+@m3y$IH7AV1T7K@ z&T1^VbUe^;xWNUk^)9f@4Mhi5h6VHYyY}B`8+`ZW)Z9_dw1x7cT_^^D4?#^g078}y9Q+wF`-Q;w3&L%v!Tn0Vja!VufsGu{>6`gSLL zv6C>o>(<^;EaYsd>xXBZqQsT_knFdl27WgID>;DEVjmFc^9g=Lr5eNj;6{AAsbIk` zitw&ZRT)dEzivK^9Via!v&7>uZ}TBV9)1x>U15r@D{D=}GBSqM9dAzU=d*F)s7^>>vR%K(%d7=4Dq^zd0(y+6-5Ymuli z8);rRW8PjlhWz!fFV8^GJ}!cZ6I-<}4x&{a6r*}pn=dKl3Crut17Em!)Y zHAk)#|1H>VC3CL}4c>cihjrjp$ZJWWok@veLI1Jv4;@J{GUuI76sm$EEB|b52T?<` zX{||Sa^}tNOJ~1&(u7G7?d@IzW2Ks31|0@Gw~54QsrCZd)wVr6&EbCak7YzQ!0+2p zc#B^%wcFKeH&hxCV%Dz3Qkh;hT0|k@Mh%&~ty$|=lq8>mm8-eg4pw@GzB%J`mDk$fm1$)guHKQ4*c+gCpYMpmIB5iBm9Y5uaIDOM0 z)b~pC;gOE+5IiD(aRrxJeqA*wcq#J{HXM7B=nDq_7IWCp=i@fR)Q_O6hyBPAGpGj% z$sH8`%dE7zAejTU%iKTlJ7XXlWvb>2H1mRI^|vyK$i}OElQ)lOMx%zLvbE$SgxoNK znEKdmpPicCAfR11F=?H83X+!CCX%(`j%p}EW2(vJ#4l_iOY>63XL1?V2qkK=3i^sB zWm)0yO?gR>IdqNFB5TuV8X;cuQ-#zEBepk-=vI;!S=l#E!%k5A&X~>`wNjj+eg zTl;)oy>TW}Pb@NTN{+S$Tnh7&jOXN`AR7M8ip|*AUYXrRdG9Yg zd`C#T=ob!l7rbf^7c|}DD3Zbof#KW*qcA})WBiXmos?eL#5!=zASltK&HY-8o&>Ys zeUtw5LJ9&Yy_o6s7DkFb)FDduDE8;z8Gi4OTUm_3pUGQ6>P-LkMK9e>W6XgoI>f;A z4~n!3-%pnoaPkpsX3{qRGnTZlrD(?__=>@6SX=DHLfWV}fPK^bI||RJ0r=Ck{c?st z>iyv#zg34q8c>o#J2e#* z_r1M@K1~#gA=h*Z#$lMim1>#&oQ?1qn&~fc+CW^mFK3!Lnxf<%_h!irCFN2c`dl?L zT*jfqU2WYqv*%z*wa#M58(Q4PF=!nB!Kdwm2iYOXnLy<&`othvTMc7CY5B0}$EN+n z2bQ?6cT~=Wmxx>N$^sQ5?V{(V9BGXr+_YXUT`21Tt@Q z2Y-f>b0AfdWQ!<1wT7zr+6YYlDvjdI{pclLO4D>N z=~~^1YKt%GSz+ytw(gskj5P9de(>3dXoPJuS<-;~aeUf;a^^7U5pzbAk6ZL5P~<+I zp`1_DuSFbIM4jgHZXUqaeV5u9eqhKav!-b(XLZjYDcki!@befA$tHKMzn<9?I-1g& z-zf0^H!nvy^_cz$v^(<)38B9XfR{xL*3UBD?MR#fAUje(++&yPDcixJiYP=jmR#D}K? zw7r!4abRX(_4G1m)n3(XV>+1@so|lqf##HX^vL__KiOlZtylO1{;FsmGce~qa^44i z9FgO(S}}W$j|orJbeTM>GYr^4WHlq~0@;Z~;ijcaAM?vUMA1(_)<8}_RK(fCr)pkC zlme?`iX1RXM2b!Y(_uIhe9euYTt3|;R}kc&PvLvr=lcApW4W{`mvm6}YBQH17QMOk z^wkeD$~y7W9v#tsECYS(ckGK-yj*j*XpW~`OqV7#;_$8A#pfRT%PYo7-?^@*u35A0 z+AhqGo42(B5~Us|9DbL42-I{AWRvomGOsF-Yx!=BJA8M+&s(7PYZbxqDw6FLX>*T?QD$U& zp@)T~{HOADvC76r2on>V4rh{DBUZI&Ue7?+tZv;x;VR4_RNX;wt$Uy55S>y}HT?`L z`p+N;3R$z%@6E$vwL>wQP(5QO_HD)5f&;W~5-L?Zuwds>PTl(5Wvu?xF6Vd|VVLe+ zZFJOX2zyNbBmvaX(5beNFR@Pn4pHp|+{Kat3V`HV8A?YBGxdaF&9!Vg>qqqXJoXcg zw#6SEOA%+To&LP`OB6DtPPHaw1sqW#@$Fl#nNc^O5o>20-zXru(~!=tr#pU*-7ksD z_pRvd_J^HNWy9y6#>AWj>N~ zMI*NK`R!eKWD!eLlE73pag~Sf6oeTy$Q9pJkz^-UTZQ7jp(3?|;sT${fotFP>E{19 z)3srYx7_QR{Btc((0RUn6gbYJZNed^++7Ixxnu0)%@wYPk#H4fmiVoAG9CaCD39Yr zHZW63s_6U>6sWGvMrrZ{7F>V#YA3Mt$(~Cf2Chc67QuS;=w`7)Jko^Dp#nlwZ1qNp z<;Ebr1N0@*18xxA4Ck?hZ~8v6Pho9|mk8=xwUIO9Xdmb3SJRJ?!uSS*%M=(>uzt9b zXUaZao4-Ln~dQXdT6o>jNJn+&ajmz|98yvCHo*r#A71#;81x{Bv6_q^MP0nF5G zwmRH-;^Sl1=N!}c+l=Cs)Uu|ZZ7rbWF!_s9=5Nh*?Z(*)A7O?S%y_4NyZ43>(FR(i zs*!J>yvRM(&_HwKFR_vOl6z@F=&CSuT$Pn9QPLz4~buUd8#+)Jlo3hnX2sye4F-@NxHS|UU5GTJt;*k zrUW2zjy(OC)$#bJP9wP2Ygl*G5%dvF>1D=nW?c|B8^Ek?I=XR23<@-u@JS6q}SH`RGL_~`n)-)I4 zP2hO^)CBLQxz(GiZSg$l;44kP-A9D5UpAuXe5dk*R-oE-wQaHMuZ|iym7@tnq3^G4 zQU$!lljFicSWQQccG#HaN|2o(R*j+x6lM``6Nn7bRz@rJ1wZf+TUu%&W-Ah{6BK}Q_@Cl@9a&c_fchx;giNj}6OSQ6)~A!{)*baBx({ zeG9|riisk{T8;4=?&rf>c)IqCt?VUt7cML7CO2y}yCiaXS!~f4UJ7g`8pfqWUlBmq zKzyfV&3EqU_Z<(a=7b$_^Nlc-=$JDC1yqJP2KNrg!zSpL(GEl3s?!VnmW5W$rWYKx{K!{cdQyNkt_$ z%+Zf*iUQh*vkx7smGUFTcm3ZHE=YFp`zHU!Vu)%^{`GpMN~uWK{y1)}!CIYpDO*VS z@_~s_XV!>w;peUEH8DYi{rLGXJq;tGmBj9;lDgLuJHfl>!_uKmRgl}+Ng)Cq{ax;Q&s1!F{$518+{ zWYX$hk3IUa@@t@t#l388hw#0gy7DoF7w^!_KdfrS1evmfZ+}m*+0A{z)$84_&6usV4z%WYlcJL%AeR%OYB^CY^X5*Or88&NH~S&8CVA2QfiAfHWs9TOsi9iwFSY znT%%FbByG$15yzuWLCr`Ul?!7J@Hdaf~PL!mYei{!{?9swjEaj^C;8Pl~5?IIc|3H z!YOzLp*{ZhZ{B4IE7kV*hlRI06g&`jrOxJ0xb6~&gHgG;50oHga2x&s@<1T6A=3K6 zFHUhXhhWHuLi;x7q!<9!>Y7N2vkZe%^78bWWmC$(?@;s!tq*tBdYz~A^R`UIN?8F# z#7v=}Tbq)arw^D9M5&B-hgE3tWN!%Z)33Z2V%r3Hz4RJNl~kJ69ddCO!rhE#Wvx%l zOEBnlFy%UhVThlDr}%zcN2HH(@L>FEe!H*a{{h!*UL&BGaWs`Oq$DdSU-*gCy3f3_KpoDbgIlAjv!LbWG2$rAC5>T^+sf0B{>2 zsO{sa2Np}S-b2+JWuy_`cIc^_2GWp^`MlrCup3q6`T_8B{rU+L-9}^Z8BGw~5Tf(P zMOF1z+dsU#M?|Rx?!zoKYx@gAD->GJXKQr0%5Xk1es$YVNhp!hx8$noy*kB(0z;?_ zaHrJ)?5mRpJ0=N8EU=HdaiXaVh1PIKDFG*MvOE{`Ay@i+OK@|R+p|5#@JvNN(ZB^n z89l*`l~48ewWn5%Mv$P62l?#-fA3 zO?h}EFE5W>5qCy_N$i+FdIkP@IX=~zV2&`+ye$QCs|GY9f>xVuMFBv1zJie!b#Tf( z`&aHSfyaME@)9WZm z-BakdJp!zuzfWC?dYIR7w{%f7}{L@HVRp`TS1Ry-Odbe zQT@7K^B4a)pv+8kwCqmxo%?tzOkfof4cq1$Vx`QnJ*zBca(4P9+&+h0FY;~lWJ?Zh z$}&7h=uJ3jEG7GY51iTL<4582w;5>%p0rTcHcvzMamqXmISg-Rjo<4;t4s%2O-5n= zkv!~(pfB&vKUr0=Y6=Q#O5;yLEf9w_Bx`=L;Bx<3I27MWsUm+0>p2U`D-6}#Y?bu+%p3w1%HX6kuP&^bbMJT@OKF?;O3q%lNisqdTH6*SnQNW z9Og~iv9r3m_-V%!_VMsW|K^Es6oxPymGe@Sx#u9}Xf^b% z;*`3h*%BV#q!u3K@lF8AkFetX_W3>0VArYx%gY)iVnk`KM;u7oG4A6atmtxAJtHw& z1E;d9Z3XRU9wAdPj|eutpwR1C_Cos0?kDdCv(-Fv{k7uLK-cf>Dx?jxo*Th7e%-1D zwBH;b{vexbj}yw6bN+Naz-TOl-cH%ZXn&7+i5rmNj}LNZCM;3Otcbd;G-rv zJnUdk0vS89W)d)h`l8kVRnkrcSCo^g9*zatHa3zDvrZ92ex35Z$EnX6BHOltxZbBe zf+my`^7Q>J`!kuny=XGUk@}Od zhWx^ssbxDxiVo4G>_Tg|cl+1a+F|r52j`ZgIRx~vyJMu=t+EQgPV%r8ZVc=2yagHs z^4LjFecn~nX{RNy;aDLaY^f}6H(?D?n6xA5Vh=+D(u46ZA@&x1AA;V09gI>?K*e}3 zQBvc=9JoI1g~=Y;S)}!sYQLB>oDSjje1}B*8p?}pXBR)LxPItjnXWTk8ssJ%&fCCz zHPkfbH4vB9YmK2+mN?!~P{tkC#x}4P_jMnlpN7ZtL?OVCFqtXq__&AiIrzNLy?3%b z?nJV8&^*Y7ikUu^6I6NQnlpSm5VG7BpXzk6of*6r0GgzkKN#!o8|zSg_TZzX>5AG= z&6s=8haO%wBGSctHENFTQGBb5lGia-qf9n9VNh61J+qD14a~u>3|R70z?+drtLm6e zJ9;|)(hI*fmJzo<(cp4zJhUtHzhtHaO#b7H3FT;cl}v+z)&?VS%)J9IYTN=EV){W-2ars()>^DxS!s>e!i=2dEMu^o2R>~mT)ni+DcN|5vJ**<}A(i0^!fj~v< zcPTL^Z+`4m&Y{2hibY$lG)FC$e*(pmHYoAQK5g?hzXH4IDOhCb05f3LF8|JMI^JRI?V b$HV`x(QtJjBGP~7Vc&1v_Z6G#zpwuRzryiI diff --git a/extensions-contrib/virtual-columns/pom.xml b/extensions-contrib/virtual-columns/pom.xml deleted file mode 100644 index d3811c1a8d3f..000000000000 --- a/extensions-contrib/virtual-columns/pom.xml +++ /dev/null @@ -1,107 +0,0 @@ - - - - - 4.0.0 - - org.apache.druid.extensions.contrib - druid-virtual-columns - druid-virtual-columns - druid-virtual-columns - - - org.apache.druid - druid - 0.19.0-iap2-SNAPSHOT - ../../pom.xml - - - - - org.apache.druid - druid-core - ${project.parent.version} - provided - - - org.apache.druid - druid-processing - ${project.parent.version} - provided - - - com.google.code.findbugs - jsr305 - provided - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - com.google.guava - guava - provided - - - com.google.inject - guice - provided - - - com.fasterxml.jackson.core - jackson-databind - provided - - - joda-time - joda-time - provided - - - - - junit - junit - test - - - org.easymock - easymock - test - - - org.apache.druid - druid-core - ${project.parent.version} - test-jar - test - - - org.apache.druid - druid-processing - ${project.parent.version} - test-jar - test - - - - diff --git a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/DruidVirtualColumnsModule.java b/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/DruidVirtualColumnsModule.java deleted file mode 100644 index 803387ec3237..000000000000 --- a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/DruidVirtualColumnsModule.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.segment; - -import com.fasterxml.jackson.databind.Module; -import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.common.collect.ImmutableList; -import com.google.inject.Binder; -import org.apache.druid.initialization.DruidModule; - -import java.util.List; - -/** - */ -public class DruidVirtualColumnsModule implements DruidModule -{ - @Override - public List getJacksonModules() - { - return ImmutableList.of( - new SimpleModule(getClass().getSimpleName()) - .registerSubtypes( - new NamedType(MapVirtualColumn.class, "map") - ) - ); - } - - @Override - public void configure(Binder binder) - { - } -} diff --git a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapTypeMapVirtualColumnDimensionSelector.java b/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapTypeMapVirtualColumnDimensionSelector.java deleted file mode 100644 index 4fc361f33253..000000000000 --- a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapTypeMapVirtualColumnDimensionSelector.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.segment; - -import com.google.common.base.Predicate; -import org.apache.druid.query.filter.ValueMatcher; -import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector; -import org.apache.druid.segment.data.IndexedInts; - -import javax.annotation.Nullable; -import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.IntStream; - -/** - * {@link DimensionSelector} for {@link Map} type {@link MapVirtualColumn}. This dimensionSelector only supports - * {@link #getObject()} currently. - */ -final class MapTypeMapVirtualColumnDimensionSelector extends MapVirtualColumnDimensionSelector -{ - MapTypeMapVirtualColumnDimensionSelector( - DimensionSelector keySelector, - DimensionSelector valueSelector - ) - { - super(keySelector, valueSelector); - } - - @Override - public IndexedInts getRow() - { - throw new UnsupportedOperationException("Map column doesn't support getRow()"); - } - - @Override - public ValueMatcher makeValueMatcher(@Nullable String value) - { - return new ValueMatcher() - { - @Override - public boolean matches() - { - // Map column doesn't match with any string - return false; - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - - } - }; - } - - @Override - public ValueMatcher makeValueMatcher(Predicate predicate) - { - return new ValueMatcher() - { - @Override - public boolean matches() - { - return false; - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - - } - }; - } - - @Override - public int getValueCardinality() - { - return CARDINALITY_UNKNOWN; - } - - @Nullable - @Override - public String lookupName(int id) - { - throw new UnsupportedOperationException("Map column doesn't support lookupName()"); - } - - @Override - public boolean nameLookupPossibleInAdvance() - { - return false; - } - - @Nullable - @Override - public IdLookup idLookup() - { - throw new UnsupportedOperationException("Map column doesn't support idLookup()"); - } - - @Override - public Object getObject() - { - final DimensionSelector keySelector = getKeySelector(); - final DimensionSelector valueSelector = getValueSelector(); - - final IndexedInts keyIndices = keySelector.getRow(); - final IndexedInts valueIndices = valueSelector.getRow(); - - final int limit = Math.min(keyIndices.size(), valueIndices.size()); - return IntStream - .range(0, limit) - .boxed() - .collect( - Collectors.toMap( - i -> keySelector.lookupName(keyIndices.get(i)), - i -> valueSelector.lookupName(valueIndices.get(i)) - ) - ); - } - - @Override - public Class classOfObject() - { - return Map.class; - } -} diff --git a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumn.java b/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumn.java deleted file mode 100644 index 7451398baa1e..000000000000 --- a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumn.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.segment; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.query.dimension.DefaultDimensionSpec; -import org.apache.druid.query.dimension.DimensionSpec; -import org.apache.druid.query.filter.DimFilterUtils; -import org.apache.druid.segment.column.ColumnCapabilities; -import org.apache.druid.segment.column.ColumnCapabilitiesImpl; -import org.apache.druid.segment.column.ValueType; -import org.apache.druid.segment.data.IndexedInts; -import org.apache.druid.segment.virtual.VirtualColumnCacheHelper; - -import javax.annotation.Nullable; -import java.nio.ByteBuffer; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - */ -public class MapVirtualColumn implements VirtualColumn -{ - private final String outputName; - private final String keyDimension; - private final String valueDimension; - - @JsonCreator - public MapVirtualColumn( - @JsonProperty("keyDimension") String keyDimension, - @JsonProperty("valueDimension") String valueDimension, - @JsonProperty("outputName") String outputName - ) - { - Preconditions.checkArgument(keyDimension != null, "key dimension should not be null"); - Preconditions.checkArgument(valueDimension != null, "value dimension should not be null"); - Preconditions.checkArgument(outputName != null, "output name should not be null"); - - this.keyDimension = keyDimension; - this.valueDimension = valueDimension; - this.outputName = outputName; - } - - @Override - public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec, ColumnSelectorFactory factory) - { - final DimensionSelector keySelector = factory.makeDimensionSelector(DefaultDimensionSpec.of(keyDimension)); - final DimensionSelector valueSelector = factory.makeDimensionSelector(DefaultDimensionSpec.of(valueDimension)); - final String subColumnName = VirtualColumns.splitColumnName(dimensionSpec.getDimension()).rhs; - if (subColumnName == null) { - return dimensionSpec.decorate(new MapTypeMapVirtualColumnDimensionSelector(keySelector, valueSelector)); - } else { - return dimensionSpec.decorate( - new StringTypeMapVirtualColumnDimensionSelector(keySelector, valueSelector, subColumnName) - ); - } - } - - @Override - public ColumnValueSelector makeColumnValueSelector(String columnName, ColumnSelectorFactory factory) - { - final DimensionSelector keySelector = factory.makeDimensionSelector(DefaultDimensionSpec.of(keyDimension)); - final DimensionSelector valueSelector = factory.makeDimensionSelector(DefaultDimensionSpec.of(valueDimension)); - - final String subColumnName = VirtualColumns.splitColumnName(columnName).rhs; - - if (subColumnName == null) { - return new MapVirtualColumnValueSelector(keySelector, valueSelector) - { - @Override - public Class classOfObject() - { - return Map.class; - } - - @Override - public Map getObject() - { - final IndexedInts keyIndices = keySelector.getRow(); - final IndexedInts valueIndices = valueSelector.getRow(); - final int limit = Math.min(keyIndices.size(), valueIndices.size()); - final Map map = Maps.newHashMapWithExpectedSize(limit); - for (int i = 0; i < limit; i++) { - map.put( - keySelector.lookupName(keyIndices.get(i)), - valueSelector.lookupName(valueIndices.get(i)) - ); - } - return map; - } - }; - } - - IdLookup keyIdLookup = keySelector.idLookup(); - if (keyIdLookup != null) { - final int keyId = keyIdLookup.lookupId(subColumnName); - if (keyId < 0) { - return NilColumnValueSelector.instance(); - } - return new MapVirtualColumnValueSelector(keySelector, valueSelector) - { - @Override - public Class classOfObject() - { - return String.class; - } - - @Nullable - @Override - public String getObject() - { - final IndexedInts keyIndices = keySelector.getRow(); - final IndexedInts valueIndices = valueSelector.getRow(); - final int limit = Math.min(keyIndices.size(), valueIndices.size()); - for (int i = 0; i < limit; i++) { - if (keyIndices.get(i) == keyId) { - return valueSelector.lookupName(valueIndices.get(i)); - } - } - return null; - } - }; - } else { - return new MapVirtualColumnValueSelector(keySelector, valueSelector) - { - @Override - public Class classOfObject() - { - return String.class; - } - - @Nullable - @Override - public String getObject() - { - final IndexedInts keyIndices = keySelector.getRow(); - final IndexedInts valueIndices = valueSelector.getRow(); - final int limit = Math.min(keyIndices.size(), valueIndices.size()); - for (int i = 0; i < limit; i++) { - if (Objects.equals(keySelector.lookupName(keyIndices.get(i)), subColumnName)) { - return valueSelector.lookupName(valueIndices.get(i)); - } - } - return null; - } - }; - } - } - - @Override - public ColumnCapabilities capabilities(String columnName) - { - final ValueType valueType = columnName.indexOf('.') < 0 ? ValueType.COMPLEX : ValueType.STRING; - return new ColumnCapabilitiesImpl().setType(valueType); - } - - @Override - public List requiredColumns() - { - return ImmutableList.of(keyDimension, valueDimension); - } - - @Override - public boolean usesDotNotation() - { - return true; - } - - @Override - public byte[] getCacheKey() - { - byte[] key = StringUtils.toUtf8(keyDimension); - byte[] value = StringUtils.toUtf8(valueDimension); - byte[] output = StringUtils.toUtf8(outputName); - - return ByteBuffer.allocate(3 + key.length + value.length + output.length) - .put(VirtualColumnCacheHelper.CACHE_TYPE_ID_MAP) - .put(key).put(DimFilterUtils.STRING_SEPARATOR) - .put(value).put(DimFilterUtils.STRING_SEPARATOR) - .put(output) - .array(); - } - - @JsonProperty - public String getKeyDimension() - { - return keyDimension; - } - - @JsonProperty - public String getValueDimension() - { - return valueDimension; - } - - @Override - @JsonProperty - public String getOutputName() - { - return outputName; - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (!(o instanceof MapVirtualColumn)) { - return false; - } - - MapVirtualColumn that = (MapVirtualColumn) o; - - if (!keyDimension.equals(that.keyDimension)) { - return false; - } - if (!valueDimension.equals(that.valueDimension)) { - return false; - } - if (!outputName.equals(that.outputName)) { - return false; - } - - return true; - } - - @Override - public int hashCode() - { - int result = keyDimension.hashCode(); - result = 31 * result + valueDimension.hashCode(); - result = 31 * result + outputName.hashCode(); - return result; - } - - @Override - public String toString() - { - return "MapVirtualColumn{" + - "keyDimension='" + keyDimension + '\'' + - ", valueDimension='" + valueDimension + '\'' + - ", outputName='" + outputName + '\'' + - '}'; - } -} diff --git a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumnDimensionSelector.java b/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumnDimensionSelector.java deleted file mode 100644 index 3117cf849143..000000000000 --- a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumnDimensionSelector.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.segment; - -import org.apache.druid.common.config.NullHandling; -import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector; - -abstract class MapVirtualColumnDimensionSelector implements DimensionSelector -{ - private final DimensionSelector keySelector; - private final DimensionSelector valueSelector; - - MapVirtualColumnDimensionSelector( - DimensionSelector keySelector, - DimensionSelector valueSelector - ) - { - this.keySelector = keySelector; - this.valueSelector = valueSelector; - } - - protected DimensionSelector getKeySelector() - { - return keySelector; - } - - protected DimensionSelector getValueSelector() - { - return valueSelector; - } - - @Override - public double getDouble() - { - assert NullHandling.replaceWithDefault(); - return 0.0; - } - - @Override - public float getFloat() - { - assert NullHandling.replaceWithDefault(); - return 0.0f; - } - - @Override - public long getLong() - { - assert NullHandling.replaceWithDefault(); - return 0L; - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("keySelector", keySelector); - inspector.visit("valueSelector", valueSelector); - } -} diff --git a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumnValueSelector.java b/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumnValueSelector.java deleted file mode 100644 index 5c154d310d0d..000000000000 --- a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/MapVirtualColumnValueSelector.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.segment; - -import org.apache.druid.common.config.NullHandling; -import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector; - -abstract class MapVirtualColumnValueSelector implements ColumnValueSelector -{ - private final DimensionSelector keySelector; - private final DimensionSelector valueSelector; - - MapVirtualColumnValueSelector(DimensionSelector keySelector, DimensionSelector valueSelector) - { - this.keySelector = keySelector; - this.valueSelector = valueSelector; - } - - @Override - public double getDouble() - { - assert NullHandling.replaceWithDefault(); - return 0.0; - } - - @Override - public float getFloat() - { - assert NullHandling.replaceWithDefault(); - return 0.0f; - } - - @Override - public long getLong() - { - assert NullHandling.replaceWithDefault(); - return 0L; - } - - @Override - public boolean isNull() - { - return false; - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("keySelector", keySelector); - inspector.visit("valueSelector", valueSelector); - } -} diff --git a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/StringTypeMapVirtualColumnDimensionSelector.java b/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/StringTypeMapVirtualColumnDimensionSelector.java deleted file mode 100644 index 37a005203692..000000000000 --- a/extensions-contrib/virtual-columns/src/main/java/org/apache/druid/segment/StringTypeMapVirtualColumnDimensionSelector.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.segment; - -import com.google.common.base.Preconditions; -import com.google.common.base.Predicate; -import org.apache.druid.query.filter.ValueMatcher; -import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector; -import org.apache.druid.segment.data.IndexedInts; -import org.apache.druid.segment.data.SingleIndexedInt; -import org.apache.druid.segment.data.ZeroIndexedInts; - -import javax.annotation.Nullable; -import java.util.Objects; -import java.util.stream.IntStream; - -/** - * {@link DimensionSelector} for String type {@link MapVirtualColumn}. The performance has not considered yet and so - * it may need to be improved later. - */ -final class StringTypeMapVirtualColumnDimensionSelector extends MapVirtualColumnDimensionSelector -{ - private final String subColumnName; - private final SingleIndexedInt indexedInt = new SingleIndexedInt(); - - StringTypeMapVirtualColumnDimensionSelector( - DimensionSelector keySelector, - DimensionSelector valueSelector, - String subColumnName - ) - { - super(keySelector, valueSelector); - this.subColumnName = Preconditions.checkNotNull(subColumnName, "subColumnName"); - } - - @Override - public IndexedInts getRow() - { - final int valueIndex = findValueIndicesIndexForSubColumn(); - if (valueIndex < 0) { - return ZeroIndexedInts.instance(); - } else { - indexedInt.setValue(valueIndex); - return indexedInt; - } - } - - @Override - public ValueMatcher makeValueMatcher(@Nullable String value) - { - return new ValueMatcher() - { - @Override - public boolean matches() - { - return Objects.equals(value, getObject()); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("keySelector", getKeySelector()); - inspector.visit("valueSelector", getValueSelector()); - inspector.visit("subColumnName", subColumnName); - } - }; - } - - @Override - public ValueMatcher makeValueMatcher(Predicate predicate) - { - return new ValueMatcher() - { - @Override - public boolean matches() - { - return predicate.apply((String) getObject()); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("keySelector", getKeySelector()); - inspector.visit("valueSelector", getValueSelector()); - inspector.visit("subColumnName", subColumnName); - } - }; - } - - @Override - public int getValueCardinality() - { - // To get the value cardinarlity, we need to first check all keys and values to find valid pairs, and then find the - // number of distinct values among them. - return CARDINALITY_UNKNOWN; - } - - @Nullable - @Override - public String lookupName(int id) - { - final int valueIndex = findValueIndicesIndexForSubColumn(); - - if (valueIndex == id) { - return getValueSelector().lookupName(id); - } else { - return null; - } - } - - @Override - public boolean nameLookupPossibleInAdvance() - { - return false; - } - - @Nullable - @Override - public IdLookup idLookup() - { - final DimensionSelector valueSelector = getValueSelector(); - final IdLookup valueLookup = valueSelector.idLookup(); - - if (valueLookup != null) { - final int valueIndex = findValueIndicesIndexForSubColumn(); - return name -> { - final int candidate = valueLookup.lookupId(name); - if (candidate == valueIndex) { - return candidate; - } - return -1; - }; - } else { - return null; - } - } - - @Nullable - @Override - public Object getObject() - { - final int valueIndex = findValueIndicesIndexForSubColumn(); - - if (valueIndex < 0) { - return null; - } else { - final DimensionSelector valueSelector = getValueSelector(); - final IndexedInts valueIndices = valueSelector.getRow(); - return valueSelector.lookupName(valueIndices.get(valueIndex)); - } - } - - /** - * Find the index of valueIndices which is {@link IndexedInts} returned from {@link #getValueSelector()#getRow()} - * corresponding to the {@link #subColumnName}. - * - * @return index for valueIndices if found. -1 otherwise. - */ - private int findValueIndicesIndexForSubColumn() - { - final DimensionSelector keySelector = getKeySelector(); - final DimensionSelector valueSelector = getValueSelector(); - - final IndexedInts keyIndices = keySelector.getRow(); - final IndexedInts valueIndices = valueSelector.getRow(); - - final int limit = Math.min(keyIndices.size(), valueIndices.size()); - - return IntStream - .range(0, limit) - .filter(i -> subColumnName.equals(keySelector.lookupName(keyIndices.get(i)))) // subColumnName is never null - .findAny() - .orElse(-1); - } - - @Override - public Class classOfObject() - { - return String.class; - } -} diff --git a/extensions-contrib/virtual-columns/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule b/extensions-contrib/virtual-columns/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule deleted file mode 100644 index 5f384116a91d..000000000000 --- a/extensions-contrib/virtual-columns/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -org.apache.druid.segment.DruidVirtualColumnsModule diff --git a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnGroupByTest.java b/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnGroupByTest.java deleted file mode 100644 index 3e0cda4aedbd..000000000000 --- a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnGroupByTest.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.segment; - -import com.google.common.collect.ImmutableList; -import org.apache.druid.collections.DefaultBlockingPool; -import org.apache.druid.collections.StupidPool; -import org.apache.druid.data.input.MapBasedRow; -import org.apache.druid.jackson.DefaultObjectMapper; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.Intervals; -import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.query.DruidProcessingConfig; -import org.apache.druid.query.QueryPlus; -import org.apache.druid.query.QueryRunner; -import org.apache.druid.query.QueryRunnerTestHelper; -import org.apache.druid.query.TableDataSource; -import org.apache.druid.query.aggregation.CountAggregatorFactory; -import org.apache.druid.query.dimension.DefaultDimensionSpec; -import org.apache.druid.query.groupby.GroupByQuery; -import org.apache.druid.query.groupby.GroupByQueryConfig; -import org.apache.druid.query.groupby.GroupByQueryQueryToolChest; -import org.apache.druid.query.groupby.GroupByQueryRunnerFactory; -import org.apache.druid.query.groupby.ResultRow; -import org.apache.druid.query.groupby.strategy.GroupByStrategySelector; -import org.apache.druid.query.groupby.strategy.GroupByStrategyV2; -import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; -import org.apache.druid.segment.incremental.IncrementalIndex; -import org.apache.druid.testing.InitializedNullHandlingTest; -import org.apache.druid.timeline.SegmentId; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.List; -import java.util.stream.Collectors; - -public class MapVirtualColumnGroupByTest extends InitializedNullHandlingTest -{ - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - private QueryRunner runner; - - @Before - public void setup() throws IOException - { - final IncrementalIndex incrementalIndex = MapVirtualColumnTestBase.generateIndex(); - - final GroupByStrategySelector strategySelector = new GroupByStrategySelector( - GroupByQueryConfig::new, - null, - new GroupByStrategyV2( - new DruidProcessingConfig() - { - @Override - public String getFormatString() - { - return null; - } - - @Override - public int intermediateComputeSizeBytes() - { - return 10 * 1024 * 1024; - } - - @Override - public int getNumMergeBuffers() - { - return 1; - } - - @Override - public int getNumThreads() - { - return 1; - } - }, - GroupByQueryConfig::new, - new StupidPool<>("map-virtual-column-groupby-test", () -> ByteBuffer.allocate(1024)), - new DefaultBlockingPool<>(() -> ByteBuffer.allocate(1024), 1), - new DefaultObjectMapper(), - QueryRunnerTestHelper.NOOP_QUERYWATCHER - ) - ); - - final GroupByQueryRunnerFactory factory = new GroupByQueryRunnerFactory( - strategySelector, - new GroupByQueryQueryToolChest(strategySelector) - ); - - runner = QueryRunnerTestHelper.makeQueryRunner( - factory, - SegmentId.dummy("index"), - new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("index")), - "incremental" - ); - } - - @Test - public void testWithMapColumn() - { - final GroupByQuery query = new GroupByQuery( - new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), - new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), - VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), - null, - Granularities.ALL, - ImmutableList.of(new DefaultDimensionSpec("params", "params")), - ImmutableList.of(new CountAggregatorFactory("count")), - null, - null, - null, - null, - null - ); - - expectedException.expect(UnsupportedOperationException.class); - expectedException.expectMessage("Map column doesn't support getRow()"); - runner.run(QueryPlus.wrap(query)).toList(); - } - - @Test - public void testWithSubColumn() - { - final GroupByQuery query = new GroupByQuery( - new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), - new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), - VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), - null, - Granularities.ALL, - ImmutableList.of(new DefaultDimensionSpec("params.key3", "params.key3")), - ImmutableList.of(new CountAggregatorFactory("count")), - null, - null, - null, - null, - null - ); - - final List result = runner.run(QueryPlus.wrap(query)).toList(); - final List expected = ImmutableList.of( - new MapBasedRow( - DateTimes.of("2011-01-12T00:00:00.000Z"), - MapVirtualColumnTestBase.mapOf("count", 1L, "params.key3", "value3") - ), - new MapBasedRow(DateTimes.of("2011-01-12T00:00:00.000Z"), MapVirtualColumnTestBase.mapOf("count", 2L)) - ).stream().map(row -> ResultRow.fromLegacyRow(row, query)).collect(Collectors.toList()); - - Assert.assertEquals(expected, result); - } -} diff --git a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTestBase.java b/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTestBase.java deleted file mode 100644 index 4d2164d7ec21..000000000000 --- a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTestBase.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.segment; - -import com.google.common.io.CharSource; -import org.apache.druid.data.input.impl.DelimitedParseSpec; -import org.apache.druid.data.input.impl.DimensionsSpec; -import org.apache.druid.data.input.impl.StringInputRowParser; -import org.apache.druid.data.input.impl.TimestampSpec; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.segment.incremental.IncrementalIndex; -import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.testing.InitializedNullHandlingTest; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public class MapVirtualColumnTestBase extends InitializedNullHandlingTest -{ - static IncrementalIndex generateIndex() throws IOException - { - final CharSource input = CharSource.wrap( - "2011-01-12T00:00:00.000Z\ta\tkey1,key2,key3\tvalue1,value2,value3\n" + - "2011-01-12T00:00:00.000Z\tb\tkey4,key5,key6\tvalue4\n" + - "2011-01-12T00:00:00.000Z\tc\tkey1,key5\tvalue1,value5,value9\n" - ); - - final StringInputRowParser parser = new StringInputRowParser( - new DelimitedParseSpec( - new TimestampSpec("ts", "auto", null), - new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("dim", "keys", "values")), null, null), - "\t", - ",", - Arrays.asList("ts", "dim", "keys", "values"), - false, - 0 - ), - "utf8" - ); - - final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) - .build(); - - return TestIndex.loadIncrementalIndex( - () -> new IncrementalIndex.Builder() - .setIndexSchema(schema) - .setMaxRowCount(10000) - .buildOnheap(), - input, - parser - ); - } - - static Map mapOf(Object... elements) - { - final Map map = new HashMap<>(); - for (int i = 0; i < elements.length; i += 2) { - //noinspection unchecked - map.put((K) elements[i], (V) elements[i + 1]); - } - return map; - } -} diff --git a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTopNTest.java b/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTopNTest.java deleted file mode 100644 index 81e169b811ca..000000000000 --- a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTopNTest.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.segment; - -import com.google.common.collect.ImmutableList; -import org.apache.druid.collections.StupidPool; -import org.apache.druid.java.util.common.DateTimes; -import org.apache.druid.java.util.common.Intervals; -import org.apache.druid.java.util.common.granularity.Granularities; -import org.apache.druid.query.QueryPlus; -import org.apache.druid.query.QueryRunner; -import org.apache.druid.query.QueryRunnerTestHelper; -import org.apache.druid.query.Result; -import org.apache.druid.query.TableDataSource; -import org.apache.druid.query.aggregation.CountAggregatorFactory; -import org.apache.druid.query.dimension.DefaultDimensionSpec; -import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; -import org.apache.druid.query.topn.DimensionAndMetricValueExtractor; -import org.apache.druid.query.topn.NumericTopNMetricSpec; -import org.apache.druid.query.topn.TopNQuery; -import org.apache.druid.query.topn.TopNQueryConfig; -import org.apache.druid.query.topn.TopNQueryQueryToolChest; -import org.apache.druid.query.topn.TopNQueryRunnerFactory; -import org.apache.druid.query.topn.TopNResultValue; -import org.apache.druid.segment.incremental.IncrementalIndex; -import org.apache.druid.testing.InitializedNullHandlingTest; -import org.apache.druid.timeline.SegmentId; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Collections; -import java.util.List; - -public class MapVirtualColumnTopNTest extends InitializedNullHandlingTest -{ - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - private QueryRunner> runner; - - @Before - public void setup() throws IOException - { - final IncrementalIndex incrementalIndex = MapVirtualColumnTestBase.generateIndex(); - - final TopNQueryRunnerFactory factory = new TopNQueryRunnerFactory( - new StupidPool<>("map-virtual-column-test", () -> ByteBuffer.allocate(1024)), - new TopNQueryQueryToolChest(new TopNQueryConfig()), - QueryRunnerTestHelper.NOOP_QUERYWATCHER - ); - - runner = QueryRunnerTestHelper.makeQueryRunner( - factory, - SegmentId.dummy("index1"), - new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("index1")), - "incremental" - ); - } - - @Test - public void testWithMapColumn() - { - final TopNQuery query = new TopNQuery( - new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), - VirtualColumns.create( - ImmutableList.of( - new MapVirtualColumn("keys", "values", "params") - ) - ), - new DefaultDimensionSpec("params", "params"), // params is the map type - new NumericTopNMetricSpec("count"), - 1, - new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), - null, - Granularities.ALL, - ImmutableList.of(new CountAggregatorFactory("count")), - null, - null - ); - - expectedException.expect(UnsupportedOperationException.class); - expectedException.expectMessage("Map column doesn't support getRow()"); - runner.run(QueryPlus.wrap(query)).toList(); - } - - @Test - public void testWithSubColumn() - { - final TopNQuery query = new TopNQuery( - new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), - VirtualColumns.create( - ImmutableList.of( - new MapVirtualColumn("keys", "values", "params") - ) - ), - new DefaultDimensionSpec("params.key3", "params.key3"), // params.key3 is string - new NumericTopNMetricSpec("count"), - 2, - new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), - null, - Granularities.ALL, - ImmutableList.of(new CountAggregatorFactory("count")), - null, - null - ); - - final List> result = runner.run(QueryPlus.wrap(query)).toList(); - final List> expected = Collections.singletonList( - new Result<>( - DateTimes.of("2011-01-12T00:00:00.000Z"), - new TopNResultValue( - ImmutableList.of( - new DimensionAndMetricValueExtractor(MapVirtualColumnTestBase.mapOf("count", 2L, "params.key3", null)), - new DimensionAndMetricValueExtractor(MapVirtualColumnTestBase.mapOf("count", 1L, "params.key3", "value3")) - ) - ) - ) - ); - - Assert.assertEquals(expected, result); - } -} diff --git a/pom.xml b/pom.xml index eafde8e0f039..a92f7358dafc 100644 --- a/pom.xml +++ b/pom.xml @@ -169,30 +169,6 @@ extensions-core/druid-basic-security extensions-core/google-extensions extensions-core/druid-ranger-security - - extensions-contrib/influx-extensions - extensions-contrib/cassandra-storage - extensions-contrib/dropwizard-emitter - extensions-contrib/cloudfiles-extensions - extensions-contrib/graphite-emitter - extensions-contrib/distinctcount - extensions-contrib/statsd-emitter - extensions-contrib/time-min-max - extensions-contrib/virtual-columns - extensions-contrib/thrift-extensions - extensions-contrib/ambari-metrics-emitter - extensions-contrib/sqlserver-metadata-storage - extensions-contrib/kafka-emitter - extensions-contrib/redis-cache - extensions-contrib/opentsdb-emitter - extensions-contrib/materialized-view-maintenance - extensions-contrib/materialized-view-selection - extensions-contrib/momentsketch - extensions-contrib/moving-average-query - extensions-contrib/tdigestsketch - extensions-contrib/influxdb-emitter - extensions-contrib/gce-extensions - extensions-contrib/aliyun-oss-extensions distribution