Skip to content

Commit

Permalink
Increase code cov
Browse files Browse the repository at this point in the history
Signed-off-by: bowenlan-amzn <[email protected]>
  • Loading branch information
bowenlan-amzn committed May 2, 2024
1 parent 28492b5 commit c605a03
Show file tree
Hide file tree
Showing 2 changed files with 101 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -613,7 +613,6 @@ private void visitPoints(byte[] packedValue, CheckedRunnable<IOException> collec

private boolean pointCompare(byte[] lower, byte[] upper, byte[] packedValue) {
if (compareByteValue(packedValue, lower) < 0) {

return false;
}
return compareByteValue(packedValue, upper) <= 0;
Expand All @@ -633,7 +632,6 @@ public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue
if (collector.iterateRangeEnd(minPackedValue, this::compareByteValue)) {
throw new CollectionTerminatedException();
}

// compare the next range with this node's min max again
// new rangeMin = previous rangeMax + 1 <= min
rangeMax = collector.activeRangeAsByteArray[1];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,15 @@
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.tests.util.TestUtil;
import org.opensearch.common.time.DateFormatters;
import org.opensearch.core.common.breaker.CircuitBreaker;
import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
Expand Down Expand Up @@ -1461,7 +1464,6 @@ private void testSearchCase(
}

public void testMultiRangeTraversal() throws IOException {

Map<String, Integer> dataset = new HashMap<>();
dataset.put("2017-02-01T09:02:00.000Z", randomIntBetween(100, 2000));
dataset.put("2017-02-01T09:59:59.999Z", randomIntBetween(100, 2000));
Expand Down Expand Up @@ -1508,8 +1510,67 @@ public void testMultiRangeTraversal() throws IOException {
assertEquals(expected, bucket.getDocCount());
},
false,
collectorCount -> assertEquals(0, (int) collectorCount)
collectorCount -> assertEquals(0, (int) collectorCount),
true
);
}

public void testMultiRangeTraversalFixedData() throws IOException {
Map<String, Integer> dataset = new HashMap<>();
dataset.put("2017-02-01T09:02:00.000Z", 512);
dataset.put("2017-02-01T09:59:59.999Z", 256);
dataset.put("2017-02-01T10:00:00.001Z", 256);
dataset.put("2017-02-01T13:06:00.000Z", 512);
dataset.put("2017-02-01T14:04:00.000Z", 256);
dataset.put("2017-02-01T14:05:00.000Z", 256);
dataset.put("2017-02-01T15:59:00.000Z", 768);

testFilterRewriteCase(
LongPoint.newRangeQuery(AGGREGABLE_DATE, asLong("2017-01-01T09:00:00.000Z"), asLong("2017-02-01T14:04:01.000Z")),
dataset,
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("60m")).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(4, buckets.size());

Histogram.Bucket bucket = buckets.get(0);
assertEquals("2017-02-01T09:00:00.000Z", bucket.getKeyAsString());
int expected = dataset.get("2017-02-01T09:02:00.000Z") + dataset.get("2017-02-01T09:59:59.999Z");
assertEquals(expected, bucket.getDocCount());

bucket = buckets.get(1);
assertEquals("2017-02-01T10:00:00.000Z", bucket.getKeyAsString());
expected = dataset.get("2017-02-01T10:00:00.001Z");
assertEquals(expected, bucket.getDocCount());

bucket = buckets.get(2);
assertEquals("2017-02-01T13:00:00.000Z", bucket.getKeyAsString());
expected = dataset.get("2017-02-01T13:06:00.000Z");
assertEquals(expected, bucket.getDocCount());

bucket = buckets.get(3);
assertEquals("2017-02-01T14:00:00.000Z", bucket.getKeyAsString());
expected = dataset.get("2017-02-01T14:04:00.000Z");
assertEquals(expected, bucket.getDocCount());
},
false,
collectorCount -> assertEquals(0, (int) collectorCount),
false
);
}

public void testMultiRangeTraversalNotApplicable() throws IOException {
Map<String, Integer> dataset = new HashMap<>();
dataset.put("2017-02-01T09:02:00.000Z", randomIntBetween(100, 2000));
dataset.put("2017-02-01T09:59:59.999Z", randomIntBetween(100, 2000));
dataset.put("2017-02-01T10:00:00.001Z", randomIntBetween(100, 2000));
dataset.put("2017-02-01T13:06:00.000Z", randomIntBetween(100, 2000));
dataset.put("2017-02-01T14:04:00.000Z", randomIntBetween(100, 2000));
dataset.put("2017-02-01T14:05:00.000Z", randomIntBetween(100, 2000));
dataset.put("2017-02-01T15:59:00.000Z", randomIntBetween(100, 2000));
dataset.put("2017-02-01T16:06:00.000Z", randomIntBetween(100, 2000));
dataset.put("2017-02-01T16:48:00.000Z", randomIntBetween(100, 2000));
dataset.put("2017-02-01T16:59:00.000Z", randomIntBetween(100, 2000));

testFilterRewriteCase(
new MatchAllDocsQuery(),
Expand Down Expand Up @@ -1552,7 +1613,8 @@ public void testMultiRangeTraversal() throws IOException {
assertEquals(expected, bucket.getDocCount());
},
true,
collectCount -> assertTrue(collectCount > 0)
collectCount -> assertTrue(collectCount > 0),
true
);
}

Expand All @@ -1562,25 +1624,48 @@ private void testFilterRewriteCase(
Consumer<DateHistogramAggregationBuilder> configure,
Consumer<InternalDateHistogram> verify,
boolean useDocCountField,
Consumer<Integer> verifyCollectCount
Consumer<Integer> verifyCollectCount,
boolean randomWrite
) throws IOException {
DateFieldMapper.DateFieldType fieldType = aggregableDateFieldType(false, true);

try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Document document = new Document();
if (useDocCountField) {
// add the doc count field to the first document
document.add(new NumericDocValuesField(DocCountFieldMapper.NAME, 5));
if (randomWrite) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Document document = new Document();
if (useDocCountField) {
// add the doc count field to the first document
document.add(new NumericDocValuesField(DocCountFieldMapper.NAME, 5));
}
for (Map.Entry<String, Integer> date : dataset.entrySet()) {
for (int i = 0; i < date.getValue(); i++) {
long instant = asLong(date.getKey(), fieldType);
document.add(new SortedNumericDocValuesField(AGGREGABLE_DATE, instant));
document.add(new LongPoint(AGGREGABLE_DATE, instant));
indexWriter.addDocument(document);
document.clear();
}
}
}
for (Map.Entry<String, Integer> date : dataset.entrySet()) {
for (int i = 0; i < date.getValue(); i++) {
long instant = asLong(date.getKey(), fieldType);
document.add(new SortedNumericDocValuesField(AGGREGABLE_DATE, instant));
document.add(new LongPoint(AGGREGABLE_DATE, instant));
indexWriter.addDocument(document);
document.clear();
} else {
// use default codec so max points in leaf is fixed to 512, to cover the node level visit and compare logic
try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig().setCodec(TestUtil.getDefaultCodec()))) {
List<Document> documents = new ArrayList<>();
for (Map.Entry<String, Integer> date : dataset.entrySet()) {
for (int i = 0; i < date.getValue(); i++) {
Document document = new Document();
if (useDocCountField) {
// add the doc count field once
document.add(new NumericDocValuesField(DocCountFieldMapper.NAME, 5));
useDocCountField = false;
}
long instant = asLong(date.getKey(), fieldType);
document.add(new SortedNumericDocValuesField(AGGREGABLE_DATE, instant));
document.add(new LongPoint(AGGREGABLE_DATE, instant));
documents.add(document);
}
}
indexWriter.addDocuments(documents);
}
}

Expand Down

0 comments on commit c605a03

Please sign in to comment.