Skip to content

Commit 0df456c

Browse files
committed
Test: Protect auto_date_histo from 0 buckets
The test for `auto_date_histogram` as trying to round `Long.MAX_VALUE` if there were 0 buckets. That doesn't work. Also, this replaces all of the class variables created to make consistent random result when testing `InternalAutoDateHistogram` with the newer `randomResultsToReduce` which is a little simpler to understand.
1 parent de9b91f commit 0df456c

File tree

1 file changed

+91
-69
lines changed

1 file changed

+91
-69
lines changed

server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java

+91-69
Original file line numberDiff line numberDiff line change
@@ -53,35 +53,19 @@
5353
import static org.hamcrest.Matchers.hasSize;
5454

5555
public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregationTestCase<InternalAutoDateHistogram> {
56-
57-
private DocValueFormat format;
58-
private RoundingInfo[] roundingInfos;
59-
private long defaultStart;
60-
private int roundingIndex;
61-
62-
@Override
63-
public void setUp() throws Exception {
64-
super.setUp();
65-
format = randomNumericDocValueFormat();
66-
defaultStart = randomLongBetween(0, utcMillis("2050-01-01"));
67-
roundingIndex = between(0, AutoDateHistogramAggregationBuilder.buildRoundings(null, null).length - 1);
68-
}
69-
70-
@Override
71-
protected InternalAutoDateHistogram createTestInstance(String name,
72-
Map<String, Object> metadata,
73-
InternalAggregations aggregations) {
74-
75-
roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null, null);
56+
protected InternalAutoDateHistogram createTestInstance(
57+
String name,
58+
Map<String, Object> metadata,
59+
InternalAggregations aggregations,
60+
long startingDate,
61+
RoundingInfo[] roundingInfos,
62+
int roundingIndex,
63+
DocValueFormat format
64+
) {
7665
int nbBuckets = randomNumberOfBuckets();
7766
int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1);
7867
List<InternalAutoDateHistogram.Bucket> buckets = new ArrayList<>(nbBuckets);
7968

80-
long startingDate = defaultStart;
81-
if (rarely()) {
82-
startingDate += randomFrom(TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS).toMillis(between(1, 10000));
83-
}
84-
8569
long interval = randomIntBetween(1, 3);
8670
long intervalMillis = roundingInfos[roundingIndex].roughEstimateDurationMillis * interval;
8771

@@ -94,9 +78,26 @@ protected InternalAutoDateHistogram createTestInstance(String name,
9478
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, metadata, 1);
9579
}
9680

81+
@Override
82+
protected InternalAutoDateHistogram createTestInstance(String name,
83+
Map<String, Object> metadata,
84+
InternalAggregations aggregations) {
85+
RoundingInfo[] roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null, null);
86+
int roundingIndex = between(0, roundingInfos.length - 1);
87+
return createTestInstance(
88+
name,
89+
metadata,
90+
aggregations,
91+
randomLongBetween(0, utcMillis("2050-01-01")),
92+
roundingInfos,
93+
roundingIndex,
94+
randomNumericDocValueFormat()
95+
);
96+
}
97+
9798
/*
98-
This test was added to reproduce a bug where getAppropriateRounding was only ever using the first innerIntervals
99-
passed in, instead of using the interval associated with the loop.
99+
* This test was added to reproduce a bug where getAppropriateRounding was only ever using the first innerIntervals
100+
* passed in, instead of using the interval associated with the loop.
100101
*/
101102
public void testGetAppropriateRoundingUsesCorrectIntervals() {
102103
RoundingInfo[] roundings = new RoundingInfo[6];
@@ -121,8 +122,23 @@ public void testGetAppropriateRoundingUsesCorrectIntervals() {
121122
}
122123

123124
@Override
124-
protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAutoDateHistogram> inputs) {
125+
protected List<InternalAutoDateHistogram> randomResultsToReduce(String name, int size) {
126+
long startingDate = randomLongBetween(0, utcMillis("2050-01-01"));
127+
RoundingInfo[] roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null, null);
128+
int roundingIndex = between(0, roundingInfos.length - 1);
129+
DocValueFormat format = randomNumericDocValueFormat();
130+
List<InternalAutoDateHistogram> result = new ArrayList<>(size);
131+
for (int i = 0; i < size; i++) {
132+
long thisResultStart = startingDate;
133+
thisResultStart += usually() ? 0 :randomFrom(TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS).toMillis(between(1, 10000));
134+
result.add(createTestInstance(name, null, InternalAggregations.EMPTY, thisResultStart, roundingInfos, roundingIndex, format));
135+
}
136+
return result;
137+
}
125138

139+
@Override
140+
protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAutoDateHistogram> inputs) {
141+
int totalBucketConut = 0;
126142
long lowest = Long.MAX_VALUE;
127143
long highest = 0;
128144

@@ -135,11 +151,12 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAut
135151
if (bucketKey > highest) {
136152
highest = bucketKey;
137153
}
154+
totalBucketConut++;
138155
}
139156
}
140157

141158
int roundingIndex = reduced.getBucketInfo().roundingIdx;
142-
RoundingInfo roundingInfo = roundingInfos[roundingIndex];
159+
RoundingInfo roundingInfo = AutoDateHistogramAggregationBuilder.buildRoundings(null, null)[roundingIndex];
143160

144161
long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis();
145162
int innerIntervalIndex = 0;
@@ -163,53 +180,58 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAut
163180
* number of buckets.
164181
*/
165182
int innerIntervalToUse;
166-
do {
167-
innerIntervalToUse = roundingInfo.innerIntervals[innerIntervalIndex];
168-
int bucketCount = getBucketCount(lowest, highest, roundingInfo.rounding, innerIntervalToUse);
169-
if (bucketCount == reduced.getBuckets().size()) {
170-
break;
171-
}
172-
if (bucketCount < reduced.getBuckets().size()) {
173-
innerIntervalToUse = roundingInfo.innerIntervals[Math.max(0, innerIntervalIndex - 1)];
174-
break;
175-
}
176-
} while (++innerIntervalIndex < roundingInfo.innerIntervals.length);
183+
if (totalBucketConut == 0) {
184+
innerIntervalToUse = roundingInfo.innerIntervals[0];
185+
} else {
186+
do {
187+
innerIntervalToUse = roundingInfo.innerIntervals[innerIntervalIndex];
188+
int bucketCountAtInterval = getBucketCount(lowest, highest, roundingInfo.rounding, innerIntervalToUse);
189+
if (bucketCountAtInterval == reduced.getBuckets().size()) {
190+
break;
191+
}
192+
if (bucketCountAtInterval < reduced.getBuckets().size()) {
193+
innerIntervalToUse = roundingInfo.innerIntervals[Math.max(0, innerIntervalIndex - 1)];
194+
break;
195+
}
196+
} while (++innerIntervalIndex < roundingInfo.innerIntervals.length);
197+
}
177198

178199
assertThat(reduced.getInterval().toString(), equalTo(innerIntervalToUse + roundingInfo.unitAbbreviation));
179200
Map<Instant, Long> expectedCounts = new TreeMap<>();
180-
long keyForBucket = roundingInfo.rounding.round(lowest);
181-
while (keyForBucket <= roundingInfo.rounding.round(highest)) {
182-
long nextKey = keyForBucket;
183-
for (int i = 0; i < innerIntervalToUse; i++) {
184-
nextKey = roundingInfo.rounding.nextRoundingValue(nextKey);
185-
}
186-
Instant key = Instant.ofEpochMilli(keyForBucket);
187-
expectedCounts.put(key, 0L);
188-
189-
// Iterate through the input buckets, and for each bucket, determine if it's inside
190-
// the range of the bucket in the outer loop. if it is, add the doc count to the total
191-
// for that bucket.
192-
193-
for (InternalAutoDateHistogram histogram : inputs) {
194-
for (Histogram.Bucket bucket : histogram.getBuckets()) {
195-
long roundedBucketKey = roundingInfo.rounding.round(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli());
196-
long docCount = bucket.getDocCount();
197-
if (roundedBucketKey >= keyForBucket && roundedBucketKey < nextKey) {
198-
expectedCounts.compute(key,
199-
(k, oldValue) -> (oldValue == null ? 0 : oldValue) + docCount);
201+
if (totalBucketConut > 0) {
202+
long keyForBucket = roundingInfo.rounding.round(lowest);
203+
while (keyForBucket <= roundingInfo.rounding.round(highest)) {
204+
long nextKey = keyForBucket;
205+
for (int i = 0; i < innerIntervalToUse; i++) {
206+
nextKey = roundingInfo.rounding.nextRoundingValue(nextKey);
207+
}
208+
Instant key = Instant.ofEpochMilli(keyForBucket);
209+
expectedCounts.put(key, 0L);
210+
211+
// Iterate through the input buckets, and for each bucket, determine if it's inside
212+
// the range of the bucket in the outer loop. if it is, add the doc count to the total
213+
// for that bucket.
214+
215+
for (InternalAutoDateHistogram histogram : inputs) {
216+
for (Histogram.Bucket bucket : histogram.getBuckets()) {
217+
long roundedBucketKey = roundingInfo.rounding.round(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli());
218+
long docCount = bucket.getDocCount();
219+
if (roundedBucketKey >= keyForBucket && roundedBucketKey < nextKey) {
220+
expectedCounts.compute(key,
221+
(k, oldValue) -> (oldValue == null ? 0 : oldValue) + docCount);
222+
}
200223
}
201224
}
225+
keyForBucket = nextKey;
202226
}
203-
keyForBucket = nextKey;
204-
}
205227

206-
// If there is only a single bucket, and we haven't added it above, add a bucket with no documents.
207-
// this step is necessary because of the roundedBucketKey < keyForBucket + intervalInMillis above.
208-
if (roundingInfo.rounding.round(lowest) == roundingInfo.rounding.round(highest) && expectedCounts.isEmpty()) {
209-
expectedCounts.put(Instant.ofEpochMilli(roundingInfo.rounding.round(lowest)), 0L);
228+
// If there is only a single bucket, and we haven't added it above, add a bucket with no documents.
229+
// this step is necessary because of the roundedBucketKey < keyForBucket + intervalInMillis above.
230+
if (roundingInfo.rounding.round(lowest) == roundingInfo.rounding.round(highest) && expectedCounts.isEmpty()) {
231+
expectedCounts.put(Instant.ofEpochMilli(roundingInfo.rounding.round(lowest)), 0L);
232+
}
210233
}
211234

212-
213235
// pick out the actual reduced values to the make the assertion more readable
214236
Map<Instant, Long> actualCounts = new TreeMap<>();
215237
for (Histogram.Bucket bucket : reduced.getBuckets()) {
@@ -257,7 +279,7 @@ protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram ins
257279
break;
258280
case 1:
259281
buckets = new ArrayList<>(buckets);
260-
buckets.add(new InternalAutoDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format,
282+
buckets.add(new InternalAutoDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), instance.getFormatter(),
261283
InternalAggregations.EMPTY));
262284
break;
263285
case 2:
@@ -275,7 +297,7 @@ protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram ins
275297
default:
276298
throw new AssertionError("Illegal randomisation branch");
277299
}
278-
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, metadata, 1);
300+
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, instance.getFormatter(), metadata, 1);
279301
}
280302

281303
public void testReduceSecond() {

0 commit comments

Comments
 (0)