Skip to content

Add interval response parameter to AutoDateInterval histogram #33254

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,8 @@ Response:
"key": 1425168000000,
"doc_count": 2
}
]
],
"interval": "1M"
}
}
}
Expand Down Expand Up @@ -174,7 +175,8 @@ starting at midnight UTC on 1 October 2015:
"key": 1443664800000,
"doc_count": 1
}
]
],
"interval": "1h"
}
}
}
Expand Down Expand Up @@ -229,7 +231,8 @@ the specified time zone.
"key": 1443664800000,
"doc_count": 1
}
]
],
"interval": "1h"
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,17 +73,17 @@ public class AutoDateHistogramAggregationBuilder
static RoundingInfo[] buildRoundings(DateTimeZone timeZone) {
RoundingInfo[] roundings = new RoundingInfo[6];
roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone),
1000L, 1, 5, 10, 30);
1000L, "s" , 1, 5, 10, 30);
roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone),
60 * 1000L, 1, 5, 10, 30);
60 * 1000L, "m", 1, 5, 10, 30);
roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone),
60 * 60 * 1000L, 1, 3, 12);
60 * 60 * 1000L, "h", 1, 3, 12);
roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH, timeZone),
24 * 60 * 60 * 1000L, 1, 7);
24 * 60 * 60 * 1000L, "d", 1, 7);
roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR, timeZone),
30 * 24 * 60 * 60 * 1000L, 1, 3);
30 * 24 * 60 * 60 * 1000L, "M", 1, 3);
roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY, timeZone),
365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100);
365 * 24 * 60 * 60 * 1000L, "y", 1, 5, 10, 20, 50, 100);
return roundings;
}

Expand Down Expand Up @@ -186,24 +186,28 @@ public static class RoundingInfo implements Writeable {
final Rounding rounding;
final int[] innerIntervals;
final long roughEstimateDurationMillis;
final String unitAbbreviation;

public RoundingInfo(Rounding rounding, long roughEstimateDurationMillis, int... innerIntervals) {
public RoundingInfo(Rounding rounding, long roughEstimateDurationMillis, String unitAbbreviation, int... innerIntervals) {
this.rounding = rounding;
this.roughEstimateDurationMillis = roughEstimateDurationMillis;
this.unitAbbreviation = unitAbbreviation;
this.innerIntervals = innerIntervals;
}

public RoundingInfo(StreamInput in) throws IOException {
rounding = Rounding.Streams.read(in);
roughEstimateDurationMillis = in.readVLong();
innerIntervals = in.readIntArray();
unitAbbreviation = in.readString();
}

@Override
public void writeTo(StreamOutput out) throws IOException {
Rounding.Streams.write(rounding, out);
out.writeVLong(roughEstimateDurationMillis);
out.writeIntArray(innerIntervals);
out.writeString(unitAbbreviation);
}

public int getMaximumInnerInterval() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,15 +181,16 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE
InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(roundingInfos, roundingIdx,
buildEmptySubAggregations());

return new InternalAutoDateHistogram(name, buckets, targetBuckets, emptyBucketInfo, formatter, pipelineAggregators(), metaData());
return new InternalAutoDateHistogram(name, buckets, targetBuckets, emptyBucketInfo,
formatter, pipelineAggregators(), metaData(), 1);
}

@Override
public InternalAggregation buildEmptyAggregation() {
InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(roundingInfos, roundingIdx,
buildEmptySubAggregations());
return new InternalAutoDateHistogram(name, Collections.emptyList(), targetBuckets, emptyBucketInfo, formatter,
pipelineAggregators(), metaData());
pipelineAggregators(), metaData(), 1);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -208,15 +208,16 @@ public int hashCode() {
private final DocValueFormat format;
private final BucketInfo bucketInfo;
private final int targetBuckets;

private long bucketInnerInterval;

InternalAutoDateHistogram(String name, List<Bucket> buckets, int targetBuckets, BucketInfo emptyBucketInfo, DocValueFormat formatter,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData, long bucketInnerInterval) {
super(name, pipelineAggregators, metaData);
this.buckets = buckets;
this.bucketInfo = emptyBucketInfo;
this.format = formatter;
this.targetBuckets = targetBuckets;
this.bucketInnerInterval = bucketInnerInterval;
}

/**
Expand All @@ -238,6 +239,13 @@ protected void doWriteTo(StreamOutput out) throws IOException {
out.writeVInt(targetBuckets);
}

public DateHistogramInterval getInterval() {

RoundingInfo roundingInfo = this.bucketInfo.roundingInfos[this.bucketInfo.roundingIdx];
String unitAbbreviation = roundingInfo.unitAbbreviation;
return new DateHistogramInterval(Long.toString(bucketInnerInterval) + unitAbbreviation);
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure we should calculate the interval here as we could end up with bugs where this is different from the actual interval used. Instead could we have a field for the interval store in this class which is populated int he AutoDateHistogramAggregator.buildAggregation() and also in doReduce() when we build the new instance following a reduce? I think we will have all the information we need in both those places?


@Override
public String getWriteableName() {
return AutoDateHistogramAggregationBuilder.NAME;
Expand All @@ -262,7 +270,7 @@ public BucketInfo getBucketInfo() {

@Override
public InternalAutoDateHistogram create(List<Bucket> buckets) {
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators(), metaData);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators(), metaData, 1);
}

@Override
Expand All @@ -279,7 +287,6 @@ private static class IteratorAndCurrent {
this.iterator = iterator;
current = iterator.next();
}

}

/**
Expand Down Expand Up @@ -365,7 +372,7 @@ private BucketReduceResult mergeBucketsIfNeeded(List<Bucket> reducedBuckets, int
reduceRoundingInfo = bucketInfo.roundingInfos[reduceRoundingIdx];
reducedBuckets = mergeBuckets(reducedBuckets, reduceRoundingInfo.rounding, reduceContext);
}
return new BucketReduceResult(reducedBuckets, reduceRoundingInfo, reduceRoundingIdx);
return new BucketReduceResult(reducedBuckets, reduceRoundingInfo, reduceRoundingIdx, 1);
}

private List<Bucket> mergeBuckets(List<Bucket> reducedBuckets, Rounding reduceRounding, ReduceContext reduceContext) {
Expand Down Expand Up @@ -403,12 +410,13 @@ private static class BucketReduceResult {
List<Bucket> buckets;
RoundingInfo roundingInfo;
int roundingIdx;
long innerInterval;

BucketReduceResult(List<Bucket> buckets, RoundingInfo roundingInfo, int roundingIdx) {
BucketReduceResult(List<Bucket> buckets, RoundingInfo roundingInfo, int roundingIdx, long innerInterval) {
this.buckets = buckets;
this.roundingInfo = roundingInfo;
this.roundingIdx = roundingIdx;

this.innerInterval = innerInterval;
}
}

Expand Down Expand Up @@ -444,7 +452,7 @@ private BucketReduceResult addEmptyBuckets(BucketReduceResult currentResult, Red
}
lastBucket = iter.next();
}
return new BucketReduceResult(list, roundingInfo, roundingIdx);
return new BucketReduceResult(list, roundingInfo, roundingIdx, currentResult.innerInterval);
}

static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx,
Expand Down Expand Up @@ -507,7 +515,7 @@ public InternalAggregation doReduce(List<InternalAggregation> aggregations, Redu
this.bucketInfo.emptySubAggregations);

return new InternalAutoDateHistogram(getName(), reducedBucketsResult.buckets, targetBuckets, bucketInfo, format,
pipelineAggregators(), getMetaData());
pipelineAggregators(), getMetaData(), reducedBucketsResult.innerInterval);
}

private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult,
Expand Down Expand Up @@ -547,7 +555,7 @@ private BucketReduceResult mergeConsecutiveBuckets(List<Bucket> reducedBuckets,
reduceContext.consumeBucketsAndMaybeBreak(1);
mergedBuckets.add(sameKeyedBuckets.get(0).reduce(sameKeyedBuckets, roundingInfo.rounding, reduceContext));
}
return new BucketReduceResult(mergedBuckets, roundingInfo, roundingIdx);
return new BucketReduceResult(mergedBuckets, roundingInfo, roundingIdx, mergeInterval);
}

@Override
Expand All @@ -557,6 +565,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th
bucket.toXContent(builder, params);
}
builder.endArray();
builder.field("interval", getInterval().toString());
return builder;
}

Expand All @@ -580,7 +589,7 @@ public InternalAggregation createAggregation(List<MultiBucketsAggregation.Bucket
buckets2.add((Bucket) b);
}
buckets2 = Collections.unmodifiableList(buckets2);
return new InternalAutoDateHistogram(name, buckets2, targetBuckets, bucketInfo, format, pipelineAggregators(), getMetaData());
return new InternalAutoDateHistogram(name, buckets2, targetBuckets, bucketInfo, format, pipelineAggregators(), getMetaData(), 1);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.elasticsearch.search.aggregations.bucket.histogram;

import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
Expand All @@ -36,6 +37,16 @@ public String getType() {
return AutoDateHistogramAggregationBuilder.NAME;
}

private String interval;

public String getInterval() {
return interval;
}

public void setInterval(String interval) {
this.interval = interval;
}

@Override
public List<? extends Histogram.Bucket> getBuckets() {
return buckets;
Expand All @@ -47,6 +58,8 @@ public List<? extends Histogram.Bucket> getBuckets() {
declareMultiBucketAggregationFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
PARSER.declareString((parsed, value) -> parsed.interval = value,
new ParseField("interval"));
}

public static ParsedAutoDateHistogram fromXContent(XContentParser parser, String name) throws IOException {
Expand All @@ -55,6 +68,14 @@ public static ParsedAutoDateHistogram fromXContent(XContentParser parser, String
return aggregation;
}

@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder = super.doXContentBody(builder, params);
builder.field("interval", getInterval());
return builder;
}


public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket {

private Long key;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ protected InternalAutoDateHistogram createTestInstance(String name,
}
InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList());
BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData, 1);
}

/*
Expand All @@ -94,11 +94,11 @@ public void testGetAppropriateRoundingUsesCorrectIntervals() {
// an innerInterval that is quite large, such that targetBuckets * roundings[i].getMaximumInnerInterval()
// will be larger than the estimate.
roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone),
1000L, 1000);
1000L, "s", 1000);
roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone),
60 * 1000L, 1, 5, 10, 30);
60 * 1000L, "m", 1, 5, 10, 30);
roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone),
60 * 60 * 1000L, 1, 3, 12);
60 * 60 * 1000L, "h", 1, 3, 12);

OffsetDateTime timestamp = Instant.parse("2018-01-01T00:00:01.000Z").atOffset(ZoneOffset.UTC);
// We want to pass a roundingIdx of zero, because in order to reproduce this bug, we need the function
Expand Down Expand Up @@ -198,6 +198,14 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAut
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
assertEquals(expectedCounts, actualCounts);

DateHistogramInterval expectedInterval;
if (reduced.getBuckets().size() == 1) {
expectedInterval = reduced.getInterval();
} else {
expectedInterval = new DateHistogramInterval(innerIntervalToUse+roundingInfo.unitAbbreviation);
}
assertThat(reduced.getInterval(), equalTo(expectedInterval));
}

private int getBucketCount(long lowest, long highest, RoundingInfo roundingInfo, long intervalInMillis) {
Expand Down Expand Up @@ -252,6 +260,6 @@ protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram ins
default:
throw new AssertionError("Illegal randomisation branch");
}
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData, 1);
}
}