Skip to content

add explicit tests for geo_shape with doc values on old indices #52861

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Mar 5, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/
package org.elasticsearch.index.mapper;

import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
Expand All @@ -28,6 +29,7 @@
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;

import java.io.IOException;
import java.util.Collection;
Expand All @@ -40,6 +42,11 @@

public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {

@Override
protected boolean forbidPrivateIndexSettings() {
return false;
}

@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
Expand All @@ -65,6 +72,25 @@ public void testDefaultConfiguration() throws IOException {
assertTrue(geoShapeFieldMapper.fieldType().hasDocValues());
}

public void testDefaultDocValueConfigurationOnPre8() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.endObject().endObject()
.endObject().endObject());

Version oldVersion = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0);
DocumentMapper defaultMapper = createIndex("test", settings(oldVersion).build()).mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));

GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
assertFalse(geoShapeFieldMapper.docValues().explicit());
assertFalse(geoShapeFieldMapper.docValues().value());
assertFalse(geoShapeFieldMapper.fieldType().hasDocValues());
}

/**
* Test that orientation parameter correctly parses
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,16 @@
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.geometry.MultiPoint;
import org.elasticsearch.geometry.Point;
import org.elasticsearch.geometry.utils.GeographyValidator;
import org.elasticsearch.geometry.utils.WellKnownText;
import org.elasticsearch.index.query.GeoBoundingBoxQueryBuilder;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
Expand All @@ -43,6 +47,7 @@
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.stream.Collectors;

import static org.elasticsearch.geometry.utils.Geohash.PRECISION;
import static org.elasticsearch.geometry.utils.Geohash.stringEncode;
Expand All @@ -52,44 +57,46 @@
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.lessThan;

@ESIntegTestCase.SuiteScopeTestCase
public class GeoHashGridIT extends ESIntegTestCase {
private static WellKnownText WKT = new WellKnownText(false, new GeographyValidator(true));

@Override
protected boolean forbidPrivateIndexSettings() {
return false;
}

private Version version = VersionUtils.randomIndexCompatibleVersion(random());

static ObjectIntMap<String> expectedDocCountsForGeoHash = null;
static ObjectIntMap<String> multiValuedExpectedDocCountsForGeoHash = null;
static int numDocs = 100;

static String smallestGeoHash = null;

private static IndexRequestBuilder indexCity(String index, String name, List<String> latLon) throws Exception {
private static IndexRequestBuilder indexCity(String index, String name, List<Point> points) throws Exception {
XContentBuilder source = jsonBuilder().startObject().field("city", name);
if (latLon != null) {
source = source.field("location", latLon);
if (points != null) {
List<String> latLonAsStrings = points.stream().map(ll -> ll.getLat() + ", " + ll.getLon()).collect(Collectors.toList());
source = source.field("location", latLonAsStrings);
source = source.field("location_as_shape", points.isEmpty() ? null : WKT.toWKT(new MultiPoint(points)));
}
source = source.endObject();
return client().prepareIndex(index).setSource(source);
}

private static IndexRequestBuilder indexCity(String index, String name, String latLon) throws Exception {
return indexCity(index, name, Arrays.<String>asList(latLon));
private static IndexRequestBuilder indexCity(String index, String name, double lng, double lat) throws Exception {
return indexCity(index, name, Arrays.<Point>asList(new Point(lng, lat)));
}

@Override
public void setupSuiteScopeCluster() throws Exception {
createIndex("idx_unmapped");

Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();

assertAcked(prepareCreate("idx").setSettings(settings)
.setMapping("location", "type=geo_point", "city", "type=keyword"));
assertAcked(prepareCreate("idx")
.setMapping("location", "type=geo_point", "location_as_shape", "type=geo_shape", "city", "type=keyword"));
assertAcked(prepareCreate("idx7x", settings(VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0)))
.setMapping("location", "type=geo_point", "location_as_shape", "type=geo_shape", "city", "type=keyword"));

List<IndexRequestBuilder> cities = new ArrayList<>();
Random random = random();
Expand All @@ -100,7 +107,8 @@ public void setupSuiteScopeCluster() throws Exception {
double lng = (360d * random.nextDouble()) - 180d;
String randomGeoHash = stringEncode(lng, lat, PRECISION);
//Index at the highest resolution
cities.add(indexCity("idx", randomGeoHash, lat + ", " + lng));
cities.add(indexCity("idx", randomGeoHash, lng, lat));
cities.add(indexCity("idx7x", randomGeoHash, lng, lat));
expectedDocCountsForGeoHash.put(randomGeoHash, expectedDocCountsForGeoHash.getOrDefault(randomGeoHash, 0) + 1);
//Update expected doc counts for all resolutions..
for (int precision = PRECISION - 1; precision > 0; precision--) {
Expand All @@ -113,19 +121,19 @@ public void setupSuiteScopeCluster() throws Exception {
}
indexRandom(true, cities);

assertAcked(prepareCreate("multi_valued_idx").setSettings(settings)
assertAcked(prepareCreate("multi_valued_idx").setSettings(settings(VersionUtils.randomIndexCompatibleVersion(random())))
.setMapping("location", "type=geo_point", "city", "type=keyword"));

cities = new ArrayList<>();
multiValuedExpectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
for (int i = 0; i < numDocs; i++) {
final int numPoints = random.nextInt(4);
List<String> points = new ArrayList<>();
List<Point> points = new ArrayList<>();
Set<String> geoHashes = new HashSet<>();
for (int j = 0; j < numPoints; ++j) {
double lat = (180d * random.nextDouble()) - 90d;
double lng = (360d * random.nextDouble()) - 180d;
points.add(lat + "," + lng);
points.add(new Point(lng, lat));
// Update expected doc counts for all resolutions..
for (int precision = PRECISION; precision > 0; precision--) {
final String geoHash = stringEncode(lng, lat, precision);
Expand All @@ -142,6 +150,42 @@ public void setupSuiteScopeCluster() throws Exception {
ensureSearchable();
}

public void test7xIndexOnly() {
SearchPhaseExecutionException exception = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("idx7x")
.addAggregation(geohashGrid("aggName").field("location_as_shape"))
.get());
assertNotNull(exception.getRootCause());
assertThat(exception.getRootCause().getMessage(),
equalTo("Can't load fielddata on [location_as_shape] because fielddata is unsupported on fields of type [geo_shape]." +
" Use doc values instead."));
}

public void test7xIndexWith8Index() {
int precision = randomIntBetween(1, PRECISION);
SearchResponse response = client().prepareSearch("idx", "idx7x")
.addAggregation(geohashGrid("aggName").field("location_as_shape").precision(precision))
.get();
assertThat(response.status(), equalTo(RestStatus.OK));
assertThat(response.getSuccessfulShards(), lessThan(response.getTotalShards()));
GeoGrid geoGrid = response.getAggregations().get("aggName");
List<? extends Bucket> buckets = geoGrid.getBuckets();
Object[] propertiesKeys = (Object[]) ((InternalAggregation)geoGrid).getProperty("_key");
Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)geoGrid).getProperty("_count");
for (int i = 0; i < buckets.size(); i++) {
GeoGrid.Bucket cell = buckets.get(i);
String geohash = cell.getKeyAsString();

long bucketCount = cell.getDocCount();
int expectedBucketCount = expectedDocCountsForGeoHash.get(geohash);
assertNotSame(bucketCount, 0);
assertEquals("Geohash " + geohash + " has wrong doc count ",
expectedBucketCount, bucketCount);
GeoPoint geoPoint = (GeoPoint) propertiesKeys[i];
assertThat(stringEncode(geoPoint.lon(), geoPoint.lat(), precision), equalTo(geohash));
assertThat(propertiesDocCounts[i], equalTo(bucketCount));
}
}

public void testSimple() throws Exception {
for (int precision = 1; precision <= PRECISION; precision++) {
SearchResponse response = client().prepareSearch("idx")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import com.carrotsearch.hppc.ObjectIntMap;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.hppc.ObjectObjectMap;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.Strings;
Expand All @@ -41,6 +42,7 @@
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.test.geo.RandomGeoGenerator;

import java.util.ArrayList;
Expand All @@ -64,6 +66,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
protected static final String DATELINE_IDX_NAME = "dateline_idx";
protected static final String HIGH_CARD_IDX_NAME = "high_card_idx";
protected static final String IDX_ZERO_NAME = "idx_zero";
protected static final String IDX_NAME_7x = "idx_7x";

protected static int numDocs;
protected static int numUniqueGeoPoints;
Expand All @@ -74,13 +77,22 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
protected static ObjectObjectMap<String, GeoPoint> expectedCentroidsForGeoHash = null;
protected static final double GEOHASH_TOLERANCE = 1E-5D;

@Override
protected boolean forbidPrivateIndexSettings() {
return false;
}

@Override
public void setupSuiteScopeCluster() throws Exception {
createIndex(UNMAPPED_IDX_NAME);
assertAcked(prepareCreate(IDX_NAME)
.setMapping(SINGLE_VALUED_GEOPOINT_FIELD_NAME, "type=geo_point",
.setMapping(SINGLE_VALUED_GEOPOINT_FIELD_NAME, "type=geo_point", SINGLE_VALUED_GEOSHAPE_FIELD_NAME, "type=geo_shape",
MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long", "tag", "type=keyword"));

assertAcked(prepareCreate(IDX_NAME_7x)
.setSettings(settings(VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0)))
.setMapping(SINGLE_VALUED_GEOSHAPE_FIELD_NAME, "type=geo_shape"));

singleTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
singleBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
multiTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
Expand Down Expand Up @@ -119,9 +131,16 @@ public void setupSuiteScopeCluster() throws Exception {
singleVal = singleValues[i % numUniqueGeoPoints];
multiVal[0] = multiValues[i % numUniqueGeoPoints];
multiVal[1] = multiValues[(i+1) % numUniqueGeoPoints];
String singleValWKT = "POINT(" + singleVal.lon() + " " + singleVal.lat() + ")";
builders.add(client().prepareIndex(IDX_NAME_7x).setSource(jsonBuilder()
.startObject()
.field(SINGLE_VALUED_GEOSHAPE_FIELD_NAME, singleValWKT)
.endObject()
));
builders.add(client().prepareIndex(IDX_NAME).setSource(jsonBuilder()
.startObject()
.array(SINGLE_VALUED_GEOPOINT_FIELD_NAME, singleVal.lon(), singleVal.lat())
.field(SINGLE_VALUED_GEOSHAPE_FIELD_NAME, singleValWKT)
.startArray(MULTI_VALUED_FIELD_NAME)
.startArray().value(multiVal[0].lon()).value(multiVal[0].lat()).endArray()
.startArray().value(multiVal[1].lon()).value(multiVal[1].lat()).endArray()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.elasticsearch.search.aggregations.metrics;

import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoPoint;
Expand All @@ -43,6 +44,7 @@
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.sameInstance;
Expand All @@ -52,6 +54,30 @@ public class GeoBoundsIT extends AbstractGeoTestCase {
private static final String geoPointAggName = "geoPointBounds";
private static final String geoShapeAggName = "geoShapeBounds";

public void test7xIndexOnly() {
SearchPhaseExecutionException exception = expectThrows(SearchPhaseExecutionException.class,
() -> client().prepareSearch(IDX_NAME_7x) .addAggregation(geoBounds(geoShapeAggName).field(SINGLE_VALUED_GEOSHAPE_FIELD_NAME))
.get());
assertNotNull(exception.getRootCause());
assertThat(exception.getRootCause().getMessage(),
equalTo("Can't load fielddata on [geoshape_value] because fielddata is unsupported on fields of type [geo_shape]." +
" Use doc values instead."));
}

public void test7xIndexWith8Index() {
SearchResponse response = client().prepareSearch(IDX_NAME_7x, IDX_NAME)
.addAggregation(geoBounds(geoShapeAggName).field(SINGLE_VALUED_GEOSHAPE_FIELD_NAME).wrapLongitude(false)).get();
assertThat(response.status(), equalTo(RestStatus.OK));
assertThat(response.getSuccessfulShards(), lessThan(response.getTotalShards()));
GeoBounds geoBounds = response.getAggregations().get(geoShapeAggName);
GeoPoint topLeft = geoBounds.topLeft();
GeoPoint bottomRight = geoBounds.bottomRight();
assertThat(topLeft.lat(), closeTo(singleTopLeft.lat(), GEOHASH_TOLERANCE));
assertThat(topLeft.lon(), closeTo(singleTopLeft.lon(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lat(), closeTo(singleBottomRight.lat(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lon(), closeTo(singleBottomRight.lon(), GEOHASH_TOLERANCE));
}

public void testSingleValuedField() throws Exception {
SearchResponse response = client().prepareSearch(IDX_NAME)
.addAggregation(geoBounds(geoPointAggName).field(SINGLE_VALUED_GEOPOINT_FIELD_NAME)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,10 @@

package org.elasticsearch.search.aggregations.metrics;

import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGrid;
import org.elasticsearch.search.aggregations.bucket.global.Global;
Expand All @@ -35,6 +37,7 @@
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.sameInstance;

Expand All @@ -45,6 +48,31 @@
public class GeoCentroidIT extends AbstractGeoTestCase {
private static final String aggName = "geoCentroid";

public void test7xIndexOnly() {
SearchPhaseExecutionException exception = expectThrows(SearchPhaseExecutionException.class,
() -> client().prepareSearch(IDX_NAME_7x) .addAggregation(geoCentroid(aggName).field(SINGLE_VALUED_GEOSHAPE_FIELD_NAME))
.get());
assertNotNull(exception.getRootCause());
assertThat(exception.getRootCause().getMessage(),
equalTo("Can't load fielddata on [geoshape_value] because fielddata is unsupported on fields of type [geo_shape]." +
" Use doc values instead."));
}

public void test7xIndexWith8Index() {
SearchResponse response = client().prepareSearch(IDX_NAME_7x, IDX_NAME)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure if am reading this correctly. If you mix index with different versions then we do not fail? Is this the expected behavior?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this is intentional? We fail shards that do not have doc-values, but since the other index is valid, the aggregation result is consistent.

It errors out if queried against an index with no geo_shape field at all.

I will double check other aggregations to see what is the most consistent behavior. I assume it is the same as is shown here

Copy link
Contributor

@iverase iverase Mar 2, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see, and this assertion checks that some shards were not successful:

  assertThat(response.getSuccessfulShards(), lessThan(response.getTotalShards()));

.addAggregation(geoCentroid(aggName).field(SINGLE_VALUED_GEOSHAPE_FIELD_NAME))
.get();
assertThat(response.status(), equalTo(RestStatus.OK));
assertThat(response.getSuccessfulShards(), lessThan(response.getTotalShards()));
GeoCentroid geoCentroid = response.getAggregations().get(aggName);
assertThat(geoCentroid, notNullValue());
assertThat(geoCentroid.getName(), equalTo(aggName));
GeoPoint centroid = geoCentroid.centroid();
assertThat(centroid.lat(), closeTo(singleCentroid.lat(), GEOHASH_TOLERANCE));
assertThat(centroid.lon(), closeTo(singleCentroid.lon(), GEOHASH_TOLERANCE));
assertEquals(numDocs, geoCentroid.count());
}

public void testEmptyAggregation() throws Exception {
SearchResponse response = client().prepareSearch(EMPTY_IDX_NAME)
.setQuery(matchAllQuery())
Expand Down