Skip to content

Migrate some more mapper test cases (#61507) #61552

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Aug 25, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,9 @@
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.Plugin;
import org.hamcrest.Matchers;
import org.junit.Before;
Expand All @@ -42,29 +38,23 @@
import java.util.Collection;
import java.util.Set;

public class RankFeatureFieldMapperTests extends FieldMapperTestCase<RankFeatureFieldMapper.Builder> {

IndexService indexService;
DocumentMapperParser parser;

public class RankFeatureFieldMapperTests extends FieldMapperTestCase2<RankFeatureFieldMapper.Builder> {
@Override
protected Set<String> unsupportedProperties() {
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity", "store", "doc_values", "index");
}

@Before
public void setup() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
addModifier("positive_score_impact", false, (a, b) -> {
a.positiveScoreImpact(true);
b.positiveScoreImpact(false);
});
}

@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(MapperExtrasPlugin.class);
protected Collection<? extends Plugin> getPlugins() {
return List.of(new MapperExtrasPlugin());
}

static int getFrequency(TokenStream tk) throws IOException {
Expand All @@ -81,34 +71,27 @@ protected RankFeatureFieldMapper.Builder newBuilder() {
return new RankFeatureFieldMapper.Builder("rank-feature");
}

public void testDefaults() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_feature").endObject().endObject()
.endObject().endObject());

DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "rank_feature");
}

assertEquals(mapping, mapper.mappingSource().toString());
@Override
protected boolean supportsMeta() {
return false;
}

ParsedDocument doc1 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 10)
.endObject()),
XContentType.JSON));
public void testDefaults() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString());

ParsedDocument doc1 = mapper.parse(source(b -> b.field("field", 10)));
IndexableField[] fields = doc1.rootDoc().getFields("_feature");
assertEquals(1, fields.length);
assertThat(fields[0], Matchers.instanceOf(FeatureField.class));
FeatureField featureField1 = (FeatureField) fields[0];

ParsedDocument doc2 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 12)
.endObject()),
XContentType.JSON));

ParsedDocument doc2 = mapper.parse(source(b -> b.field("field", 12)));
FeatureField featureField2 = (FeatureField) doc2.rootDoc().getFields("_feature")[0];

int freq1 = getFrequency(featureField1.tokenStream(null, null));
Expand All @@ -117,34 +100,17 @@ public void testDefaults() throws Exception {
}

public void testNegativeScoreImpact() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_feature")
.field("positive_score_impact", false).endObject().endObject()
.endObject().endObject());

DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));

assertEquals(mapping, mapper.mappingSource().toString());

ParsedDocument doc1 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 10)
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "rank_feature").field("positive_score_impact", false))
);

ParsedDocument doc1 = mapper.parse(source(b -> b.field("field", 10)));
IndexableField[] fields = doc1.rootDoc().getFields("_feature");
assertEquals(1, fields.length);
assertThat(fields[0], Matchers.instanceOf(FeatureField.class));
FeatureField featureField1 = (FeatureField) fields[0];

ParsedDocument doc2 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 12)
.endObject()),
XContentType.JSON));

ParsedDocument doc2 = mapper.parse(source(b -> b.field("field", 12)));
FeatureField featureField2 = (FeatureField) doc2.rootDoc().getFields("_feature")[0];

int freq1 = getFrequency(featureField1.tokenStream(null, null));
Expand All @@ -153,39 +119,30 @@ public void testNegativeScoreImpact() throws Exception {
}

public void testRejectMultiValuedFields() throws MapperParsingException, IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_feature").endObject().startObject("foo")
.startObject("properties").startObject("field").field("type", "rank_feature").endObject().endObject()
.endObject().endObject().endObject().endObject());

DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));

assertEquals(mapping, mapper.mappingSource().toString());

MapperParsingException e = expectThrows(MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", Arrays.asList(10, 20))
.endObject()),
XContentType.JSON)));
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("field").field("type", "rank_feature").endObject();
b.startObject("foo").startObject("properties");
{
b.startObject("field").field("type", "rank_feature").endObject();
}
b.endObject().endObject();
}));

MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> mapper.parse(source(b -> b.field("field", Arrays.asList(10, 20))))
);
assertEquals("[rank_feature] fields do not support indexing multiple values for the same field [field] in the same document",
e.getCause().getMessage());

e = expectThrows(MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("foo")
.startObject()
.field("field", 10)
.endObject()
.startObject()
.field("field", 20)
.endObject()
.endArray()
.endObject()),
XContentType.JSON)));
e = expectThrows(MapperParsingException.class, () -> mapper.parse(source(b -> {
b.startArray("foo");
{
b.startObject().field("field", 10).endObject();
b.startObject().field("field", 20).endObject();
}
b.endArray();
})));
assertEquals("[rank_feature] fields do not support indexing multiple values for the same field [foo.field] in the same document",
e.getCause().getMessage());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,59 +22,42 @@
import org.apache.lucene.document.FeatureField;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.Plugin;
import org.hamcrest.Matchers;
import org.junit.Before;

import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Set;

public class RankFeaturesFieldMapperTests extends FieldMapperTestCase<RankFeaturesFieldMapper.Builder> {
public class RankFeaturesFieldMapperTests extends FieldMapperTestCase2<RankFeaturesFieldMapper.Builder> {

@Override
protected Set<String> unsupportedProperties() {
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity", "store", "doc_values", "index");
}

IndexService indexService;
DocumentMapperParser parser;
@Override
protected Collection<? extends Plugin> getPlugins() {
return org.elasticsearch.common.collect.List.of(new MapperExtrasPlugin());
}

@Before
public void setup() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "rank_features");
}

@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(MapperExtrasPlugin.class);
protected boolean supportsMeta() {
return false;
}

public void testDefaults() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_features").endObject().endObject()
.endObject().endObject());

DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));

assertEquals(mapping, mapper.mappingSource().toString());
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString());

ParsedDocument doc1 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field("foo", 10)
.field("bar", 20)
.endObject()
.endObject()),
XContentType.JSON));
ParsedDocument doc1 = mapper.parse(source(b -> b.startObject("field").field("foo", 10).field("bar", 20).endObject()));

IndexableField[] fields = doc1.rootDoc().getFields("field");
assertEquals(2, fields.length);
Expand All @@ -90,45 +73,30 @@ public void testDefaults() throws Exception {
}

public void testRejectMultiValuedFields() throws MapperParsingException, IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_features").endObject().startObject("foo")
.startObject("properties").startObject("field").field("type", "rank_features").endObject().endObject()
.endObject().endObject().endObject().endObject());

DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));

assertEquals(mapping, mapper.mappingSource().toString());

MapperParsingException e = expectThrows(MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field("foo", Arrays.asList(10, 20))
.endObject()
.endObject()),
XContentType.JSON)));
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("field").field("type", "rank_features").endObject();
b.startObject("foo").startObject("properties");
{
b.startObject("field").field("type", "rank_features").endObject();
}
b.endObject().endObject();
}));

MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> mapper.parse(source(b -> b.startObject("field").field("foo", Arrays.asList(10, 20)).endObject()))
);
assertEquals("[rank_features] fields take hashes that map a feature to a strictly positive float, but got unexpected token " +
"START_ARRAY", e.getCause().getMessage());

e = expectThrows(MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("foo")
.startObject()
.startObject("field")
.field("bar", 10)
.endObject()
.endObject()
.startObject()
.startObject("field")
.field("bar", 20)
.endObject()
.endObject()
.endArray()
.endObject()),
XContentType.JSON)));
e = expectThrows(MapperParsingException.class, () -> mapper.parse(source(b -> {
b.startArray("foo");
{
b.startObject().startObject("field").field("bar", 10).endObject().endObject();
b.startObject().startObject("field").field("bar", 20).endObject().endObject();
}
b.endArray();
})));
assertEquals("[rank_features] fields do not support indexing multiple values for the same rank feature [foo.field.bar] in " +
"the same document", e.getCause().getMessage());
}
Expand Down
Loading