Skip to content

remove backcompat handling of 6.1.x versions #42032

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
May 15, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -293,17 +293,9 @@ protected void doWriteTo(StreamOutput out) throws IOException {
} else {
out.writeBoolean(false);
}
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
out.writeVInt(documents.size());
for (BytesReference document : documents) {
out.writeBytesReference(document);
}
} else {
if (documents.size() > 1) {
throw new IllegalArgumentException("Nodes prior to 6.1.0 cannot accept multiple documents");
}
BytesReference doc = documents.isEmpty() ? null : documents.iterator().next();
out.writeOptionalBytesReference(doc);
out.writeVInt(documents.size());
for (BytesReference document : documents) {
out.writeBytesReference(document);
}
if (documents.isEmpty() == false) {
out.writeEnum(documentXContentType);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ Tuple<BooleanQuery, Boolean> createCandidateQuery(IndexReader indexReader, Versi
}

BooleanQuery.Builder candidateQuery = new BooleanQuery.Builder();
if (canUseMinimumShouldMatchField && indexVersion.onOrAfter(Version.V_6_1_0)) {
if (canUseMinimumShouldMatchField) {
LongValuesSource valuesSource = LongValuesSource.fromIntField(minimumShouldMatchField.name());
for (BytesRef extractedTerm : extractedTerms) {
subQueries.add(new TermQuery(new Term(queryTermsField.name(), extractedTerm)));
Expand Down Expand Up @@ -471,9 +471,7 @@ void processQuery(Query query, ParseContext context) {
for (IndexableField field : fields) {
context.doc().add(field);
}
if (indexVersionCreated.onOrAfter(Version.V_6_1_0)) {
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch));
}
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch));
}

static Query parseQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, XContentParser parser) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ static void innerHitsExecute(Query mainQuery,
String fieldName = singlePercolateQuery ? FIELD_NAME_PREFIX : FIELD_NAME_PREFIX + "_" + percolateQuery.getName();
IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher();
Weight weight = percolatorIndexSearcher.createWeight(percolatorIndexSearcher.rewrite(Queries.newNonNestedFilter()),
ScoreMode.COMPLETE_NO_SCORES, 1f);
ScoreMode.COMPLETE_NO_SCORES, 1f);
Scorer s = weight.scorer(percolatorIndexSearcher.getIndexReader().leaves().get(0));
int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc();
BitSet rootDocs = BitSet.of(s.iterator(), memoryIndexMaxDoc);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.VersionUtils;
import org.junit.After;
import org.junit.Before;

Expand Down Expand Up @@ -593,7 +594,7 @@ public void testRangeQueries() throws Exception {
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);

Version v = Version.V_6_1_0;
Version v = VersionUtils.randomIndexCompatibleVersion(random());
MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")),
Expand Down
20 changes: 0 additions & 20 deletions server/src/main/java/org/elasticsearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -46,16 +46,6 @@ public class Version implements Comparable<Version>, ToXContentFragment {
*/
public static final int V_EMPTY_ID = 0;
public static final Version V_EMPTY = new Version(V_EMPTY_ID, org.apache.lucene.util.Version.LATEST);
public static final int V_6_1_0_ID = 6010099;
public static final Version V_6_1_0 = new Version(V_6_1_0_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final int V_6_1_1_ID = 6010199;
public static final Version V_6_1_1 = new Version(V_6_1_1_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final int V_6_1_2_ID = 6010299;
public static final Version V_6_1_2 = new Version(V_6_1_2_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final int V_6_1_3_ID = 6010399;
public static final Version V_6_1_3 = new Version(V_6_1_3_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final int V_6_1_4_ID = 6010499;
public static final Version V_6_1_4 = new Version(V_6_1_4_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
// The below version is missing from the 7.3 JAR
private static final org.apache.lucene.util.Version LUCENE_7_2_1 = org.apache.lucene.util.Version.fromBits(7, 2, 1);
public static final int V_6_2_0_ID = 6020099;
Expand Down Expand Up @@ -196,16 +186,6 @@ public static Version fromId(int id) {
return V_6_2_1;
case V_6_2_0_ID:
return V_6_2_0;
case V_6_1_4_ID:
return V_6_1_4;
case V_6_1_3_ID:
return V_6_1_3;
case V_6_1_2_ID:
return V_6_1_2;
case V_6_1_1_ID:
return V_6_1_1;
case V_6_1_0_ID:
return V_6_1_0;
case V_EMPTY_ID:
return V_EMPTY;
default:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
Expand Down Expand Up @@ -202,10 +201,7 @@ public void preParse(ParseContext context) {
}

@Override
public void postParse(ParseContext context) throws IOException {
if (context.indexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) {
super.parse(context);
}
public void postParse(ParseContext context) {
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
Expand Down Expand Up @@ -255,15 +254,9 @@ public void postParse(ParseContext context) throws IOException {
// we share the parent docs fields to ensure good compression
SequenceIDFields seqID = context.seqID();
assert seqID != null;
final Version versionCreated = context.mapperService().getIndexSettings().getIndexVersionCreated();
final boolean includePrimaryTerm = versionCreated.before(Version.V_6_1_0);
for (Document doc : context.nonRootDocuments()) {
doc.add(seqID.seqNo);
doc.add(seqID.seqNoDocValue);
if (includePrimaryTerm) {
// primary terms are used to distinguish between parent and nested docs since 6.1.0
doc.add(seqID.primaryTerm);
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,11 @@

package org.elasticsearch.index.query;

import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
Expand Down Expand Up @@ -148,10 +145,6 @@ public static Query newFilter(QueryShardContext context, String fieldPattern) {
fields = context.simpleMatchToIndexNames(fieldPattern);
}

if (context.indexVersionCreated().before(Version.V_6_1_0)) {
return newLegacyExistsQuery(context, fields);
}

if (fields.size() == 1) {
String field = fields.iterator().next();
return newFieldExistsQuery(context, field);
Expand All @@ -164,28 +157,6 @@ public static Query newFilter(QueryShardContext context, String fieldPattern) {
return new ConstantScoreQuery(boolFilterBuilder.build());
}

private static Query newLegacyExistsQuery(QueryShardContext context, Collection<String> fields) {
// We create TermsQuery directly here rather than using FieldNamesFieldType.termsQuery()
// so we don't end up with deprecation warnings
if (fields.size() == 1) {
Query filter = newLegacyExistsQuery(context, fields.iterator().next());
return new ConstantScoreQuery(filter);
}

BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder();
for (String field : fields) {
Query filter = newLegacyExistsQuery(context, field);
boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD);
}
return new ConstantScoreQuery(boolFilterBuilder.build());
}

private static Query newLegacyExistsQuery(QueryShardContext context, String field) {
MappedFieldType fieldType = context.fieldMapper(field);
String fieldName = fieldType != null ? fieldType.name() : field;
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, fieldName));
}

private static Query newFieldExistsQuery(QueryShardContext context, String field) {
MappedFieldType fieldType = context.getMapperService().fullName(field);
if (fieldType == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import org.apache.lucene.search.NormsFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;

Expand Down Expand Up @@ -65,26 +64,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query,
Collection<String> fields = context.getQueryShardContext().simpleMatchToIndexNames(fieldPattern);
Collection<String> mappedFields = fields.stream().filter((field) -> context.getQueryShardContext().getObjectMapper(field) != null
|| context.getQueryShardContext().getMapperService().fullName(field) != null).collect(Collectors.toList());
if (context.mapperService().getIndexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) {
if (fields.size() == 1) {
assertThat(query, instanceOf(ConstantScoreQuery.class));
ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query;
String field = expectedFieldName(fields.iterator().next());
assertThat(constantScoreQuery.getQuery(), instanceOf(TermQuery.class));
TermQuery termQuery = (TermQuery) constantScoreQuery.getQuery();
assertEquals(field, termQuery.getTerm().text());
} else {
assertThat(query, instanceOf(ConstantScoreQuery.class));
ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query;
assertThat(constantScoreQuery.getQuery(), instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) constantScoreQuery.getQuery();
assertThat(booleanQuery.clauses().size(), equalTo(mappedFields.size()));
for (int i = 0; i < mappedFields.size(); i++) {
BooleanClause booleanClause = booleanQuery.clauses().get(i);
assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD));
}
}
} else if (fields.size() == 1 && mappedFields.size() == 0) {
if (fields.size() == 1 && mappedFields.size() == 0) {
assertThat(query, instanceOf(MatchNoDocsQuery.class));
MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query;
assertThat(matchNoDocsQuery.toString(null),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
Expand Down Expand Up @@ -1033,8 +1032,7 @@ public void testExistsFieldQuery() throws Exception {
QueryShardContext context = createShardContext();
QueryStringQueryBuilder queryBuilder = new QueryStringQueryBuilder(STRING_FIELD_NAME + ":*");
Query query = queryBuilder.toQuery(context);
if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0)
&& (context.fieldMapper(STRING_FIELD_NAME).omitNorms() == false)) {
if (context.fieldMapper(STRING_FIELD_NAME).omitNorms() == false) {
assertThat(query, equalTo(new ConstantScoreQuery(new NormsFieldExistsQuery(STRING_FIELD_NAME))));
} else {
assertThat(query, equalTo(new ConstantScoreQuery(new TermQuery(new Term("_field_names", STRING_FIELD_NAME)))));
Expand All @@ -1044,8 +1042,7 @@ public void testExistsFieldQuery() throws Exception {
String value = (quoted ? "\"" : "") + STRING_FIELD_NAME + (quoted ? "\"" : "");
queryBuilder = new QueryStringQueryBuilder("_exists_:" + value);
query = queryBuilder.toQuery(context);
if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0)
&& (context.fieldMapper(STRING_FIELD_NAME).omitNorms() == false)) {
if (context.fieldMapper(STRING_FIELD_NAME).omitNorms() == false) {
assertThat(query, equalTo(new ConstantScoreQuery(new NormsFieldExistsQuery(STRING_FIELD_NAME))));
} else {
assertThat(query, equalTo(new ConstantScoreQuery(new TermQuery(new Term("_field_names", STRING_FIELD_NAME)))));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.lucene.BytesRefs;
Expand Down Expand Up @@ -139,11 +138,9 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query,
String expectedFieldName = expectedFieldName(queryBuilder.fieldName());
if (queryBuilder.from() == null && queryBuilder.to() == null) {
final Query expectedQuery;
if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0)
&& context.mapperService().fullName(queryBuilder.fieldName()).hasDocValues()) {
if (context.mapperService().fullName(queryBuilder.fieldName()).hasDocValues()) {
expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(expectedFieldName));
} else if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) &&
context.mapperService().fullName(queryBuilder.fieldName()).omitNorms() == false) {
} else if (context.mapperService().fullName(queryBuilder.fieldName()).omitNorms() == false) {
expectedQuery = new ConstantScoreQuery(new NormsFieldExistsQuery(expectedFieldName));
} else {
expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, expectedFieldName)));
Expand Down Expand Up @@ -425,8 +422,7 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC
// Range query with open bounds rewrite to an exists query
Query luceneQuery = rewrittenRange.toQuery(queryShardContext);
final Query expectedQuery;
if (queryShardContext.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0)
&& queryShardContext.fieldMapper(query.fieldName()).hasDocValues()) {
if (queryShardContext.fieldMapper(query.fieldName()).hasDocValues()) {
expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(query.fieldName()));
} else {
expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, query.fieldName())));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,16 @@ public void testMessage() {
}

public void testParseSkipSectionVersionNoFeature() throws Exception {
Version version = VersionUtils.randomVersion(random());
parser = createParser(YamlXContent.yamlXContent,
"version: \" - 6.1.1\"\n" +
"version: \" - " + version + "\"\n" +
"reason: Delete ignores the parent param"
);

SkipSection skipSection = SkipSection.parse(parser);
assertThat(skipSection, notNullValue());
assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion()));
assertThat(skipSection.getUpperVersion(), equalTo(Version.V_6_1_1));
assertThat(skipSection.getUpperVersion(), equalTo(version));
assertThat(skipSection.getFeatures().size(), equalTo(0));
assertThat(skipSection.getReason(), equalTo("Delete ignores the parent param"));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,13 +166,11 @@ public void writeTo(StreamOutput streamOutput) throws IOException {
streamOutput.writeBoolean(true); // has a license
license.writeTo(streamOutput);
}
if (streamOutput.getVersion().onOrAfter(Version.V_6_1_0)) {
if (trialVersion == null) {
streamOutput.writeBoolean(false);
} else {
streamOutput.writeBoolean(true);
Version.writeVersion(trialVersion, streamOutput);
}
if (trialVersion == null) {
streamOutput.writeBoolean(false);
} else {
streamOutput.writeBoolean(true);
Version.writeVersion(trialVersion, streamOutput);
}
}

Expand All @@ -182,11 +180,9 @@ public LicensesMetaData(StreamInput streamInput) throws IOException {
} else {
license = LICENSE_TOMBSTONE;
}
if (streamInput.getVersion().onOrAfter(Version.V_6_1_0)) {
boolean hasExercisedTrial = streamInput.readBoolean();
if (hasExercisedTrial) {
this.trialVersion = Version.readVersion(streamInput);
}
boolean hasExercisedTrial = streamInput.readBoolean();
if (hasExercisedTrial) {
this.trialVersion = Version.readVersion(streamInput);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
*/
package org.elasticsearch.xpack.core.ml.action;

import org.elasticsearch.Version;
import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.support.tasks.BaseTasksRequest;
Expand Down Expand Up @@ -91,9 +90,7 @@ public Request(StreamInput in) throws IOException {
force = in.readBoolean();
openJobIds = in.readStringArray();
local = in.readBoolean();
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
allowNoJobs = in.readBoolean();
}
allowNoJobs = in.readBoolean();
}

@Override
Expand All @@ -104,9 +101,7 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(force);
out.writeStringArray(openJobIds);
out.writeBoolean(local);
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
out.writeBoolean(allowNoJobs);
}
out.writeBoolean(allowNoJobs);
}

public Request(String jobId) {
Expand Down
Loading