Skip to content

Commit 547de71

Browse files
committed
Revert "Integrates soft-deletes into Elasticsearch (#33222)"
Revert to correct co-author tags. This reverts commit 6dd0aa5.
1 parent 273c82d commit 547de71

File tree

63 files changed

+499
-3432
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

63 files changed

+499
-3432
lines changed

modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,6 @@
7777
import org.apache.lucene.store.RAMDirectory;
7878
import org.apache.lucene.util.BytesRef;
7979
import org.elasticsearch.Version;
80-
import org.elasticsearch.cluster.metadata.IndexMetaData;
8180
import org.elasticsearch.common.CheckedFunction;
8281
import org.elasticsearch.common.Strings;
8382
import org.elasticsearch.common.bytes.BytesArray;
@@ -88,7 +87,6 @@
8887
import org.elasticsearch.common.settings.Settings;
8988
import org.elasticsearch.common.xcontent.XContentFactory;
9089
import org.elasticsearch.index.IndexService;
91-
import org.elasticsearch.index.IndexSettings;
9290
import org.elasticsearch.index.mapper.DocumentMapper;
9391
import org.elasticsearch.index.mapper.MappedFieldType;
9492
import org.elasticsearch.index.mapper.MapperService;
@@ -1111,11 +1109,7 @@ private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryInd
11111109
}
11121110

11131111
private void addQuery(Query query, List<ParseContext.Document> docs) {
1114-
IndexMetaData build = IndexMetaData.builder("")
1115-
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
1116-
.numberOfShards(1).numberOfReplicas(0).build();
1117-
IndexSettings settings = new IndexSettings(build, Settings.EMPTY);
1118-
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings,
1112+
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
11191113
mapperService.documentMapperParser(), documentMapper, null, null);
11201114
fieldMapper.processQuery(query, parseContext);
11211115
ParseContext.Document queryDocument = parseContext.doc();

modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java

Lines changed: 6 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@
4242
import org.apache.lucene.util.BytesRef;
4343
import org.elasticsearch.Version;
4444
import org.elasticsearch.action.support.PlainActionFuture;
45-
import org.elasticsearch.cluster.metadata.IndexMetaData;
4645
import org.elasticsearch.common.Strings;
4746
import org.elasticsearch.common.bytes.BytesArray;
4847
import org.elasticsearch.common.bytes.BytesReference;
@@ -59,7 +58,6 @@
5958
import org.elasticsearch.common.xcontent.XContentFactory;
6059
import org.elasticsearch.common.xcontent.XContentType;
6160
import org.elasticsearch.index.IndexService;
62-
import org.elasticsearch.index.IndexSettings;
6361
import org.elasticsearch.index.mapper.DocumentMapper;
6462
import org.elasticsearch.index.mapper.DocumentMapperParser;
6563
import org.elasticsearch.index.mapper.MapperParsingException;
@@ -184,11 +182,7 @@ public void testExtractTerms() throws Exception {
184182

185183
DocumentMapper documentMapper = mapperService.documentMapper("doc");
186184
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
187-
IndexMetaData build = IndexMetaData.builder("")
188-
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
189-
.numberOfShards(1).numberOfReplicas(0).build();
190-
IndexSettings settings = new IndexSettings(build, Settings.EMPTY);
191-
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings,
185+
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
192186
mapperService.documentMapperParser(), documentMapper, null, null);
193187
fieldMapper.processQuery(bq.build(), parseContext);
194188
ParseContext.Document document = parseContext.doc();
@@ -210,7 +204,7 @@ public void testExtractTerms() throws Exception {
210204
bq.add(termQuery1, Occur.MUST);
211205
bq.add(termQuery2, Occur.MUST);
212206

213-
parseContext = new ParseContext.InternalParseContext(settings, mapperService.documentMapperParser(),
207+
parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, mapperService.documentMapperParser(),
214208
documentMapper, null, null);
215209
fieldMapper.processQuery(bq.build(), parseContext);
216210
document = parseContext.doc();
@@ -238,12 +232,8 @@ public void testExtractRanges() throws Exception {
238232
bq.add(rangeQuery2, Occur.MUST);
239233

240234
DocumentMapper documentMapper = mapperService.documentMapper("doc");
241-
IndexMetaData build = IndexMetaData.builder("")
242-
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
243-
.numberOfShards(1).numberOfReplicas(0).build();
244-
IndexSettings settings = new IndexSettings(build, Settings.EMPTY);
245235
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
246-
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings,
236+
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
247237
mapperService.documentMapperParser(), documentMapper, null, null);
248238
fieldMapper.processQuery(bq.build(), parseContext);
249239
ParseContext.Document document = parseContext.doc();
@@ -269,7 +259,7 @@ public void testExtractRanges() throws Exception {
269259
.rangeQuery(15, 20, true, true, null, null, null, null);
270260
bq.add(rangeQuery2, Occur.MUST);
271261

272-
parseContext = new ParseContext.InternalParseContext(settings,
262+
parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
273263
mapperService.documentMapperParser(), documentMapper, null, null);
274264
fieldMapper.processQuery(bq.build(), parseContext);
275265
document = parseContext.doc();
@@ -293,11 +283,7 @@ public void testExtractTermsAndRanges_failed() throws Exception {
293283
TermRangeQuery query = new TermRangeQuery("field1", new BytesRef("a"), new BytesRef("z"), true, true);
294284
DocumentMapper documentMapper = mapperService.documentMapper("doc");
295285
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
296-
IndexMetaData build = IndexMetaData.builder("")
297-
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
298-
.numberOfShards(1).numberOfReplicas(0).build();
299-
IndexSettings settings = new IndexSettings(build, Settings.EMPTY);
300-
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings,
286+
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
301287
mapperService.documentMapperParser(), documentMapper, null, null);
302288
fieldMapper.processQuery(query, parseContext);
303289
ParseContext.Document document = parseContext.doc();
@@ -312,11 +298,7 @@ public void testExtractTermsAndRanges_partial() throws Exception {
312298
PhraseQuery phraseQuery = new PhraseQuery("field", "term");
313299
DocumentMapper documentMapper = mapperService.documentMapper("doc");
314300
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
315-
IndexMetaData build = IndexMetaData.builder("")
316-
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
317-
.numberOfShards(1).numberOfReplicas(0).build();
318-
IndexSettings settings = new IndexSettings(build, Settings.EMPTY);
319-
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings,
301+
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
320302
mapperService.documentMapperParser(), documentMapper, null, null);
321303
fieldMapper.processQuery(phraseQuery, parseContext);
322304
ParseContext.Document document = parseContext.doc();

server/src/main/java/org/elasticsearch/common/lucene/Lucene.java

Lines changed: 1 addition & 85 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,8 @@
2727
import org.apache.lucene.codecs.DocValuesFormat;
2828
import org.apache.lucene.codecs.PostingsFormat;
2929
import org.apache.lucene.document.LatLonDocValuesField;
30-
import org.apache.lucene.document.NumericDocValuesField;
3130
import org.apache.lucene.index.CorruptIndexException;
3231
import org.apache.lucene.index.DirectoryReader;
33-
import org.apache.lucene.index.FilterDirectoryReader;
3432
import org.apache.lucene.index.FilterLeafReader;
3533
import org.apache.lucene.index.IndexCommit;
3634
import org.apache.lucene.index.IndexFileNames;
@@ -98,8 +96,6 @@ public class Lucene {
9896
assert annotation == null : "DocValuesFormat " + LATEST_DOC_VALUES_FORMAT + " is deprecated" ;
9997
}
10098

101-
public static final String SOFT_DELETES_FIELD = "__soft_deletes";
102-
10399
public static final NamedAnalyzer STANDARD_ANALYZER = new NamedAnalyzer("_standard", AnalyzerScope.GLOBAL, new StandardAnalyzer());
104100
public static final NamedAnalyzer KEYWORD_ANALYZER = new NamedAnalyzer("_keyword", AnalyzerScope.GLOBAL, new KeywordAnalyzer());
105101

@@ -144,7 +140,7 @@ public static Iterable<String> files(SegmentInfos infos) throws IOException {
144140
public static int getNumDocs(SegmentInfos info) {
145141
int numDocs = 0;
146142
for (SegmentCommitInfo si : info) {
147-
numDocs += si.info.maxDoc() - si.getDelCount() - si.getSoftDelCount();
143+
numDocs += si.info.maxDoc() - si.getDelCount();
148144
}
149145
return numDocs;
150146
}
@@ -201,7 +197,6 @@ public static SegmentInfos pruneUnreferencedFiles(String segmentsFileName, Direc
201197
}
202198
final CommitPoint cp = new CommitPoint(si, directory);
203199
try (IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)
204-
.setSoftDeletesField(Lucene.SOFT_DELETES_FIELD)
205200
.setIndexCommit(cp)
206201
.setCommitOnClose(false)
207202
.setMergePolicy(NoMergePolicy.INSTANCE)
@@ -225,7 +220,6 @@ public static void cleanLuceneIndex(Directory directory) throws IOException {
225220
}
226221
}
227222
try (IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)
228-
.setSoftDeletesField(Lucene.SOFT_DELETES_FIELD)
229223
.setMergePolicy(NoMergePolicy.INSTANCE) // no merges
230224
.setCommitOnClose(false) // no commits
231225
.setOpenMode(IndexWriterConfig.OpenMode.CREATE))) // force creation - don't append...
@@ -835,82 +829,4 @@ public int length() {
835829
}
836830
};
837831
}
838-
839-
/**
840-
* Wraps a directory reader to make all documents live except those were rolled back
841-
* or hard-deleted due to non-aborting exceptions during indexing.
842-
* The wrapped reader can be used to query all documents.
843-
*
844-
* @param in the input directory reader
845-
* @return the wrapped reader
846-
*/
847-
public static DirectoryReader wrapAllDocsLive(DirectoryReader in) throws IOException {
848-
return new DirectoryReaderWithAllLiveDocs(in);
849-
}
850-
851-
private static final class DirectoryReaderWithAllLiveDocs extends FilterDirectoryReader {
852-
static final class LeafReaderWithLiveDocs extends FilterLeafReader {
853-
final Bits liveDocs;
854-
final int numDocs;
855-
LeafReaderWithLiveDocs(LeafReader in, Bits liveDocs, int numDocs) {
856-
super(in);
857-
this.liveDocs = liveDocs;
858-
this.numDocs = numDocs;
859-
}
860-
@Override
861-
public Bits getLiveDocs() {
862-
return liveDocs;
863-
}
864-
@Override
865-
public int numDocs() {
866-
return numDocs;
867-
}
868-
@Override
869-
public CacheHelper getCoreCacheHelper() {
870-
return in.getCoreCacheHelper();
871-
}
872-
@Override
873-
public CacheHelper getReaderCacheHelper() {
874-
return null; // Modifying liveDocs
875-
}
876-
}
877-
878-
DirectoryReaderWithAllLiveDocs(DirectoryReader in) throws IOException {
879-
super(in, new SubReaderWrapper() {
880-
@Override
881-
public LeafReader wrap(LeafReader leaf) {
882-
SegmentReader segmentReader = segmentReader(leaf);
883-
Bits hardLiveDocs = segmentReader.getHardLiveDocs();
884-
if (hardLiveDocs == null) {
885-
return new LeafReaderWithLiveDocs(leaf, null, leaf.maxDoc());
886-
}
887-
// TODO: Can we avoid calculate numDocs by using SegmentReader#getSegmentInfo with LUCENE-8458?
888-
int numDocs = 0;
889-
for (int i = 0; i < hardLiveDocs.length(); i++) {
890-
if (hardLiveDocs.get(i)) {
891-
numDocs++;
892-
}
893-
}
894-
return new LeafReaderWithLiveDocs(segmentReader, hardLiveDocs, numDocs);
895-
}
896-
});
897-
}
898-
899-
@Override
900-
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
901-
return wrapAllDocsLive(in);
902-
}
903-
904-
@Override
905-
public CacheHelper getReaderCacheHelper() {
906-
return null; // Modifying liveDocs
907-
}
908-
}
909-
910-
/**
911-
* Returns a numeric docvalues which can be used to soft-delete documents.
912-
*/
913-
public static NumericDocValuesField newSoftDeletesField() {
914-
return new NumericDocValuesField(SOFT_DELETES_FIELD, 1);
915-
}
916832
}

server/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java

Lines changed: 6 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@
2828
import org.apache.lucene.search.DocIdSetIterator;
2929
import org.apache.lucene.util.Bits;
3030
import org.apache.lucene.util.BytesRef;
31-
import org.elasticsearch.common.lucene.Lucene;
3231
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndSeqNo;
3332
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion;
3433
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
@@ -67,22 +66,15 @@ final class PerThreadIDVersionAndSeqNoLookup {
6766
*/
6867
PerThreadIDVersionAndSeqNoLookup(LeafReader reader, String uidField) throws IOException {
6968
this.uidField = uidField;
70-
final Terms terms = reader.terms(uidField);
69+
Terms terms = reader.terms(uidField);
7170
if (terms == null) {
72-
// If a segment contains only no-ops, it does not have _uid but has both _soft_deletes and _tombstone fields.
73-
final NumericDocValues softDeletesDV = reader.getNumericDocValues(Lucene.SOFT_DELETES_FIELD);
74-
final NumericDocValues tombstoneDV = reader.getNumericDocValues(SeqNoFieldMapper.TOMBSTONE_NAME);
75-
if (softDeletesDV == null || tombstoneDV == null) {
76-
throw new IllegalArgumentException("reader does not have _uid terms but not a no-op segment; " +
77-
"_soft_deletes [" + softDeletesDV + "], _tombstone [" + tombstoneDV + "]");
78-
}
79-
termsEnum = null;
80-
} else {
81-
termsEnum = terms.iterator();
71+
throw new IllegalArgumentException("reader misses the [" + uidField + "] field");
8272
}
73+
termsEnum = terms.iterator();
8374
if (reader.getNumericDocValues(VersionFieldMapper.NAME) == null) {
84-
throw new IllegalArgumentException("reader misses the [" + VersionFieldMapper.NAME + "] field; _uid terms [" + terms + "]");
75+
throw new IllegalArgumentException("reader misses the [" + VersionFieldMapper.NAME + "] field");
8576
}
77+
8678
Object readerKey = null;
8779
assert (readerKey = reader.getCoreCacheHelper().getKey()) != null;
8880
this.readerKey = readerKey;
@@ -119,8 +111,7 @@ public DocIdAndVersion lookupVersion(BytesRef id, LeafReaderContext context)
119111
* {@link DocIdSetIterator#NO_MORE_DOCS} is returned if not found
120112
* */
121113
private int getDocID(BytesRef id, Bits liveDocs) throws IOException {
122-
// termsEnum can possibly be null here if this leaf contains only no-ops.
123-
if (termsEnum != null && termsEnum.seekExact(id)) {
114+
if (termsEnum.seekExact(id)) {
124115
int docID = DocIdSetIterator.NO_MORE_DOCS;
125116
// there may be more than one matching docID, in the case of nested docs, so we want the last one:
126117
docsEnum = termsEnum.postings(docsEnum, 0);

server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -129,8 +129,6 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
129129
IndexSettings.MAX_REGEX_LENGTH_SETTING,
130130
ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING,
131131
IndexSettings.INDEX_GC_DELETES_SETTING,
132-
IndexSettings.INDEX_SOFT_DELETES_SETTING,
133-
IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING,
134132
IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING,
135133
UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING,
136134
EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING,

0 commit comments

Comments
 (0)