Skip to content

Commit 3fd142f

Browse files
authored
Remove hppc from some "common" classes (#85957)
This commit removes hppc from two trivial cases under o.e.common. relates #84735
1 parent aafd2f9 commit 3fd142f

File tree

2 files changed

+12
-15
lines changed

2 files changed

+12
-15
lines changed

server/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,6 @@
88

99
package org.elasticsearch.common.lucene.search;
1010

11-
import com.carrotsearch.hppc.ObjectHashSet;
12-
1311
import org.apache.lucene.index.IndexReader;
1412
import org.apache.lucene.index.LeafReaderContext;
1513
import org.apache.lucene.index.Term;
@@ -27,10 +25,12 @@
2725
import java.io.IOException;
2826
import java.util.ArrayList;
2927
import java.util.Arrays;
28+
import java.util.HashSet;
3029
import java.util.Iterator;
3130
import java.util.List;
3231
import java.util.ListIterator;
3332
import java.util.Objects;
33+
import java.util.Set;
3434

3535
public class MultiPhrasePrefixQuery extends Query {
3636

@@ -146,7 +146,7 @@ public Query rewrite(IndexReader reader) throws IOException {
146146
}
147147
Term[] suffixTerms = termArrays.get(sizeMinus1);
148148
int position = positions.get(sizeMinus1);
149-
ObjectHashSet<Term> terms = new ObjectHashSet<>();
149+
Set<Term> terms = new HashSet<>();
150150
for (Term term : suffixTerms) {
151151
getPrefixTerms(terms, term, reader);
152152
if (terms.size() > maxExpansions) {
@@ -169,11 +169,11 @@ public Query rewrite(IndexReader reader) throws IOException {
169169
)
170170
.build();
171171
}
172-
query.add(terms.toArray(Term.class), position);
172+
query.add(terms.toArray(new Term[0]), position);
173173
return query.build();
174174
}
175175

176-
private void getPrefixTerms(ObjectHashSet<Term> terms, final Term prefix, final IndexReader reader) throws IOException {
176+
private void getPrefixTerms(Set<Term> terms, final Term prefix, final IndexReader reader) throws IOException {
177177
// SlowCompositeReaderWrapper could be used... but this would merge all terms from each segment into one terms
178178
// instance, which is very expensive. Therefore I think it is better to iterate over each leaf individually.
179179
List<LeafReaderContext> leaves = reader.leaves();

server/src/main/java/org/elasticsearch/common/transport/PortsRange.java

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@
88

99
package org.elasticsearch.common.transport;
1010

11-
import com.carrotsearch.hppc.IntArrayList;
12-
11+
import java.util.ArrayList;
12+
import java.util.List;
1313
import java.util.StringTokenizer;
1414

1515
public class PortsRange {
@@ -25,15 +25,12 @@ public String getPortRangeString() {
2525
}
2626

2727
public int[] ports() throws NumberFormatException {
28-
final IntArrayList ports = new IntArrayList();
29-
iterate(new PortCallback() {
30-
@Override
31-
public boolean onPortNumber(int portNumber) {
32-
ports.add(portNumber);
33-
return false;
34-
}
28+
final List<Integer> ports = new ArrayList<>();
29+
iterate(portNumber -> {
30+
ports.add(portNumber);
31+
return false;
3532
});
36-
return ports.toArray();
33+
return ports.stream().mapToInt(Integer::intValue).toArray();
3734
}
3835

3936
public boolean iterate(PortCallback callback) throws NumberFormatException {

0 commit comments

Comments
 (0)