Skip to content

Upgrade for ES0.90.4 #5

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
</parent>

<properties>
<elasticsearch.version>0.90.3</elasticsearch.version>
<elasticsearch.version>0.90.4</elasticsearch.version>
</properties>

<dependencies>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,29 +19,18 @@

package org.elasticsearch.action.updatebyquery;

import org.elasticsearch.common.collect.Sets;
import org.elasticsearch.ElasticSearchGenerationException;
import static org.elasticsearch.action.ValidateActions.addValidationError;

import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;

import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.action.support.replication.IndexReplicationOperationRequest;
import org.elasticsearch.action.support.replication.ReplicationType;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.builder.SearchSourceBuilder;

import java.io.IOException;
import java.util.Map;
import java.util.Set;

import static org.elasticsearch.common.collect.Maps.newHashMap;
import static org.elasticsearch.action.ValidateActions.addValidationError;

/**
* Represents an update by query request targeted for a specific index.
Expand All @@ -51,7 +40,7 @@ public class IndexUpdateByQueryRequest extends IndexReplicationOperationRequest
private String[] types = new String[0];
private BulkResponseOption bulkResponseOption;
private String[] filteringAliases = new String[0];
private Set<String> routing = Sets.newHashSet();
private Set<String> routing = new HashSet();

private BytesReference source;
private boolean sourceUnsafe;
Expand Down Expand Up @@ -119,10 +108,12 @@ public ActionRequestValidationException validate() {
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
types = in.readStringArray();
String[] readStringArray = in.readStringArray();
types = readStringArray;
bulkResponseOption = BulkResponseOption.fromId(in.readByte());
filteringAliases = in.readStringArray();
routing = Sets.newHashSet(in.readStringArray());
filteringAliases = readStringArray;
routing = new HashSet();
routing.addAll(Arrays.asList(readStringArray));
source = in.readBytesReference();
sourceUnsafe = false;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@

package org.elasticsearch.action.updatebyquery;

import org.elasticsearch.common.collect.Maps;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;

import java.io.IOException;
import java.util.Map;

/**
* Encapsulates the result of an update by query request by bundling all bulk item responses.
* Each bulk item response holds the result of an individual update.
Expand All @@ -38,8 +38,8 @@ public class IndexUpdateByQueryResponse extends ActionResponse {
private String index;
private long totalHits;
private long updated;
private Map<Integer, BulkItemResponse[]> responsesByShard = Maps.newHashMap();
private Map<Integer, String> failuresByShard = Maps.newHashMap();
private Map<Integer, BulkItemResponse[]> responsesByShard = new HashMap();
private Map<Integer, String> failuresByShard = new HashMap();

IndexUpdateByQueryResponse() {
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,17 @@

package org.elasticsearch.action.updatebyquery;

import static org.elasticsearch.action.ValidateActions.addValidationError;

import java.io.IOException;

import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.replication.ShardReplicationOperationRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

import static org.elasticsearch.common.collect.Maps.newHashMap;
import static org.elasticsearch.action.ValidateActions.addValidationError;

/**
* Represents a shard update by query request, that will be performed on the targeted shard.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,20 @@

package org.elasticsearch.action.updatebyquery;

import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.Term;
import org.elasticsearch.common.collect.Maps;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.lucene.document.Document;
import org.apache.lucene.document.DocumentStoredFieldVisitor;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.FixedBitSet;
Expand All @@ -36,7 +41,13 @@
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.bulk.*;
import org.elasticsearch.action.bulk.BulkItemRequest;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkShardRequest;
import org.elasticsearch.action.bulk.BulkShardResponse;
import org.elasticsearch.action.bulk.PublicBulkShardRequest;
import org.elasticsearch.action.bulk.PublicBulkShardResponse;
import org.elasticsearch.action.bulk.TransportShardBulkAction;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.TransportAction;
Expand All @@ -59,7 +70,12 @@
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.*;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.RoutingFieldMapper;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TTLFieldMapper;
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.ShardId;
Expand All @@ -74,9 +90,6 @@
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportService;

import java.io.IOException;
import java.util.*;

/**
* Transport action that translates the shard update by query request into a bulk request. All actions are performed
* locally and the bulk requests are then forwarded to the replica shards (this logic is done inside
Expand Down Expand Up @@ -152,7 +165,7 @@ private void doExecuteInternal(ShardUpdateByQueryRequest request, ActionListener
SearchContext searchContext = new SearchContext(
0,
shardSearchRequest,
null, indexShard.searcher(), indexService, indexShard,
null, indexShard.acquireSearcher(), indexService, indexShard,
scriptService,
cacheRecycler
);
Expand Down Expand Up @@ -191,7 +204,7 @@ private UpdateByQueryContext parseRequestSource(IndexService indexService, Shard
ParsedQuery parsedQuery = null;
String script = null;
String scriptLang = null;
Map<String, Object> params = Maps.newHashMap();
Map<String, Object> params = new HashMap();
try {
XContentParser parser = XContentHelper.createParser(request.source());
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,13 @@

package org.elasticsearch.action.updatebyquery;

import org.elasticsearch.common.collect.Lists;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReferenceArray;

import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.UnavailableShardsException;
Expand All @@ -43,13 +49,11 @@
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.node.NodeClosedException;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;

import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReferenceArray;
import org.elasticsearch.transport.BaseTransportRequestHandler;
import org.elasticsearch.transport.BaseTransportResponseHandler;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportService;

/**
* Delegates a {@link IndexUpdateByQueryRequest} to the primary shards of the index this request is targeted to.
Expand Down Expand Up @@ -131,8 +135,8 @@ public void onFailure(Throwable e) {
private void finishHim() {
long tookInMillis = System.currentTimeMillis() - startTime;
UpdateByQueryResponse response = new UpdateByQueryResponse(tookInMillis);
List<IndexUpdateByQueryResponse> indexResponses = Lists.newArrayList();
List<String> indexFailures = Lists.newArrayList();
List<IndexUpdateByQueryResponse> indexResponses = new ArrayList();
List<String> indexFailures = new ArrayList();
for (int i = 0; i < expectedNumberOfResponses; i++) {
IndexUpdateByQueryResponse indexResponse = successFullIndexResponses.get(i);
if (indexResponse != null) {
Expand Down Expand Up @@ -215,7 +219,7 @@ boolean startExecution(boolean fromClusterEvent) {
return false;
}

List<ShardRouting> primaryShards = Lists.newArrayList();
List<ShardRouting> primaryShards = new ArrayList();
GroupShardsIterator groupShardsIterator =
clusterService.operationRouting().deleteByQueryShards(state, request.index(), request.routing());
for (ShardIterator shardIt : groupShardsIterator) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@

package org.elasticsearch.action.updatebyquery;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
Expand All @@ -27,11 +31,6 @@
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;

import java.io.IOException;
import java.util.Map;

import static org.elasticsearch.common.collect.Maps.newHashMap;

/**
* Source builder of the script, lang, params and query for a update by query request.
*/
Expand All @@ -41,7 +40,7 @@ public class UpdateByQuerySourceBuilder implements ToXContent {
private BytesReference queryBinary;
private String script;
private String scriptLang;
private Map<String, Object> scriptParams = newHashMap();
private Map<String, Object> scriptParams = new HashMap();

public UpdateByQuerySourceBuilder query(QueryBuilder query) {
this.queryBuilder = query;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,32 +19,40 @@

package org.elasticsearch.rest.action.updatebyquery;

import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestStatus.OK;

import java.io.IOException;
import java.util.Map;

import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.support.replication.ReplicationType;
import org.elasticsearch.action.updatebyquery.*;
import org.elasticsearch.action.updatebyquery.BulkResponseOption;
import org.elasticsearch.action.updatebyquery.IndexUpdateByQueryResponse;
import org.elasticsearch.action.updatebyquery.UpdateByQueryRequest;
import org.elasticsearch.action.updatebyquery.UpdateByQueryResponse;
import org.elasticsearch.action.updatebyquery.UpdateByQuerySourceBuilder;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.UpdateByQueryClient;
import org.elasticsearch.client.UpdateByQueryClientWrapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.rest.*;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.XContentRestResponse;
import org.elasticsearch.rest.XContentThrowableRestResponse;
import org.elasticsearch.rest.action.support.RestActions;
import org.elasticsearch.rest.action.support.RestXContentBuilder;

import java.io.IOException;
import java.util.Map;

import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestStatus.OK;
import static org.elasticsearch.rest.action.support.RestActions.splitIndices;
import static org.elasticsearch.rest.action.support.RestActions.splitTypes;

/**
* Rest handler for update by query requests.
*/
Expand All @@ -62,8 +70,8 @@ public RestUpdateByQueryAction(Settings settings, Client client, RestController

public void handleRequest(final RestRequest request, final RestChannel channel) {
UpdateByQueryRequest udqRequest = new UpdateByQueryRequest(
splitIndices(request.param("index")),
splitTypes(request.param("type"))
Strings.splitStringByCommaToArray(request.param("index")),
Strings.splitStringByCommaToArray(request.param("type"))
);
udqRequest.listenerThreaded(false);
String replicationType = request.param("replication");
Expand Down