Skip to content

Fix AnalyzeAction response serialization #44284

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jul 12, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,10 @@
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.RandomObjects;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class AnalyzeResponseTests extends AbstractResponseTestCase<AnalyzeAction.Response, AnalyzeResponse> {

Expand All @@ -37,7 +34,7 @@ protected AnalyzeAction.Response createServerTestInstance() {
int tokenCount = randomIntBetween(1, 30);
AnalyzeAction.AnalyzeToken[] tokens = new AnalyzeAction.AnalyzeToken[tokenCount];
for (int i = 0; i < tokenCount; i++) {
tokens[i] = randomToken();
tokens[i] = RandomObjects.randomToken(random());
}
if (randomBoolean()) {
AnalyzeAction.CharFilteredText[] charfilters = null;
Expand All @@ -62,45 +59,6 @@ protected AnalyzeAction.Response createServerTestInstance() {
return new AnalyzeAction.Response(Arrays.asList(tokens), null);
}

private AnalyzeAction.AnalyzeToken randomToken() {
String token = randomAlphaOfLengthBetween(1, 20);
int position = randomIntBetween(0, 1000);
int startOffset = randomIntBetween(0, 1000);
int endOffset = randomIntBetween(0, 1000);
int posLength = randomIntBetween(1, 5);
String type = randomAlphaOfLengthBetween(1, 20);
Map<String, Object> extras = new HashMap<>();
if (randomBoolean()) {
int entryCount = randomInt(6);
for (int i = 0; i < entryCount; i++) {
switch (randomInt(6)) {
case 0:
case 1:
case 2:
case 3:
String key = randomAlphaOfLength(5);
String value = randomAlphaOfLength(10);
extras.put(key, value);
break;
case 4:
String objkey = randomAlphaOfLength(5);
Map<String, String> obj = new HashMap<>();
obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10));
extras.put(objkey, obj);
break;
case 5:
String listkey = randomAlphaOfLength(5);
List<String> list = new ArrayList<>();
list.add(randomAlphaOfLength(4));
list.add(randomAlphaOfLength(6));
extras.put(listkey, list);
break;
}
}
}
return new AnalyzeAction.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras);
}

@Override
protected AnalyzeResponse doParseToClientInstance(XContentParser parser) throws IOException {
return AnalyzeResponse.fromXContent(parser);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@

package org.elasticsearch.action.admin.indices.analyze;

import org.elasticsearch.action.ActionType;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.support.single.shard.SingleShardRequest;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
Expand Down Expand Up @@ -292,22 +293,29 @@ public static class Response extends ActionResponse implements ToXContentObject
private final List<AnalyzeToken> tokens;

public Response(List<AnalyzeToken> tokens, DetailAnalyzeResponse detail) {
if (tokens == null && detail == null) {
throw new IllegalArgumentException("Neither token nor detail set on AnalysisAction.Response");
}
this.tokens = tokens;
this.detail = detail;
}

public Response(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
if (size > 0) {
tokens = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
tokens.add(new AnalyzeToken(in));
if (in.getVersion().onOrAfter(Version.V_8_0_0)) {
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd like to backport this to 7_3_0 at least if possible. Not sure of its feasible to 7_2_1, might complicate the whole backport story a bit.

AnalyzeToken[] tokenArray = in.readOptionalArray(AnalyzeToken::new, AnalyzeToken[]::new);
tokens = tokenArray != null ? Arrays.asList(tokenArray) : null;
} else {
int size = in.readVInt();
if (size > 0) {
tokens = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
tokens.add(new AnalyzeToken(in));
}
} else {
tokens = null;
}
}
else {
tokens = null;
}
detail = in.readOptionalWriteable(DetailAnalyzeResponse::new);
}

Expand Down Expand Up @@ -346,21 +354,33 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws

@Override
public void writeTo(StreamOutput out) throws IOException {
if (tokens != null) {
out.writeVInt(tokens.size());
for (AnalyzeToken token : tokens) {
token.writeTo(out);
if (out.getVersion().onOrAfter(Version.V_8_0_0)) {
AnalyzeToken[] tokenArray = null;
if (tokens != null) {
tokenArray = tokens.toArray(new AnalyzeToken[0]);
}
out.writeOptionalArray(tokenArray);
} else {
out.writeVInt(0);
if (tokens != null) {
out.writeVInt(tokens.size());
for (AnalyzeToken token : tokens) {
token.writeTo(out);
}
} else {
out.writeVInt(0);
}
}
out.writeOptionalWriteable(detail);
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Response that = (Response) o;
return Objects.equals(detail, that.detail) &&
Objects.equals(tokens, that.tokens);
Expand Down Expand Up @@ -401,8 +421,12 @@ public static class AnalyzeToken implements Writeable, ToXContentObject {

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AnalyzeToken that = (AnalyzeToken) o;
return startOffset == that.startOffset &&
endOffset == that.endOffset &&
Expand Down Expand Up @@ -582,8 +606,12 @@ public AnalyzeTokenList[] tokenfilters() {

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DetailAnalyzeResponse that = (DetailAnalyzeResponse) o;
return customAnalyzer == that.customAnalyzer &&
Objects.equals(analyzer, that.analyzer) &&
Expand Down Expand Up @@ -669,8 +697,12 @@ public static class AnalyzeTokenList implements Writeable, ToXContentObject {

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AnalyzeTokenList that = (AnalyzeTokenList) o;
return Objects.equals(name, that.name) &&
Arrays.equals(tokens, that.tokens);
Expand All @@ -690,16 +722,19 @@ public AnalyzeTokenList(String name, AnalyzeToken[] tokens) {

AnalyzeTokenList(StreamInput in) throws IOException {
name = in.readString();
int size = in.readVInt();
if (size > 0) {
tokens = new AnalyzeToken[size];
for (int i = 0; i < size; i++) {
tokens[i] = new AnalyzeToken(in);
if (in.getVersion().onOrAfter(Version.V_8_0_0)) {
tokens = in.readOptionalArray(AnalyzeToken::new, AnalyzeToken[]::new);
} else {
int size = in.readVInt();
if (size > 0) {
tokens = new AnalyzeToken[size];
for (int i = 0; i < size; i++) {
tokens[i] = new AnalyzeToken(in);
}
} else {
tokens = null;
}
}
else {
tokens = null;
}
}

public String getName() {
Expand Down Expand Up @@ -732,13 +767,17 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
if (tokens != null) {
out.writeVInt(tokens.length);
for (AnalyzeToken token : tokens) {
token.writeTo(out);
}
if (out.getVersion().onOrAfter(Version.V_8_0_0)) {
out.writeOptionalArray(tokens);
} else {
out.writeVInt(0);
if (tokens != null) {
out.writeVInt(tokens.length);
for (AnalyzeToken token : tokens) {
token.writeTo(out);
}
} else {
out.writeVInt(0);
}
}
}
}
Expand Down Expand Up @@ -789,8 +828,12 @@ public void writeTo(StreamOutput out) throws IOException {

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CharFilteredText that = (CharFilteredText) o;
return Objects.equals(name, that.name) &&
Arrays.equals(texts, that.texts);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,25 @@

package org.elasticsearch.action.admin.indices.analyze;

import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction.AnalyzeToken;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.test.RandomObjects;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;

import static org.hamcrest.Matchers.equalTo;

public class AnalyzeResponseTests extends ESTestCase {
public class AnalyzeResponseTests extends AbstractWireSerializingTestCase<AnalyzeAction.Response> {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍


@SuppressWarnings("unchecked")
public void testNullResponseToXContent() throws IOException {
Expand All @@ -59,6 +64,64 @@ public void testNullResponseToXContent() throws IOException {
assertThat(nullTokens.size(), equalTo(0));
assertThat(name, equalTo(nameValue));
}
}

public void testConstructorArgs() {
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new AnalyzeAction.Response(null, null));
assertEquals("Neither token nor detail set on AnalysisAction.Response", ex.getMessage());
}

@Override
protected AnalyzeAction.Response createTestInstance() {
int tokenCount = randomIntBetween(0, 30);
AnalyzeAction.AnalyzeToken[] tokens = new AnalyzeAction.AnalyzeToken[tokenCount];
for (int i = 0; i < tokenCount; i++) {
tokens[i] = RandomObjects.randomToken(random());
}
if (randomBoolean()) {
AnalyzeAction.CharFilteredText[] charfilters = null;
AnalyzeAction.AnalyzeTokenList[] tokenfilters = null;
if (randomBoolean()) {
charfilters = new AnalyzeAction.CharFilteredText[]{
new AnalyzeAction.CharFilteredText("my_charfilter", new String[]{"one two"})
};
}
if (randomBoolean()) {
tokenfilters = new AnalyzeAction.AnalyzeTokenList[]{
new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_1", tokens),
new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_2", tokens)
};
}
AnalyzeAction.DetailAnalyzeResponse dar = new AnalyzeAction.DetailAnalyzeResponse(
charfilters,
new AnalyzeAction.AnalyzeTokenList("my_tokenizer", tokens),
tokenfilters);
return new AnalyzeAction.Response(null, dar);
}
return new AnalyzeAction.Response(Arrays.asList(tokens), null);
}

/**
* Either add a token to the token list or change the details token list name
*/
@Override
protected AnalyzeAction.Response mutateInstance(AnalyzeAction.Response instance) throws IOException {
if (instance.getTokens() != null) {
List<AnalyzeToken> extendedList = new ArrayList<>(instance.getTokens());
extendedList.add(RandomObjects.randomToken(random()));
return new AnalyzeAction.Response(extendedList, null);
} else {
AnalyzeToken[] tokens = instance.detail().tokenizer().getTokens();
return new AnalyzeAction.Response(null, new AnalyzeAction.DetailAnalyzeResponse(
instance.detail().charfilters(),
new AnalyzeAction.AnalyzeTokenList("my_other_tokenizer", tokens),
instance.detail().tokenfilters()));
}
}

@Override
protected Reader<AnalyzeAction.Response> instanceReader() {
return AnalyzeAction.Response::new;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.MockKeywordPlugin;
import org.hamcrest.core.IsNull;

Expand All @@ -41,6 +42,7 @@
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;

@ClusterScope(minNumDataNodes = 2)
public class AnalyzeActionIT extends ESIntegTestCase {

@Override
Expand Down Expand Up @@ -387,5 +389,16 @@ public void testAnalyzeNormalizedKeywordField() throws IOException {
assertThat(token.getPositionLength(), equalTo(1));
}

/**
* Input text that doesn't produce tokens should return an empty token list
*/
public void testZeroTokenAnalysis() throws IOException {
assertAcked(prepareCreate("test"));
ensureGreen("test");

AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze("test", ".").get();
assertNotNull(analyzeResponse.getTokens());
assertThat(analyzeResponse.getTokens().size(), equalTo(0));
}

}
Loading