Remove readFrom from org.elasticsearch.search
Replace with a constructor that takes StreamInput or a static method. In one case (ValuesSourceType) we no longer need to serialize the data at all! Relates to #17085
This commit is contained in:
parent
dd2184ab25
commit
476d57150a
|
@ -776,7 +776,6 @@
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]FieldLookup.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]FieldLookup.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafDocLookup.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafDocLookup.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafFieldsLookup.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafFieldsLookup.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]profile[/\\]ProfileResult.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]QueryPhase.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]QueryPhase.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]QueryRescorer.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]QueryRescorer.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]RescoreParseElement.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]RescoreParseElement.java" checks="LineLength" />
|
||||||
|
|
|
@ -100,11 +100,6 @@ public class SearchShardTarget implements Writeable<SearchShardTarget>, Comparab
|
||||||
return i;
|
return i;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public SearchShardTarget readFrom(StreamInput in) throws IOException {
|
|
||||||
return new SearchShardTarget(in);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
if (nodeId == null) {
|
if (nodeId == null) {
|
||||||
|
|
|
@ -139,8 +139,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
|
||||||
throw new ElasticsearchParseException("no [{}] found for value [{}]", KEY.getPreferredName(), value);
|
throw new ElasticsearchParseException("no [{}] found for value [{}]", KEY.getPreferredName(), value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public static SubAggCollectionMode readFromStream(StreamInput in) throws IOException {
|
||||||
public SubAggCollectionMode readFrom(StreamInput in) throws IOException {
|
|
||||||
int ordinal = in.readVInt();
|
int ordinal = in.readVInt();
|
||||||
if (ordinal < 0 || ordinal >= values().length) {
|
if (ordinal < 0 || ordinal >= values().length) {
|
||||||
throw new IOException("Unknown SubAggCollectionMode ordinal [" + ordinal + "]");
|
throw new IOException("Unknown SubAggCollectionMode ordinal [" + ordinal + "]");
|
||||||
|
|
|
@ -49,19 +49,13 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
|
||||||
*/
|
*/
|
||||||
public DateHistogramAggregatorBuilder(StreamInput in) throws IOException {
|
public DateHistogramAggregatorBuilder(StreamInput in) throws IOException {
|
||||||
super(in, InternalDateHistogram.HISTOGRAM_FACTORY);
|
super(in, InternalDateHistogram.HISTOGRAM_FACTORY);
|
||||||
if (in.readBoolean()) {
|
dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new);
|
||||||
dateHistogramInterval = DateHistogramInterval.readFromStream(in);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||||
super.innerWriteTo(out);
|
super.innerWriteTo(out);
|
||||||
boolean hasDateInterval = dateHistogramInterval != null;
|
out.writeOptionalWriteable(dateHistogramInterval);
|
||||||
out.writeBoolean(hasDateInterval);
|
|
||||||
if (hasDateInterval) {
|
|
||||||
dateHistogramInterval.writeTo(out);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -40,10 +40,6 @@ public class DateHistogramInterval implements Writeable<DateHistogramInterval> {
|
||||||
public static final DateHistogramInterval QUARTER = new DateHistogramInterval("1q");
|
public static final DateHistogramInterval QUARTER = new DateHistogramInterval("1q");
|
||||||
public static final DateHistogramInterval YEAR = new DateHistogramInterval("1y");
|
public static final DateHistogramInterval YEAR = new DateHistogramInterval("1y");
|
||||||
|
|
||||||
public static final DateHistogramInterval readFromStream(StreamInput in) throws IOException {
|
|
||||||
return SECOND.readFrom(in);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static DateHistogramInterval seconds(int sec) {
|
public static DateHistogramInterval seconds(int sec) {
|
||||||
return new DateHistogramInterval(sec + "s");
|
return new DateHistogramInterval(sec + "s");
|
||||||
}
|
}
|
||||||
|
@ -70,6 +66,19 @@ public class DateHistogramInterval implements Writeable<DateHistogramInterval> {
|
||||||
this.expression = expression;
|
this.expression = expression;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read from a stream.
|
||||||
|
*/
|
||||||
|
public DateHistogramInterval(StreamInput in) throws IOException {
|
||||||
|
expression = in.readString();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
|
out.writeString(expression);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return expression;
|
return expression;
|
||||||
|
@ -91,14 +100,4 @@ public class DateHistogramInterval implements Writeable<DateHistogramInterval> {
|
||||||
DateHistogramInterval other = (DateHistogramInterval) obj;
|
DateHistogramInterval other = (DateHistogramInterval) obj;
|
||||||
return Objects.equals(expression, other.expression);
|
return Objects.equals(expression, other.expression);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public DateHistogramInterval readFrom(StreamInput in) throws IOException {
|
|
||||||
return new DateHistogramInterval(in.readString());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
out.writeString(expression);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -71,7 +71,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
||||||
public TermsAggregatorBuilder(StreamInput in) throws IOException {
|
public TermsAggregatorBuilder(StreamInput in) throws IOException {
|
||||||
super(in, StringTerms.TYPE, ValuesSourceType.ANY);
|
super(in, StringTerms.TYPE, ValuesSourceType.ANY);
|
||||||
bucketCountThresholds = new BucketCountThresholds(in);
|
bucketCountThresholds = new BucketCountThresholds(in);
|
||||||
collectMode = SubAggCollectionMode.BREADTH_FIRST.readFrom(in);
|
collectMode = SubAggCollectionMode.readFromStream(in);
|
||||||
executionHint = in.readOptionalString();
|
executionHint = in.readOptionalString();
|
||||||
includeExclude = in.readOptionalWriteable(IncludeExclude::new);
|
includeExclude = in.readOptionalWriteable(IncludeExclude::new);
|
||||||
order = InternalOrder.Streams.readOrder(in);
|
order = InternalOrder.Streams.readOrder(in);
|
||||||
|
|
|
@ -64,7 +64,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
||||||
keyed = in.readBoolean();
|
keyed = in.readBoolean();
|
||||||
numberOfSignificantValueDigits = in.readVInt();
|
numberOfSignificantValueDigits = in.readVInt();
|
||||||
compression = in.readDouble();
|
compression = in.readDouble();
|
||||||
method = PercentilesMethod.TDIGEST.readFrom(in);
|
method = PercentilesMethod.readFromStream(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -64,7 +64,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
||||||
keyed = in.readBoolean();
|
keyed = in.readBoolean();
|
||||||
numberOfSignificantValueDigits = in.readVInt();
|
numberOfSignificantValueDigits = in.readVInt();
|
||||||
compression = in.readDouble();
|
compression = in.readDouble();
|
||||||
method = PercentilesMethod.TDIGEST.readFrom(in);
|
method = PercentilesMethod.readFromStream(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -51,8 +51,7 @@ public enum PercentilesMethod implements Writeable<PercentilesMethod> {
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public static PercentilesMethod readFromStream(StreamInput in) throws IOException {
|
||||||
public PercentilesMethod readFrom(StreamInput in) throws IOException {
|
|
||||||
int ordinal = in.readVInt();
|
int ordinal = in.readVInt();
|
||||||
if (ordinal < 0 || ordinal >= values().length) {
|
if (ordinal < 0 || ordinal >= values().length) {
|
||||||
throw new IOException("Unknown PercentilesMethod ordinal [" + ordinal + "]");
|
throw new IOException("Unknown PercentilesMethod ordinal [" + ordinal + "]");
|
||||||
|
|
|
@ -19,34 +19,9 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.support;
|
package org.elasticsearch.search.aggregations.support;
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
public enum ValuesSourceType {
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
/*
|
|
||||||
* The ordinal values for this class are tested in ValuesSourceTypeTests to
|
|
||||||
* ensure that the ordinal for each value does not change and break bwc
|
|
||||||
*/
|
|
||||||
public enum ValuesSourceType implements Writeable<ValuesSourceType> {
|
|
||||||
|
|
||||||
ANY,
|
ANY,
|
||||||
NUMERIC,
|
NUMERIC,
|
||||||
BYTES,
|
BYTES,
|
||||||
GEOPOINT;
|
GEOPOINT;
|
||||||
|
|
||||||
@Override
|
|
||||||
public ValuesSourceType readFrom(StreamInput in) throws IOException {
|
|
||||||
int ordinal = in.readVInt();
|
|
||||||
if (ordinal < 0 || ordinal >= values().length) {
|
|
||||||
throw new IOException("Unknown ValuesSourceType ordinal [" + ordinal + "]");
|
|
||||||
}
|
|
||||||
return values()[ordinal];
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
out.writeVInt(ordinal());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,6 +80,9 @@ public class CollectorResult implements ToXContent, Writeable {
|
||||||
this.children = children;
|
this.children = children;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read from a stream.
|
||||||
|
*/
|
||||||
public CollectorResult(StreamInput in) throws IOException {
|
public CollectorResult(StreamInput in) throws IOException {
|
||||||
this.collectorName = in.readString();
|
this.collectorName = in.readString();
|
||||||
this.reason = in.readString();
|
this.reason = in.readString();
|
||||||
|
@ -92,6 +95,17 @@ public class CollectorResult implements ToXContent, Writeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
|
out.writeString(collectorName);
|
||||||
|
out.writeString(reason);
|
||||||
|
out.writeLong(time);
|
||||||
|
out.writeVInt(children.size());
|
||||||
|
for (CollectorResult child : children) {
|
||||||
|
child.writeTo(out);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return the profiled time for this collector (inclusive of children)
|
* @return the profiled time for this collector (inclusive of children)
|
||||||
*/
|
*/
|
||||||
|
@ -137,20 +151,4 @@ public class CollectorResult implements ToXContent, Writeable {
|
||||||
builder = builder.endObject();
|
builder = builder.endObject();
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
out.writeString(collectorName);
|
|
||||||
out.writeString(reason);
|
|
||||||
out.writeLong(time);
|
|
||||||
out.writeVInt(children.size());
|
|
||||||
for (CollectorResult child : children) {
|
|
||||||
child.writeTo(out);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object readFrom(StreamInput in) throws IOException {
|
|
||||||
return new CollectorResult(in);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,11 +75,6 @@ public final class InternalProfileShardResults implements Writeable<InternalProf
|
||||||
return this.shardResults;
|
return this.shardResults;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public InternalProfileShardResults readFrom(StreamInput in) throws IOException {
|
|
||||||
return new InternalProfileShardResults(in);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
out.writeInt(shardResults.size());
|
out.writeInt(shardResults.size());
|
||||||
|
|
|
@ -57,7 +57,8 @@ final class ProfileResult implements Writeable<ProfileResult>, ToXContent {
|
||||||
private final long nodeTime;
|
private final long nodeTime;
|
||||||
private final List<ProfileResult> children;
|
private final List<ProfileResult> children;
|
||||||
|
|
||||||
public ProfileResult(String queryType, String luceneDescription, Map<String, Long> timings, List<ProfileResult> children, long nodeTime) {
|
public ProfileResult(String queryType, String luceneDescription, Map<String, Long> timings, List<ProfileResult> children,
|
||||||
|
long nodeTime) {
|
||||||
this.queryType = queryType;
|
this.queryType = queryType;
|
||||||
this.luceneDescription = luceneDescription;
|
this.luceneDescription = luceneDescription;
|
||||||
this.timings = timings;
|
this.timings = timings;
|
||||||
|
@ -65,6 +66,9 @@ final class ProfileResult implements Writeable<ProfileResult>, ToXContent {
|
||||||
this.nodeTime = nodeTime;
|
this.nodeTime = nodeTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read from a stream.
|
||||||
|
*/
|
||||||
public ProfileResult(StreamInput in) throws IOException{
|
public ProfileResult(StreamInput in) throws IOException{
|
||||||
this.queryType = in.readString();
|
this.queryType = in.readString();
|
||||||
this.luceneDescription = in.readString();
|
this.luceneDescription = in.readString();
|
||||||
|
@ -84,6 +88,22 @@ final class ProfileResult implements Writeable<ProfileResult>, ToXContent {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
|
out.writeString(queryType);
|
||||||
|
out.writeString(luceneDescription);
|
||||||
|
out.writeLong(nodeTime); // not Vlong because can be negative
|
||||||
|
out.writeVInt(timings.size());
|
||||||
|
for (Map.Entry<String, Long> entry : timings.entrySet()) {
|
||||||
|
out.writeString(entry.getKey());
|
||||||
|
out.writeLong(entry.getValue());
|
||||||
|
}
|
||||||
|
out.writeVInt(children.size());
|
||||||
|
for (ProfileResult child : children) {
|
||||||
|
child.writeTo(out);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieve the lucene description of this query (e.g. the "explain" text)
|
* Retrieve the lucene description of this query (e.g. the "explain" text)
|
||||||
*/
|
*/
|
||||||
|
@ -121,27 +141,6 @@ final class ProfileResult implements Writeable<ProfileResult>, ToXContent {
|
||||||
return Collections.unmodifiableList(children);
|
return Collections.unmodifiableList(children);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public ProfileResult readFrom(StreamInput in) throws IOException {
|
|
||||||
return new ProfileResult(in);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
out.writeString(queryType);
|
|
||||||
out.writeString(luceneDescription);
|
|
||||||
out.writeLong(nodeTime); // not Vlong because can be negative
|
|
||||||
out.writeVInt(timings.size());
|
|
||||||
for (Map.Entry<String, Long> entry : timings.entrySet()) {
|
|
||||||
out.writeString(entry.getKey());
|
|
||||||
out.writeLong(entry.getValue());
|
|
||||||
}
|
|
||||||
out.writeVInt(children.size());
|
|
||||||
for (ProfileResult child : children) {
|
|
||||||
child.writeTo(out);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder = builder.startObject()
|
builder = builder.startObject()
|
||||||
|
|
|
@ -50,6 +50,9 @@ public final class ProfileShardResult implements Writeable<ProfileShardResult>,
|
||||||
this.rewriteTime = rewriteTime;
|
this.rewriteTime = rewriteTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read from a stream.
|
||||||
|
*/
|
||||||
public ProfileShardResult(StreamInput in) throws IOException {
|
public ProfileShardResult(StreamInput in) throws IOException {
|
||||||
int profileSize = in.readVInt();
|
int profileSize = in.readVInt();
|
||||||
profileResults = new ArrayList<>(profileSize);
|
profileResults = new ArrayList<>(profileSize);
|
||||||
|
@ -61,6 +64,17 @@ public final class ProfileShardResult implements Writeable<ProfileShardResult>,
|
||||||
rewriteTime = in.readLong();
|
rewriteTime = in.readLong();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
|
out.writeVInt(profileResults.size());
|
||||||
|
for (ProfileResult p : profileResults) {
|
||||||
|
p.writeTo(out);
|
||||||
|
}
|
||||||
|
profileCollector.writeTo(out);
|
||||||
|
out.writeLong(rewriteTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
public List<ProfileResult> getQueryResults() {
|
public List<ProfileResult> getQueryResults() {
|
||||||
return Collections.unmodifiableList(profileResults);
|
return Collections.unmodifiableList(profileResults);
|
||||||
}
|
}
|
||||||
|
@ -86,20 +100,4 @@ public final class ProfileShardResult implements Writeable<ProfileShardResult>,
|
||||||
builder.endArray();
|
builder.endArray();
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public ProfileShardResult readFrom(StreamInput in) throws IOException {
|
|
||||||
return new ProfileShardResult(in);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
out.writeVInt(profileResults.size());
|
|
||||||
for (ProfileResult p : profileResults) {
|
|
||||||
p.writeTo(out);
|
|
||||||
}
|
|
||||||
profileCollector.writeTo(out);
|
|
||||||
out.writeLong(rewriteTime);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,13 +56,13 @@ public class SubAggCollectionModeTests extends ESTestCase {
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
out.writeVInt(0);
|
out.writeVInt(0);
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
||||||
assertThat(SubAggCollectionMode.BREADTH_FIRST.readFrom(in), equalTo(SubAggCollectionMode.DEPTH_FIRST));
|
assertThat(SubAggCollectionMode.readFromStream(in), equalTo(SubAggCollectionMode.DEPTH_FIRST));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
out.writeVInt(1);
|
out.writeVInt(1);
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
||||||
assertThat(SubAggCollectionMode.BREADTH_FIRST.readFrom(in), equalTo(SubAggCollectionMode.BREADTH_FIRST));
|
assertThat(SubAggCollectionMode.readFromStream(in), equalTo(SubAggCollectionMode.BREADTH_FIRST));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -71,7 +71,7 @@ public class SubAggCollectionModeTests extends ESTestCase {
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE));
|
out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE));
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
||||||
SubAggCollectionMode.BREADTH_FIRST.readFrom(in);
|
SubAggCollectionMode.readFromStream(in);
|
||||||
fail("Expected IOException");
|
fail("Expected IOException");
|
||||||
} catch(IOException e) {
|
} catch(IOException e) {
|
||||||
assertThat(e.getMessage(), containsString("Unknown SubAggCollectionMode ordinal ["));
|
assertThat(e.getMessage(), containsString("Unknown SubAggCollectionMode ordinal ["));
|
||||||
|
|
|
@ -55,13 +55,13 @@ public class PercentilesMethodTests extends ESTestCase {
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
out.writeVInt(0);
|
out.writeVInt(0);
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
||||||
assertThat(PercentilesMethod.TDIGEST.readFrom(in), equalTo(PercentilesMethod.TDIGEST));
|
assertThat(PercentilesMethod.readFromStream(in), equalTo(PercentilesMethod.TDIGEST));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
out.writeVInt(1);
|
out.writeVInt(1);
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
||||||
assertThat(PercentilesMethod.TDIGEST.readFrom(in), equalTo(PercentilesMethod.HDR));
|
assertThat(PercentilesMethod.readFromStream(in), equalTo(PercentilesMethod.HDR));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -70,7 +70,7 @@ public class PercentilesMethodTests extends ESTestCase {
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE));
|
out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE));
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
||||||
PercentilesMethod.TDIGEST.readFrom(in);
|
PercentilesMethod.readFromStream(in);
|
||||||
fail("Expected IOException");
|
fail("Expected IOException");
|
||||||
} catch(IOException e) {
|
} catch(IOException e) {
|
||||||
assertThat(e.getMessage(), containsString("Unknown PercentilesMethod ordinal ["));
|
assertThat(e.getMessage(), containsString("Unknown PercentilesMethod ordinal ["));
|
||||||
|
|
|
@ -1,109 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.support;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.containsString;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
|
|
||||||
public class ValuesSourceTypeTests extends ESTestCase {
|
|
||||||
|
|
||||||
public void testValidOrdinals() {
|
|
||||||
assertThat(ValuesSourceType.ANY.ordinal(), equalTo(0));
|
|
||||||
assertThat(ValuesSourceType.NUMERIC.ordinal(), equalTo(1));
|
|
||||||
assertThat(ValuesSourceType.BYTES.ordinal(), equalTo(2));
|
|
||||||
assertThat(ValuesSourceType.GEOPOINT.ordinal(), equalTo(3));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testwriteTo() throws Exception {
|
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
|
||||||
ValuesSourceType.ANY.writeTo(out);
|
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
|
||||||
assertThat(in.readVInt(), equalTo(0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
|
||||||
ValuesSourceType.NUMERIC.writeTo(out);
|
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
|
||||||
assertThat(in.readVInt(), equalTo(1));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
|
||||||
ValuesSourceType.BYTES.writeTo(out);
|
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
|
||||||
assertThat(in.readVInt(), equalTo(2));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
|
||||||
ValuesSourceType.GEOPOINT.writeTo(out);
|
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
|
||||||
assertThat(in.readVInt(), equalTo(3));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testReadFrom() throws Exception {
|
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
|
||||||
out.writeVInt(0);
|
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
|
||||||
assertThat(ValuesSourceType.ANY.readFrom(in), equalTo(ValuesSourceType.ANY));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
|
||||||
out.writeVInt(1);
|
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
|
||||||
assertThat(ValuesSourceType.ANY.readFrom(in), equalTo(ValuesSourceType.NUMERIC));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
|
||||||
out.writeVInt(2);
|
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
|
||||||
assertThat(ValuesSourceType.ANY.readFrom(in), equalTo(ValuesSourceType.BYTES));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
|
||||||
out.writeVInt(3);
|
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
|
||||||
assertThat(ValuesSourceType.ANY.readFrom(in), equalTo(ValuesSourceType.GEOPOINT));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testInvalidReadFrom() throws Exception {
|
|
||||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
|
||||||
out.writeVInt(randomIntBetween(4, Integer.MAX_VALUE));
|
|
||||||
try (StreamInput in = StreamInput.wrap(out.bytes())) {
|
|
||||||
ValuesSourceType.ANY.readFrom(in);
|
|
||||||
fail("Expected IOException");
|
|
||||||
} catch(IOException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("Unknown ValuesSourceType ordinal ["));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in New Issue