Merge branch 'master' into needsScore

This commit is contained in:
Robert Muir 2016-05-10 11:23:50 -04:00
commit 7a47d33a9b
25 changed files with 486 additions and 267 deletions

View File

@ -51,6 +51,8 @@ class ListPluginsCommand extends Command {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(env.pluginsFile())) {
for (Path plugin : stream) {
terminal.println(plugin.getFileName().toString());
PluginInfo info = PluginInfo.readFromProperties(env.pluginsFile().resolve(plugin.toAbsolutePath()));
terminal.println(Terminal.Verbosity.VERBOSE, info.toString());
}
}
}

View File

@ -19,8 +19,7 @@
package org.elasticsearch.rest.action.cat;
import com.carrotsearch.hppc.ObjectLongHashMap;
import com.carrotsearch.hppc.ObjectLongMap;
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
@ -36,11 +35,6 @@ import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.action.support.RestResponseListener;
import org.elasticsearch.rest.action.support.RestTable;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.rest.RestRequest.Method.GET;
/**
@ -57,7 +51,6 @@ public class RestFielddataAction extends AbstractCatAction {
@Override
protected void doRequest(final RestRequest request, final RestChannel channel, final Client client) {
final NodesStatsRequest nodesStatsRequest = new NodesStatsRequest("data:true");
nodesStatsRequest.clear();
nodesStatsRequest.indices(true);
@ -86,56 +79,30 @@ public class RestFielddataAction extends AbstractCatAction {
.addCell("host", "alias:h;desc:host name")
.addCell("ip", "desc:ip address")
.addCell("node", "alias:n;desc:node name")
.addCell("total", "text-align:right;desc:total field data usage")
.addCell("field", "alias:f;desc:field name")
.addCell("size", "text-align:right;alias:s;desc:field data usage")
.endHeaders();
return table;
}
private Table buildTable(final RestRequest request, final NodesStatsResponse nodeStatses) {
Set<String> fieldNames = new HashSet<>();
Map<NodeStats, ObjectLongMap<String>> nodesFields = new HashMap<>();
Table table = getTableWithHeader(request);
// Collect all the field names so a new table can be built
for (NodeStats ns : nodeStatses.getNodes()) {
ObjectLongHashMap<String> fields = ns.getIndices().getFieldData().getFields();
nodesFields.put(ns, fields);
if (fields != null) {
for (String key : fields.keys().toArray(String.class)) {
fieldNames.add(key);
for (NodeStats nodeStats: nodeStatses.getNodes()) {
if (nodeStats.getIndices().getFieldData().getFields() != null) {
for (ObjectLongCursor<String> cursor : nodeStats.getIndices().getFieldData().getFields()) {
table.startRow();
table.addCell(nodeStats.getNode().getId());
table.addCell(nodeStats.getNode().getHostName());
table.addCell(nodeStats.getNode().getHostAddress());
table.addCell(nodeStats.getNode().getName());
table.addCell(cursor.key);
table.addCell(new ByteSizeValue(cursor.value));
table.endRow();
}
}
}
// The table must be rebuilt because it has dynamic headers based on the fields
Table table = new Table();
table.startHeaders()
.addCell("id", "desc:node id")
.addCell("host", "alias:h;desc:host name")
.addCell("ip", "desc:ip address")
.addCell("node", "alias:n;desc:node name")
.addCell("total", "text-align:right;desc:total field data usage");
// The table columns must be built dynamically since the number of fields is unknown
for (String fieldName : fieldNames) {
table.addCell(fieldName, "text-align:right;desc:" + fieldName + " field");
}
table.endHeaders();
for (Map.Entry<NodeStats, ObjectLongMap<String>> statsEntry : nodesFields.entrySet()) {
table.startRow();
// add the node info and field data total before each individual field
NodeStats ns = statsEntry.getKey();
table.addCell(ns.getNode().getId());
table.addCell(ns.getNode().getHostName());
table.addCell(ns.getNode().getHostAddress());
table.addCell(ns.getNode().getName());
table.addCell(ns.getIndices().getFieldData().getMemorySize());
ObjectLongMap<String> fields = statsEntry.getValue();
for (String fieldName : fieldNames) {
table.addCell(new ByteSizeValue(fields == null ? 0L : fields.getOrDefault(fieldName, 0L)));
}
table.endRow();
}
return table;
}
}

View File

@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -53,7 +52,7 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
String[] bucketsPaths = null;
String format = null;
GapPolicy gapPolicy = null;
Map<String, Object> leftover = new HashMap<>(5);
Map<String, Object> params = new HashMap<>(5);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
@ -66,7 +65,7 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
} else if (context.getParseFieldMatcher().match(currentFieldName, GAP_POLICY)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
} else {
leftover.put(currentFieldName, parser.text());
parseToken(pipelineAggregatorName, parser, context, currentFieldName, token, params);
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (context.getParseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
@ -77,10 +76,10 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
}
bucketsPaths = paths.toArray(new String[paths.size()]);
} else {
leftover.put(currentFieldName, parser.list());
parseToken(pipelineAggregatorName, parser, context, currentFieldName, token, params);
}
} else {
leftover.put(currentFieldName, parser.objectText());
parseToken(pipelineAggregatorName, parser, context, currentFieldName, token, params);
}
}
@ -89,30 +88,32 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
"Missing required field [" + BUCKETS_PATH.getPreferredName() + "] for aggregation [" + pipelineAggregatorName + "]");
}
BucketMetricsPipelineAggregatorBuilder<?> factory = null;
try {
factory = buildFactory(pipelineAggregatorName, bucketsPaths[0], leftover);
if (format != null) {
factory.format(format);
}
if (gapPolicy != null) {
factory.gapPolicy(gapPolicy);
}
} catch (ParseException exception) {
throw new ParsingException(parser.getTokenLocation(),
"Could not parse settings for aggregation [" + pipelineAggregatorName + "].", exception);
BucketMetricsPipelineAggregatorBuilder<?> factory = buildFactory(pipelineAggregatorName, bucketsPaths[0], params);
if (format != null) {
factory.format(format);
}
if (gapPolicy != null) {
factory.gapPolicy(gapPolicy);
}
if (leftover.size() > 0) {
throw new ParsingException(parser.getTokenLocation(),
"Unexpected tokens " + leftover.keySet() + " in [" + pipelineAggregatorName + "].");
}
assert(factory != null);
return factory;
}
protected abstract BucketMetricsPipelineAggregatorBuilder<?> buildFactory(String pipelineAggregatorName, String bucketsPaths,
Map<String, Object> unparsedParams) throws ParseException;
Map<String, Object> params);
protected boolean token(XContentParser parser, QueryParseContext context, String field,
XContentParser.Token token, Map<String, Object> params) throws IOException {
return false;
}
private void parseToken(String aggregationName, XContentParser parser, QueryParseContext context, String currentFieldName,
XContentParser.Token currentToken, Map<String, Object> params) throws IOException {
if (token(parser, context, currentFieldName, currentToken, params) == false) {
throw new ParsingException(parser.getTokenLocation(),
"Unexpected token " + currentToken + " [" + currentFieldName + "] in [" + aggregationName + "]");
}
}
}

View File

@ -75,7 +75,7 @@ public class AvgBucketPipelineAggregatorBuilder extends BucketMetricsPipelineAgg
public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() {
@Override
protected AvgBucketPipelineAggregatorBuilder buildFactory(String pipelineAggregatorName,
String bucketsPath, Map<String, Object> unparsedParams) {
String bucketsPath, Map<String, Object> params) {
return new AvgBucketPipelineAggregatorBuilder(pipelineAggregatorName, bucketsPath);
}
};
@ -94,4 +94,4 @@ public class AvgBucketPipelineAggregatorBuilder extends BucketMetricsPipelineAgg
public String getWriteableName() {
return NAME;
}
}
}

View File

@ -75,7 +75,7 @@ public class MaxBucketPipelineAggregatorBuilder extends BucketMetricsPipelineAgg
public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() {
@Override
protected MaxBucketPipelineAggregatorBuilder buildFactory(String pipelineAggregatorName,
String bucketsPath, Map<String, Object> unparsedParams) {
String bucketsPath, Map<String, Object> params) {
return new MaxBucketPipelineAggregatorBuilder(pipelineAggregatorName, bucketsPath);
}
};
@ -94,4 +94,4 @@ public class MaxBucketPipelineAggregatorBuilder extends BucketMetricsPipelineAgg
public String getWriteableName() {
return NAME;
}
}
}

View File

@ -75,7 +75,7 @@ public class MinBucketPipelineAggregatorBuilder extends BucketMetricsPipelineAgg
public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() {
@Override
protected MinBucketPipelineAggregatorBuilder buildFactory(String pipelineAggregatorName,
String bucketsPath, Map<String, Object> unparsedParams) {
String bucketsPath, Map<String, Object> params) {
return new MinBucketPipelineAggregatorBuilder(pipelineAggregatorName, bucketsPath);
}
};
@ -94,4 +94,4 @@ public class MinBucketPipelineAggregatorBuilder extends BucketMetricsPipelineAgg
public String getWriteableName() {
return NAME;
}
}
}

View File

@ -19,10 +19,13 @@
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
import com.carrotsearch.hppc.DoubleArrayList;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
@ -117,41 +120,36 @@ public class PercentilesBucketPipelineAggregatorBuilder
}
public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() {
@Override
protected PercentilesBucketPipelineAggregatorBuilder buildFactory(String pipelineAggregatorName,
String bucketsPath, Map<String, Object> unparsedParams) throws ParseException {
double[] percents = null;
int counter = 0;
Object percentParam = unparsedParams.get(PERCENTS_FIELD.getPreferredName());
if (percentParam != null) {
if (percentParam instanceof List) {
percents = new double[((List<?>) percentParam).size()];
for (Object p : (List<?>) percentParam) {
if (p instanceof Double) {
percents[counter] = (Double) p;
counter += 1;
} else {
throw new ParseException(
"Parameter [" + PERCENTS_FIELD.getPreferredName() + "] must be an array of doubles, type `"
+ percentParam.getClass().getSimpleName() + "` provided instead",
0);
}
}
unparsedParams.remove(PERCENTS_FIELD.getPreferredName());
} else {
throw new ParseException("Parameter [" + PERCENTS_FIELD.getPreferredName() + "] must be an array of doubles, type `"
+ percentParam.getClass().getSimpleName() + "` provided instead", 0);
}
}
String bucketsPath, Map<String, Object> params) {
PercentilesBucketPipelineAggregatorBuilder factory = new
PercentilesBucketPipelineAggregatorBuilder(pipelineAggregatorName, bucketsPath);
PercentilesBucketPipelineAggregatorBuilder(pipelineAggregatorName, bucketsPath);
double[] percents = (double[]) params.get(PERCENTS_FIELD.getPreferredName());
if (percents != null) {
factory.percents(percents);
}
return factory;
}
@Override
protected boolean token(XContentParser parser, QueryParseContext context, String field,
XContentParser.Token token, Map<String, Object> params) throws IOException {
if (context.getParseFieldMatcher().match(field, PERCENTS_FIELD) && token == XContentParser.Token.START_ARRAY) {
DoubleArrayList percents = new DoubleArrayList(10);
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
percents.add(parser.doubleValue());
}
params.put(PERCENTS_FIELD.getPreferredName(), percents.toArray());
return true;
}
return false;
}
};
@Override
@ -169,4 +167,4 @@ public class PercentilesBucketPipelineAggregatorBuilder
public String getWriteableName() {
return NAME;
}
}
}

View File

@ -77,7 +77,7 @@ public class StatsBucketPipelineAggregatorBuilder extends BucketMetricsPipelineA
public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() {
@Override
protected StatsBucketPipelineAggregatorBuilder buildFactory(String pipelineAggregatorName,
String bucketsPath, Map<String, Object> unparsedParams) {
String bucketsPath, Map<String, Object> params) {
return new StatsBucketPipelineAggregatorBuilder(pipelineAggregatorName, bucketsPath);
}
};
@ -96,4 +96,4 @@ public class StatsBucketPipelineAggregatorBuilder extends BucketMetricsPipelineA
public String getWriteableName() {
return NAME;
}
}
}

View File

@ -20,9 +20,11 @@
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser;
import java.text.ParseException;
import java.io.IOException;
import java.util.Map;
public class ExtendedStatsBucketParser extends BucketMetricsParser {
@ -30,25 +32,24 @@ public class ExtendedStatsBucketParser extends BucketMetricsParser {
@Override
protected ExtendedStatsBucketPipelineAggregatorBuilder buildFactory(String pipelineAggregatorName,
String bucketsPath, Map<String, Object> unparsedParams) throws ParseException {
Double sigma = null;
Object param = unparsedParams.get(SIGMA.getPreferredName());
if (param != null) {
if (param instanceof Double) {
sigma = (Double) param;
unparsedParams.remove(SIGMA.getPreferredName());
} else {
throw new ParseException("Parameter [" + SIGMA.getPreferredName() + "] must be a Double, type `"
+ param.getClass().getSimpleName() + "` provided instead", 0);
}
}
String bucketsPath, Map<String, Object> params) {
ExtendedStatsBucketPipelineAggregatorBuilder factory =
new ExtendedStatsBucketPipelineAggregatorBuilder(pipelineAggregatorName, bucketsPath);
new ExtendedStatsBucketPipelineAggregatorBuilder(pipelineAggregatorName, bucketsPath);
Double sigma = (Double) params.get(SIGMA.getPreferredName());
if (sigma != null) {
factory.sigma(sigma);
}
return factory;
}
@Override
protected boolean token(XContentParser parser, QueryParseContext context, String field,
XContentParser.Token token, Map<String, Object> params) throws IOException {
if (context.getParseFieldMatcher().match(field, SIGMA) && token == XContentParser.Token.VALUE_NUMBER) {
params.put(SIGMA.getPreferredName(), parser.doubleValue());
return true;
}
return false;
}
}

View File

@ -75,7 +75,7 @@ public class SumBucketPipelineAggregatorBuilder extends BucketMetricsPipelineAgg
public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() {
@Override
protected SumBucketPipelineAggregatorBuilder buildFactory(String pipelineAggregatorName,
String bucketsPath, Map<String, Object> unparsedParams) {
String bucketsPath, Map<String, Object> params) {
return new SumBucketPipelineAggregatorBuilder(pipelineAggregatorName, bucketsPath);
}
};
@ -94,4 +94,4 @@ public class SumBucketPipelineAggregatorBuilder extends BucketMetricsPipelineAgg
public String getWriteableName() {
return NAME;
}
}
}

View File

@ -62,8 +62,10 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.startsWith;
/**
* Integration tests for InternalCircuitBreakerService
@ -84,6 +86,8 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase {
.put(HierarchyCircuitBreakerService.IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(),
HierarchyCircuitBreakerService.IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_LIMIT_SETTING.getDefaultRaw(null))
.put(HierarchyCircuitBreakerService.IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0)
.put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(),
HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getDefaultRaw(null))
.build();
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings));
}
@ -210,7 +214,6 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase {
* Test that a breaker correctly redistributes to a different breaker, in
* this case, the fielddata breaker borrows space from the request breaker
*/
@AwaitsFix(bugUrl = "way too unstable request size. Needs a proper and more stable fix.")
public void testParentChecking() throws Exception {
if (noopBreakerUsed()) {
logger.info("--> noop breakers used, skipping test");
@ -228,10 +231,6 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase {
}
indexRandom(true, reqs);
// We need the request limit beforehand, just from a single node because the limit should always be the same
long beforeReqLimit = client.admin().cluster().prepareNodesStats().setBreaker(true).get()
.getNodes().get(0).getBreaker().getStats(CircuitBreaker.REQUEST).getLimit();
Settings resetSettings = Settings.builder()
.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b")
.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0)
@ -252,11 +251,11 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase {
RestStatus.INTERNAL_SERVER_ERROR,
containsString("Data too large, data for [test] would be larger than limit of [10/10b]"));
reset();
// Adjust settings so the parent breaker will fail, but neither the fielddata breaker nor the node request breaker will fail
// There is no "one size fits all" breaker size as internal request size will vary based on doc count.
int parentBreakerSize = docCount * 3;
resetSettings = Settings.builder()
.put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), parentBreakerSize + "b")
.put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "500b")
.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "90%")
.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0)
.build();
@ -267,9 +266,16 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase {
client.prepareSearch("cb-test").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get();
fail("should have thrown an exception");
} catch (Exception e) {
String errMsg = "[parent] Data too large, data for [test] would be larger than limit of [" + parentBreakerSize;
assertThat("Exception: [" + e.toString() + "] should contain a CircuitBreakingException",
e.toString(), containsString(errMsg));
final Throwable cause = ExceptionsHelper.unwrap(e, CircuitBreakingException.class);
assertNotNull("CircuitBreakingException is not the cause of " + e, cause);
String errMsg = "would be larger than limit of [500/500b]]";
assertThat("Exception: [" + cause.toString() + "] should contain a CircuitBreakingException",
cause.toString(), startsWith("CircuitBreakingException[[parent] Data too large"));
assertThat("Exception: [" + cause.toString() + "] should contain a CircuitBreakingException",
cause.toString(), endsWith(errMsg));
} finally {
// reset before teardown as it requires properly set up breakers
reset();
}
}

View File

@ -19,8 +19,14 @@
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregatorBuilder;
import static org.hamcrest.Matchers.equalTo;
public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCase<ExtendedStatsBucketPipelineAggregatorBuilder> {
@Override
@ -32,5 +38,22 @@ public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCase<Exte
return factory;
}
public void testSigmaFromInt() throws Exception {
String content = XContentFactory.jsonBuilder()
.startObject()
.field("sigma", 5)
.field("buckets_path", "test")
.endObject()
.string();
XContentParser parser = XContentFactory.xContent(content).createParser(content);
QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher);
parser.nextToken(); // skip object start
ExtendedStatsBucketPipelineAggregatorBuilder builder = (ExtendedStatsBucketPipelineAggregatorBuilder) aggParsers
.pipelineParser(ExtendedStatsBucketPipelineAggregator.TYPE.name(), parseFieldMatcher)
.parse("test", parseContext);
assertThat(builder.sigma(), equalTo(5.0));
}
}

View File

@ -19,8 +19,14 @@
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregatorBuilder;
import static org.hamcrest.Matchers.equalTo;
public class PercentilesBucketTests extends AbstractBucketMetricsTestCase<PercentilesBucketPipelineAggregatorBuilder> {
@Override
@ -37,5 +43,22 @@ public class PercentilesBucketTests extends AbstractBucketMetricsTestCase<Percen
return factory;
}
public void testPercentsFromMixedArray() throws Exception {
String content = XContentFactory.jsonBuilder()
.startObject()
.field("buckets_path", "test")
.array("percents", 0, 20.0, 50, 75.99)
.endObject()
.string();
XContentParser parser = XContentFactory.xContent(content).createParser(content);
QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher);
parser.nextToken(); // skip object start
PercentilesBucketPipelineAggregatorBuilder builder = (PercentilesBucketPipelineAggregatorBuilder) aggParsers
.pipelineParser(PercentilesBucketPipelineAggregator.TYPE.name(), parseFieldMatcher)
.parse("test", parseContext);
assertThat(builder.percents(), equalTo(new double[]{0.0, 20.0, 50.0, 75.99}));
}
}

View File

@ -370,4 +370,31 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "2");
}
public void testEmptySimpleQueryStringWithAnalysis() throws Exception {
// https://github.com/elastic/elasticsearch/issues/18202
String mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")
.startObject("properties")
.startObject("body")
.field("type", "string")
.field("analyzer", "stop")
.endObject()
.endObject()
.endObject()
.endObject().string();
CreateIndexRequestBuilder mappingRequest = client().admin().indices()
.prepareCreate("test1")
.addMapping("type1", mapping);
mappingRequest.execute().actionGet();
indexRandom(true, client().prepareIndex("test1", "type1", "1").setSource("body", "Some Text"));
refresh();
SearchResponse searchResponse = client().prepareSearch()
.setQuery(simpleQueryStringQuery("the*").analyzeWildcard(true).field("body")).get();
assertNoFailures(searchResponse);
assertHitCount(searchResponse, 0l);
}
}

View File

@ -77,20 +77,10 @@ if [ ! -x "$JAVA" ]; then
exit 1
fi
# check if properties already has a config file or config dir
if [ -e "$CONF_DIR" ]; then
case "$properties" in
*-Des.default.path.conf=*|*-Des.path.conf=*)
;;
*)
properties="$properties -Des.default.path.conf=\"$CONF_DIR\""
;;
esac
fi
# full hostname passed through cut for portability on systems that do not support hostname -s
# export on separate line for shells that do not support combining definition and export
HOSTNAME=`hostname | cut -d. -f1`
export HOSTNAME
eval "\"$JAVA\"" "$ES_JAVA_OPTS" -Delasticsearch -Des.path.home="\"$ES_HOME\"" $properties -cp "\"$ES_HOME/lib/*\"" org.elasticsearch.plugins.PluginCli "$@"
exec "$JAVA" $ES_JAVA_OPTS -Delasticsearch -Des.path.home="$ES_HOME" -Des.default.path.conf="$CONF_DIR" \
-cp "$ES_HOME/lib/*" org.elasticsearch.plugins.PluginCli "$@"

View File

@ -7,10 +7,13 @@ on every data node in the cluster.
[source,sh]
--------------------------------------------------
% curl '192.168.56.10:9200/_cat/fielddata?v'
id host ip node total body text
c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones 385.6kb 159.8kb 225.7kb
waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary 435.2kb 159.8kb 275.3kb
yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip 284.6kb 109.2kb 175.3kb
id host ip node field size
c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones body 159.8kb
c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones text 225.7kb
waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary body 159.8kb
waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary text 275.3kb
yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip body 109.2kb
yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip text 175.3kb
--------------------------------------------------
Fields can be specified either as a query parameter, or in the URL path:
@ -18,17 +21,19 @@ Fields can be specified either as a query parameter, or in the URL path:
[source,sh]
--------------------------------------------------
% curl '192.168.56.10:9200/_cat/fielddata?v&fields=body'
id host ip node total body
c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones 385.6kb 159.8kb
waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary 435.2kb 159.8kb
yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip 284.6kb 109.2kb
id host ip node field size
c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones body 159.8kb
waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary body 159.8kb
yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip body 109.2kb
% curl '192.168.56.10:9200/_cat/fielddata/body,text?v'
id host ip node total body text
c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones 385.6kb 159.8kb 225.7kb
waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary 435.2kb 159.8kb 275.3kb
yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip 284.6kb 109.2kb 175.3kb
id host ip node field size
c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones body 159.8kb
c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones text 225.7kb
waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary body 159.8kb
waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary text 275.3kb
yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip body 109.2kb
yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip text 175.3kb
--------------------------------------------------
The output shows the total fielddata and then the individual fielddata for the
`body` and `text` fields.
The output shows the individual fielddata for the`body` and `text` fields, one row per field per node.

View File

@ -38,3 +38,10 @@ and `i` for ingest. A node with no explicit roles will be a coordinating
only node and marked with `-`. A node can have multiple roles. The
master column has been adapted to return only whether a node is the
current master (`*`) or not (`-`).
==== Changes to cat field data API
The cat field data endpoint adds a row per field instead of a column per field.
The `total` field has been removed from the field data API. Total field data usage per node
can be got by cat nodes API.

View File

@ -26,9 +26,12 @@ import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.lang.invoke.MethodHandles.Lookup;
import java.lang.reflect.Array;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Support for dynamic type (def).
@ -36,18 +39,13 @@ import java.util.Map;
* Dynamic types can invoke methods, load/store fields, and be passed as parameters to operators without
* compile-time type information.
* <p>
* Dynamic methods, loads, and stores involve locating the appropriate field or method depending
* on the receiver's class. For these, we emit an {@code invokedynamic} instruction that, for each new
* type encountered will query a corresponding {@code lookupXXX} method to retrieve the appropriate method.
* In most cases, the {@code lookupXXX} methods here will only be called once for a given call site, because
* Dynamic methods, loads, stores, and array/list/map load/stores involve locating the appropriate field
* or method depending on the receiver's class. For these, we emit an {@code invokedynamic} instruction that,
* for each new type encountered will query a corresponding {@code lookupXXX} method to retrieve the appropriate
* method. In most cases, the {@code lookupXXX} methods here will only be called once for a given call site, because
* caching ({@link DynamicCallSite}) generally works: usually all objects at any call site will be consistently
* the same type (or just a few types). In extreme cases, if there is type explosion, they may be called every
* single time, but simplicity is still more valuable than performance in this code.
* <p>
* Dynamic array loads and stores and operator functions (e.g. {@code +}) are called directly
* with {@code invokestatic}. Because these features cannot be overloaded in painless, they are hardcoded
* decision trees based on the only types that are possible. This keeps overhead low, and seems to be as fast
* on average as the more adaptive methodhandle caching.
*/
public class Def {
@ -96,8 +94,6 @@ public class Def {
"for class [" + receiverClass.getCanonicalName() + "].");
}
/** pointer to Array.getLength(Object) */
private static final MethodHandle ARRAY_LENGTH;
/** pointer to Map.get(Object) */
private static final MethodHandle MAP_GET;
/** pointer to Map.put(Object,Object) */
@ -109,9 +105,6 @@ public class Def {
static {
Lookup lookup = MethodHandles.publicLookup();
try {
// TODO: maybe specialize handles for different array types. this may be slower, but simple :)
ARRAY_LENGTH = lookup.findStatic(Array.class, "getLength",
MethodType.methodType(int.class, Object.class));
MAP_GET = lookup.findVirtual(Map.class, "get",
MethodType.methodType(Object.class, Object.class));
MAP_PUT = lookup.findVirtual(Map.class, "put",
@ -125,6 +118,54 @@ public class Def {
}
}
// TODO: Once Java has a factory for those in java.lang.invoke.MethodHandles, use it:
/** Helper class for isolating MethodHandles and methods to get the length of arrays
* (to emulate a "arraystore" byteoode using MethodHandles).
* This should really be a method in {@link MethodHandles} class!
*/
private static final class ArrayLengthHelper {
private ArrayLengthHelper() {}
private static final Lookup PRIV_LOOKUP = MethodHandles.lookup();
private static final Map<Class<?>,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap(
Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class,
char[].class, float[].class, double[].class, Object[].class)
.collect(Collectors.toMap(Function.identity(), type -> {
try {
return PRIV_LOOKUP.findStatic(PRIV_LOOKUP.lookupClass(), "getArrayLength", MethodType.methodType(int.class, type));
} catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}))
);
private static final MethodHandle OBJECT_ARRAY_MH = ARRAY_TYPE_MH_MAPPING.get(Object[].class);
static int getArrayLength(boolean[] array) { return array.length; }
static int getArrayLength(byte[] array) { return array.length; }
static int getArrayLength(short[] array) { return array.length; }
static int getArrayLength(int[] array) { return array.length; }
static int getArrayLength(long[] array) { return array.length; }
static int getArrayLength(char[] array) { return array.length; }
static int getArrayLength(float[] array) { return array.length; }
static int getArrayLength(double[] array) { return array.length; }
static int getArrayLength(Object[] array) { return array.length; }
public static MethodHandle arrayLengthGetter(Class<?> arrayType) {
if (!arrayType.isArray()) {
throw new IllegalArgumentException("type must be an array");
}
return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ?
ARRAY_TYPE_MH_MAPPING.get(arrayType) :
OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType));
}
}
/** Returns an array length getter MethodHandle for the given array type */
public static MethodHandle arrayLengthGetter(Class<?> arrayType) {
return ArrayLengthHelper.arrayLengthGetter(arrayType);
}
/**
* Looks up handle for a dynamic field getter (field load)
* <p>
@ -177,7 +218,7 @@ public class Def {
// special case: arrays, maps, and lists
if (receiverClass.isArray() && "length".equals(name)) {
// arrays expose .length as a read-only getter
return ARRAY_LENGTH;
return arrayLengthGetter(receiverClass);
} else if (Map.class.isAssignableFrom(receiverClass)) {
// maps allow access like mymap.key
// wire 'key' as a parameter, its a constant in painless
@ -266,56 +307,45 @@ public class Def {
"for class [" + receiverClass.getCanonicalName() + "].");
}
// NOTE: below methods are not cached, instead invoked directly because they are performant.
/**
* Returns a method handle to do an array store.
* @param receiverClass Class of the array to store the value in
* @return a MethodHandle that accepts the receiver as first argument, the index as second argument,
* and the value to set as 3rd argument. Return value is undefined and should be ignored.
*/
static MethodHandle lookupArrayStore(Class<?> receiverClass) {
if (receiverClass.isArray()) {
return MethodHandles.arrayElementSetter(receiverClass);
} else if (Map.class.isAssignableFrom(receiverClass)) {
// maps allow access like mymap[key]
return MAP_PUT;
} else if (List.class.isAssignableFrom(receiverClass)) {
return LIST_SET;
}
throw new IllegalArgumentException("Attempting to address a non-array type " +
"[" + receiverClass.getCanonicalName() + "] as an array.");
}
/**
* Returns a method handle to do an array load.
* @param receiverClass Class of the array to load the value from
* @return a MethodHandle that accepts the receiver as first argument, the index as second argument.
* It returns the loaded value.
*/
static MethodHandle lookupArrayLoad(Class<?> receiverClass) {
if (receiverClass.isArray()) {
return MethodHandles.arrayElementGetter(receiverClass);
} else if (Map.class.isAssignableFrom(receiverClass)) {
// maps allow access like mymap[key]
return MAP_GET;
} else if (List.class.isAssignableFrom(receiverClass)) {
return LIST_GET;
}
throw new IllegalArgumentException("Attempting to address a non-array type " +
"[" + receiverClass.getCanonicalName() + "] as an array.");
}
/**
* Performs an actual array store.
* @param array array object
* @param index map key, array index (integer), or list index (integer)
* @param value value to store in the array.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static void arrayStore(final Object array, Object index, Object value) {
if (array instanceof Map) {
((Map)array).put(index, value);
} else if (array.getClass().isArray()) {
try {
Array.set(array, (int)index, value);
} catch (final Throwable throwable) {
throw new IllegalArgumentException("Error storing value [" + value + "] " +
"in array class [" + array.getClass().getCanonicalName() + "].", throwable);
}
} else if (array instanceof List) {
((List)array).set((int)index, value);
} else {
throw new IllegalArgumentException("Attempting to address a non-array type " +
"[" + array.getClass().getCanonicalName() + "] as an array.");
}
}
/**
* Performs an actual array load.
* @param array array object
* @param index map key, array index (integer), or list index (integer)
*/
@SuppressWarnings("rawtypes")
public static Object arrayLoad(final Object array, Object index) {
if (array instanceof Map) {
return ((Map)array).get(index);
} else if (array.getClass().isArray()) {
try {
return Array.get(array, (int)index);
} catch (final Throwable throwable) {
throw new IllegalArgumentException("Error loading value from " +
"array class [" + array.getClass().getCanonicalName() + "].", throwable);
}
} else if (array instanceof List) {
return ((List)array).get((int)index);
} else {
throw new IllegalArgumentException("Attempting to address a non-array type " +
"[" + array.getClass().getCanonicalName() + "] as an array.");
}
}
// NOTE: below methods are not cached, instead invoked directly because they are performant.
public static Object not(final Object unary) {
if (unary instanceof Double || unary instanceof Float || unary instanceof Long) {

View File

@ -48,6 +48,10 @@ public final class DynamicCallSite {
static final int LOAD = 1;
/** static bootstrap parameter indicating a dynamic store (setter), e.g. foo.bar = baz */
static final int STORE = 2;
/** static bootstrap parameter indicating a dynamic array load, e.g. baz = foo[bar] */
static final int ARRAY_LOAD = 3;
/** static bootstrap parameter indicating a dynamic array store, e.g. foo[bar] = baz */
static final int ARRAY_STORE = 4;
static class InliningCacheCallSite extends MutableCallSite {
/** maximum number of types before we go megamorphic */
@ -104,6 +108,10 @@ public final class DynamicCallSite {
return Def.lookupGetter(clazz, name, Definition.INSTANCE);
case STORE:
return Def.lookupSetter(clazz, name, Definition.INSTANCE);
case ARRAY_LOAD:
return Def.lookupArrayLoad(clazz);
case ARRAY_STORE:
return Def.lookupArrayStore(clazz);
default: throw new AssertionError();
}
}

View File

@ -60,10 +60,14 @@ class WriterConstants {
final static Handle DEF_BOOTSTRAP_HANDLE = new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(DynamicCallSite.class),
"bootstrap", WriterConstants.DEF_BOOTSTRAP_TYPE.toMethodDescriptorString());
final static Method DEF_ARRAY_STORE = getAsmMethod(
void.class, "arrayStore", Object.class, Object.class, Object.class);
final static Method DEF_ARRAY_LOAD = getAsmMethod(
Object.class, "arrayLoad", Object.class, Object.class);
final static String DEF_DYNAMIC_LOAD_FIELD_DESC = MethodType.methodType(Object.class, Object.class)
.toMethodDescriptorString();
final static String DEF_DYNAMIC_STORE_FIELD_DESC = MethodType.methodType(void.class, Object.class, Object.class)
.toMethodDescriptorString();
final static String DEF_DYNAMIC_ARRAY_LOAD_DESC = MethodType.methodType(Object.class, Object.class, Object.class)
.toMethodDescriptorString();
final static String DEF_DYNAMIC_ARRAY_STORE_DESC = MethodType.methodType(void.class, Object.class, Object.class, Object.class)
.toMethodDescriptorString();
final static Method DEF_NOT_CALL = getAsmMethod(Object.class, "not", Object.class);
final static Method DEF_NEG_CALL = getAsmMethod(Object.class, "neg", Object.class);

View File

@ -49,8 +49,6 @@ import static org.elasticsearch.painless.PainlessParser.DIV;
import static org.elasticsearch.painless.PainlessParser.MUL;
import static org.elasticsearch.painless.PainlessParser.REM;
import static org.elasticsearch.painless.PainlessParser.SUB;
import static org.elasticsearch.painless.WriterConstants.DEF_ARRAY_LOAD;
import static org.elasticsearch.painless.WriterConstants.DEF_ARRAY_STORE;
import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_INT;
import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.TOBYTEWOOVERFLOW_DOUBLE;
@ -468,10 +466,10 @@ class WriterExternal {
private void writeLoadStoreField(final ParserRuleContext source, final boolean store, final String name) {
if (store) {
execute.visitInvokeDynamicInsn(name, "(Ljava/lang/Object;Ljava/lang/Object;)V",
execute.visitInvokeDynamicInsn(name, WriterConstants.DEF_DYNAMIC_STORE_FIELD_DESC,
WriterConstants.DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.STORE });
} else {
execute.visitInvokeDynamicInsn(name, "(Ljava/lang/Object;)Ljava/lang/Object;",
execute.visitInvokeDynamicInsn(name, WriterConstants.DEF_DYNAMIC_LOAD_FIELD_DESC,
WriterConstants.DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.LOAD });
}
}
@ -483,9 +481,11 @@ class WriterExternal {
if (type.sort == Sort.DEF) {
if (store) {
execute.invokeStatic(definition.defobjType.type, DEF_ARRAY_STORE);
execute.visitInvokeDynamicInsn("arrayStore", WriterConstants.DEF_DYNAMIC_ARRAY_STORE_DESC,
WriterConstants.DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.ARRAY_STORE });
} else {
execute.invokeStatic(definition.defobjType.type, DEF_ARRAY_LOAD);
execute.visitInvokeDynamicInsn("arrayLoad", WriterConstants.DEF_DYNAMIC_ARRAY_LOAD_DESC,
WriterConstants.DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.ARRAY_LOAD });
}
} else {
if (store) {

View File

@ -0,0 +1,62 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
/** Tests for or operator across all types */
public class ArrayTests extends ScriptTestCase {
public void testArrayLengthHelper() throws Throwable {
assertArrayLength(2, new int[2]);
assertArrayLength(3, new long[3]);
assertArrayLength(4, new byte[4]);
assertArrayLength(5, new float[5]);
assertArrayLength(6, new double[6]);
assertArrayLength(7, new char[7]);
assertArrayLength(8, new short[8]);
assertArrayLength(9, new Object[9]);
assertArrayLength(10, new Integer[10]);
assertArrayLength(11, new String[11][2]);
}
private void assertArrayLength(int length, Object array) throws Throwable {
assertEquals(length, (int) Def.arrayLengthGetter(array.getClass()).invoke(array));
}
public void testArrayLoadStoreInt() {
assertEquals(5, exec("def x = new int[5]; return x.length"));
assertEquals(5, exec("def x = new int[4]; x[0] = 5; return x[0];"));
}
public void testArrayLoadStoreString() {
assertEquals(5, exec("def x = new String[5]; return x.length"));
assertEquals("foobar", exec("def x = new String[4]; x[0] = 'foobar'; return x[0];"));
}
public void testArrayLoadStoreDef() {
assertEquals(5, exec("def x = new def[5]; return x.length"));
assertEquals(5, exec("def x = new def[4]; x[0] = 5; return x[0];"));
}
public void testForLoop() {
assertEquals(999*1000/2, exec("def a = new int[1000]; for (int x = 0; x < a.length; x++) { a[x] = x; } "+
"int total = 0; for (int x = 0; x < a.length; x++) { total += a[x]; } return total;"));
}
}

View File

@ -62,12 +62,6 @@ public class BasicAPITests extends ScriptTestCase {
assertEquals(5, exec("def x = new ArrayList(); x.add(3); x[0] = 5; return x[0];"));
}
/** Test loads and stores with a list */
public void testArrayLoadStore() {
assertEquals(5, exec("def x = new int[5]; return x.length"));
assertEquals(5, exec("def x = new int[4]; x[0] = 5; return x[0];"));
}
/** Test shortcut for getters with isXXXX */
public void testListEmpty() {
assertEquals(true, exec("def x = new ArrayList(); return x.empty;"));

View File

@ -19,9 +19,13 @@
package org.elasticsearch.plugins;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.stream.Collectors;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.cli.ExitCodes;
@ -29,6 +33,7 @@ import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.Version;
@LuceneTestCase.SuppressFileSystems("*")
public class ListPluginsCommandTests extends ESTestCase {
@ -43,12 +48,30 @@ public class ListPluginsCommandTests extends ESTestCase {
}
static MockTerminal listPlugins(Environment env) throws Exception {
return listPlugins(env, new String[0]);
}
static MockTerminal listPlugins(Environment env, String[] args) throws Exception {
MockTerminal terminal = new MockTerminal();
String[] args = {};
int status = new ListPluginsCommand(env).main(args, terminal);
assertEquals(ExitCodes.OK, status);
return terminal;
}
static String buildMultiline(String... args){
return Arrays.asList(args).stream().collect(Collectors.joining("\n", "", "\n"));
}
static void buildFakePlugin(Environment env, String description, String name, String classname) throws IOException {
PluginTestUtil.writeProperties(env.pluginsFile().resolve(name),
"description", description,
"name", name,
"version", "1.0",
"elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"),
"classname", classname);
}
public void testPluginsDirMissing() throws Exception {
Environment env = createEnv();
@ -56,7 +79,7 @@ public class ListPluginsCommandTests extends ESTestCase {
IOException e = expectThrows(IOException.class, () -> {
listPlugins(env);
});
assertTrue(e.getMessage(), e.getMessage().contains("Plugins directory missing"));
assertEquals(e.getMessage(), "Plugins directory missing: " + env.pluginsFile());
}
public void testNoPlugins() throws Exception {
@ -66,18 +89,63 @@ public class ListPluginsCommandTests extends ESTestCase {
public void testOnePlugin() throws Exception {
Environment env = createEnv();
Files.createDirectory(env.pluginsFile().resolve("fake"));
buildFakePlugin(env, "fake desc", "fake", "org.fake");
MockTerminal terminal = listPlugins(env);
assertTrue(terminal.getOutput(), terminal.getOutput().contains("fake"));
assertEquals(terminal.getOutput(), buildMultiline("fake"));
}
public void testTwoPlugins() throws Exception {
Environment env = createEnv();
Files.createDirectory(env.pluginsFile().resolve("fake1"));
Files.createDirectory(env.pluginsFile().resolve("fake2"));
buildFakePlugin(env, "fake desc", "fake1", "org.fake");
buildFakePlugin(env, "fake desc 2", "fake2", "org.fake");
MockTerminal terminal = listPlugins(env);
String output = terminal.getOutput();
assertTrue(output, output.contains("fake1"));
assertTrue(output, output.contains("fake2"));
assertEquals(terminal.getOutput(), buildMultiline("fake1", "fake2"));
}
public void testPluginWithVerbose() throws Exception {
Environment env = createEnv();
buildFakePlugin(env, "fake desc", "fake_plugin", "org.fake");
String[] params = { "-v" };
MockTerminal terminal = listPlugins(env, params);
assertEquals(terminal.getOutput(), buildMultiline("Plugins directory: " + env.pluginsFile(), "fake_plugin",
"- Plugin information:", "Name: fake_plugin", "Description: fake desc", "Version: 1.0", " * Classname: org.fake"));
}
public void testPluginWithVerboseMultiplePlugins() throws Exception {
Environment env = createEnv();
buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake");
buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2");
String[] params = { "-v" };
MockTerminal terminal = listPlugins(env, params);
assertEquals(terminal.getOutput(), buildMultiline("Plugins directory: " + env.pluginsFile(),
"fake_plugin1", "- Plugin information:", "Name: fake_plugin1", "Description: fake desc 1", "Version: 1.0",
" * Classname: org.fake", "fake_plugin2", "- Plugin information:", "Name: fake_plugin2",
"Description: fake desc 2", "Version: 1.0", " * Classname: org.fake2"));
}
public void testPluginWithoutVerboseMultiplePlugins() throws Exception {
Environment env = createEnv();
buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake");
buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2");
MockTerminal terminal = listPlugins(env, new String[0]);
String output = terminal.getOutput();
assertEquals(output, buildMultiline("fake_plugin1", "fake_plugin2"));
}
public void testPluginWithoutDescriptorFile() throws Exception{
Environment env = createEnv();
Files.createDirectories(env.pluginsFile().resolve("fake1"));
NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listPlugins(env));
assertEquals(e.getFile(), env.pluginsFile().resolve("fake1").resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString());
}
public void testPluginWithWrongDescriptorFile() throws Exception{
Environment env = createEnv();
PluginTestUtil.writeProperties(env.pluginsFile().resolve("fake1"),
"description", "fake desc");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listPlugins(env));
assertEquals(e.getMessage(), "Property [name] is missing in [" +
env.pluginsFile().resolve("fake1").resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString() + "]");
}
}

View File

@ -10,7 +10,8 @@
host .+ \n
ip .+ \n
node .+ \n
total .+ \n
field .+ \n
size .+ \n
$/
---
@ -38,39 +39,41 @@
type: type
body: { foo: bar }
refresh: true
- do:
search:
index: index
body:
query: { match_all: {} }
sort: foo
- do:
cat.fielddata:
h: total
h: field,size
v: true
- match:
$body: |
/^ total \n
(\s*\d+(\.\d+)?[gmk]?b \n)+ $/
/^ field \s+ size \n
foo \s+ (\d+(\.\d+)?[gmk]?b \n)+ $/
- do:
cat.fielddata:
h: total,foo
v: true
- match:
$body: |
/^ total \s+ foo \n
(\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \n)+ $/
- do:
cat.fielddata:
h: total,foo
h: field,size
fields: notfoo,foo
v: true
- match:
$body: |
/^ total \s+ foo \n
(\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \n)+ $/
/^ field \s+ size \n
foo \s+ (\d+(\.\d+)?[gmk]?b \n)+ $/
- do:
cat.fielddata:
h: field,size
fields: notfoo
v: true
- match:
$body: |
/^ field \s+ size \n $/