Merge branch 'master' into simplify_license_service_scheduling

Original commit: elastic/x-pack-elasticsearch@91258f1a3c
This commit is contained in:
Areek Zillur 2016-07-07 17:54:46 -04:00
commit 005304afa9
57 changed files with 1045 additions and 240 deletions

View File

@ -58,7 +58,7 @@ public class MonitoringBulkRequest extends ActionRequest<MonitoringBulkRequest>
validationException);
}
if (Strings.hasLength(doc.getMonitoringVersion()) == false) {
validationException = addValidationError("monitored system version is missing for monitoring document [" + i + "]",
validationException = addValidationError("monitored system API version is missing for monitoring document [" + i + "]",
validationException);
}
if (Strings.hasLength(doc.getType()) == false) {
@ -84,7 +84,7 @@ public class MonitoringBulkRequest extends ActionRequest<MonitoringBulkRequest>
/**
* Parses a monitoring bulk request and builds the list of documents to be indexed.
*/
public MonitoringBulkRequest add(BytesReference content, String defaultMonitoringId, String defaultMonitoringVersion,
public MonitoringBulkRequest add(BytesReference content, String defaultMonitoringId, String defaultMonitoringApiVersion,
String defaultType) throws Exception {
// MonitoringBulkRequest accepts a body request that has the same format as the BulkRequest:
// instead of duplicating the parsing logic here we use a new BulkRequest instance to parse the content.
@ -97,7 +97,7 @@ public class MonitoringBulkRequest extends ActionRequest<MonitoringBulkRequest>
// builds a new monitoring document based on the index request
MonitoringBulkDoc doc =
new MonitoringBulkDoc(defaultMonitoringId,
defaultMonitoringVersion,
defaultMonitoringApiVersion,
MonitoringIndex.from(indexRequest.index()),
indexRequest.type(),
indexRequest.id(),

View File

@ -21,9 +21,9 @@ public class MonitoringBulkRequestBuilder
return this;
}
public MonitoringBulkRequestBuilder add(BytesReference content, String defaultId, String defaultVersion, String defaultType)
public MonitoringBulkRequestBuilder add(BytesReference content, String defaultId, String defaultApiVersion, String defaultType)
throws Exception {
request.add(content, defaultId, defaultVersion, defaultType);
request.add(content, defaultId, defaultApiVersion, defaultType);
return this;
}
}

View File

@ -89,7 +89,7 @@ public class MonitoringBulkResponse extends ActionResponse {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeThrowable(getCause());
out.writeException(getCause());
}
/**

View File

@ -16,13 +16,13 @@ public final class MonitoringTemplateUtils {
private static final String TEMPLATE_VERSION_PROPERTY = Pattern.quote("${monitoring.template.version}");
/** Current version of es and data templates **/
public static final Integer TEMPLATE_VERSION = 2;
public static final String TEMPLATE_VERSION = "2";
private MonitoringTemplateUtils() {
}
public static String loadTemplate(String id) {
String resource = String.format(Locale.ROOT, TEMPLATE_FILE, id);
return TemplateUtils.loadTemplate(resource, String.valueOf(TEMPLATE_VERSION), TEMPLATE_VERSION_PROPERTY);
return TemplateUtils.loadTemplate(resource, TEMPLATE_VERSION, TEMPLATE_VERSION_PROPERTY);
}
}

View File

@ -137,8 +137,8 @@ public abstract class MonitoringIndexNameResolver<T extends MonitoringDoc> {
}
// Used in tests
protected Data(Integer version) {
this.index = String.join(DELIMITER, PREFIX, DATA, String.valueOf(version));
protected Data(String version) {
this.index = String.join(DELIMITER, PREFIX, DATA, version);
}
@Override
@ -153,7 +153,7 @@ public abstract class MonitoringIndexNameResolver<T extends MonitoringDoc> {
@Override
public String templateName() {
return String.format(Locale.ROOT, "%s-%s-%d", PREFIX, DATA, MonitoringTemplateUtils.TEMPLATE_VERSION);
return String.format(Locale.ROOT, "%s-%s-%s", PREFIX, DATA, MonitoringTemplateUtils.TEMPLATE_VERSION);
}
@Override
@ -180,9 +180,9 @@ public abstract class MonitoringIndexNameResolver<T extends MonitoringDoc> {
}
// Used in tests
protected Timestamped(MonitoredSystem system, Settings settings, Integer version) {
protected Timestamped(MonitoredSystem system, Settings settings, String version) {
this.system = system;
this.index = String.join(DELIMITER, PREFIX, system.getSystem(), String.valueOf(version));
this.index = String.join(DELIMITER, PREFIX, system.getSystem(), version);
String format = INDEX_NAME_TIME_FORMAT_SETTING.get(settings);
try {
this.formatter = DateTimeFormat.forPattern(format).withZoneUTC();
@ -209,7 +209,7 @@ public abstract class MonitoringIndexNameResolver<T extends MonitoringDoc> {
@Override
public String templateName() {
return String.format(Locale.ROOT, "%s-%s-%d", PREFIX, getId(), MonitoringTemplateUtils.TEMPLATE_VERSION);
return String.format(Locale.ROOT, "%s-%s-%s", PREFIX, getId(), MonitoringTemplateUtils.TEMPLATE_VERSION);
}
@Override

View File

@ -5,7 +5,6 @@
*/
package org.elasticsearch.xpack.monitoring.agent.resolver;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc;
@ -21,6 +20,7 @@ import org.elasticsearch.xpack.monitoring.agent.collector.indices.IndicesStatsMo
import org.elasticsearch.xpack.monitoring.agent.collector.node.NodeStatsMonitoringDoc;
import org.elasticsearch.xpack.monitoring.agent.collector.shards.ShardMonitoringDoc;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringTemplateUtils;
import org.elasticsearch.xpack.monitoring.agent.resolver.bulk.MonitoringBulkDataResolver;
import org.elasticsearch.xpack.monitoring.agent.resolver.bulk.MonitoringBulkTimestampedResolver;
import org.elasticsearch.xpack.monitoring.agent.resolver.cluster.ClusterInfoResolver;
@ -74,8 +74,13 @@ public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver>
private void registerMonitoredSystem(MonitoredSystem id, Settings settings) {
final MonitoringBulkDataResolver dataResolver = new MonitoringBulkDataResolver();
final MonitoringBulkTimestampedResolver timestampedResolver = new MonitoringBulkTimestampedResolver(id, settings);
registrations.add(resolveByClassSystemVersion(id, dataResolver, MonitoringIndex.DATA, Version.CURRENT));
registrations.add(resolveByClassSystemVersion(id, timestampedResolver, MonitoringIndex.TIMESTAMPED, Version.CURRENT));
final String currentApiVersion = MonitoringTemplateUtils.TEMPLATE_VERSION;
// Note: We resolve requests by the API version that is supplied; this allows us to translate and up-convert any older
// requests that come through the _xpack/monitoring/_bulk endpoint
registrations.add(resolveByClassSystemVersion(id, dataResolver, MonitoringIndex.DATA, currentApiVersion));
registrations.add(resolveByClassSystemVersion(id, timestampedResolver, MonitoringIndex.TIMESTAMPED, currentApiVersion));
}
/**
@ -100,7 +105,7 @@ public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver>
}
static Registration resolveByClassSystemVersion(MonitoredSystem system, MonitoringIndexNameResolver resolver, MonitoringIndex index,
Version version) {
String apiVersion) {
return new Registration(resolver, doc -> {
try {
if (doc instanceof MonitoringBulkDoc == false || index != ((MonitoringBulkDoc)doc).getIndex()) {
@ -109,7 +114,7 @@ public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver>
if (system != MonitoredSystem.fromSystem(doc.getMonitoringId())) {
return false;
}
return version == Version.fromString(doc.getMonitoringVersion());
return apiVersion.equals(doc.getMonitoringVersion());
} catch (Exception e) {
return false;
}

View File

@ -29,7 +29,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT;
public class RestMonitoringBulkAction extends MonitoringRestHandler {
public static final String MONITORING_ID = "system_id";
public static final String MONITORING_VERSION = "system_version";
public static final String MONITORING_VERSION = "system_api_version";
@Inject
public RestMonitoringBulkAction(Settings settings, RestController controller) {

View File

@ -36,13 +36,13 @@ public class MonitoringBulkRequestTests extends ESTestCase {
MonitoringBulkDoc doc = new MonitoringBulkDoc(null, null);
assertValidationErrors(new MonitoringBulkRequest().add(doc), hasItems("monitored system id is missing for monitoring document [0]",
"monitored system version is missing for monitoring document [0]",
"monitored system API version is missing for monitoring document [0]",
"type is missing for monitoring document [0]",
"source is missing for monitoring document [0]"));
doc = new MonitoringBulkDoc("id", null);
assertValidationErrors(new MonitoringBulkRequest().add(doc),
hasItems("monitored system version is missing for monitoring document [0]",
hasItems("monitored system API version is missing for monitoring document [0]",
"type is missing for monitoring document [0]",
"source is missing for monitoring document [0]"));
@ -92,7 +92,7 @@ public class MonitoringBulkRequestTests extends ESTestCase {
assertValidationErrors(request, hasItems("type is missing for monitoring document [1]",
"source is missing for monitoring document [2]",
"monitored system version is missing for monitoring document [3]",
"monitored system API version is missing for monitoring document [3]",
"monitored system id is missing for monitoring document [4]"));
}

View File

@ -5,13 +5,12 @@
*/
package org.elasticsearch.xpack.monitoring.action;
import org.elasticsearch.Version;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringTemplateUtils;
import org.elasticsearch.xpack.monitoring.agent.resolver.bulk.MonitoringBulkTimestampedResolver;
import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase;
@ -29,7 +28,6 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@TestLogging("_root:DEBUG")
public class MonitoringBulkTests extends MonitoringIntegTestCase {
@Override
@ -43,7 +41,7 @@ public class MonitoringBulkTests extends MonitoringIntegTestCase {
int numDocs = scaledRandomIntBetween(100, 5000);
for (int i = 0; i < numDocs; i++) {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString());
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION);
doc.setType(randomFrom(types));
doc.setSource(jsonBuilder().startObject().field("num", numDocs).endObject().bytes());
requestBuilder.add(doc);
@ -95,7 +93,8 @@ public class MonitoringBulkTests extends MonitoringIntegTestCase {
int numDocs = scaledRandomIntBetween(10, 50);
for (int k = 0; k < numDocs; k++) {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString());
MonitoringBulkDoc doc =
new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION);
doc.setType("concurrent");
doc.setSource(jsonBuilder().startObject().field("num", k).endObject().bytes());
requestBuilder.add(doc);
@ -133,10 +132,10 @@ public class MonitoringBulkTests extends MonitoringIntegTestCase {
for (int i = 0; i < totalDocs; i++) {
MonitoringBulkDoc doc;
if (randomBoolean()) {
doc = new MonitoringBulkDoc("unknown", Version.CURRENT.toString());
doc = new MonitoringBulkDoc("unknown", MonitoringTemplateUtils.TEMPLATE_VERSION);
unsupportedDocs++;
} else {
doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString());
doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION);
}
doc.setType(randomFrom(types));
doc.setSource(jsonBuilder().startObject().field("num", i).endObject().bytes());

View File

@ -13,6 +13,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc;
import org.elasticsearch.xpack.monitoring.action.MonitoringIndex;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringTemplateUtils;
import org.elasticsearch.xpack.monitoring.agent.resolver.MonitoringIndexNameResolverTestCase;
import static java.util.Collections.emptyMap;
@ -29,7 +30,7 @@ public class MonitoringBulkDataResolverTests extends MonitoringIndexNameResolver
@Override
protected MonitoringBulkDoc newMonitoringDoc() {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString(),
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION,
MonitoringIndex.DATA, "kibana", id,
new BytesArray("{\"field1\" : \"value1\"}"));

View File

@ -13,6 +13,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc;
import org.elasticsearch.xpack.monitoring.action.MonitoringIndex;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringTemplateUtils;
import org.elasticsearch.xpack.monitoring.agent.resolver.MonitoringIndexNameResolverTestCase;
import static java.util.Collections.emptyMap;
@ -28,7 +29,7 @@ public class MonitoringBulkTimestampedResolverTests
@Override
protected MonitoringBulkDoc newMonitoringDoc() {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString(),
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION,
MonitoringIndex.TIMESTAMPED, "kibana_stats", null,
new BytesArray("{\"field1\" : \"value1\"}"));

View File

@ -102,7 +102,7 @@ public class MultiNodesStatsTests extends MonitoringIntegTestCase {
assertThat(((StringTerms) aggregation).getBuckets().size(), equalTo(nbNodes));
for (String nodeName : internalCluster().getNodeNames()) {
StringTerms.Bucket bucket = (StringTerms.Bucket) ((StringTerms) aggregation)
StringTerms.Bucket bucket = ((StringTerms) aggregation)
.getBucketByKey(internalCluster().clusterService(nodeName).localNode().getId());
// At least 1 doc must exist per node, but it can be more than 1
// because the first node may have already collected many node stats documents

View File

@ -77,8 +77,8 @@ public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTest
// Won't be deleted
createIndex(MonitoringSettings.LEGACY_DATA_INDEX_NAME, now().minusYears(1));
createDataIndex(now().minusDays(10), 0);
createDataIndex(now().minusDays(10), 1);
createDataIndex(now().minusDays(10), "0");
createDataIndex(now().minusDays(10), "1");
assertIndicesCount(4);
CleanerService.Listener listener = getListener();
@ -108,9 +108,9 @@ public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTest
createTimestampedIndex(now().minusDays(10));
// Won't be deleted
createTimestampedIndex(now().minusDays(10), 0);
createTimestampedIndex(now().minusDays(10), 1);
createTimestampedIndex(now().minusDays(10), Integer.MAX_VALUE);
createTimestampedIndex(now().minusDays(10), "0");
createTimestampedIndex(now().minusDays(10), "1");
createTimestampedIndex(now().minusDays(10), String.valueOf(Integer.MAX_VALUE));
assertIndicesCount(4);
CleanerService.Listener listener = getListener();
@ -198,7 +198,7 @@ public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTest
/**
* Creates a monitoring data index in a given version.
*/
protected void createDataIndex(DateTime creationDate, int version) {
protected void createDataIndex(DateTime creationDate, String version) {
createIndex(new MockDataIndexNameResolver(version).index(randomMonitoringDoc()), creationDate);
}
@ -212,7 +212,7 @@ public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTest
/**
* Creates a monitoring timestamped index using a given template version.
*/
protected void createTimestampedIndex(DateTime creationDate, int version) {
protected void createTimestampedIndex(DateTime creationDate, String version) {
MonitoringDoc monitoringDoc = randomMonitoringDoc();
monitoringDoc.setTimestamp(creationDate.getMillis());

View File

@ -440,7 +440,7 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase {
public class MockDataIndexNameResolver extends MonitoringIndexNameResolver.Data<MonitoringDoc> {
public MockDataIndexNameResolver(Integer version) {
public MockDataIndexNameResolver(String version) {
super(version);
}
@ -462,7 +462,7 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase {
protected class MockTimestampedIndexNameResolver extends MonitoringIndexNameResolver.Timestamped<MonitoringDoc> {
public MockTimestampedIndexNameResolver(MonitoredSystem system, Settings settings, Integer version) {
public MockTimestampedIndexNameResolver(MonitoredSystem system, Settings settings, String version) {
super(system, settings, version);
}

View File

@ -16,9 +16,9 @@
"type": "string",
"description" : "Identifier of the monitored system"
},
"system_version" : {
"system_api_version" : {
"type" : "string",
"description" : "Version of the monitored system"
"description" : "API Version of the monitored system"
}
}
},

View File

@ -13,14 +13,11 @@ setup:
---
"Bulk indexing of monitoring data":
# Get the current version
- do: {info: {}}
- set: {version.number: version}
- do:
xpack.monitoring.bulk:
system_id: "kibana"
system_version: $version
system_api_version: "2"
body:
- index:
_type: test_type
@ -54,7 +51,7 @@ setup:
- do:
xpack.monitoring.bulk:
system_id: "kibana"
system_version: $version
system_api_version: "2"
type: "default_type"
body:
- '{"index": {}}'

View File

@ -143,7 +143,7 @@ public class SecurityFeatureSet implements XPackFeatureSet {
static boolean systemKeyUsage(CryptoService cryptoService) {
// we can piggy back on the encryption enabled method as it is only enabled if there is a system key
return cryptoService.encryptionEnabled();
return cryptoService != null && cryptoService.encryptionEnabled();
}
static class Usage extends XPackFeatureSet.Usage {

View File

@ -0,0 +1,91 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authz.accesscontrol;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.DocValuesNumbersQuery;
import org.apache.lucene.search.DocValuesRangeQuery;
import org.apache.lucene.search.FieldValueQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.PointInSetQuery;
import org.apache.lucene.search.PointRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.spans.SpanTermQuery;
import java.util.Set;
/**
* Extracts fields from a query, or throws UnsupportedOperationException.
* <p>
* Lucene queries have {@link Weight#extractTerms}, but this is really geared at things
* such as highlighting, not security. For example terms in a Boolean {@code MUST_NOT} clause
* are not included, TermsQuery doesn't implement the method as it could be terribly slow, etc.
*/
class FieldExtractor {
/**
* Populates {@code fields} with the set of fields used by the query, or throws
* UnsupportedOperationException if it doesn't know how to do this.
*/
static void extractFields(Query query, Set<String> fields) throws UnsupportedOperationException {
// NOTE: we expect a rewritten query, so we only need logic for "atomic" queries here:
if (query instanceof BooleanQuery) {
// extract from all clauses
BooleanQuery q = (BooleanQuery) query;
for (BooleanClause clause : q.clauses()) {
extractFields(clause.getQuery(), fields);
}
} else if (query instanceof DisjunctionMaxQuery) {
// extract from all clauses
DisjunctionMaxQuery q = (DisjunctionMaxQuery) query;
for (Query clause : q.getDisjuncts()) {
extractFields(clause, fields);
}
} else if (query instanceof SpanTermQuery) {
// we just do SpanTerm, other spans are trickier, they could contain
// the evil FieldMaskingSpanQuery: so SpanQuery.getField cannot be trusted.
fields.add(((SpanTermQuery)query).getField());
} else if (query instanceof TermQuery) {
fields.add(((TermQuery)query).getTerm().field());
} else if (query instanceof SynonymQuery) {
SynonymQuery q = (SynonymQuery) query;
// all terms must have the same field
fields.add(q.getTerms().get(0).field());
} else if (query instanceof PhraseQuery) {
PhraseQuery q = (PhraseQuery) query;
// all terms must have the same field
fields.add(q.getTerms()[0].field());
} else if (query instanceof MultiPhraseQuery) {
MultiPhraseQuery q = (MultiPhraseQuery) query;
// all terms must have the same field
fields.add(q.getTermArrays()[0][0].field());
} else if (query instanceof PointRangeQuery) {
fields.add(((PointRangeQuery)query).getField());
} else if (query instanceof PointInSetQuery) {
fields.add(((PointInSetQuery)query).getField());
} else if (query instanceof FieldValueQuery) {
fields.add(((FieldValueQuery)query).getField());
} else if (query instanceof DocValuesNumbersQuery) {
fields.add(((DocValuesNumbersQuery)query).getField());
} else if (query instanceof DocValuesRangeQuery) {
fields.add(((DocValuesRangeQuery)query).getField());
} else if (query instanceof MatchAllDocsQuery) {
// no field
} else if (query instanceof MatchNoDocsQuery) {
// no field
} else {
throw new UnsupportedOperationException(); // we don't know how to get the fields from it
}
}
}

View File

@ -56,30 +56,33 @@ public final class FieldSubsetReader extends FilterLeafReader {
* and so on.
* @param in reader to filter
* @param fieldNames fields to filter.
* @param negate {@code true} if this should be a negative set, meaning set of field names that is denied.
*/
public static DirectoryReader wrap(DirectoryReader in, Set<String> fieldNames) throws IOException {
return new FieldSubsetDirectoryReader(in, fieldNames);
public static DirectoryReader wrap(DirectoryReader in, Set<String> fieldNames, boolean negate) throws IOException {
return new FieldSubsetDirectoryReader(in, fieldNames, negate);
}
// wraps subreaders with fieldsubsetreaders.
static class FieldSubsetDirectoryReader extends FilterDirectoryReader {
private final Set<String> fieldNames;
private final boolean negate;
FieldSubsetDirectoryReader(DirectoryReader in, final Set<String> fieldNames) throws IOException {
FieldSubsetDirectoryReader(DirectoryReader in, Set<String> fieldNames, boolean negate) throws IOException {
super(in, new FilterDirectoryReader.SubReaderWrapper() {
@Override
public LeafReader wrap(LeafReader reader) {
return new FieldSubsetReader(reader, fieldNames);
return new FieldSubsetReader(reader, fieldNames, negate);
}
});
this.fieldNames = fieldNames;
this.negate = negate;
verifyNoOtherFieldSubsetDirectoryReaderIsWrapped(in);
}
@Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new FieldSubsetDirectoryReader(in, fieldNames);
return new FieldSubsetDirectoryReader(in, fieldNames, negate);
}
public Set<String> getFieldNames() {
@ -111,17 +114,23 @@ public final class FieldSubsetReader extends FilterLeafReader {
/**
* Wrap a single segment, exposing a subset of its fields.
* @param fields set of field names that should be allowed
* @param negate {@code true} if this should be a negative set, meaning set of field names that is denied.
*/
FieldSubsetReader(LeafReader in, Set<String> fieldNames) {
FieldSubsetReader(LeafReader in, Set<String> fields, boolean negate) {
super(in);
// look at what fields the reader has, and preprocess a subset of them that are allowed
ArrayList<FieldInfo> filteredInfos = new ArrayList<>();
for (FieldInfo fi : in.getFieldInfos()) {
if (fieldNames.contains(fi.name)) {
if (fields.contains(fi.name) ^ negate) {
filteredInfos.add(fi);
}
}
fieldInfos = new FieldInfos(filteredInfos.toArray(new FieldInfo[filteredInfos.size()]));
this.fieldNames = fieldNames.toArray(new String[fieldNames.size()]);
fieldNames = new String[filteredInfos.size()];
for (int i = 0; i < fieldNames.length; i++) {
fieldNames[i] = filteredInfos.get(i).name;
}
}
/** returns true if this field is allowed. */

View File

@ -16,8 +16,12 @@ import org.elasticsearch.indices.IndicesQueryCache;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.xpack.security.authz.InternalAuthorizationService;
import java.util.HashSet;
import java.util.Set;
/**
* Opts out of the query cache if field level security is active for the current request.
* Opts out of the query cache if field level security is active for the current request,
* and its unsafe to cache.
*/
public final class OptOutQueryCache extends AbstractIndexComponent implements QueryCache {
@ -64,13 +68,41 @@ public final class OptOutQueryCache extends AbstractIndexComponent implements Qu
IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(indexName);
if (indexAccessControl != null && indexAccessControl.getFields() != null) {
logger.debug("opting out of the query cache. request for index [{}] has field level security enabled", indexName);
// If in the future there is a Query#extractFields() then we can be smart on when to skip the query cache.
// (only cache if all fields in the query also are defined in the role)
return weight;
if (cachingIsSafe(weight, indexAccessControl)) {
logger.trace("not opting out of the query cache. request for index [{}] is safe to cache", indexName);
return indicesQueryCache.doCache(weight, policy);
} else {
logger.trace("opting out of the query cache. request for index [{}] is unsafe to cache", indexName);
return weight;
}
} else {
logger.trace("not opting out of the query cache. request for index [{}] has field level security disabled", indexName);
return indicesQueryCache.doCache(weight, policy);
}
}
/**
* Returns true if its safe to use the query cache for this query.
*/
static boolean cachingIsSafe(Weight weight, IndicesAccessControl.IndexAccessControl permissions) {
// support caching for common queries, by inspecting the field
// TODO: If in the future there is a Query#extractFields() then we can do a better job
Set<String> fields = new HashSet<>();
try {
FieldExtractor.extractFields(weight.getQuery(), fields);
} catch (UnsupportedOperationException ok) {
// we don't know how to safely extract the fields of this query, don't cache.
return false;
}
// we successfully extracted the set of fields: check each one
for (String field : fields) {
// don't cache any internal fields (e.g. _field_names), these are complicated.
if (field.startsWith("_") || permissions.getFields().contains(field) == false) {
return false;
}
}
// we can cache, all fields are ok
return true;
}
}

View File

@ -144,7 +144,8 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
allowedFields.addAll(mapperService.simpleMatchToIndexNames(field));
}
resolveParentChildJoinFields(allowedFields);
reader = FieldSubsetReader.wrap(reader, allowedFields);
// TODO: support 'denied' fields (pass true as the 3rd parameter in this case)
reader = FieldSubsetReader.wrap(reader, allowedFields, false);
}
return reader;

View File

@ -35,6 +35,12 @@ public class RestAuthenticateAction extends BaseRestHandler {
super(settings);
this.securityContext = securityContext;
controller.registerHandler(GET, "/_xpack/security/_authenticate", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(GET, "/_shield/authenticate", this,
"[GET /_shield/authenticate] is deprecated! Use " +
"[GET /_xpack/security/_authenticate] instead.",
deprecationLogger);
}
@Override

View File

@ -23,8 +23,17 @@ public class RestClearRealmCacheAction extends BaseRestHandler {
@Inject
public RestClearRealmCacheAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(POST, "/_xpack/security/realm/{realms}/_cache/clear", this); // deprecated
controller.registerHandler(POST, "/_xpack/security/realm/{realms}/_clear_cache", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(POST, "/_shield/realm/{realms}/_cache/clear", this,
"[POST /_shield/realm/{realms}/_cache/clear] is deprecated! Use " +
"[POST /_xpack/security/realm/{realms}/_clear_cache] instead.",
deprecationLogger);
controller.registerAsDeprecatedHandler(POST, "/_shield/realm/{realms}/_clear_cache", this,
"[POST /_shield/realm/{realms}/_clear_cache] is deprecated! Use " +
"[POST /_xpack/security/realm/{realms}/_clear_cache] instead.",
deprecationLogger);
}
@Override

View File

@ -5,7 +5,6 @@
*/
package org.elasticsearch.xpack.security.rest.action.role;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
@ -28,6 +27,12 @@ public class RestClearRolesCacheAction extends BaseRestHandler {
public RestClearRolesCacheAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(POST, "/_xpack/security/role/{name}/_clear_cache", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(POST, "/_shield/role/{name}/_clear_cache", this,
"[POST /_shield/role/{name}/_clear_cache] is deprecated! Use " +
"[POST /_xpack/security/role/{name}/_clear_cache] instead.",
deprecationLogger);
}
@Override

View File

@ -21,6 +21,8 @@ import org.elasticsearch.xpack.security.action.role.DeleteRoleRequestBuilder;
import org.elasticsearch.xpack.security.action.role.DeleteRoleResponse;
import org.elasticsearch.xpack.security.client.SecurityClient;
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
/**
* Rest endpoint to delete a Role from the security index
*/
@ -29,7 +31,13 @@ public class RestDeleteRoleAction extends BaseRestHandler {
@Inject
public RestDeleteRoleAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.DELETE, "/_xpack/security/role/{name}", this);
controller.registerHandler(DELETE, "/_xpack/security/role/{name}", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(DELETE, "/_shield/role/{name}", this,
"[DELETE /_shield/role/{name}] is deprecated! Use " +
"[DELETE /_xpack/security/role/{name}] instead.",
deprecationLogger);
}
@Override

View File

@ -22,6 +22,8 @@ import org.elasticsearch.xpack.security.action.role.GetRolesResponse;
import org.elasticsearch.xpack.security.client.SecurityClient;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import static org.elasticsearch.rest.RestRequest.Method.GET;
/**
* Rest endpoint to retrieve a Role from the security index
*/
@ -30,8 +32,18 @@ public class RestGetRolesAction extends BaseRestHandler {
@Inject
public RestGetRolesAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.GET, "/_xpack/security/role/", this);
controller.registerHandler(RestRequest.Method.GET, "/_xpack/security/role/{name}", this);
controller.registerHandler(GET, "/_xpack/security/role/", this);
controller.registerHandler(GET, "/_xpack/security/role/{name}", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(GET, "/_shield/role", this,
"[GET /_shield/role] is deprecated! Use " +
"[GET /_xpack/security/role] instead.",
deprecationLogger);
controller.registerAsDeprecatedHandler(GET, "/_shield/role/{name}", this,
"[GET /_shield/role/{name}] is deprecated! Use " +
"[GET /_xpack/security/role/{name}] instead.",
deprecationLogger);
}
@Override

View File

@ -21,6 +21,9 @@ import org.elasticsearch.xpack.security.action.role.PutRoleRequestBuilder;
import org.elasticsearch.xpack.security.action.role.PutRoleResponse;
import org.elasticsearch.xpack.security.client.SecurityClient;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestRequest.Method.PUT;
/**
* Rest endpoint to add a Role to the security index
*/
@ -29,8 +32,18 @@ public class RestPutRoleAction extends BaseRestHandler {
@Inject
public RestPutRoleAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.POST, "/_xpack/security/role/{name}", this);
controller.registerHandler(RestRequest.Method.PUT, "/_xpack/security/role/{name}", this);
controller.registerHandler(POST, "/_xpack/security/role/{name}", this);
controller.registerHandler(PUT, "/_xpack/security/role/{name}", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(POST, "/_shield/role/{name}", this,
"[POST /_shield/role/{name}] is deprecated! Use " +
"[POST /_xpack/security/role/{name}] instead.",
deprecationLogger);
controller.registerAsDeprecatedHandler(PUT, "/_shield/role/{name}", this,
"[PUT /_shield/role/{name}] is deprecated! Use " +
"[PUT /_xpack/security/role/{name}] instead.",
deprecationLogger);
}
@Override

View File

@ -22,6 +22,9 @@ import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.action.user.ChangePasswordResponse;
import org.elasticsearch.xpack.security.client.SecurityClient;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestRequest.Method.PUT;
/**
*/
public class RestChangePasswordAction extends BaseRestHandler {
@ -32,10 +35,10 @@ public class RestChangePasswordAction extends BaseRestHandler {
public RestChangePasswordAction(Settings settings, RestController controller, SecurityContext securityContext) {
super(settings);
this.securityContext = securityContext;
controller.registerHandler(RestRequest.Method.POST, "/_xpack/security/user/{username}/_password", this);
controller.registerHandler(RestRequest.Method.PUT, "/_xpack/security/user/{username}/_password", this);
controller.registerHandler(RestRequest.Method.POST, "/_xpack/security/user/_password", this);
controller.registerHandler(RestRequest.Method.PUT, "/_xpack/security/user/_password", this);
controller.registerHandler(POST, "/_xpack/security/user/{username}/_password", this);
controller.registerHandler(PUT, "/_xpack/security/user/{username}/_password", this);
controller.registerHandler(POST, "/_xpack/security/user/_password", this);
controller.registerHandler(PUT, "/_xpack/security/user/_password", this);
}
@Override

View File

@ -17,11 +17,12 @@ import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.support.RestBuilderListener;
import org.elasticsearch.xpack.security.action.user.DeleteUserRequest;
import org.elasticsearch.xpack.security.action.user.DeleteUserRequestBuilder;
import org.elasticsearch.xpack.security.action.user.DeleteUserResponse;
import org.elasticsearch.xpack.security.client.SecurityClient;
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
/**
* Rest action to delete a user from the security index
*/
@ -30,7 +31,13 @@ public class RestDeleteUserAction extends BaseRestHandler {
@Inject
public RestDeleteUserAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.DELETE, "/_xpack/security/user/{username}", this);
controller.registerHandler(DELETE, "/_xpack/security/user/{username}", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(DELETE, "/_shield/user/{username}", this,
"[DELETE /_shield/user/{username}] is deprecated! Use " +
"[DELETE /_xpack/security/user/{username}] instead.",
deprecationLogger);
}
@Override

View File

@ -9,7 +9,6 @@ import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.BytesRestResponse;
@ -23,6 +22,8 @@ import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.action.user.GetUsersResponse;
import org.elasticsearch.xpack.security.client.SecurityClient;
import static org.elasticsearch.rest.RestRequest.Method.GET;
/**
* Rest action to retrieve a user from the security index
*/
@ -31,8 +32,18 @@ public class RestGetUsersAction extends BaseRestHandler {
@Inject
public RestGetUsersAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.GET, "/_xpack/security/user/", this);
controller.registerHandler(RestRequest.Method.GET, "/_xpack/security/user/{username}", this);
controller.registerHandler(GET, "/_xpack/security/user/", this);
controller.registerHandler(GET, "/_xpack/security/user/{username}", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(GET, "/_shield/user", this,
"[GET /_shield/user] is deprecated! Use " +
"[GET /_xpack/security/user] instead.",
deprecationLogger);
controller.registerAsDeprecatedHandler(GET, "/_shield/user/{username}", this,
"[GET /_shield/user/{username}] is deprecated! Use " +
"[GET /_xpack/security/user/{username}] instead.",
deprecationLogger);
}
@Override

View File

@ -21,6 +21,9 @@ import org.elasticsearch.xpack.security.action.user.PutUserRequestBuilder;
import org.elasticsearch.xpack.security.action.user.PutUserResponse;
import org.elasticsearch.xpack.security.client.SecurityClient;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestRequest.Method.PUT;
/**
* Rest endpoint to add a User to the security index
*/
@ -29,8 +32,18 @@ public class RestPutUserAction extends BaseRestHandler {
@Inject
public RestPutUserAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.POST, "/_xpack/security/user/{username}", this);
controller.registerHandler(RestRequest.Method.PUT, "/_xpack/security/user/{username}", this);
controller.registerHandler(POST, "/_xpack/security/user/{username}", this);
controller.registerHandler(PUT, "/_xpack/security/user/{username}", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(POST, "/_shield/user/{username}", this,
"[POST /_shield/user/{username}] is deprecated! Use " +
"[POST /_xpack/security/user/{username}] instead.",
deprecationLogger);
controller.registerAsDeprecatedHandler(PUT, "/_shield/user/{username}", this,
"[PUT /_shield/user/{username}] is deprecated! Use " +
"[PUT /_xpack/security/user/{username}] instead.",
deprecationLogger);
}
@Override

View File

@ -93,6 +93,18 @@ public class SecurityFeatureSetTests extends ESTestCase {
assertThat(featureSet.enabled(), is(true));
}
public void testSystemKeyUsageEnabledByCryptoService() {
final boolean enabled = randomBoolean();
when(cryptoService.encryptionEnabled()).thenReturn(enabled);
assertThat(SecurityFeatureSet.systemKeyUsage(cryptoService), is(enabled));
}
public void testSystemKeyUsageNotEnabledIfNull() {
assertThat(SecurityFeatureSet.systemKeyUsage(null), is(false));
}
public void testUsage() throws Exception {
boolean authcAuthzAvailable = randomBoolean();

View File

@ -97,7 +97,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(numDocs));
assertThat(indexFieldDataCache.topLevelBuilds, equalTo(1));
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet(), false);
global = sortedSetDVOrdinalsIndexFieldData.loadGlobal(ir);
atomic = global.load(ir.leaves().get(0));
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));
@ -110,7 +110,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
assertThat(atomic.getOrdinalsValues().getValueCount(), greaterThanOrEqualTo(1L));
}
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet(), false);
for (LeafReaderContext context : ir.leaves()) {
AtomicOrdinalsFieldData atomic = sortedSetDVOrdinalsIndexFieldData.load(context);
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));
@ -126,7 +126,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(numDocs));
assertThat(indexFieldDataCache.topLevelBuilds, equalTo(1));
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet(), false);
global = pagedBytesIndexFieldData.loadGlobal(ir);
atomic = global.load(ir.leaves().get(0));
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));
@ -141,7 +141,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
}
assertThat(indexFieldDataCache.leafLevelBuilds, equalTo(ir.leaves().size()));
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet(), false);
for (LeafReaderContext context : ir.leaves()) {
AtomicOrdinalsFieldData atomic = pagedBytesIndexFieldData.load(context);
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));

View File

@ -0,0 +1,138 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authz.accesscontrol;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.AssertingQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.DocValuesNumbersQuery;
import org.apache.lucene.search.DocValuesRangeQuery;
import org.apache.lucene.search.FieldValueQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.elasticsearch.test.ESTestCase;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/** Simple tests for query field extraction */
public class FieldExtractorTests extends ESTestCase {
public void testBoolean() {
Set<String> fields = new HashSet<>();
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.MUST);
builder.add(new TermQuery(new Term("no", "baz")), BooleanClause.Occur.MUST_NOT);
FieldExtractor.extractFields(builder.build(), fields);
assertEquals(asSet("foo", "no"), fields);
}
public void testDisjunctionMax() {
Set<String> fields = new HashSet<>();
DisjunctionMaxQuery query = new DisjunctionMaxQuery(Arrays.asList(
new TermQuery(new Term("one", "bar")),
new TermQuery(new Term("two", "baz"))
), 1.0F);
FieldExtractor.extractFields(query, fields);
assertEquals(asSet("one", "two"), fields);
}
public void testSpanTerm() {
Set<String> fields = new HashSet<>();
FieldExtractor.extractFields(new SpanTermQuery(new Term("foo", "bar")), fields);
assertEquals(asSet("foo"), fields);
}
public void testTerm() {
Set<String> fields = new HashSet<>();
FieldExtractor.extractFields(new TermQuery(new Term("foo", "bar")), fields);
assertEquals(asSet("foo"), fields);
}
public void testSynonym() {
Set<String> fields = new HashSet<>();
SynonymQuery query = new SynonymQuery(new Term("foo", "bar"), new Term("foo", "baz"));
FieldExtractor.extractFields(query, fields);
assertEquals(asSet("foo"), fields);
}
public void testPhrase() {
Set<String> fields = new HashSet<>();
PhraseQuery.Builder builder = new PhraseQuery.Builder();
builder.add(new Term("foo", "bar"));
builder.add(new Term("foo", "baz"));
FieldExtractor.extractFields(builder.build(), fields);
assertEquals(asSet("foo"), fields);
}
public void testMultiPhrase() {
Set<String> fields = new HashSet<>();
MultiPhraseQuery.Builder builder = new MultiPhraseQuery.Builder();
builder.add(new Term("foo", "bar"));
builder.add(new Term[] { new Term("foo", "baz"), new Term("foo", "baz2") });
FieldExtractor.extractFields(builder.build(), fields);
assertEquals(asSet("foo"), fields);
}
public void testPointRange() {
Set<String> fields = new HashSet<>();
FieldExtractor.extractFields(IntPoint.newRangeQuery("foo", 3, 4), fields);
assertEquals(asSet("foo"), fields);
}
public void testPointSet() {
Set<String> fields = new HashSet<>();
FieldExtractor.extractFields(IntPoint.newSetQuery("foo", 3, 4, 5), fields);
assertEquals(asSet("foo"), fields);
}
public void testFieldValue() {
Set<String> fields = new HashSet<>();
FieldExtractor.extractFields(new FieldValueQuery("foo"), fields);
assertEquals(asSet("foo"), fields);
}
public void testDocValuesNumbers() {
Set<String> fields = new HashSet<>();
FieldExtractor.extractFields(new DocValuesNumbersQuery("foo", 5L), fields);
assertEquals(asSet("foo"), fields);
}
public void testDocValuesRange() {
Set<String> fields = new HashSet<>();
FieldExtractor.extractFields(DocValuesRangeQuery.newLongRange("foo", 1L, 2L, true, true), fields);
assertEquals(asSet("foo"), fields);
}
public void testMatchAllDocs() {
Set<String> fields = new HashSet<>();
FieldExtractor.extractFields(new MatchAllDocsQuery(), fields);
assertEquals(Collections.emptySet(), fields);
}
public void testMatchNoDocs() {
Set<String> fields = new HashSet<>();
FieldExtractor.extractFields(new MatchNoDocsQuery(), fields);
assertEquals(Collections.emptySet(), fields);
}
public void testUnsupported() {
Set<String> fields = new HashSet<>();
expectThrows(UnsupportedOperationException.class, () -> {
FieldExtractor.extractFields(new AssertingQuery(random(), new MatchAllDocsQuery()), fields);
});
}
}

View File

@ -70,7 +70,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -102,7 +102,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -189,7 +189,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
Document d2 = ir.document(0);
@ -216,7 +216,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
Document d2 = ir.document(0);
@ -243,7 +243,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
Document d2 = ir.document(0);
@ -270,7 +270,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
Document d2 = ir.document(0);
@ -297,7 +297,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
Document d2 = ir.document(0);
@ -324,7 +324,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
Document d2 = ir.document(0);
@ -353,7 +353,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
Fields vectors = ir.getTermVectors(0);
@ -383,7 +383,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -410,7 +410,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -442,7 +442,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -474,7 +474,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -506,7 +506,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -542,7 +542,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -577,7 +577,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -610,7 +610,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
Set<String> fields = new HashSet<>();
fields.add("fieldA");
fields.add(SourceFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
Document d2 = ir.document(0);
@ -641,7 +641,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
Set<String> fields = new HashSet<>();
fields.add("fieldA");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -690,7 +690,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
fields.add("fieldA");
fields.add("fieldC");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only two fields
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -738,7 +738,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
fields.add("fieldA");
fields.add("fieldC");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -774,7 +774,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
Set<String> fields = new HashSet<>();
fields.add("fieldA");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -803,7 +803,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("id");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
assertEquals(2, ir.numDocs());
assertEquals(1, ir.leaves().size());
@ -838,7 +838,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldB");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// sees no fields
assertNull(ir.getTermVectors(0));
@ -858,7 +858,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
// see no fields
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -888,9 +888,9 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.close();
DirectoryReader directoryReader = DirectoryReader.open(dir);
directoryReader = FieldSubsetReader.wrap(directoryReader, Collections.emptySet());
directoryReader = FieldSubsetReader.wrap(directoryReader, Collections.emptySet(), false);
try {
FieldSubsetReader.wrap(directoryReader, Collections.emptySet());
FieldSubsetReader.wrap(directoryReader, Collections.emptySet(), false);
fail("shouldn't be able to wrap FieldSubsetDirectoryReader twice");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("Can't wrap [class org.elasticsearch.xpack.security.authz.accesscontrol" +
@ -899,4 +899,36 @@ public class FieldSubsetReaderTests extends ESTestCase {
directoryReader.close();
dir.close();
}
/**
* test filtering two string fields, with negated set
*/
public void testNegative() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(null);
IndexWriter iw = new IndexWriter(dir, iwc);
// add document with 2 fields
Document doc = new Document();
doc.add(new StringField("fieldA", "test", Field.Store.NO));
doc.add(new StringField("fieldB", "test", Field.Store.NO));
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldB");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, true);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
Set<String> seenFields = new HashSet<>();
for (String field : segmentReader.fields()) {
seenFields.add(field);
}
assertEquals(Collections.singleton("fieldA"), seenFields);
assertNotNull(segmentReader.terms("fieldA"));
assertNull(segmentReader.terms("fieldB"));
TestUtil.checkReader(ir);
IOUtils.close(ir, iw, dir);
}
}

View File

@ -7,8 +7,11 @@ package org.elasticsearch.xpack.action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
@ -21,22 +24,38 @@ import java.util.stream.Collectors;
/**
*/
public class TransportXPackUsageAction extends HandledTransportAction<XPackUsageRequest, XPackUsageResponse> {
public class TransportXPackUsageAction extends TransportMasterNodeAction<XPackUsageRequest, XPackUsageResponse> {
private final Set<XPackFeatureSet> featureSets;
@Inject
public TransportXPackUsageAction(Settings settings, ThreadPool threadPool, TransportService transportService,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
Set<XPackFeatureSet> featureSets) {
super(settings, XPackUsageAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
ClusterService clusterService, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, Set<XPackFeatureSet> featureSets) {
super(settings, XPackUsageAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver,
XPackUsageRequest::new);
this.featureSets = featureSets;
}
@Override
protected void doExecute(XPackUsageRequest request, ActionListener<XPackUsageResponse> listener) {
protected String executor() {
return ThreadPool.Names.MANAGEMENT;
}
@Override
protected XPackUsageResponse newResponse() {
return new XPackUsageResponse();
}
@Override
protected void masterOperation(XPackUsageRequest request, ClusterState state, ActionListener<XPackUsageResponse> listener)
throws Exception {
List<XPackFeatureSet.Usage> usages = featureSets.stream().map(XPackFeatureSet::usage).collect(Collectors.toList());
listener.onResponse(new XPackUsageResponse(usages));
}
@Override
protected ClusterBlockException checkBlock(XPackUsageRequest request, ClusterState state) {
return null;
}
}

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.action;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.MasterNodeRequest;
/**
*
*/
public class XPackUsageRequest extends ActionRequest<XPackUsageRequest> {
public class XPackUsageRequest extends MasterNodeRequest<XPackUsageRequest> {
@Override
public ActionRequestValidationException validate() {

View File

@ -5,12 +5,11 @@
*/
package org.elasticsearch.xpack.action;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
/**
*/
public class XPackUsageRequestBuilder extends ActionRequestBuilder<XPackUsageRequest, XPackUsageResponse, XPackUsageRequestBuilder> {
public class XPackUsageRequestBuilder
extends MasterNodeOperationRequestBuilder<XPackUsageRequest, XPackUsageResponse, XPackUsageRequestBuilder> {
public XPackUsageRequestBuilder(ElasticsearchClient client) {
this(client, XPackUsageAction.INSTANCE);

View File

@ -0,0 +1,87 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.common.stats;
import com.carrotsearch.hppc.ObjectLongHashMap;
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
import java.util.HashMap;
import java.util.Map;
/**
* Helper class to create simple usage stat counters based on longs
* Internally this is a map mapping from String to a long, which is the counter
* Calling toMap() will create a nested map, where each dot of the key name will nest deeper
* The main reason for this class is that the stats producer should not be worried about how the map is actually nested
*/
public class Counters {
private ObjectLongHashMap<String> counters = new ObjectLongHashMap<>();
public Counters(String ... names) {
for (String name : names) {
set(name);
}
}
/**
* Sets a counter. This ensures that the counter is there, even though it is never incremented.
* @param name Name of the counter
*/
public void set(String name) {
counters.put(name, 0);
}
/**
* Increment the counter by one
* @param name Name of the counter
*/
public void inc(String name) {
inc(name, 1);
}
/**
* Increment the counter by configured number
* @param name The name of the counter
* @param count Incremental value
*/
public void inc(String name, long count) {
counters.addTo(name, count);
}
/**
* Convert the counters to a nested map, using the "." as a splitter to create deeper maps
* @return A nested map with all the current configured counters
*/
@SuppressWarnings("unchecked")
public Map<String, Object> toMap() {
Map<String, Object> map = new HashMap<>();
for (ObjectLongCursor<String> counter : counters) {
if (counter.key.contains(".")) {
String[] parts = counter.key.split("\\.");
Map<String, Object> curr = map;
for (int i = 0; i < parts.length; i++) {
String part = parts[i];
boolean isLast = i == parts.length - 1;
if (isLast == false) {
if (curr.containsKey(part) == false) {
curr.put(part, new HashMap<String, Object>());
curr = (Map<String, Object>) curr.get(part);
} else {
curr = (Map<String, Object>) curr.get(part);
}
} else {
curr.put(part, counter.value);
}
}
} else {
map.put(counter.key, counter.value);
}
}
return map;
}
}

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.rest.action;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -33,7 +34,9 @@ public class RestXPackUsageAction extends XPackRestHandler {
@Override
public void handleRequest(RestRequest request, RestChannel restChannel, XPackClient client) throws Exception {
new XPackUsageRequestBuilder(client.es()).execute(new RestBuilderListener<XPackUsageResponse>(restChannel) {
new XPackUsageRequestBuilder(client.es())
.setMasterNodeTimeout(request.paramAsTime("master_timeout", MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT))
.execute(new RestBuilderListener<XPackUsageResponse>(restChannel) {
@Override
public RestResponse buildResponse(XPackUsageResponse response, XContentBuilder builder) throws Exception {
builder.startObject();

View File

@ -0,0 +1,32 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.common.stats;
import org.elasticsearch.test.ESTestCase;
import java.util.Map;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.instanceOf;
public class CountersTests extends ESTestCase {
public void testCounters() {
Counters counters = new Counters();
counters.inc("f", 200);
counters.inc("foo.bar");
counters.inc("foo.baz");
counters.inc("foo.baz");
Map<String, Object> map = counters.toMap();
assertThat(map, hasEntry("f", 200L));
assertThat(map, hasKey("foo"));
assertThat(map.get("foo"), instanceOf(Map.class));
Map<String, Object> fooMap = (Map<String, Object>) map.get("foo");
assertThat(fooMap, hasEntry("bar", 1L));
assertThat(fooMap, hasEntry("baz", 2L));
}
}

View File

@ -7,6 +7,10 @@
"paths": [ "/_xpack/usage" ],
"parts": {},
"params": {
"master_timeout": {
"type": "duration",
"description": "Specify timeout for watch write operation"
}
}
},
"body": null

View File

@ -84,7 +84,7 @@
- is_true: tagline
- do:
xpack.usage: {}
xpack.usage: {}
- is_true: watcher.enabled
- is_true: watcher.available
- is_true: security.enabled
@ -93,19 +93,21 @@
- is_true: graph.available
- is_true: monitoring.enabled
- is_true: monitoring.available
- gte: { watcher.count.total: 0 }
- gte: { watcher.count.active: 0 }
- do:
xpack.info:
categories: "_none"
xpack.info:
categories: "_none"
- is_false: build
- is_false: features
- is_false: license
- match: { tagline: "You know, for X" }
- do:
xpack.info:
categories: "_none"
human: false
xpack.info:
categories: "_none"
human: false
- is_false: build
- is_false: features
- is_false: license

View File

@ -19,6 +19,7 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.rest.RestHandler;
@ -29,6 +30,7 @@ import org.elasticsearch.xpack.watcher.actions.WatcherActionModule;
import org.elasticsearch.xpack.watcher.client.WatcherClientModule;
import org.elasticsearch.xpack.watcher.condition.ConditionModule;
import org.elasticsearch.xpack.watcher.execution.ExecutionModule;
import org.elasticsearch.xpack.watcher.execution.ExecutionService;
import org.elasticsearch.xpack.watcher.execution.InternalWatchExecutor;
import org.elasticsearch.xpack.watcher.history.HistoryModule;
import org.elasticsearch.xpack.watcher.history.HistoryStore;
@ -44,7 +46,6 @@ import org.elasticsearch.xpack.watcher.rest.action.RestWatchServiceAction;
import org.elasticsearch.xpack.watcher.rest.action.RestWatcherStatsAction;
import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry;
import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry.TemplateConfig;
import org.elasticsearch.xpack.watcher.support.validation.WatcherSettingsValidation;
import org.elasticsearch.xpack.watcher.transform.TransformModule;
import org.elasticsearch.xpack.watcher.transport.actions.ack.AckWatchAction;
import org.elasticsearch.xpack.watcher.transport.actions.ack.TransportAckWatchAction;
@ -87,6 +88,8 @@ public class Watcher implements ActionPlugin {
new Setting<>("index.xpack.watcher.template.version", "", Function.identity(), Setting.Property.IndexScope);
public static final Setting<Boolean> ENCRYPT_SENSITIVE_DATA_SETTING =
Setting.boolSetting("xpack.watcher.encrypt_sensitive_data", false, Setting.Property.NodeScope);
public static final Setting<TimeValue> MAX_STOP_TIMEOUT_SETTING =
Setting.timeSetting("xpack.watcher.stop.timeout", TimeValue.timeValueSeconds(30), Setting.Property.NodeScope);
private static final ESLogger logger = Loggers.getLogger(XPackPlugin.class);
@ -127,9 +130,7 @@ public class Watcher implements ActionPlugin {
if (enabled == false|| transportClient) {
return Collections.emptyList();
}
return Arrays.<Class<? extends LifecycleComponent>>asList(
WatcherLicensee.class,
WatcherSettingsValidation.class);
return Collections.singletonList(WatcherLicensee.class);
}
public Settings additionalSettings() {
@ -144,6 +145,8 @@ public class Watcher implements ActionPlugin {
}
settings.add(INDEX_WATCHER_VERSION_SETTING);
settings.add(INDEX_WATCHER_TEMPLATE_VERSION_SETTING);
settings.add(MAX_STOP_TIMEOUT_SETTING);
settings.add(ExecutionService.DEFAULT_THROTTLE_PERIOD_SETTING);
settings.add(Setting.intSetting("xpack.watcher.execution.scroll.size", 0, Setting.Property.NodeScope));
settings.add(Setting.intSetting("xpack.watcher.watch.scroll.size", 0, Setting.Property.NodeScope));
settings.add(Setting.boolSetting(XPackPlugin.featureEnabledSetting(Watcher.NAME), true, Setting.Property.NodeScope));
@ -152,7 +155,6 @@ public class Watcher implements ActionPlugin {
settings.add(Setting.simpleString("xpack.watcher.internal.ops.search.default_timeout", Setting.Property.NodeScope));
settings.add(Setting.simpleString("xpack.watcher.internal.ops.bulk.default_timeout", Setting.Property.NodeScope));
settings.add(Setting.simpleString("xpack.watcher.internal.ops.index.default_timeout", Setting.Property.NodeScope));
settings.add(Setting.simpleString("xpack.watcher.execution.default_throttle_period", Setting.Property.NodeScope));
settings.add(Setting.simpleString("xpack.watcher.actions.index.default_timeout", Setting.Property.NodeScope));
settings.add(Setting.simpleString("xpack.watcher.index.rest.direct_access", Setting.Property.NodeScope));
settings.add(Setting.simpleString("xpack.watcher.trigger.schedule.engine", Setting.Property.NodeScope));

View File

@ -9,10 +9,14 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.XPackFeatureSet;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
/**
*
@ -21,9 +25,12 @@ public class WatcherFeatureSet implements XPackFeatureSet {
private final boolean enabled;
private final WatcherLicensee licensee;
private final WatcherService watcherService;
@Inject
public WatcherFeatureSet(Settings settings, @Nullable WatcherLicensee licensee, NamedWriteableRegistry namedWriteableRegistry) {
public WatcherFeatureSet(Settings settings, @Nullable WatcherLicensee licensee, NamedWriteableRegistry namedWriteableRegistry,
@Nullable WatcherService watcherService) {
this.watcherService = watcherService;
this.enabled = Watcher.enabled(settings);
this.licensee = licensee;
namedWriteableRegistry.register(Usage.class, Usage.writeableName(Watcher.NAME), Usage::new);
@ -51,18 +58,41 @@ public class WatcherFeatureSet implements XPackFeatureSet {
@Override
public XPackFeatureSet.Usage usage() {
return new Usage(available(), enabled());
return new Usage(available(), enabled(), watcherService != null ? watcherService.usageStats() : Collections.emptyMap());
}
static class Usage extends XPackFeatureSet.Usage {
public Usage(StreamInput input) throws IOException {
super(input);
private final Map<String, Object> stats;
public Usage(StreamInput in) throws IOException {
super(in);
stats = in.readMap();
}
public Usage(boolean available, boolean enabled) {
public Usage(boolean available, boolean enabled, Map<String, Object> stats) {
super(Watcher.NAME, available, enabled);
this.stats = stats;
}
public Map<String, Object> stats() {
return stats;
}
@Override
protected void innerXContent(XContentBuilder builder, Params params) throws IOException {
super.innerXContent(builder, params);
if (enabled) {
for (Map.Entry<String, Object> entry : stats.entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeMap(stats);
}
}
}

View File

@ -10,7 +10,6 @@ import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry;
import org.elasticsearch.xpack.watcher.support.validation.WatcherSettingsValidation;
public class WatcherModule extends AbstractModule {
@ -25,16 +24,21 @@ public class WatcherModule extends AbstractModule {
@Override
protected void configure() {
XPackPlugin.bindFeatureSet(binder(), WatcherFeatureSet.class);
if (enabled == false || transportClientMode) {
if (transportClientMode) {
bind(WatcherLicensee.class).toProvider(Providers.of(null));
return;
}
bind(WatcherLicensee.class).asEagerSingleton();
bind(WatcherLifeCycleService.class).asEagerSingleton();
bind(WatcherSettingsValidation.class).asEagerSingleton();
bind(WatcherIndexTemplateRegistry.class).asEagerSingleton();
if (enabled == false) {
bind(WatcherLicensee.class).toProvider(Providers.of(null));
// watcher service must be null, so that the watcher feature set can be instantiated even if watcher is not enabled
bind(WatcherService.class).toProvider(Providers.of(null));
} else {
bind(WatcherLicensee.class).asEagerSingleton();
bind(WatcherLifeCycleService.class).asEagerSingleton();
bind(WatcherIndexTemplateRegistry.class).asEagerSingleton();
}
XPackPlugin.bindFeatureSet(binder(), WatcherFeatureSet.class);
}
}

View File

@ -15,6 +15,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.xpack.common.stats.Counters;
import org.elasticsearch.xpack.watcher.execution.ExecutionService;
import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry;
import org.elasticsearch.xpack.support.clock.Clock;
@ -28,6 +29,8 @@ import org.joda.time.DateTimeZone;
import org.joda.time.PeriodType;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalArgument;
@ -285,4 +288,9 @@ public class WatcherService extends AbstractComponent {
}
}
public Map<String, Object> usageStats() {
Map<String, Object> innerMap = executionService.usageStats();
innerMap.putAll(watchStore.usageStats());
return innerMap;
}
}

View File

@ -10,16 +10,20 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.watcher.Watcher;
import org.elasticsearch.xpack.watcher.WatcherFeatureSet;
import org.elasticsearch.xpack.common.stats.Counters;
import org.elasticsearch.xpack.watcher.actions.ActionWrapper;
import org.elasticsearch.xpack.watcher.condition.Condition;
import org.elasticsearch.xpack.watcher.history.HistoryStore;
import org.elasticsearch.xpack.watcher.history.WatchRecord;
import org.elasticsearch.xpack.watcher.input.Input;
import org.elasticsearch.xpack.watcher.support.validation.WatcherSettingsValidation;
import org.elasticsearch.xpack.watcher.transform.Transform;
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
import org.elasticsearch.xpack.watcher.watch.Watch;
@ -33,17 +37,20 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
/**
*/
public class ExecutionService extends AbstractComponent {
private static final TimeValue DEFAULT_MAX_STOP_TIMEOUT = new TimeValue(30, TimeUnit.SECONDS);
private static final String DEFAULT_MAX_STOP_TIMEOUT_SETTING = "xpack.watcher.stop.timeout";
public static final Setting<TimeValue> DEFAULT_THROTTLE_PERIOD_SETTING =
Setting.positiveTimeSetting("xpack.watcher.execution.default_throttle_period",
TimeValue.timeValueSeconds(5), Setting.Property.NodeScope);
private final MeanMetric totalExecutionsTime = new MeanMetric();
private final Map<String, MeanMetric> actionByTypeExecutionTime = new HashMap<>();
private final HistoryStore historyStore;
private final TriggeredWatchStore triggeredWatchStore;
@ -59,8 +66,7 @@ public class ExecutionService extends AbstractComponent {
@Inject
public ExecutionService(Settings settings, HistoryStore historyStore, TriggeredWatchStore triggeredWatchStore, WatchExecutor executor,
WatchStore watchStore, WatchLockService watchLockService, Clock clock,
WatcherSettingsValidation settingsValidation) {
WatchStore watchStore, WatchLockService watchLockService, Clock clock) {
super(settings);
this.historyStore = historyStore;
this.triggeredWatchStore = triggeredWatchStore;
@ -68,11 +74,8 @@ public class ExecutionService extends AbstractComponent {
this.watchStore = watchStore;
this.watchLockService = watchLockService;
this.clock = clock;
this.defaultThrottlePeriod = settings.getAsTime("xpack.watcher.execution.default_throttle_period", TimeValue.timeValueSeconds(5));
maxStopTimeout = settings.getAsTime(DEFAULT_MAX_STOP_TIMEOUT_SETTING, DEFAULT_MAX_STOP_TIMEOUT);
if (ExecutionService.this.defaultThrottlePeriod.millis() < 0) {
settingsValidation.addError("xpack.watcher.execution.default_throttle_period", "time value cannot be negative");
}
this.defaultThrottlePeriod = DEFAULT_THROTTLE_PERIOD_SETTING.get(settings);
this.maxStopTimeout = Watcher.MAX_STOP_TIMEOUT_SETTING.get(settings);
}
public void start(ClusterState state) throws Exception {
@ -385,12 +388,19 @@ public class ExecutionService extends AbstractComponent {
// actions
ctx.beforeActions();
for (ActionWrapper action : watch.actions()) {
long now = System.currentTimeMillis();
ActionWrapper.Result actionResult = action.execute(ctx);
long executionTime = System.currentTimeMillis() - now;
String type = action.action().type();
actionByTypeExecutionTime.putIfAbsent(type, new MeanMetric());
actionByTypeExecutionTime.get(type).inc(executionTime);
ctx.onActionResult(actionResult);
}
}
return ctx.finish();
WatchRecord record = ctx.finish();
totalExecutionsTime.inc(record.result().executionDurationMs());
return record;
}
void executeTriggeredWatches(Collection<TriggeredWatch> triggeredWatches) throws Exception {
@ -415,6 +425,19 @@ public class ExecutionService extends AbstractComponent {
logger.debug("executed [{}] watches from the watch history", counter);
}
public Map<String, Object> usageStats() {
Counters counters = new Counters();
counters.inc("execution.actions._all.total", totalExecutionsTime.count());
counters.inc("execution.actions._all.total_time_in_ms", totalExecutionsTime.sum());
for (Map.Entry<String, MeanMetric> entry : actionByTypeExecutionTime.entrySet()) {
counters.inc("execution.actions." + entry.getKey() + ".total", entry.getValue().count());
counters.inc("execution.actions." + entry.getKey() + ".total_time_in_ms", entry.getValue().sum());
}
return counters.toMap();
}
private static final class StartupExecutionContext extends TriggeredExecutionContext {
public StartupExecutionContext(Watch watch, DateTime executionTime, TriggerEvent triggerEvent, TimeValue defaultThrottlePeriod) {

View File

@ -5,7 +5,6 @@
*/
package org.elasticsearch.xpack.watcher.rest.action;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.rest.RestChannel;
@ -15,7 +14,6 @@ import org.elasticsearch.rest.action.support.AcknowledgedRestListener;
import org.elasticsearch.xpack.watcher.client.WatcherClient;
import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler;
import org.elasticsearch.xpack.watcher.transport.actions.service.WatcherServiceRequest;
import org.elasticsearch.xpack.watcher.transport.actions.service.WatcherServiceResponse;
/**
*/
@ -31,7 +29,7 @@ public class RestWatchServiceAction extends WatcherRestHandler {
@Override
public void handleRequest(RestRequest request, RestChannel channel, WatcherClient client) throws Exception {
client.watcherService(new WatcherServiceRequest().restart(), new AcknowledgedRestListener<WatcherServiceResponse>(channel));
client.watcherService(new WatcherServiceRequest().restart(), new AcknowledgedRestListener<>(channel));
}
static class StartRestHandler extends WatcherRestHandler {
@ -42,7 +40,7 @@ public class RestWatchServiceAction extends WatcherRestHandler {
@Override
public void handleRequest(RestRequest request, RestChannel channel, WatcherClient client) throws Exception {
client.watcherService(new WatcherServiceRequest().start(), new AcknowledgedRestListener<WatcherServiceResponse>(channel));
client.watcherService(new WatcherServiceRequest().start(), new AcknowledgedRestListener<>(channel));
}
}
@ -54,7 +52,7 @@ public class RestWatchServiceAction extends WatcherRestHandler {
@Override
public void handleRequest(RestRequest request, RestChannel channel, WatcherClient client) throws Exception {
client.watcherService(new WatcherServiceRequest().stop(), new AcknowledgedRestListener<WatcherServiceResponse>(channel));
client.watcherService(new WatcherServiceRequest().stop(), new AcknowledgedRestListener<>(channel));
}
}
}

View File

@ -1,57 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.watcher.support.validation;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.watcher.support.Exceptions;
import java.util.ArrayList;
import java.util.List;
/**
*
*/
public class WatcherSettingsValidation extends AbstractLifecycleComponent {
private List<String> errors = new ArrayList<>();
@Inject
public WatcherSettingsValidation(Settings settings) {
super(settings);
}
@Override
protected void doStart() throws ElasticsearchException {
validate();
}
@Override
protected void doStop() throws ElasticsearchException {
}
@Override
protected void doClose() throws ElasticsearchException {
}
public void addError(String setting, String reason) {
errors.add(LoggerMessageFormat.format("", "invalid [{}] setting value [{}]. {}", setting, settings.get(setting), reason));
}
private void validate() throws ElasticsearchException {
if (errors.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder("encountered invalid watcher settings:\n");
for (String error : errors) {
sb.append("- ").append(error).append("\n");
}
throw Exceptions.invalidSettings(sb.toString());
}
}

View File

@ -19,6 +19,9 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.common.secret.Secret;
import org.elasticsearch.xpack.common.secret.SecretService;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.support.clock.HaltedClock;
import org.elasticsearch.xpack.watcher.Watcher;
import org.elasticsearch.xpack.watcher.actions.ActionRegistry;
import org.elasticsearch.xpack.watcher.actions.ActionStatus;
@ -31,9 +34,6 @@ import org.elasticsearch.xpack.watcher.input.ExecutableInput;
import org.elasticsearch.xpack.watcher.input.InputRegistry;
import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput;
import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.support.clock.HaltedClock;
import org.elasticsearch.xpack.common.secret.SecretService;
import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams;
import org.elasticsearch.xpack.watcher.support.xcontent.WatcherXContentParser;
import org.elasticsearch.xpack.watcher.transform.ExecutableTransform;

View File

@ -32,11 +32,17 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.xpack.security.InternalClient;
import org.elasticsearch.xpack.watcher.WatcherFeatureSet;
import org.elasticsearch.xpack.common.stats.Counters;
import org.elasticsearch.xpack.watcher.actions.ActionWrapper;
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
import org.elasticsearch.xpack.watcher.trigger.schedule.Schedule;
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger;
import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
@ -204,8 +210,66 @@ public class WatchStore extends AbstractComponent {
return watches;
}
public int watchCount() {
return watches.size();
public Map<String, Object> usageStats() {
Counters counters = new Counters("count.total", "count.active");
for (Watch watch : watches.values()) {
boolean isActive = watch.status().state().isActive();
addToCounters("count", isActive, counters);
// schedule
if (watch.trigger() != null) {
addToCounters("watch.trigger._all", isActive, counters);
if ("schedule".equals(watch.trigger().type())) {
Schedule schedule = ((ScheduleTrigger) watch.trigger()).getSchedule();
addToCounters("watch.trigger.schedule._all", isActive, counters);
addToCounters("watch.trigger.schedule." + schedule.type(), isActive, counters);
}
}
// input
if (watch.input() != null) {
String type = watch.input().type();
addToCounters("watch.input._all", isActive, counters);
addToCounters("watch.input." + type, isActive, counters);
}
// condition
if (watch.condition() != null) {
String type = watch.condition().type();
addToCounters("watch.condition._all", isActive, counters);
addToCounters("watch.condition." + type, isActive, counters);
}
// actions
for (ActionWrapper actionWrapper : watch.actions()) {
String type = actionWrapper.action().type();
addToCounters("watch.action." + type, isActive, counters);
if (actionWrapper.transform() != null) {
String transformType = actionWrapper.transform().type();
addToCounters("watch.transform." + transformType, isActive, counters);
}
}
// transform
if (watch.transform() != null) {
String type = watch.transform().type();
addToCounters("watch.transform." + type, isActive, counters);
}
// metadata
if (watch.metadata() != null && watch.metadata().size() > 0) {
addToCounters("watch.metadata", isActive, counters);
}
}
return counters.toMap();
}
private void addToCounters(String name, boolean isActive, Counters counters) {
counters.inc(name + ".total");
if (isActive) {
counters.inc(name + ".active");
}
}
IndexRequest createIndexRequest(String id, BytesReference source, long version) {

View File

@ -8,7 +8,9 @@ package org.elasticsearch.script;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.common.text.DefaultTextTemplateEngine;
import java.util.Collections;
import java.util.Map;
import java.util.function.Function;
/**
* A mock script engine that registers itself under the 'mustache' name so that
@ -19,11 +21,16 @@ public class MockMustacheScriptEngine extends MockScriptEngine {
public static final String NAME = "mustache";
public static class TestPlugin extends MockScriptEngine.TestPlugin {
public static class TestPlugin extends MockScriptPlugin {
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
return new MockMustacheScriptEngine();
}
@Override
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
return Collections.emptyMap();
}
}
@Override

View File

@ -7,9 +7,18 @@ package org.elasticsearch.xpack.watcher;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
import org.junit.Before;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.core.Is.is;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.eq;
@ -24,20 +33,22 @@ public class WatcherFeatureSetTests extends ESTestCase {
private WatcherLicensee licensee;
private NamedWriteableRegistry namedWriteableRegistry;
private WatcherService watcherService;
@Before
public void init() throws Exception {
licensee = mock(WatcherLicensee.class);
namedWriteableRegistry = mock(NamedWriteableRegistry.class);
watcherService = mock(WatcherService.class);
}
public void testWritableRegistration() throws Exception {
new WatcherFeatureSet(Settings.EMPTY, licensee, namedWriteableRegistry);
new WatcherFeatureSet(Settings.EMPTY, licensee, namedWriteableRegistry, watcherService);
verify(namedWriteableRegistry).register(eq(WatcherFeatureSet.Usage.class), eq("xpack.usage.watcher"), anyObject());
}
public void testAvailable() throws Exception {
WatcherFeatureSet featureSet = new WatcherFeatureSet(Settings.EMPTY, licensee, namedWriteableRegistry);
WatcherFeatureSet featureSet = new WatcherFeatureSet(Settings.EMPTY, licensee, namedWriteableRegistry, watcherService);
boolean available = randomBoolean();
when(licensee.isAvailable()).thenReturn(available);
assertThat(featureSet.available(), is(available));
@ -53,8 +64,24 @@ public class WatcherFeatureSetTests extends ESTestCase {
} else {
settings.put("xpack.watcher.enabled", enabled);
}
WatcherFeatureSet featureSet = new WatcherFeatureSet(settings.build(), licensee, namedWriteableRegistry);
WatcherFeatureSet featureSet = new WatcherFeatureSet(settings.build(), licensee, namedWriteableRegistry, watcherService);
assertThat(featureSet.enabled(), is(enabled));
}
public void testUsageStats() throws Exception {
Map<String, Object> statsMap = new HashMap<>();
statsMap.put("foo", "bar");
when(watcherService.usageStats()).thenReturn(statsMap);
WatcherFeatureSet featureSet = new WatcherFeatureSet(Settings.EMPTY, licensee, namedWriteableRegistry, watcherService);
XContentBuilder builder = jsonBuilder();
featureSet.usage().toXContent(builder, ToXContent.EMPTY_PARAMS);
XContentSource source = new XContentSource(builder);
assertThat(source.getValue("foo"), is("bar"));
assertThat(featureSet.usage(), instanceOf(WatcherFeatureSet.Usage.class));
WatcherFeatureSet.Usage usage = (WatcherFeatureSet.Usage) featureSet.usage();
assertThat(usage.stats(), hasEntry("foo", "bar"));
}
}

View File

@ -56,13 +56,14 @@ public class WatcherServiceTests extends ESTestCase {
private WatcherService watcherService;
private WatchLockService watchLockService;
private ClockMock clock;
private ExecutionService executionService;
@Before
public void init() throws Exception {
triggerService = mock(TriggerService.class);
watchStore = mock(WatchStore.class);
watchParser = mock(Watch.Parser.class);
ExecutionService executionService = mock(ExecutionService.class);
executionService = mock(ExecutionService.class);
watchLockService = mock(WatchLockService.class);
clock = new ClockMock();
WatcherIndexTemplateRegistry watcherIndexTemplateRegistry = mock(WatcherIndexTemplateRegistry.class);

View File

@ -8,6 +8,8 @@ package org.elasticsearch.xpack.watcher.execution;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.support.clock.ClockMock;
import org.elasticsearch.xpack.watcher.actions.Action;
import org.elasticsearch.xpack.watcher.actions.ActionStatus;
import org.elasticsearch.xpack.watcher.actions.ActionWrapper;
@ -23,9 +25,7 @@ import org.elasticsearch.xpack.watcher.history.HistoryStore;
import org.elasticsearch.xpack.watcher.history.WatchRecord;
import org.elasticsearch.xpack.watcher.input.ExecutableInput;
import org.elasticsearch.xpack.watcher.input.Input;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.support.clock.ClockMock;
import org.elasticsearch.xpack.watcher.support.validation.WatcherSettingsValidation;
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
import org.elasticsearch.xpack.watcher.transform.ExecutableTransform;
import org.elasticsearch.xpack.watcher.transform.Transform;
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent;
@ -44,6 +44,7 @@ import java.util.concurrent.ArrayBlockingQueue;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
@ -89,10 +90,9 @@ public class ExecutionServiceTests extends ESTestCase {
when(executor.queue()).thenReturn(new ArrayBlockingQueue<Runnable>(1));
watchLockService = mock(WatchLockService.class);
WatcherSettingsValidation settingsValidator = mock(WatcherSettingsValidation.class);
clock = new ClockMock();
executionService = new ExecutionService(Settings.EMPTY, historyStore, triggeredWatchStore, executor, watchStore,
watchLockService, clock, settingsValidator);
watchLockService, clock);
ClusterState clusterState = mock(ClusterState.class);
when(triggeredWatchStore.loadTriggeredWatches(clusterState)).thenReturn(new ArrayList<TriggeredWatch>());
@ -138,6 +138,7 @@ public class ExecutionServiceTests extends ESTestCase {
when(actionResult.type()).thenReturn("_action_type");
when(actionResult.status()).thenReturn(Action.Result.Status.SUCCESS);
ExecutableAction action = mock(ExecutableAction.class);
when(action.type()).thenReturn("MY_AWESOME_TYPE");
when(action.execute("_action", context, payload)).thenReturn(actionResult);
ActionWrapper actionWrapper = new ActionWrapper("_action", throttler, actionTransform, action);
@ -165,6 +166,13 @@ public class ExecutionServiceTests extends ESTestCase {
verify(condition, times(1)).execute(context);
verify(watchTransform, times(1)).execute(context, payload);
verify(action, times(1)).execute("_action", context, payload);
// test stats
XContentSource source = new XContentSource(jsonBuilder().map(executionService.usageStats()));
assertThat(source.getValue("execution.actions._all.total_time_in_ms"), is(notNullValue()));
assertThat(source.getValue("execution.actions._all.total"), is(1));
assertThat(source.getValue("execution.actions.MY_AWESOME_TYPE.total_time_in_ms"), is(notNullValue()));
assertThat(source.getValue("execution.actions.MY_AWESOME_TYPE.total"), is(1));
}
public void testExecuteFailedInput() throws Exception {

View File

@ -32,11 +32,29 @@ import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHits;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.watcher.actions.ActionWrapper;
import org.elasticsearch.xpack.watcher.actions.ExecutableAction;
import org.elasticsearch.xpack.watcher.actions.ExecutableActions;
import org.elasticsearch.xpack.watcher.condition.always.ExecutableAlwaysCondition;
import org.elasticsearch.xpack.watcher.condition.never.ExecutableNeverCondition;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput;
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
import org.elasticsearch.xpack.watcher.transform.ExecutableTransform;
import org.elasticsearch.xpack.watcher.transform.Transform;
import org.elasticsearch.xpack.watcher.trigger.schedule.Schedule;
import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger;
import org.junit.Before;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.mockito.Matchers.any;
@ -280,6 +298,127 @@ public class WatchStoreTests extends ESTestCase {
verify(clientProxy, times(1)).clearScroll(anyString());
}
public void testUsageStats() throws Exception {
ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name"));
MetaData.Builder metaDateBuilder = MetaData.builder();
RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
Settings settings = settings(Version.CURRENT)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
.build();
metaDateBuilder.put(IndexMetaData.builder(WatchStore.INDEX).settings(settings).numberOfShards(1).numberOfReplicas(1));
final Index index = metaDateBuilder.get(WatchStore.INDEX).getIndex();
IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index);
indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(index, 0))
.addShard(TestShardRouting.newShardRouting(WatchStore.INDEX, 0, "_node_id", null, true, ShardRoutingState.STARTED))
.build());
indexRoutingTableBuilder.addReplica();
routingTableBuilder.add(indexRoutingTableBuilder.build());
csBuilder.metaData(metaDateBuilder);
csBuilder.routingTable(routingTableBuilder.build());
RefreshResponse refreshResponse = mockRefreshResponse(1, 1);
when(clientProxy.refresh(any(RefreshRequest.class))).thenReturn(refreshResponse);
BytesReference source = new BytesArray("{}");
int hitCount = randomIntBetween(50, 100);
int activeHitCount = 0;
List<InternalSearchHit> hits = new ArrayList<>();
for (int i = 0; i < hitCount; i++) {
InternalSearchHit hit = new InternalSearchHit(0, "_id" + i, new Text("type"), Collections.<String, SearchHitField>emptyMap());
hits.add(hit.sourceRef(source));
Watch watch = mock(Watch.class);
WatchStatus status = mock(WatchStatus.class);
when(watch.status()).thenReturn(status);
boolean isActive = usually();
WatchStatus.State state = mock(WatchStatus.State.class);
when(state.isActive()).thenReturn(isActive);
when(status.state()).thenReturn(state);
if (isActive) {
activeHitCount++;
}
// random schedule
ScheduleTrigger mockTricker = mock(ScheduleTrigger.class);
when(watch.trigger()).thenReturn(mockTricker);
when(mockTricker.type()).thenReturn("schedule");
String scheduleType = randomFrom("a", "b", "c");
Schedule mockSchedule = mock(Schedule.class);
when(mockSchedule.type()).thenReturn(scheduleType);
when(mockTricker.getSchedule()).thenReturn(mockSchedule);
// either a none input, or null
when(watch.input()).thenReturn(randomFrom(new ExecutableNoneInput(logger), null));
// random conditions
when(watch.condition()).thenReturn(randomFrom(new ExecutableAlwaysCondition(logger), null,
new ExecutableNeverCondition(logger)));
// random actions
ActionWrapper actionWrapper = mock(ActionWrapper.class);
ExecutableAction action = mock(ExecutableAction.class);
when(actionWrapper.action()).thenReturn(action);
when(action.type()).thenReturn(randomFrom("a", "b", "c"));
when(watch.actions()).thenReturn(new ExecutableActions(Arrays.asList(actionWrapper)));
// random transform, not always set
Transform mockTransform = mock(Transform.class);
when(mockTransform.type()).thenReturn("TYPE");
@SuppressWarnings("unchecked")
ExecutableTransform testTransform = new ExecutableTransform(mockTransform, logger) {
@Override
public Transform.Result execute(WatchExecutionContext ctx, Payload payload) {
return null;
}
};
when(watch.transform()).thenReturn(randomFrom(testTransform, null));
when(parser.parse("_id" + i, true, source)).thenReturn(watch);
}
SearchResponse searchResponse = mockSearchResponse(1, 1, hitCount, hits.toArray(new InternalSearchHit[] {}));
when(clientProxy.search(any(SearchRequest.class), any(TimeValue.class))).thenReturn(searchResponse);
SearchResponse noHitsResponse = mockSearchResponse(1, 1, 2);
when(clientProxy.searchScroll(anyString(), any(TimeValue.class))).thenReturn(noHitsResponse);
when(clientProxy.clearScroll(anyString())).thenReturn(new ClearScrollResponse(true, 0));
ClusterState cs = csBuilder.build();
watchStore.start(cs);
XContentSource stats = new XContentSource(jsonBuilder().map(watchStore.usageStats()));
assertThat(stats.getValue("count.total"), is(hitCount));
assertThat(stats.getValue("count.active"), is(activeHitCount));
// schedule count
int scheduleCountA = stats.getValue("watch.trigger.schedule.a.active");
int scheduleCountB = stats.getValue("watch.trigger.schedule.b.active");
int scheduleCountC = stats.getValue("watch.trigger.schedule.c.active");
assertThat(scheduleCountA + scheduleCountB + scheduleCountC, is(activeHitCount));
// input count
assertThat(stats.getValue("watch.input.none.active"), is(greaterThan(0)));
assertThat(stats.getValue("watch.input.none.total"), is(greaterThan(0)));
assertThat(stats.getValue("watch.input.none.total"), is(lessThan(activeHitCount)));
// condition count
assertThat(stats.getValue("watch.condition.never.active"), is(greaterThan(0)));
assertThat(stats.getValue("watch.condition.always.active"), is(greaterThan(0)));
// action count
int actionCountA = stats.getValue("watch.action.a.active");
int actionCountB = stats.getValue("watch.action.b.active");
int actionCountC = stats.getValue("watch.action.c.active");
assertThat(actionCountA + actionCountB + actionCountC, is(activeHitCount));
// transform count
assertThat(stats.getValue("watch.transform.TYPE.active"), is(greaterThan(0)));
}
private RefreshResponse mockRefreshResponse(int total, int successful) {
RefreshResponse refreshResponse = mock(RefreshResponse.class);
when(refreshResponse.getTotalShards()).thenReturn(total);