diff --git a/README.asciidoc b/README.asciidoc index 46d72b8db07..c733d3fd277 100644 --- a/README.asciidoc +++ b/README.asciidoc @@ -51,7 +51,7 @@ gradle clean assemble gradle clean install ----- -- If you don't work on the UI side of x-plugins, you can force gradle to skip building kibana by adding - `xpack.kibana.build=false` to your `~/.gradle/gradle.properties`. Alternatively you add `-Pxpack.kibana.build=false` +- If you don't work on the UI/Logstash side of x-plugins, you can force gradle to skip building kibana and/or Logstash by adding + `xpack.kibana.build=false`/`xpack.logstash.build=false` to your `~/.gradle/gradle.properties`. Alternatively you add `-Pxpack.kibana.build=false` or `-Pxpack.logstash.build=false` on the command line if you only want to do this on individual builds (or `-Pxpack.kibana.build=true` if you need to override having added this to your `gradle.properties`). diff --git a/build.gradle b/build.gradle index f7292c5fc61..145e289418c 100644 --- a/build.gradle +++ b/build.gradle @@ -32,10 +32,13 @@ subprojects { task bundlePack(type: Zip) { onlyIf { project('kibana').bundlePlugin.enabled } + onlyIf { project('logstash').bundlePlugin.enabled } dependsOn 'elasticsearch:bundlePlugin' dependsOn 'kibana:bundlePlugin' + dependsOn 'logstash:bundlePlugin' from { zipTree(project('elasticsearch').bundlePlugin.outputs.files.singleFile) } from { zipTree(project('kibana').bundlePlugin.outputs.files.singleFile) } + from { zipTree(project('logstash').bundlePlugin.outputs.files.singleFile) } destinationDir file('build/distributions') baseName = 'x-pack' version = VersionProperties.elasticsearch diff --git a/elasticsearch/build.gradle b/elasticsearch/build.gradle index ae0f33260ab..1565469d379 100644 --- a/elasticsearch/build.gradle +++ b/elasticsearch/build.gradle @@ -52,6 +52,7 @@ dependencies { // common test deps testCompile 'org.elasticsearch:securemock:1.2' + testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}" testCompile 'org.slf4j:slf4j-log4j12:1.6.2' testCompile 'org.slf4j:slf4j-api:1.6.2' } @@ -239,3 +240,9 @@ thirdPartyAudit.excludes = [ 'javax.activation.UnsupportedDataTypeException' ] +run { + setting 'xpack.graph.enabled', 'true' + setting 'xpack.security.enabled', 'true' + setting 'xpack.monitoring.enabled', 'true' + setting 'xpack.watcher.enabled', 'true' +} diff --git a/elasticsearch/src/main/java/org/elasticsearch/license/License.java b/elasticsearch/src/main/java/org/elasticsearch/license/License.java index 2026260d72a..7eae2c3088d 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/license/License.java +++ b/elasticsearch/src/main/java/org/elasticsearch/license/License.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -29,7 +30,7 @@ import java.util.Locale; * Data structure for license. Use {@link Builder} to build a license. * Provides serialization/deserialization & validation methods for license object */ -public class License implements ToXContent { +public class License implements ToXContentObject { public static final int VERSION_START = 1; public static final int VERSION_NO_FEATURE_TYPE = 2; public static final int VERSION_START_DATE = 3; diff --git a/elasticsearch/src/main/java/org/elasticsearch/license/LicensesMetaData.java b/elasticsearch/src/main/java/org/elasticsearch/license/LicensesMetaData.java index 41dcc07bb69..8bacbf2593d 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/license/LicensesMetaData.java +++ b/elasticsearch/src/main/java/org/elasticsearch/license/LicensesMetaData.java @@ -6,6 +6,8 @@ package org.elasticsearch.license; import org.elasticsearch.cluster.AbstractDiffable; +import org.elasticsearch.cluster.AbstractNamedDiffable; +import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -20,7 +22,7 @@ import java.util.EnumSet; /** * Contains metadata about registered licenses */ -class LicensesMetaData extends AbstractDiffable implements MetaData.Custom, +class LicensesMetaData extends AbstractNamedDiffable implements MetaData.Custom, TribeService.MergableCustomMetaData { public static final String TYPE = "licenses"; @@ -45,8 +47,6 @@ class LicensesMetaData extends AbstractDiffable implements Meta .expiryDate(0) .build(); - public static final LicensesMetaData PROTO = new LicensesMetaData(null); - private License license; public LicensesMetaData(License license) { @@ -79,7 +79,7 @@ class LicensesMetaData extends AbstractDiffable implements Meta } @Override - public String type() { + public String getWriteableName() { return TYPE; } @@ -88,8 +88,7 @@ class LicensesMetaData extends AbstractDiffable implements Meta return EnumSet.of(MetaData.XContentContext.GATEWAY); } - @Override - public LicensesMetaData fromXContent(XContentParser parser) throws IOException { + public static LicensesMetaData fromXContent(XContentParser parser) throws IOException { License license = LICENSE_TOMBSTONE; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -132,13 +131,16 @@ class LicensesMetaData extends AbstractDiffable implements Meta } } - @Override - public LicensesMetaData readFrom(StreamInput streamInput) throws IOException { - License license = LICENSE_TOMBSTONE; + public LicensesMetaData(StreamInput streamInput) throws IOException { if (streamInput.readBoolean()) { license = License.readLicense(streamInput); + } else { + license = LICENSE_TOMBSTONE; } - return new LicensesMetaData(license); + } + + public static NamedDiff readDiffFrom(StreamInput streamInput) throws IOException { + return readDiffFrom(MetaData.Custom.class, TYPE, streamInput); } @Override diff --git a/elasticsearch/src/main/java/org/elasticsearch/license/Licensing.java b/elasticsearch/src/main/java/org/elasticsearch/license/Licensing.java index ddd33e197c6..a93d965d7f5 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/license/Licensing.java +++ b/elasticsearch/src/main/java/org/elasticsearch/license/Licensing.java @@ -7,12 +7,17 @@ package org.elasticsearch.license; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.rest.RestHandler; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -28,16 +33,20 @@ public class Licensing implements ActionPlugin { protected final boolean isTransportClient; private final boolean isTribeNode; - static { - // we have to make sure we don't override the prototype, if we already - // registered. This causes class cast exceptions while casting license - // meta data on tribe node, as the registration happens for every tribe - // client nodes and the tribe node itself - if (MetaData.lookupPrototype(LicensesMetaData.TYPE) == null) { - MetaData.registerPrototype(LicensesMetaData.TYPE, LicensesMetaData.PROTO); - } + public List getNamedWriteables() { + List entries = new ArrayList<>(); + entries.add(new NamedWriteableRegistry.Entry(MetaData.Custom.class, LicensesMetaData.TYPE, LicensesMetaData::new)); + entries.add(new NamedWriteableRegistry.Entry(NamedDiff.class, LicensesMetaData.TYPE, LicensesMetaData::readDiffFrom)); + return entries; } + public List getNamedXContent() { + List entries = new ArrayList<>(); + // Metadata + entries.add(new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField(LicensesMetaData.TYPE), + LicensesMetaData::fromXContent)); + return entries; + } public Licensing(Settings settings) { this.settings = settings; isTransportClient = transportClientMode(settings); diff --git a/elasticsearch/src/main/java/org/elasticsearch/license/XPackInfoResponse.java b/elasticsearch/src/main/java/org/elasticsearch/license/XPackInfoResponse.java index 2ab7feec18d..1945def2437 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/license/XPackInfoResponse.java +++ b/elasticsearch/src/main/java/org/elasticsearch/license/XPackInfoResponse.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.XPackBuild; @@ -140,7 +141,7 @@ public class XPackInfoResponse extends ActionResponse { } } - public static class BuildInfo implements ToXContent, Writeable { + public static class BuildInfo implements ToXContentObject, Writeable { private final String hash; private final String timestamp; @@ -180,7 +181,7 @@ public class XPackInfoResponse extends ActionResponse { } } - public static class FeatureSetsInfo implements ToXContent, Writeable { + public static class FeatureSetsInfo implements ToXContentObject, Writeable { private final Map featureSets; diff --git a/elasticsearch/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/elasticsearch/src/main/java/org/elasticsearch/license/XPackLicenseState.java index ebfba1b19ad..a578d3949b3 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/elasticsearch/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -7,7 +7,10 @@ package org.elasticsearch.license; import java.util.Collections; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.BiFunction; import org.elasticsearch.common.Strings; @@ -176,10 +179,17 @@ public class XPackLicenseState { } } private volatile Status status = new Status(OperationMode.TRIAL, true); + private final List listeners = new CopyOnWriteArrayList<>(); /** Updates the current state of the license, which will change what features are available. */ void update(OperationMode mode, boolean active) { status = new Status(mode, active); + listeners.forEach(Runnable::run); + } + + /** Add a listener to be notified on license change */ + public void addListener(Runnable runnable) { + listeners.add(Objects.requireNonNull(runnable)); } /** Return the current license type. */ diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/XPackFeatureSet.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/XPackFeatureSet.java index 6e9b17b3782..6995bf66d61 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/XPackFeatureSet.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/XPackFeatureSet.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -25,7 +25,7 @@ public interface XPackFeatureSet { Usage usage(); - abstract class Usage implements ToXContent, NamedWriteable { + abstract class Usage implements ToXContentObject, NamedWriteable { private static final String AVAILABLE_XFIELD = "available"; private static final String ENABLED_XFIELD = "enabled"; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/XPackPlugin.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/XPackPlugin.java index a273042083d..8ebb0158c50 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/XPackPlugin.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/XPackPlugin.java @@ -408,12 +408,23 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I @Override public List getNamedWriteables() { - return Arrays.asList( - new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, SECURITY, SecurityFeatureSet.Usage::new), - new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, WATCHER, WatcherFeatureSet.Usage::new), - new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, MONITORING, MonitoringFeatureSet.Usage::new), - new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, GRAPH, GraphFeatureSet.Usage::new) - ); + List entries = new ArrayList<>(); + entries.add(new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, SECURITY, SecurityFeatureSet.Usage::new)); + entries.add(new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, WATCHER, WatcherFeatureSet.Usage::new)); + entries.add(new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, MONITORING, MonitoringFeatureSet.Usage::new)); + entries.add(new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, GRAPH, GraphFeatureSet.Usage::new)); + entries.addAll(watcher.getNamedWriteables()); + entries.addAll(licensing.getNamedWriteables()); + return entries; + } + + @Override + public List getNamedXContent() { + List entries = new ArrayList<>(); + entries.addAll(watcher.getNamedXContent()); + entries.addAll(licensing.getNamedXContent()); + return entries; + } public void onIndexModule(IndexModule module) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpClient.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpClient.java index 69289d01ce6..020ff7a6493 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpClient.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpClient.java @@ -5,8 +5,31 @@ */ package org.elasticsearch.xpack.common.http; -import org.elasticsearch.ElasticsearchTimeoutException; -import org.elasticsearch.SpecialPermission; +import org.apache.http.Header; +import org.apache.http.HttpHeaders; +import org.apache.http.HttpHost; +import org.apache.http.NameValuePair; +import org.apache.http.auth.AuthScope; +import org.apache.http.client.AuthCache; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.http.client.utils.URIUtils; +import org.apache.http.client.utils.URLEncodedUtils; +import org.apache.http.conn.ssl.DefaultHostnameVerifier; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.auth.BasicScheme; +import org.apache.http.impl.client.BasicAuthCache; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicNameValuePair; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.Streams; @@ -17,34 +40,27 @@ import org.elasticsearch.xpack.common.http.auth.HttpAuthRegistry; import org.elasticsearch.xpack.ssl.SSLService; import javax.net.ssl.HostnameVerifier; -import javax.net.ssl.HttpsURLConnection; -import javax.net.ssl.SSLSession; -import javax.net.ssl.SSLSocketFactory; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; -import java.net.HttpURLConnection; -import java.net.SocketTimeoutException; -import java.net.URL; -import java.net.URLEncoder; +import java.net.URI; +import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; -import java.security.AccessController; -import java.security.PrivilegedAction; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -/** - * Client class to wrap http connections - */ public class HttpClient extends AbstractComponent { + private static final String SETTINGS_SSL_PREFIX = "xpack.http.ssl."; + private final HttpAuthRegistry httpAuthRegistry; + private final CloseableHttpClient client; + private final Integer proxyPort; + private final String proxyHost; private final TimeValue defaultConnectionTimeout; private final TimeValue defaultReadTimeout; - private final boolean isHostnameVerificationEnabled; - private final SSLSocketFactory sslSocketFactory; - private final HttpProxy proxy; public HttpClient(Settings settings, HttpAuthRegistry httpAuthRegistry, SSLService sslService) { super(settings); @@ -52,148 +68,158 @@ public class HttpClient extends AbstractComponent { this.defaultConnectionTimeout = HttpSettings.CONNECTION_TIMEOUT.get(settings); this.defaultReadTimeout = HttpSettings.READ_TIMEOUT.get(settings); - final Integer proxyPort; - if (HttpSettings.PROXY_HOST.exists(settings)) { - proxyPort = HttpSettings.PROXY_PORT.get(settings); - } else { - proxyPort = null; - } - final String proxyHost = HttpSettings.PROXY_HOST.get(settings); - if (proxyPort != null && Strings.hasText(proxyHost)) { - this.proxy = new HttpProxy(proxyHost, proxyPort); + // proxy setup + this.proxyHost = HttpSettings.PROXY_HOST.get(settings); + this.proxyPort = HttpSettings.PROXY_PORT.get(settings); + if (proxyPort != 0 && Strings.hasText(proxyHost)) { logger.info("Using default proxy for http input and slack/hipchat/pagerduty/webhook actions [{}:{}]", proxyHost, proxyPort); - } else if (proxyPort == null && Strings.hasText(proxyHost) == false) { - this.proxy = HttpProxy.NO_PROXY; - } else { - throw new IllegalArgumentException("HTTP Proxy requires both settings: [" + HttpSettings.PROXY_HOST_KEY + "] and [" + - HttpSettings.PROXY_PORT_KEY + "]"); + } else if (proxyPort != 0 ^ Strings.hasText(proxyHost)) { + throw new IllegalArgumentException("HTTP proxy requires both settings: [" + HttpSettings.PROXY_HOST.getKey() + "] and [" + + HttpSettings.PROXY_PORT.getKey() + "]"); } - Settings sslSettings = settings.getByPrefix(HttpSettings.SSL_KEY_PREFIX); - this.sslSocketFactory = sslService.sslSocketFactory(settings.getByPrefix(HttpSettings.SSL_KEY_PREFIX)); - this.isHostnameVerificationEnabled = sslService.getVerificationMode(sslSettings, Settings.EMPTY).isHostnameVerificationEnabled(); + + HttpClientBuilder clientBuilder = HttpClientBuilder.create(); + + // ssl setup + Settings sslSettings = settings.getByPrefix(SETTINGS_SSL_PREFIX); + boolean isHostnameVerificationEnabled = sslService.getVerificationMode(sslSettings, Settings.EMPTY).isHostnameVerificationEnabled(); + HostnameVerifier verifier = isHostnameVerificationEnabled ? new DefaultHostnameVerifier() : NoopHostnameVerifier.INSTANCE; + SSLConnectionSocketFactory factory = new SSLConnectionSocketFactory(sslService.sslSocketFactory(sslSettings), verifier); + clientBuilder.setSSLSocketFactory(factory); + + client = clientBuilder.build(); } public HttpResponse execute(HttpRequest request) throws IOException { - try { - return doExecute(request); - } catch (SocketTimeoutException ste) { - throw new ElasticsearchTimeoutException("failed to execute http request. timeout expired", ste); - } - } + URI uri = createURI(request); - public HttpResponse doExecute(HttpRequest request) throws IOException { - String queryString = null; - if (request.params() != null && !request.params().isEmpty()) { - StringBuilder builder = new StringBuilder(); - for (Map.Entry entry : request.params().entrySet()) { - if (builder.length() != 0) { - builder.append('&'); - } - builder.append(URLEncoder.encode(entry.getKey(), "UTF-8")) - .append('=') - .append(URLEncoder.encode(entry.getValue(), "UTF-8")); + HttpRequestBase internalRequest; + if (request.method == HttpMethod.HEAD) { + internalRequest = new HttpHead(uri); + } else { + HttpMethodWithEntity methodWithEntity = new HttpMethodWithEntity(uri, request.method.name()); + if (request.body != null) { + methodWithEntity.setEntity(new StringEntity(request.body)); } - queryString = builder.toString(); + internalRequest = methodWithEntity; } + internalRequest.setHeader(HttpHeaders.ACCEPT_CHARSET, StandardCharsets.UTF_8.name()); - String path = Strings.hasLength(request.path) ? request.path : ""; - if (Strings.hasLength(queryString)) { - path += "?" + queryString; - } - URL url = new URL(request.scheme.scheme(), request.host, request.port, path); + RequestConfig.Builder config = RequestConfig.custom(); - logger.debug("making [{}] request to [{}]", request.method().method(), url); - logger.trace("sending [{}] as body of request", request.body()); - - // proxy configured in the request always wins! - HttpProxy proxyToUse = request.proxy != null ? request.proxy : proxy; - - HttpURLConnection urlConnection = (HttpURLConnection) url.openConnection(proxyToUse.proxy()); - if (urlConnection instanceof HttpsURLConnection) { - final HttpsURLConnection httpsConn = (HttpsURLConnection) urlConnection; - final SSLSocketFactory factory = sslSocketFactory; - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - sm.checkPermission(new SpecialPermission()); - } - AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Void run() { - httpsConn.setSSLSocketFactory(factory); - if (isHostnameVerificationEnabled == false) { - httpsConn.setHostnameVerifier(NoopHostnameVerifier.INSTANCE); - } - return null; - } - }); - } - - urlConnection.setRequestMethod(request.method().method()); - if (request.headers() != null) { - for (Map.Entry entry : request.headers().entrySet()) { - urlConnection.setRequestProperty(entry.getKey(), entry.getValue()); + // headers + if (request.headers().isEmpty() == false) { + for (Map.Entry entry : request.headers.entrySet()) { + internalRequest.setHeader(entry.getKey(), entry.getValue()); } } + + // proxy + if (request.proxy != null && request.proxy.equals(HttpProxy.NO_PROXY) == false) { + HttpHost proxy = new HttpHost(request.proxy.getHost(), request.proxy.getPort(), request.scheme.scheme()); + config.setProxy(proxy); + } else if (proxyPort != null && Strings.hasText(proxyHost)) { + HttpHost proxy = new HttpHost(proxyHost, proxyPort, request.scheme.scheme()); + config.setProxy(proxy); + } + + HttpClientContext localContext = HttpClientContext.create(); + // auth if (request.auth() != null) { - logger.trace("applying auth headers"); ApplicableHttpAuth applicableAuth = httpAuthRegistry.createApplicable(request.auth); - applicableAuth.apply(urlConnection); - } - urlConnection.setUseCaches(false); - urlConnection.setRequestProperty("Accept-Charset", StandardCharsets.UTF_8.name()); - if (request.body() != null) { - urlConnection.setDoOutput(true); - byte[] bytes = request.body().getBytes(StandardCharsets.UTF_8.name()); - urlConnection.setRequestProperty("Content-Length", String.valueOf(bytes.length)); - urlConnection.getOutputStream().write(bytes); - urlConnection.getOutputStream().close(); + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + applicableAuth.apply(credentialsProvider, new AuthScope(request.host, request.port)); + localContext.setCredentialsProvider(credentialsProvider); + + // preemptive auth, no need to wait for a 401 first + AuthCache authCache = new BasicAuthCache(); + BasicScheme basicAuth = new BasicScheme(); + authCache.put(new HttpHost(request.host, request.port, request.scheme.scheme()), basicAuth); + localContext.setAuthCache(authCache); } - TimeValue connectionTimeout = request.connectionTimeout != null ? request.connectionTimeout : defaultConnectionTimeout; - urlConnection.setConnectTimeout((int) connectionTimeout.millis()); + // timeouts + if (request.connectionTimeout() != null) { - TimeValue readTimeout = request.readTimeout != null ? request.readTimeout : defaultReadTimeout; - urlConnection.setReadTimeout((int) readTimeout.millis()); - - urlConnection.connect(); - - final int statusCode = urlConnection.getResponseCode(); - // no status code, not considered a valid HTTP response then - if (statusCode == -1) { - throw new IOException("Not a valid HTTP response, no status code in response"); + config.setConnectTimeout(Math.toIntExact(request.connectionTimeout.millis())); + } else { + config.setConnectTimeout(Math.toIntExact(defaultConnectionTimeout.millis())); } - Map responseHeaders = new HashMap<>(urlConnection.getHeaderFields().size()); - for (Map.Entry> header : urlConnection.getHeaderFields().entrySet()) { - // HttpURLConnection#getHeaderFields returns the first status line as a header - // with a `null` key (facepalm)... so we have to skip that one. - if (header.getKey() != null) { - responseHeaders.put(header.getKey(), header.getValue().toArray(new String[header.getValue().size()])); + + if (request.readTimeout() != null) { + config.setSocketTimeout(Math.toIntExact(request.readTimeout.millis())); + config.setConnectionRequestTimeout(Math.toIntExact(request.readTimeout.millis())); + } else { + config.setSocketTimeout(Math.toIntExact(defaultReadTimeout.millis())); + config.setConnectionRequestTimeout(Math.toIntExact(defaultReadTimeout.millis())); + } + + internalRequest.setConfig(config.build()); + + try (CloseableHttpResponse response = client.execute(internalRequest, localContext)) { + // headers + Header[] headers = response.getAllHeaders(); + Map responseHeaders = new HashMap<>(headers.length); + for (Header header : headers) { + if (responseHeaders.containsKey(header.getName())) { + String[] old = responseHeaders.get(header.getName()); + String[] values = new String[old.length + 1]; + + System.arraycopy(old, 0, values, 0, old.length); + values[values.length-1] = header.getValue(); + + responseHeaders.put(header.getName(), values); + } else { + responseHeaders.put(header.getName(), new String[]{header.getValue()}); + } } - } - logger.debug("http status code [{}]", statusCode); - final byte[] body; - try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { - try (InputStream is = urlConnection.getInputStream()) { - Streams.copy(is, outputStream); - } catch (Exception e) { - if (urlConnection.getErrorStream() != null) { - try (InputStream is = urlConnection.getErrorStream()) { + + final byte[] body; + // not every response has a content, i.e. 204 + if (response.getEntity() == null) { + body = new byte[0]; + } else { + try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { + try (InputStream is = response.getEntity().getContent()) { Streams.copy(is, outputStream); } + body = outputStream.toByteArray(); } } - body = outputStream.toByteArray(); + return new HttpResponse(response.getStatusLine().getStatusCode(), body, responseHeaders); } - return new HttpResponse(statusCode, body, responseHeaders); } - private static final class NoopHostnameVerifier implements HostnameVerifier { + private URI createURI(HttpRequest request) { + // this could be really simple, as the apache http client has a UriBuilder class, however this class is always doing + // url path escaping, and we have done this already, so this would result in double escaping + try { + List qparams = new ArrayList<>(request.params.size()); + request.params.forEach((k, v)-> qparams.add(new BasicNameValuePair(k, v))); + URI uri = URIUtils.createURI(request.scheme.scheme(), request.host, request.port, request.path, + URLEncodedUtils.format(qparams, "UTF-8"), null); - private static final HostnameVerifier INSTANCE = new NoopHostnameVerifier(); + return uri; + } catch (URISyntaxException e) { + throw new IllegalArgumentException(e); + } + } + + /** + * Helper class to have all HTTP methods except HEAD allow for an body, including GET + */ + final class HttpMethodWithEntity extends HttpEntityEnclosingRequestBase { + + private final String methodName; + + HttpMethodWithEntity(final URI uri, String methodName) { + this.methodName = methodName; + setURI(uri); + } @Override - public boolean verify(String s, SSLSession sslSession) { - return true; + public String getMethod() { + return methodName; } } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpProxy.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpProxy.java index b347f1a1539..7c72158d678 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpProxy.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpProxy.java @@ -96,9 +96,9 @@ public class HttpProxy implements ToXContent, Streamable { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.HOST)) { + } else if (Field.HOST.match(currentFieldName)) { host = parser.text(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.PORT)) { + } else if (Field.PORT.match(currentFieldName)) { port = parser.intValue(); if (port <= 0 || port >= 65535) { throw new ElasticsearchParseException("Proxy port must be between 1 and 65534, but was " + port); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpRequest.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpRequest.java index 5febc46b2ac..0993ff127a4 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpRequest.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpRequest.java @@ -8,10 +8,10 @@ package org.elasticsearch.xpack.common.http; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -35,7 +35,7 @@ import java.util.Objects; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; -public class HttpRequest implements ToXContent { +public class HttpRequest implements ToXContentObject { final String host; final int port; @@ -256,17 +256,17 @@ public class HttpRequest implements ToXContent { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.PROXY)) { + } else if (Field.PROXY.match(currentFieldName)) { try { builder.proxy(HttpProxy.parse(parser)); } catch (Exception e) { throw new ElasticsearchParseException("could not parse http request. could not parse [{}] field", currentFieldName); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.AUTH)) { + } else if (Field.AUTH.match(currentFieldName)) { builder.auth(httpAuthRegistry.parse(parser)); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.CONNECTION_TIMEOUT)) { + } else if (HttpRequest.Field.CONNECTION_TIMEOUT.match(currentFieldName)) { builder.connectionTimeout(TimeValue.timeValueMillis(parser.longValue())); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN)) { + } else if (HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN.match(currentFieldName)) { // Users and 2.x specify the timeout this way try { builder.connectionTimeout(WatcherDateTimeUtils.parseTimeValue(parser, @@ -275,9 +275,9 @@ public class HttpRequest implements ToXContent { throw new ElasticsearchParseException("could not parse http request template. invalid time value for [{}] field", pe, currentFieldName); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.READ_TIMEOUT)) { + } else if (HttpRequest.Field.READ_TIMEOUT.match(currentFieldName)) { builder.readTimeout(TimeValue.timeValueMillis(parser.longValue())); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.READ_TIMEOUT_HUMAN)) { + } else if (HttpRequest.Field.READ_TIMEOUT_HUMAN.match(currentFieldName)) { // Users and 2.x specify the timeout this way try { builder.readTimeout(WatcherDateTimeUtils.parseTimeValue(parser, HttpRequest.Field.READ_TIMEOUT.toString())); @@ -286,35 +286,35 @@ public class HttpRequest implements ToXContent { pe, currentFieldName); } } else if (token == XContentParser.Token.START_OBJECT) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.HEADERS)) { + if (Field.HEADERS.match(currentFieldName)) { builder.setHeaders((Map) WatcherUtils.flattenModel(parser.map())); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.PARAMS)) { + } else if (Field.PARAMS.match(currentFieldName)) { builder.setParams((Map) WatcherUtils.flattenModel(parser.map())); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.BODY)) { + } else if (Field.BODY.match(currentFieldName)) { builder.body(parser.text()); } else { throw new ElasticsearchParseException("could not parse http request. unexpected object field [{}]", currentFieldName); } } else if (token == XContentParser.Token.VALUE_STRING) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.SCHEME)) { + if (Field.SCHEME.match(currentFieldName)) { builder.scheme(Scheme.parse(parser.text())); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.METHOD)) { + } else if (Field.METHOD.match(currentFieldName)) { builder.method(HttpMethod.parse(parser.text())); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.HOST)) { + } else if (Field.HOST.match(currentFieldName)) { builder.host = parser.text(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.PATH)) { + } else if (Field.PATH.match(currentFieldName)) { builder.path(parser.text()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.BODY)) { + } else if (Field.BODY.match(currentFieldName)) { builder.body(parser.text()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.URL)) { + } else if (Field.URL.match(currentFieldName)) { builder.fromUrl(parser.text()); } else { throw new ElasticsearchParseException("could not parse http request. unexpected string field [{}]", currentFieldName); } } else if (token == XContentParser.Token.VALUE_NUMBER) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.PORT)) { + if (Field.PORT.match(currentFieldName)) { builder.port = parser.intValue(); } else { throw new ElasticsearchParseException("could not parse http request. unexpected numeric field [{}]", @@ -460,8 +460,8 @@ public class HttpRequest implements ToXContent { scheme = Scheme.parse(uri.getScheme()); port = uri.getPort() > 0 ? uri.getPort() : scheme.defaultPort(); host = uri.getHost(); - if (Strings.hasLength(uri.getPath())) { - path = uri.getPath(); + if (Strings.hasLength(uri.getRawPath())) { + path = uri.getRawPath(); } String rawQuery = uri.getRawQuery(); if (Strings.hasLength(rawQuery)) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpRequestTemplate.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpRequestTemplate.java index 5af33e93003..4ff248faeb2 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpRequestTemplate.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpRequestTemplate.java @@ -8,11 +8,11 @@ package org.elasticsearch.xpack.common.http; import io.netty.handler.codec.http.HttpHeaders; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestUtils; @@ -33,7 +33,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; -public class HttpRequestTemplate implements ToXContent { +public class HttpRequestTemplate implements ToXContentObject { private final Scheme scheme; private final String host; @@ -277,21 +277,21 @@ public class HttpRequestTemplate implements ToXContent { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.PROXY)) { + } else if (HttpRequest.Field.PROXY.match(currentFieldName)) { builder.proxy(HttpProxy.parse(parser)); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.PATH)) { + } else if (HttpRequest.Field.PATH.match(currentFieldName)) { builder.path(parseFieldTemplate(currentFieldName, parser)); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.HEADERS)) { + } else if (HttpRequest.Field.HEADERS.match(currentFieldName)) { builder.putHeaders(parseFieldTemplates(currentFieldName, parser)); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.PARAMS)) { + } else if (HttpRequest.Field.PARAMS.match(currentFieldName)) { builder.putParams(parseFieldTemplates(currentFieldName, parser)); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.BODY)) { + } else if (HttpRequest.Field.BODY.match(currentFieldName)) { builder.body(parseFieldTemplate(currentFieldName, parser)); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.URL)) { + } else if (HttpRequest.Field.URL.match(currentFieldName)) { builder.fromUrl(parser.text()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.CONNECTION_TIMEOUT)) { + } else if (HttpRequest.Field.CONNECTION_TIMEOUT.match(currentFieldName)) { builder.connectionTimeout(TimeValue.timeValueMillis(parser.longValue())); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN)) { + } else if (HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN.match(currentFieldName)) { // Users and 2.x specify the timeout this way try { builder.connectionTimeout(WatcherDateTimeUtils.parseTimeValue(parser, @@ -300,9 +300,9 @@ public class HttpRequestTemplate implements ToXContent { throw new ElasticsearchParseException("could not parse http request template. invalid time value for [{}] field", pe, currentFieldName); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.READ_TIMEOUT)) { + } else if (HttpRequest.Field.READ_TIMEOUT.match(currentFieldName)) { builder.readTimeout(TimeValue.timeValueMillis(parser.longValue())); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.READ_TIMEOUT_HUMAN)) { + } else if (HttpRequest.Field.READ_TIMEOUT_HUMAN.match(currentFieldName)) { // Users and 2.x specify the timeout this way try { builder.readTimeout(WatcherDateTimeUtils.parseTimeValue(parser, HttpRequest.Field.READ_TIMEOUT.toString())); @@ -311,25 +311,25 @@ public class HttpRequestTemplate implements ToXContent { pe, currentFieldName); } } else if (token == XContentParser.Token.START_OBJECT) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.AUTH)) { + if (HttpRequest.Field.AUTH.match(currentFieldName)) { builder.auth(httpAuthRegistry.parse(parser)); } else { throw new ElasticsearchParseException("could not parse http request template. unexpected object field [{}]", currentFieldName); } } else if (token == XContentParser.Token.VALUE_STRING) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.SCHEME)) { + if (HttpRequest.Field.SCHEME.match(currentFieldName)) { builder.scheme(Scheme.parse(parser.text())); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.METHOD)) { + } else if (HttpRequest.Field.METHOD.match(currentFieldName)) { builder.method(HttpMethod.parse(parser.text())); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.HOST)) { + } else if (HttpRequest.Field.HOST.match(currentFieldName)) { builder.host = parser.text(); } else { throw new ElasticsearchParseException("could not parse http request template. unexpected string field [{}]", currentFieldName); } } else if (token == XContentParser.Token.VALUE_NUMBER) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, HttpRequest.Field.PORT)) { + if (HttpRequest.Field.PORT.match(currentFieldName)) { builder.port = parser.intValue(); } else { throw new ElasticsearchParseException("could not parse http request template. unexpected numeric field [{}]", diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpResponse.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpResponse.java index 4c0016a3343..8c6ea5804ef 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpResponse.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/HttpResponse.java @@ -9,11 +9,10 @@ import io.netty.handler.codec.http.HttpHeaders; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -29,7 +28,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; -public class HttpResponse implements ToXContent { +public class HttpResponse implements ToXContentObject { private final int status; private final Map headers; @@ -189,13 +188,13 @@ public class HttpResponse implements ToXContent { } else if (currentFieldName == null) { throw new ElasticsearchParseException("could not parse http response. expected a field name but found [{}] instead", token); } else if (token == XContentParser.Token.VALUE_NUMBER) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.STATUS)) { + if (Field.STATUS.match(currentFieldName)) { status = parser.intValue(); } else { throw new ElasticsearchParseException("could not parse http response. unknown numeric field [{}]", currentFieldName); } } else if (token == XContentParser.Token.VALUE_STRING) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.BODY)) { + if (Field.BODY.match(currentFieldName)) { body = parser.text(); } else { throw new ElasticsearchParseException("could not parse http response. unknown string field [{}]", currentFieldName); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/ApplicableHttpAuth.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/ApplicableHttpAuth.java index d4139b98834..7da8a9d8fa8 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/ApplicableHttpAuth.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/ApplicableHttpAuth.java @@ -5,15 +5,17 @@ */ package org.elasticsearch.xpack.common.http.auth; -import org.elasticsearch.common.xcontent.ToXContent; +import org.apache.http.auth.AuthScope; +import org.apache.http.client.CredentialsProvider; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.net.HttpURLConnection; -public abstract class ApplicableHttpAuth implements ToXContent { +public abstract class ApplicableHttpAuth implements ToXContentObject { - private final Auth auth; + protected final Auth auth; public ApplicableHttpAuth(Auth auth) { this.auth = auth; @@ -25,6 +27,8 @@ public abstract class ApplicableHttpAuth implements ToXCo public abstract void apply(HttpURLConnection connection); + public abstract void apply(CredentialsProvider credsProvider, AuthScope authScope); + @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return auth.toXContent(builder, params); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/HttpAuth.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/HttpAuth.java index 2049424bc77..2f2cdd4f168 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/HttpAuth.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/HttpAuth.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.common.http.auth; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; -public interface HttpAuth extends ToXContent { +public interface HttpAuth extends ToXContentObject { String type(); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/basic/ApplicableBasicAuth.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/basic/ApplicableBasicAuth.java index 0d7108671a0..dcd7f339796 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/basic/ApplicableBasicAuth.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/http/auth/basic/ApplicableBasicAuth.java @@ -5,20 +5,25 @@ */ package org.elasticsearch.xpack.common.http.auth.basic; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.elasticsearch.xpack.common.http.auth.ApplicableHttpAuth; +import org.elasticsearch.xpack.security.crypto.CryptoService; + import java.net.HttpURLConnection; import java.nio.charset.StandardCharsets; import java.util.Base64; -import org.elasticsearch.xpack.common.http.auth.ApplicableHttpAuth; -import org.elasticsearch.xpack.security.crypto.CryptoService; - public class ApplicableBasicAuth extends ApplicableHttpAuth { private final String basicAuth; + private final CryptoService cryptoService; public ApplicableBasicAuth(BasicAuth auth, CryptoService service) { super(auth); basicAuth = headerValue(auth.username, auth.password.text(service)); + this.cryptoService = service; } public static String headerValue(String username, char[] password) { @@ -29,4 +34,10 @@ public class ApplicableBasicAuth extends ApplicableHttpAuth { connection.setRequestProperty("Authorization", basicAuth); } + @Override + public void apply(CredentialsProvider credsProvider, AuthScope authScope) { + credsProvider.setCredentials(authScope, + new UsernamePasswordCredentials(auth.username, new String(auth.password.text(cryptoService)))); + } + } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java index 0230685f3a2..0ad8f2dcf80 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.common.text; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -113,7 +112,7 @@ public class TextTemplate implements ToXContent { if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { return new TextTemplate(parser.text()); } else { - return new TextTemplate(Script.parse(parser, ParseFieldMatcher.STRICT, Script.DEFAULT_TEMPLATE_LANG)); + return new TextTemplate(Script.parse(parser, Script.DEFAULT_TEMPLATE_LANG)); } } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreResponse.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreResponse.java index 2869871a67d..7dc630da404 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreResponse.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreResponse.java @@ -6,14 +6,13 @@ package org.elasticsearch.xpack.graph.action; import com.carrotsearch.hppc.ObjectIntHashMap; - import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.graph.action.Connection.ConnectionId; import org.elasticsearch.xpack.graph.action.Vertex.VertexId; @@ -31,7 +30,7 @@ import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearch * * @see GraphExploreRequest */ -public class GraphExploreResponse extends ActionResponse implements ToXContent { +public class GraphExploreResponse extends ActionResponse implements ToXContentObject { private long tookInMillis; private boolean timedOut = false; @@ -159,6 +158,7 @@ public class GraphExploreResponse extends ActionResponse implements ToXContent { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); builder.field(Fields.TOOK, tookInMillis); builder.field(Fields.TIMED_OUT, timedOut); @@ -194,7 +194,7 @@ public class GraphExploreResponse extends ActionResponse implements ToXContent { builder.endObject(); } builder.endArray(); - + builder.endObject(); return builder; } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java index 9caee9bacfb..b2b594eadd6 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -38,8 +37,6 @@ import static org.elasticsearch.xpack.graph.action.GraphExploreAction.INSTANCE; * @see GraphExploreRequest */ public class RestGraphAction extends XPackRestHandler { - - private IndicesQueriesRegistry indicesQueriesRegistry; public static final ParseField TIMEOUT_FIELD = new ParseField("timeout"); public static final ParseField SIGNIFICANCE_FIELD = new ParseField("use_significance"); public static final ParseField RETURN_DETAILED_INFO = new ParseField("return_detailed_stats"); @@ -60,11 +57,9 @@ public class RestGraphAction extends XPackRestHandler { public static final ParseField TERM_FIELD = new ParseField("term"); @Inject - public RestGraphAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry) { + public RestGraphAction(Settings settings, RestController controller) { super(settings); - this.indicesQueriesRegistry = indicesQueriesRegistry; - // @deprecated Remove in 6.0 // NOTE: Old versions did not end with "/_explore"; they were just "/explore" controller.registerWithDeprecatedHandler(GET, "/{index}" + URI_BASE + "/_graph/_explore", this, @@ -92,7 +87,7 @@ public class RestGraphAction extends XPackRestHandler { Hop currentHop = graphRequest.createNextHop(null); try (XContentParser parser = request.contentOrSourceParamParser()) { - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, parseFieldMatcher); + QueryParseContext context = new QueryParseContext(parser, parseFieldMatcher); XContentParser.Token token = parser.nextToken(); @@ -119,15 +114,15 @@ public class RestGraphAction extends XPackRestHandler { } if (token == XContentParser.Token.START_ARRAY) { - if (context.getParseFieldMatcher().match(fieldName, VERTICES_FIELD)) { + if (VERTICES_FIELD.match(fieldName)) { parseVertices(parser, context, currentHop, graphRequest); } } else if (token == XContentParser.Token.START_OBJECT) { - if (context.getParseFieldMatcher().match(fieldName, QUERY_FIELD)) { + if (QUERY_FIELD.match(fieldName)) { currentHop.guidingQuery(context.parseInnerQueryBuilder()); - } else if (context.getParseFieldMatcher().match(fieldName, CONNECTIONS_FIELD)) { + } else if (CONNECTIONS_FIELD.match(fieldName)) { parseHop(parser, context, graphRequest.createNextHop(null), graphRequest); - } else if (context.getParseFieldMatcher().match(fieldName, CONTROLS_FIELD)) { + } else if (CONTROLS_FIELD.match(fieldName)) { if (currentHop.getParentHop() != null) { throw new ElasticsearchParseException( "Controls are a global setting that can only be set in the root " + fieldName, token.name()); @@ -165,7 +160,7 @@ public class RestGraphAction extends XPackRestHandler { token = parser.nextToken(); } if (token == XContentParser.Token.START_ARRAY) { - if (context.getParseFieldMatcher().match(fieldName, INCLUDE_FIELD)) { + if (INCLUDE_FIELD.match(fieldName)) { if (excludes != null) { throw new ElasticsearchParseException( "Graph vertices definition cannot contain both "+INCLUDE_FIELD.getPreferredName()+" and " @@ -181,7 +176,7 @@ public class RestGraphAction extends XPackRestHandler { fieldName = parser.currentName(); } else { if (token == XContentParser.Token.VALUE_STRING) { - if (context.getParseFieldMatcher().match(fieldName, TERM_FIELD)) { + if (TERM_FIELD.match(fieldName)) { includeTerm = parser.text(); } else { throw new ElasticsearchParseException( @@ -189,7 +184,7 @@ public class RestGraphAction extends XPackRestHandler { " clause has invalid property:" + fieldName); } } else if (token == XContentParser.Token.VALUE_NUMBER) { - if (context.getParseFieldMatcher().match(fieldName, BOOST_FIELD)) { + if (BOOST_FIELD.match(fieldName)) { boost = parser.floatValue(); } else { throw new ElasticsearchParseException( @@ -220,7 +215,7 @@ public class RestGraphAction extends XPackRestHandler { + token.name()); } } - } else if (context.getParseFieldMatcher().match(fieldName, EXCLUDE_FIELD)) { + } else if (EXCLUDE_FIELD.match(fieldName)) { if (includes != null) { throw new ElasticsearchParseException( "Graph vertices definition cannot contain both "+ INCLUDE_FIELD.getPreferredName()+ @@ -236,18 +231,18 @@ public class RestGraphAction extends XPackRestHandler { } } if (token == XContentParser.Token.VALUE_STRING) { - if (context.getParseFieldMatcher().match(fieldName, FIELD_NAME_FIELD)) { + if (FIELD_NAME_FIELD.match(fieldName)) { field = parser.text(); } else { throw new ElasticsearchParseException("Unknown string property: [" + fieldName + "]"); } } if (token == XContentParser.Token.VALUE_NUMBER) { - if (context.getParseFieldMatcher().match(fieldName, SIZE_FIELD)) { + if (SIZE_FIELD.match(fieldName)) { size = parser.intValue(); - } else if (context.getParseFieldMatcher().match(fieldName, MIN_DOC_COUNT_FIELD)) { + } else if (MIN_DOC_COUNT_FIELD.match(fieldName)) { minDocCount = parser.intValue(); - } else if (context.getParseFieldMatcher().match(fieldName, SHARD_MIN_DOC_COUNT_FIELD)) { + } else if (SHARD_MIN_DOC_COUNT_FIELD.match(fieldName)) { shardMinDocCount = parser.intValue(); } else { throw new ElasticsearchParseException("Unknown numeric property: [" + fieldName + "]"); @@ -287,37 +282,37 @@ public class RestGraphAction extends XPackRestHandler { if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NUMBER) { - if (context.getParseFieldMatcher().match(fieldName, SAMPLE_SIZE_FIELD)) { + if (SAMPLE_SIZE_FIELD.match(fieldName)) { graphRequest.sampleSize(parser.intValue()); - } else if (context.getParseFieldMatcher().match(fieldName, TIMEOUT_FIELD)) { + } else if (TIMEOUT_FIELD.match(fieldName)) { graphRequest.timeout(TimeValue.timeValueMillis(parser.longValue())); } else { throw new ElasticsearchParseException("Unknown numeric property: [" + fieldName + "]"); } } else if (token == XContentParser.Token.VALUE_BOOLEAN) { - if (context.getParseFieldMatcher().match(fieldName, SIGNIFICANCE_FIELD)) { + if (SIGNIFICANCE_FIELD.match(fieldName)) { graphRequest.useSignificance(parser.booleanValue()); - } else if (context.getParseFieldMatcher().match(fieldName, RETURN_DETAILED_INFO)) { + } else if (RETURN_DETAILED_INFO.match(fieldName)) { graphRequest.returnDetailedInfo(parser.booleanValue()); } else{ throw new ElasticsearchParseException("Unknown boolean property: [" + fieldName + "]"); } } else if (token == XContentParser.Token.VALUE_STRING) { - if (context.getParseFieldMatcher().match(fieldName, TIMEOUT_FIELD)) { + if (TIMEOUT_FIELD.match(fieldName)) { graphRequest.timeout(TimeValue.parseTimeValue(parser.text(), null, "timeout")); } else { throw new ElasticsearchParseException("Unknown numeric property: [" + fieldName + "]"); } } else if (token == XContentParser.Token.START_OBJECT) { - if (context.getParseFieldMatcher().match(fieldName, SAMPLE_DIVERSITY_FIELD)) { + if (SAMPLE_DIVERSITY_FIELD.match(fieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); token = parser.nextToken(); } - if (context.getParseFieldMatcher().match(fieldName, FIELD_NAME_FIELD)) { + if (FIELD_NAME_FIELD.match(fieldName)) { graphRequest.sampleDiversityField(parser.text()); - } else if (context.getParseFieldMatcher().match(fieldName, MAX_DOCS_PER_VALUE_FIELD)) { + } else if (MAX_DOCS_PER_VALUE_FIELD.match(fieldName)) { graphRequest.maxDocsPerDiversityValue(parser.intValue()); } else { throw new ElasticsearchParseException("Unknown property: [" + fieldName + "]"); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/AgentService.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/AgentService.java deleted file mode 100644 index f8ff2a06a27..00000000000 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/AgentService.java +++ /dev/null @@ -1,230 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.monitoring; - -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.ReleasableLock; -import org.elasticsearch.xpack.monitoring.collector.Collector; -import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStatsCollector; -import org.elasticsearch.xpack.monitoring.exporter.ExportException; -import org.elasticsearch.xpack.monitoring.exporter.Exporter; -import org.elasticsearch.xpack.monitoring.exporter.Exporters; -import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.Locale; -import java.util.Set; -import java.util.concurrent.locks.ReentrantLock; - -/** - * The {@code AgentService} is a service that does the work of publishing the details to the monitoring cluster. - *

- * If this service is stopped, then the attached, monitored node is not going to publish its details to the monitoring cluster. Given - * service life cycles, the intended way to temporarily stop the publishing is using the start and stop collection methods. - * - * @see #stopCollection() - * @see #startCollection() - */ -public class AgentService extends AbstractLifecycleComponent { - - private volatile ExportingWorker exportingWorker; - - private volatile Thread workerThread; - private volatile long samplingIntervalMillis; - private final Collection collectors; - private final String[] settingsCollectors; - private final Exporters exporters; - - public AgentService(Settings settings, ClusterSettings clusterSettings, Set collectors, Exporters exporters) { - super(settings); - this.samplingIntervalMillis = MonitoringSettings.INTERVAL.get(settings).millis(); - this.settingsCollectors = MonitoringSettings.COLLECTORS.get(settings).toArray(new String[0]); - this.collectors = Collections.unmodifiableSet(filterCollectors(collectors, settingsCollectors)); - this.exporters = exporters; - - clusterSettings.addSettingsUpdateConsumer(MonitoringSettings.INTERVAL, this::setInterval); - } - - private void setInterval(TimeValue interval) { - this.samplingIntervalMillis = interval.millis(); - applyIntervalSettings(); - } - - protected Set filterCollectors(Set collectors, String[] filters) { - if (CollectionUtils.isEmpty(filters)) { - return collectors; - } - - Set list = new HashSet<>(); - for (Collector collector : collectors) { - if (Regex.simpleMatch(filters, collector.name().toLowerCase(Locale.ROOT))) { - list.add(collector); - } else if (collector instanceof ClusterStatsCollector) { - list.add(collector); - } - } - return list; - } - - protected void applyIntervalSettings() { - if (samplingIntervalMillis <= 0) { - logger.info("data sampling is disabled due to interval settings [{}]", samplingIntervalMillis); - if (workerThread != null) { - - // notify worker to stop on its leisure, not to disturb an exporting operation - exportingWorker.closed = true; - - exportingWorker = null; - workerThread = null; - } - } else if (workerThread == null || !workerThread.isAlive()) { - - exportingWorker = new ExportingWorker(); - workerThread = new Thread(exportingWorker, EsExecutors.threadName(settings, "monitoring.exporters")); - workerThread.setDaemon(true); - workerThread.start(); - } - } - - /** stop collection and exporting. this method blocks until all background activity is guaranteed to be stopped */ - public void stopCollection() { - final ExportingWorker worker = this.exportingWorker; - if (worker != null) { - worker.stopCollecting(); - } - } - - public void startCollection() { - final ExportingWorker worker = this.exportingWorker; - if (worker != null) { - worker.collecting = true; - } - } - - @Override - protected void doStart() { - logger.debug("monitoring service started"); - exporters.start(); - applyIntervalSettings(); - } - - @Override - protected void doStop() { - if (workerThread != null && workerThread.isAlive()) { - exportingWorker.closed = true; - workerThread.interrupt(); - try { - workerThread.join(60000); - } catch (InterruptedException e) { - // we don't care... - } - } - - exporters.stop(); - } - - @Override - protected void doClose() { - for (Exporter exporter : exporters) { - try { - exporter.close(); - } catch (Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("failed to close exporter [{}]", exporter.name()), e); - } - } - } - - public TimeValue getSamplingInterval() { - return TimeValue.timeValueMillis(samplingIntervalMillis); - } - - public String[] collectors() { - return settingsCollectors; - } - - class ExportingWorker implements Runnable { - - volatile boolean closed = false; - volatile boolean collecting = true; - - final ReleasableLock collectionLock = new ReleasableLock(new ReentrantLock(false)); - - @Override - public void run() { - while (!closed) { - // sleep first to allow node to complete initialization before collecting the first start - try { - Thread.sleep(samplingIntervalMillis); - - if (closed) { - continue; - } - - try (Releasable ignore = collectionLock.acquire()) { - - Collection docs = collect(); - - if ((docs.isEmpty() == false) && (closed == false)) { - exporters.export(docs); - } - } - - } catch (ExportException e) { - logger.error("exception when exporting documents", e); - } catch (InterruptedException e) { - logger.trace("interrupted"); - Thread.currentThread().interrupt(); - } catch (Exception e) { - logger.error("background thread had an uncaught exception", e); - } - } - logger.debug("worker shutdown"); - } - - /** stop collection and exporting. this method will be block until background collection is actually stopped */ - public void stopCollecting() { - collecting = false; - collectionLock.acquire().close(); - } - - private Collection collect() { - if (logger.isTraceEnabled()) { - logger.trace("collecting data - collectors [{}]", Strings.collectionToCommaDelimitedString(collectors)); - } - - Collection docs = new ArrayList<>(); - for (Collector collector : collectors) { - if (collecting) { - Collection result = collector.collect(); - if (result != null) { - logger.trace("adding [{}] collected docs from [{}] collector", result.size(), collector.name()); - docs.addAll(result); - } else { - logger.trace("skipping collected docs from [{}] collector", collector.name()); - } - } - if (closed) { - // Stop collecting if the worker is marked as closed - break; - } - } - return docs; - } - } -} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/MonitoredSystem.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/MonitoredSystem.java index 227c63d84b7..47180c7970f 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/MonitoredSystem.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/MonitoredSystem.java @@ -10,7 +10,8 @@ import java.util.Locale; public enum MonitoredSystem { ES("es"), - KIBANA("kibana"); + KIBANA("kibana"), + LOGSTASH("logstash"); private final String system; @@ -28,6 +29,8 @@ public enum MonitoredSystem { return ES; case "kibana": return KIBANA; + case "logstash": + return LOGSTASH; default: throw new IllegalArgumentException("Unknown monitoring system [" + system + "]"); } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index ff29cd672ed..1b3fd93ae09 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -38,6 +38,7 @@ import org.elasticsearch.rest.RestHandler; import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.ssl.SSLService; +import java.time.Clock; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -119,9 +120,10 @@ public class Monitoring implements ActionPlugin { collectors.add(new ShardsCollector(settings, clusterService, monitoringSettings, licenseState)); collectors.add(new NodeStatsCollector(settings, clusterService, monitoringSettings, licenseState, client)); collectors.add(new IndexRecoveryCollector(settings, clusterService, monitoringSettings, licenseState, client)); - final AgentService agentService = new AgentService(settings, clusterSettings, collectors, exporters); + final MonitoringService monitoringService = + new MonitoringService(settings, clusterSettings, threadPool, collectors, exporters); - return Arrays.asList(agentService, monitoringSettings, exporters, cleanerService); + return Arrays.asList(monitoringService, monitoringSettings, exporters, cleanerService); } @Override diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringService.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringService.java new file mode 100644 index 00000000000..838a534c48f --- /dev/null +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringService.java @@ -0,0 +1,236 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring; + +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.monitoring.collector.Collector; +import org.elasticsearch.xpack.monitoring.exporter.Exporter; +import org.elasticsearch.xpack.monitoring.exporter.Exporters; +import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; + +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * The {@code MonitoringService} is a service that does the work of publishing the details to the monitoring cluster. + *

+ * If this service is stopped, then the attached, monitored node is not going to publish its details to the monitoring cluster. Given + * service life cycles, the intended way to temporarily stop the publishing is using the start and stop methods. + */ +public class MonitoringService extends AbstractLifecycleComponent { + + /** State of the monitoring service, either started or stopped **/ + private final AtomicBoolean started = new AtomicBoolean(false); + + /** Task in charge of collecting and exporting monitoring data **/ + private final MonitoringExecution monitor = new MonitoringExecution(); + + private final ThreadPool threadPool; + private final Set collectors; + private final Exporters exporters; + + private volatile TimeValue interval; + private volatile ThreadPool.Cancellable scheduler; + + MonitoringService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool, + Set collectors, Exporters exporters) { + super(settings); + this.threadPool = Objects.requireNonNull(threadPool); + this.collectors = Objects.requireNonNull(collectors); + this.exporters = Objects.requireNonNull(exporters); + this.interval = MonitoringSettings.INTERVAL.get(settings); + clusterSettings.addSettingsUpdateConsumer(MonitoringSettings.INTERVAL, this::setInterval); + } + + void setInterval(TimeValue interval) { + this.interval = interval; + scheduleExecution(); + } + + public TimeValue getInterval() { + return interval; + } + + boolean isMonitoringActive() { + return isStarted() + && interval != null + && interval.millis() >= MonitoringSettings.MIN_INTERVAL.millis(); + } + + private String threadPoolName() { + return ThreadPool.Names.GENERIC; + } + + boolean isStarted() { + return started.get(); + } + + @Override + protected void doStart() { + if (started.compareAndSet(false, true)) { + try { + logger.debug("monitoring service is starting"); + scheduleExecution(); + logger.debug("monitoring service started"); + } catch (Exception e) { + logger.error((Supplier) () -> new ParameterizedMessage("failed to start monitoring service"), e); + started.set(false); + throw e; + } + } + } + + @Override + protected void doStop() { + if (started.getAndSet(false)) { + logger.debug("monitoring service is stopping"); + cancelExecution(); + logger.debug("monitoring service stopped"); + } + } + + @Override + protected void doClose() { + logger.debug("monitoring service is closing"); + closeExecution(); + + for (Exporter exporter : exporters) { + try { + exporter.close(); + } catch (Exception e) { + logger.error((Supplier) () -> new ParameterizedMessage("failed to close exporter [{}]", exporter.name()), e); + } + } + logger.debug("monitoring service closed"); + } + + void scheduleExecution() { + if (scheduler != null) { + cancelExecution(); + } + if (isMonitoringActive()) { + scheduler = threadPool.scheduleWithFixedDelay(monitor, interval, threadPoolName()); + } + } + + void cancelExecution() { + if (scheduler != null) { + try { + scheduler.cancel(); + } finally { + scheduler = null; + } + } + } + + void closeExecution() { + try { + monitor.close(); + } catch (IOException e) { + logger.error((Supplier) () -> new ParameterizedMessage("failed to close monitoring execution"), e); + } + } + + /** + * {@link MonitoringExecution} is a scheduled {@link Runnable} that periodically checks if monitoring + * data can be collected and exported. It runs at a given interval corresponding to the monitoring + * sampling interval. It first checks if monitoring is still enabled (because it might have changed + * since the last time the task was scheduled: interval set to -1 or the monitoring service is stopped). + * Since collecting and exporting data can take time, it uses a semaphore to track the current execution. + */ + class MonitoringExecution extends AbstractRunnable implements Closeable { + + /** + * Binary semaphore used to wait for monitoring execution to terminate before closing or stopping + * the monitoring service. A semaphore is preferred over a ReentrantLock because the lock is + * obtained by a thread and released by another thread. + **/ + private final Semaphore semaphore = new Semaphore(1); + + @Override + public void doRun() { + if (isMonitoringActive() == false) { + logger.debug("monitoring execution is skipped"); + return; + } + + if (semaphore.tryAcquire() == false) { + logger.debug("monitoring execution is skipped until previous execution terminated"); + return; + } + + threadPool.executor(threadPoolName()).submit(new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + Collection results = new ArrayList<>(); + for (Collector collector : collectors) { + if (isStarted() == false) { + // Do not collect more data if the the monitoring service is stopping + // otherwise some collectors might just fail. + return; + } + + try { + Collection result = collector.collect(); + if (result != null) { + results.addAll(result); + } + } catch (Exception e) { + logger.warn((Supplier) () -> + new ParameterizedMessage("monitoring collector [{}] failed to collect data", collector.name()), e); + } + } + if (isMonitoringActive()) { + exporters.export(results); + } + } + + @Override + public void onFailure(Exception e) { + logger.warn("monitoring execution failed", e); + } + + @Override + public void onRejection(Exception e) { + logger.warn("monitoring execution has been rejected", e); + } + + @Override + public void onAfter() { + semaphore.release(); + } + }); + } + + @Override + public void onFailure(Exception e) { + logger.warn("monitoring execution failed", e); + } + + @Override + public void close() throws IOException { + try { + // Block until the lock can be acquired + semaphore.acquire(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + } +} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringSettings.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringSettings.java index ddf147b3e50..13e4c4b327c 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringSettings.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringSettings.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.monitoring; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; @@ -34,11 +33,23 @@ public class MonitoringSettings extends AbstractComponent { */ public static final TimeValue HISTORY_DURATION_MINIMUM = TimeValue.timeValueHours(24); + /** + * Minimum value for sampling interval (1 second) + */ + static final TimeValue MIN_INTERVAL = TimeValue.timeValueSeconds(1L); + /** * Sampling interval between two collections (default to 10s) */ - public static final Setting INTERVAL = - timeSetting(collectionKey("interval"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope); + public static final Setting INTERVAL = new Setting<>(collectionKey("interval"), "10s", + (s) -> { + TimeValue value = TimeValue.parseTimeValue(s, null, collectionKey("interval")); + if (TimeValue.MINUS_ONE.equals(value) || value.millis() >= MIN_INTERVAL.millis()) { + return value; + } + throw new IllegalArgumentException("Failed to parse monitoring interval [" + s + "], value must be >= " + MIN_INTERVAL); + }, + Property.Dynamic, Property.NodeScope); /** * Timeout value when collecting index statistics (default to 10m) diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java index cbb8abb3514..afe194279f4 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java @@ -44,10 +44,6 @@ public abstract class Exporter implements AutoCloseable { return config; } - public boolean masterOnly() { - return false; - } - /** Returns true if only one instance of this exporter should be allowed. */ public boolean isSingleton() { return false; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java index 5624b776bf4..135addf1c5b 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java @@ -31,19 +31,15 @@ import static java.util.Collections.emptyMap; public class Exporters extends AbstractLifecycleComponent implements Iterable { private final Map factories; - private final ClusterService clusterService; - private final AtomicReference> exporters; - public Exporters(Settings settings, Map factories, - ClusterService clusterService) { - + public Exporters(Settings settings, Map factories, ClusterService clusterService) { super(settings); + this.factories = factories; - this.clusterService = clusterService; this.exporters = new AtomicReference<>(emptyMap()); - clusterService.getClusterSettings().addSettingsUpdateConsumer(MonitoringSettings.EXPORTERS_SETTINGS, - this::setExportersSetting); + + clusterService.getClusterSettings().addSettingsUpdateConsumer(MonitoringSettings.EXPORTERS_SETTINGS, this::setExportersSetting); } private void setExportersSetting(Settings exportersSetting) { @@ -92,15 +88,10 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable bulks = new ArrayList<>(); for (Exporter exporter : this) { - if (exporter.masterOnly() && clusterService.state().nodes().isLocalNodeElectedMaster() == false) { - // the exporter is supposed to only run on the master node, but we're not - // the master node... so skipping - continue; - } try { ExportBulk bulk = exporter.openBulk(); if (bulk == null) { - logger.info("skipping exporter [{}] as it isn't ready yet", exporter.name()); + logger.info("skipping exporter [{}] as it is not ready yet", exporter.name()); } else { bulks.add(bulk); } @@ -168,15 +159,14 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable 0) { - ExportBulk bulk = openBulk(); - if (bulk == null) { - throw new ExportException("exporters are either not ready or faulty"); - } + final ExportBulk bulk = openBulk(); - try { - bulk.add(docs); - } finally { - bulk.close(lifecycleState() == Lifecycle.State.STARTED); + if (bulk != null) { + try { + bulk.add(docs); + } finally { + bulk.close(lifecycleState() == Lifecycle.State.STARTED); + } } } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java index 2fd29c5b8fc..ea39b1c6b20 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java @@ -17,6 +17,14 @@ public final class MonitoringTemplateUtils { /** Current version of es and data templates **/ public static final String TEMPLATE_VERSION = "2"; + /** + * The name of the non-timestamped data index. + */ + public static final String DATA_INDEX = ".monitoring-data-" + TEMPLATE_VERSION; + /** + * Data types that should be supported by the {@linkplain #DATA_INDEX data index} that were not by the initial release. + */ + public static final String[] NEW_DATA_TYPES = { "kibana", "logstash" }; private MonitoringTemplateUtils() { } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/DataTypeMappingHttpResource.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/DataTypeMappingHttpResource.java new file mode 100644 index 00000000000..7a9701479c2 --- /dev/null +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/DataTypeMappingHttpResource.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.unit.TimeValue; + +import java.util.Collections; +import java.util.Objects; + +import static org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils.DATA_INDEX; + +/** + * {@linkplain DataTypeMappingHttpResource}s allow the checking and adding of index mapping's for new types that did not exist in previous + * versions. + *

+ * This allows the use of Monitoring's REST endpoint to publish Kibana data to the data index even if the "kibana" type did not + * exist in their existing index mapping (e.g., they started with an early alpha release). Additionally, this also enables future types to + * be added without issue. + *

+ * The root need for this is because the index mapping started with an index setting: "index.mapper.dynamic" set to false. This prevents + * new types from being dynamically added, which is obviously needed as new components (e.g., Kibana and Logstash) are monitored. + * Unfortunately, this setting cannot be flipped without also closing and reopening the index, so the fix is to manually add any new types. + */ +public class DataTypeMappingHttpResource extends PublishableHttpResource { + + private static final Logger logger = Loggers.getLogger(DataTypeMappingHttpResource.class); + + /** + * The name of the type that is created in the mappings on the remote cluster. + */ + private final String typeName; + + /** + * Create a new {@link DataTypeMappingHttpResource}. + * + * @param resourceOwnerName The user-recognizable name + * @param masterTimeout Master timeout to use with any request. + * @param typeName The name of the mapping type (e.g., "kibana"). + */ + public DataTypeMappingHttpResource(final String resourceOwnerName, @Nullable final TimeValue masterTimeout, + final String typeName) { + // we need to inspect the mappings, so we don't use filter_path to get rid of them + super(resourceOwnerName, masterTimeout, Collections.emptyMap()); + + this.typeName = Objects.requireNonNull(typeName); + } + + /** + * Determine if the current {@linkplain #typeName type} exists. + */ + @Override + protected CheckResponse doCheck(final RestClient client) { + final Tuple resource = + checkForResource(client, logger, + "/" + DATA_INDEX + "/_mapping", typeName, "monitoring mapping type", + resourceOwnerName, "monitoring cluster"); + + // depending on the content, we need to flip the actual response + CheckResponse checkResponse = resource.v1(); + + if (checkResponse == CheckResponse.EXISTS && resource.v2().getEntity().getContentLength() <= 2) { + // it "exists" if the index exists at all; it doesn't guarantee that the mapping exists + // the content will be "{}" if no mapping exists + checkResponse = CheckResponse.DOES_NOT_EXIST; + } else if (checkResponse == CheckResponse.DOES_NOT_EXIST) { + // DNE indicates that the entire index is missing, which means the template will create it; we only add types! + checkResponse = CheckResponse.EXISTS; + } + + return checkResponse; + } + + /** + * Add the current {@linkplain #typeName type} to the index's mappings. + */ + @Override + protected boolean doPublish(final RestClient client) { + // this could be a class-level constant, but it does not need to live the entire duration of ES; only the few times it is used + final HttpEntity disabledEntity = new StringEntity("{\"enabled\":false}", ContentType.APPLICATION_JSON); + + return putResource(client, logger, + "/" + DATA_INDEX + "/_mapping", typeName, () -> disabledEntity, "monitoring mapping type", + resourceOwnerName, "monitoring cluster"); + } + +} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporter.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporter.java index 899098991bb..5a71e61bffe 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporter.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporter.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.monitoring.exporter.Exporter; +import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.resolver.MonitoringIndexNameResolver; import org.elasticsearch.xpack.monitoring.resolver.ResolversRegistry; import org.elasticsearch.xpack.ssl.SSLService; @@ -520,6 +521,13 @@ public class HttpExporter extends Exporter { final TimeValue templateTimeout = config.settings().getAsTime(TEMPLATE_CHECK_TIMEOUT_SETTING, null); final Set templateNames = new HashSet<>(); + // add a resource to check the index mappings of the .monitoring-data-# index + // We ensure (and add if it's not) that the kibana type is there for the index for those few customers that upgraded from alphas; + // this step makes it very easy to add logstash in 5.2+ (and eventually beats) + for (final String type : MonitoringTemplateUtils.NEW_DATA_TYPES) { + resources.add(new DataTypeMappingHttpResource(resourceOwnerName, templateTimeout, type)); + } + for (final MonitoringIndexNameResolver resolver : resolvers) { final String templateName = resolver.templateName(); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PipelineHttpResource.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PipelineHttpResource.java index dde2dad6bd1..ec142e47aa7 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PipelineHttpResource.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PipelineHttpResource.java @@ -57,9 +57,9 @@ public class PipelineHttpResource extends PublishableHttpResource { */ @Override protected CheckResponse doCheck(final RestClient client) { - return checkForResource(client, logger, - "/_ingest/pipeline", pipelineName, "monitoring pipeline", - resourceOwnerName, "monitoring cluster"); + return simpleCheckForResource(client, logger, + "/_ingest/pipeline", pipelineName, "monitoring pipeline", + resourceOwnerName, "monitoring cluster"); } /** diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java index 1479e9fcdb1..eab21da08ee 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.RestStatus; @@ -144,7 +145,8 @@ public abstract class PublishableHttpResource extends HttpResource { /** * Determine if the current {@code resourceName} exists at the {@code resourceBasePath} endpoint. *

- * This provides the base-level check for any resource that does not need to inspect its actual contents. + * This provides the base-level check for any resource that does not need to care about its response beyond existence (and likely does + * not need to inspect its contents). * * @param client The REST client to make the request(s). * @param logger The logger to use for status messages. @@ -155,10 +157,33 @@ public abstract class PublishableHttpResource extends HttpResource { * @param resourceOwnerType The type of resource owner being dealt with (e.g., "monitoring cluster"). * @return Never {@code null}. */ - protected CheckResponse checkForResource(final RestClient client, final Logger logger, - final String resourceBasePath, - final String resourceName, final String resourceType, - final String resourceOwnerName, final String resourceOwnerType) { + protected CheckResponse simpleCheckForResource(final RestClient client, final Logger logger, + final String resourceBasePath, + final String resourceName, final String resourceType, + final String resourceOwnerName, final String resourceOwnerType) { + return checkForResource(client, logger, resourceBasePath, resourceName, resourceType, resourceOwnerName, resourceOwnerType).v1(); + } + + /** + * Determine if the current {@code resourceName} exists at the {@code resourceBasePath} endpoint. + *

+ * This provides the base-level check for any resource that cares about existence and also its contents. + * + * @param client The REST client to make the request(s). + * @param logger The logger to use for status messages. + * @param resourceBasePath The base path/endpoint to check for the resource (e.g., "/_template"). + * @param resourceName The name of the resource (e.g., "template123"). + * @param resourceType The type of resource (e.g., "monitoring template"). + * @param resourceOwnerName The user-recognizeable resource owner. + * @param resourceOwnerType The type of resource owner being dealt with (e.g., "monitoring cluster"). + * @return Never {@code null} pair containing the checked response and the returned response. + * The response will only ever be {@code null} if none was returned. + * @see #simpleCheckForResource(RestClient, Logger, String, String, String, String, String) + */ + protected Tuple checkForResource(final RestClient client, final Logger logger, + final String resourceBasePath, + final String resourceName, final String resourceType, + final String resourceOwnerName, final String resourceOwnerType) { logger.trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, resourceOwnerName, resourceOwnerType); try { @@ -169,18 +194,19 @@ public abstract class PublishableHttpResource extends HttpResource { if (response.getStatusLine().getStatusCode() == RestStatus.OK.getStatus()) { logger.debug("{} [{}] found on the [{}] {}", resourceType, resourceName, resourceOwnerName, resourceOwnerType); - return CheckResponse.EXISTS; + return new Tuple<>(CheckResponse.EXISTS, response); } else { throw new ResponseException(response); } } catch (final ResponseException e) { - final int statusCode = e.getResponse().getStatusLine().getStatusCode(); + final Response response = e.getResponse(); + final int statusCode = response.getStatusLine().getStatusCode(); // 404 if (statusCode == RestStatus.NOT_FOUND.getStatus()) { logger.debug("{} [{}] does not exist on the [{}] {}", resourceType, resourceName, resourceOwnerName, resourceOwnerType); - return CheckResponse.DOES_NOT_EXIST; + return new Tuple<>(CheckResponse.DOES_NOT_EXIST, response); } else { logger.error((Supplier) () -> new ParameterizedMessage("failed to verify {} [{}] on the [{}] {} with status code [{}]", @@ -188,7 +214,7 @@ public abstract class PublishableHttpResource extends HttpResource { e); // weirder failure than below; block responses just like other unexpected failures - return CheckResponse.ERROR; + return new Tuple<>(CheckResponse.ERROR, response); } } catch (IOException | RuntimeException e) { logger.error((Supplier) () -> @@ -197,7 +223,7 @@ public abstract class PublishableHttpResource extends HttpResource { e); // do not attempt to publish the resource because we're in a broken state - return CheckResponse.ERROR; + return new Tuple<>(CheckResponse.ERROR, null); } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/TemplateHttpResource.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/TemplateHttpResource.java index db0c72754b2..f506e71b6f1 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/TemplateHttpResource.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/TemplateHttpResource.java @@ -58,9 +58,9 @@ public class TemplateHttpResource extends PublishableHttpResource { */ @Override protected CheckResponse doCheck(final RestClient client) { - return checkForResource(client, logger, - "/_template", templateName, "monitoring template", - resourceOwnerName, "monitoring cluster"); + return simpleCheckForResource(client, logger, + "/_template", templateName, "monitoring template", + resourceOwnerName, "monitoring cluster"); } /** diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index 23af2d56797..1fcb7752b88 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -17,6 +17,8 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasA import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; @@ -41,6 +43,7 @@ import org.elasticsearch.xpack.monitoring.cleaner.CleanerService; import org.elasticsearch.xpack.monitoring.exporter.ExportBulk; import org.elasticsearch.xpack.monitoring.exporter.Exporter; import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.resolver.MonitoringIndexNameResolver; import org.elasticsearch.xpack.monitoring.resolver.ResolversRegistry; import org.elasticsearch.xpack.security.InternalClient; @@ -137,126 +140,16 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle } // List of distinct templates - Map templates = StreamSupport.stream(new ResolversRegistry(Settings.EMPTY).spliterator(), false) + final Map templates = StreamSupport.stream(new ResolversRegistry(Settings.EMPTY).spliterator(), false) .collect(Collectors.toMap(MonitoringIndexNameResolver::templateName, MonitoringIndexNameResolver::template, (a, b) -> a)); - - // if this is not the master, we'll just look to see if the monitoring templates are installed. - // If they all are, we'll be able to start this exporter. Otherwise, we'll just wait for a new cluster state. - if (clusterService.state().nodes().isLocalNodeElectedMaster() == false) { - for (String template : templates.keySet()) { - if (hasTemplate(template, clusterState) == false) { - // the required template is not yet installed in the given cluster state, we'll wait. - logger.debug("monitoring index template [{}] does not exist, so service cannot start", template); - return null; - } - } - - // if we don't have the ingest pipeline, then it's going to fail anyway - if (hasIngestPipelines(clusterState) == false) { - logger.debug("monitoring ingest pipeline [{}] does not exist, so service cannot start", EXPORT_PIPELINE_NAME); + // if this is not the master, we just need to make sure the master has set things up + if (clusterService.state().nodes().isLocalNodeElectedMaster()) { + if (setupIfElectedMaster(clusterState, templates) == false) { return null; } - - if (null != prepareAddAliasesTo2xIndices(clusterState)) { - logger.debug("old monitoring indexes exist without aliases, waiting for them to get new aliases"); - return null; - } - - logger.trace("monitoring index templates and pipelines are installed, service can start"); - - } else { - // we are on the elected master - // Check that there is nothing that could block metadata updates - if (clusterState.blocks().hasGlobalBlock(ClusterBlockLevel.METADATA_WRITE)) { - logger.debug("waiting until metadata writes are unblocked"); - return null; - } - - if (installingSomething.get() == true) { - logger.trace("already installing something, waiting for install to complete"); - return null; - } - - // build a list of runnables for everything that is missing, but do not start execution - final List asyncActions = new ArrayList<>(); - final AtomicInteger pendingResponses = new AtomicInteger(0); - - // Check that each required template exist, installing it if needed - final List> missingTemplates = templates.entrySet() - .stream() - .filter((e) -> hasTemplate(e.getKey(), clusterState) == false) - .collect(Collectors.toList()); - - if (missingTemplates.isEmpty() == false) { - logger.debug((Supplier) () -> new ParameterizedMessage("template {} not found", - missingTemplates.stream().map(Map.Entry::getKey).collect(Collectors.toList()))); - for (Entry template : missingTemplates) { - asyncActions.add(() -> putTemplate(template.getKey(), template.getValue(), - new ResponseActionListener<>("template", template.getKey(), pendingResponses))); - } - } - - // if we don't have the ingest pipeline, then install it - if (hasIngestPipelines(clusterState) == false) { - logger.debug("pipeline [{}] not found", EXPORT_PIPELINE_NAME); - asyncActions.add(() -> putIngestPipeline(new ResponseActionListener<>("pipeline", EXPORT_PIPELINE_NAME, pendingResponses))); - } else { - logger.trace("pipeline [{}] found", EXPORT_PIPELINE_NAME); - } - - IndicesAliasesRequest addAliasesTo2xIndices = prepareAddAliasesTo2xIndices(clusterState); - if (addAliasesTo2xIndices == null) { - logger.trace("there are no 2.x monitoring indices or they have all the aliases they need"); - } else { - final List monitoringIndices2x = addAliasesTo2xIndices.getAliasActions().stream() - .flatMap((a) -> Arrays.stream(a.indices())) - .collect(Collectors.toList()); - logger.debug("there are 2.x monitoring indices {} and they are missing some aliases to make them compatible with 5.x", - monitoringIndices2x); - asyncActions.add(() -> client.execute(IndicesAliasesAction.INSTANCE, addAliasesTo2xIndices, - new ActionListener() { - @Override - public void onResponse(IndicesAliasesResponse response) { - responseReceived(); - if (response.isAcknowledged()) { - logger.info("Added modern aliases to 2.x monitoring indices {}", monitoringIndices2x); - } else { - logger.info("Unable to add modern aliases to 2.x monitoring indices {}, response not acknowledged.", - monitoringIndices2x); - } - } - - @Override - public void onFailure(Exception e) { - responseReceived(); - logger.error((Supplier) - () -> new ParameterizedMessage("Unable to add modern aliases to 2.x monitoring indices {}", - monitoringIndices2x), e); - } - - private void responseReceived() { - if (pendingResponses.decrementAndGet() <= 0) { - logger.trace("all installation requests returned a response"); - if (installingSomething.compareAndSet(true, false) == false) { - throw new IllegalStateException("could not reset installing flag to false"); - } - } - } - })); - } - - if (asyncActions.size() > 0) { - if (installingSomething.compareAndSet(false, true)) { - pendingResponses.set(asyncActions.size()); - asyncActions.forEach(Runnable::run); - } else { - // let the cluster catch up since requested installations may be ongoing - return null; - } - } else { - logger.debug("monitoring index templates and pipelines are installed on master node, service can start"); - } + } else if (setupIfNotElectedMaster(clusterState, templates.keySet()) == false) { + return null; } if (state.compareAndSet(State.INITIALIZED, State.RUNNING)) { @@ -266,6 +159,200 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle return new LocalBulk(name(), logger, client, resolvers, config.settings().getAsBoolean(USE_INGEST_PIPELINE_SETTING, true)); } + /** + * When not on the elected master, we require all resources (mapping types, templates, and pipelines) to be available before we + * attempt to run the exporter. If those resources do not exist, then it means the elected master's exporter has not yet run, so the + * monitoring cluster (this one, as the local exporter) is not setup yet. + * + * @param clusterState The current cluster state. + * @param templates All template names that should exist. + * @return {@code true} indicates that all resources are available and the exporter can be used. {@code false} to stop and wait. + */ + private boolean setupIfNotElectedMaster(final ClusterState clusterState, final Set templates) { + for (final String type : MonitoringTemplateUtils.NEW_DATA_TYPES) { + if (hasMappingType(type, clusterState) == false) { + // the required type is not yet there in the given cluster state, we'll wait. + logger.debug("monitoring index mapping [{}] does not exist in [{}], so service cannot start", + type, MonitoringTemplateUtils.DATA_INDEX); + return false; + } + } + + for (final String template : templates) { + if (hasTemplate(template, clusterState) == false) { + // the required template is not yet installed in the given cluster state, we'll wait. + logger.debug("monitoring index template [{}] does not exist, so service cannot start", template); + return false; + } + } + + // if we don't have the ingest pipeline, then it's going to fail anyway + if (hasIngestPipelines(clusterState) == false) { + logger.debug("monitoring ingest pipeline [{}] does not exist, so service cannot start", EXPORT_PIPELINE_NAME); + return false; + } + + if (null != prepareAddAliasesTo2xIndices(clusterState)) { + logger.debug("old monitoring indexes exist without aliases, waiting for them to get new aliases"); + return false; + } + + logger.trace("monitoring index templates and pipelines are installed, service can start"); + + // everything is setup + return true; + } + + /** + * When on the elected master, we setup all resources (mapping types, templates, and pipelines) before we attempt to run the exporter. + * If those resources do not exist, then we will create them. + * + * @param clusterState The current cluster state. + * @param templates All template names that should exist. + * @return {@code true} indicates that all resources are "ready" and the exporter can be used. {@code false} to stop and wait. + */ + private boolean setupIfElectedMaster(final ClusterState clusterState, final Map templates) { + // we are on the elected master + // Check that there is nothing that could block metadata updates + if (clusterState.blocks().hasGlobalBlock(ClusterBlockLevel.METADATA_WRITE)) { + logger.debug("waiting until metadata writes are unblocked"); + return false; + } + + if (installingSomething.get() == true) { + logger.trace("already installing something, waiting for install to complete"); + return false; + } + + // build a list of runnables for everything that is missing, but do not start execution + final List asyncActions = new ArrayList<>(); + final AtomicInteger pendingResponses = new AtomicInteger(0); + + // Check that all necessary types exist for _xpack/monitoring/_bulk usage + final List missingMappingTypes = Arrays.stream(MonitoringTemplateUtils.NEW_DATA_TYPES) + .filter((type) -> hasMappingType(type, clusterState) == false) + .collect(Collectors.toList()); + + // Check that each required template exist, installing it if needed + final List> missingTemplates = templates.entrySet() + .stream() + .filter((e) -> hasTemplate(e.getKey(), clusterState) == false) + .collect(Collectors.toList()); + + if (missingMappingTypes.isEmpty() == false) { + logger.debug((Supplier) () -> new ParameterizedMessage("type {} not found", + missingMappingTypes.stream().collect(Collectors.toList()))); + for (final String type : missingMappingTypes) { + asyncActions.add(() -> putMappingType(type, new ResponseActionListener<>("type", type, pendingResponses))); + } + } + + if (missingTemplates.isEmpty() == false) { + logger.debug((Supplier) () -> new ParameterizedMessage("template {} not found", + missingTemplates.stream().map(Map.Entry::getKey).collect(Collectors.toList()))); + for (Entry template : missingTemplates) { + asyncActions.add(() -> putTemplate(template.getKey(), template.getValue(), + new ResponseActionListener<>("template", template.getKey(), pendingResponses))); + } + } + + // if we don't have the ingest pipeline, then install it + if (hasIngestPipelines(clusterState) == false) { + logger.debug("pipeline [{}] not found", EXPORT_PIPELINE_NAME); + asyncActions.add(() -> putIngestPipeline(new ResponseActionListener<>("pipeline", EXPORT_PIPELINE_NAME, pendingResponses))); + } else { + logger.trace("pipeline [{}] found", EXPORT_PIPELINE_NAME); + } + + IndicesAliasesRequest addAliasesTo2xIndices = prepareAddAliasesTo2xIndices(clusterState); + if (addAliasesTo2xIndices == null) { + logger.trace("there are no 2.x monitoring indices or they have all the aliases they need"); + } else { + final List monitoringIndices2x = addAliasesTo2xIndices.getAliasActions().stream() + .flatMap((a) -> Arrays.stream(a.indices())) + .collect(Collectors.toList()); + logger.debug("there are 2.x monitoring indices {} and they are missing some aliases to make them compatible with 5.x", + monitoringIndices2x); + asyncActions.add(() -> client.execute(IndicesAliasesAction.INSTANCE, addAliasesTo2xIndices, + new ActionListener() { + @Override + public void onResponse(IndicesAliasesResponse response) { + responseReceived(); + if (response.isAcknowledged()) { + logger.info("Added modern aliases to 2.x monitoring indices {}", monitoringIndices2x); + } else { + logger.info("Unable to add modern aliases to 2.x monitoring indices {}, response not acknowledged.", + monitoringIndices2x); + } + } + + @Override + public void onFailure(Exception e) { + responseReceived(); + logger.error((Supplier) + () -> new ParameterizedMessage("Unable to add modern aliases to 2.x monitoring indices {}", + monitoringIndices2x), e); + } + + private void responseReceived() { + if (pendingResponses.decrementAndGet() <= 0) { + logger.trace("all installation requests returned a response"); + if (installingSomething.compareAndSet(true, false) == false) { + throw new IllegalStateException("could not reset installing flag to false"); + } + } + } + })); + } + + if (asyncActions.size() > 0) { + if (installingSomething.compareAndSet(false, true)) { + pendingResponses.set(asyncActions.size()); + asyncActions.forEach(Runnable::run); + } else { + // let the cluster catch up since requested installations may be ongoing + return false; + } + } else { + logger.debug("monitoring index templates and pipelines are installed on master node, service can start"); + } + + // everything is setup (or running) + return true; + } + + /** + * Determine if the mapping {@code type} exists in the {@linkplain MonitoringTemplateUtils#DATA_INDEX data index}. + * + * @param type The data type to check (e.g., "kibana") + * @param clusterState The current cluster state + * @return {@code false} if the type mapping needs to be added. + */ + private boolean hasMappingType(final String type, final ClusterState clusterState) { + final IndexMetaData dataIndex = clusterState.getMetaData().getIndices().get(MonitoringTemplateUtils.DATA_INDEX); + + // if the index does not exist, then the template will add it and the type; if the index does exist, then we need the type + return dataIndex == null || dataIndex.getMappings().containsKey(type); + } + + /** + * Add the mapping {@code type} to the {@linkplain MonitoringTemplateUtils#DATA_INDEX data index}. + * + * @param type The data type to check (e.g., "kibana") + * @param listener The listener to use for handling the response + */ + private void putMappingType(final String type, final ActionListener listener) { + logger.debug("adding mapping type [{}] to [{}]", type, MonitoringTemplateUtils.DATA_INDEX); + + final PutMappingRequest putMapping = new PutMappingRequest(MonitoringTemplateUtils.DATA_INDEX); + + putMapping.type(type); + // avoid mapping at all; we use this index as a data cache rather than for search + putMapping.source("{\"enabled\":false}"); + + client.admin().indices().putMapping(putMapping, listener); + } + /** * Determine if the ingest pipeline for {@link #EXPORT_PIPELINE_NAME} exists in the cluster or not. * diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/resolver/ResolversRegistry.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/resolver/ResolversRegistry.java index faedc8985ee..99b79f146cb 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/resolver/ResolversRegistry.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/resolver/ResolversRegistry.java @@ -50,6 +50,7 @@ public class ResolversRegistry implements Iterable // register resolvers for monitored systems registerMonitoredSystem(MonitoredSystem.KIBANA, settings); + registerMonitoredSystem(MonitoredSystem.LOGSTASH, settings); } /** diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolver.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolver.java index 47ab958d989..4da2334382f 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolver.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolver.java @@ -66,6 +66,8 @@ public class NodeStatsResolver extends MonitoringIndexNameResolver.Timestamped templates = new ArrayList<>(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { @@ -353,7 +352,7 @@ public class EmailTemplate implements ToXContent { } else { builder.replyTo(TextTemplate.parse(parser)); } - } else if (ParseFieldMatcher.STRICT.match(fieldName, Email.Field.TO)) { + } else if (Email.Field.TO.match(fieldName)) { if (parser.currentToken() == XContentParser.Token.START_ARRAY) { List templates = new ArrayList<>(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { @@ -363,7 +362,7 @@ public class EmailTemplate implements ToXContent { } else { builder.to(TextTemplate.parse(parser)); } - } else if (ParseFieldMatcher.STRICT.match(fieldName, Email.Field.CC)) { + } else if (Email.Field.CC.match(fieldName)) { if (parser.currentToken() == XContentParser.Token.START_ARRAY) { List templates = new ArrayList<>(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { @@ -373,7 +372,7 @@ public class EmailTemplate implements ToXContent { } else { builder.cc(TextTemplate.parse(parser)); } - } else if (ParseFieldMatcher.STRICT.match(fieldName, Email.Field.BCC)) { + } else if (Email.Field.BCC.match(fieldName)) { if (parser.currentToken() == XContentParser.Token.START_ARRAY) { List templates = new ArrayList<>(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { @@ -383,11 +382,11 @@ public class EmailTemplate implements ToXContent { } else { builder.bcc(TextTemplate.parse(parser)); } - } else if (ParseFieldMatcher.STRICT.match(fieldName, Email.Field.PRIORITY)) { + } else if (Email.Field.PRIORITY.match(fieldName)) { builder.priority(TextTemplate.parse(parser)); - } else if (ParseFieldMatcher.STRICT.match(fieldName, Email.Field.SUBJECT)) { + } else if (Email.Field.SUBJECT.match(fieldName)) { builder.subject(TextTemplate.parse(parser)); - } else if (ParseFieldMatcher.STRICT.match(fieldName, Email.Field.BODY)) { + } else if (Email.Field.BODY.match(fieldName)) { if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { builder.textBody(TextTemplate.parse(parser)); } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { @@ -398,9 +397,9 @@ public class EmailTemplate implements ToXContent { currentFieldName = parser.currentName(); } else if (currentFieldName == null) { throw new ElasticsearchParseException("could not parse email template. empty [{}] field", fieldName); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Email.Field.BODY_TEXT)) { + } else if (Email.Field.BODY_TEXT.match(currentFieldName)) { builder.textBody(TextTemplate.parse(parser)); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Email.Field.BODY_HTML)) { + } else if (Email.Field.BODY_HTML.match(currentFieldName)) { builder.htmlBody(TextTemplate.parse(parser)); } else { throw new ElasticsearchParseException("could not parse email template. unknown field [{}.{}] field", diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/email/attachment/DataAttachmentParser.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/email/attachment/DataAttachmentParser.java index 8e5eedbef51..80c69d9ec17 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/email/attachment/DataAttachmentParser.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/email/attachment/DataAttachmentParser.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.notification.email.attachment; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.notification.email.Attachment; @@ -43,7 +42,7 @@ public class DataAttachmentParser implements EmailAttachmentParser templates = new ArrayList<>(); if (token == XContentParser.Token.START_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { @@ -271,7 +271,7 @@ public class HipChatMessage implements ToXContent { } } rooms = templates.toArray(new TextTemplate[templates.size()]); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.USER)) { + } else if (Field.USER.match(currentFieldName)) { List templates = new ArrayList<>(); if (token == XContentParser.Token.START_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { @@ -291,28 +291,28 @@ public class HipChatMessage implements ToXContent { } } users = templates.toArray(new TextTemplate[templates.size()]); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.COLOR)) { + } else if (Field.COLOR.match(currentFieldName)) { try { color = TextTemplate.parse(parser); } catch (ElasticsearchParseException | IllegalArgumentException e) { throw new ElasticsearchParseException("failed to parse hipchat message. failed to parse [{}] field", e, Field.COLOR.getPreferredName()); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.NOTIFY)) { + } else if (Field.NOTIFY.match(currentFieldName)) { if (token == XContentParser.Token.VALUE_BOOLEAN) { notify = parser.booleanValue(); } else { throw new ElasticsearchParseException("failed to parse hipchat message. failed to parse [{}] field, expected a " + "boolean value but found [{}]", Field.NOTIFY.getPreferredName(), token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.BODY)) { + } else if (Field.BODY.match(currentFieldName)) { try { body = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("failed to parse hipchat message. failed to parse [{}] field", pe, Field.BODY.getPreferredName()); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.FORMAT)) { + } else if (Field.FORMAT.match(currentFieldName)) { try { messageFormat = HipChatMessage.Format.parse(parser); } catch (IllegalArgumentException ilae) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/hipchat/SentMessages.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/hipchat/SentMessages.java index d94fba86aa8..be56fe4b4a9 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/hipchat/SentMessages.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/hipchat/SentMessages.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.notification.hipchat; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.common.http.HttpRequest; import org.elasticsearch.xpack.common.http.HttpResponse; @@ -17,7 +18,7 @@ import java.util.Iterator; import java.util.List; import java.util.Locale; -public class SentMessages implements ToXContent, Iterable { +public class SentMessages implements ToXContentObject, Iterable { private String accountName; private List messages; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/jira/JiraIssue.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/jira/JiraIssue.java index 845ecd58c6e..0b21d9717da 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/jira/JiraIssue.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/jira/JiraIssue.java @@ -9,9 +9,8 @@ import org.apache.http.HttpStatus; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -25,7 +24,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; -public class JiraIssue implements ToXContent { +public class JiraIssue implements ToXContentObject { @Nullable final String account; private final Map fields; @@ -161,12 +160,12 @@ public class JiraIssue implements ToXContent { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ERRORS)) { + } else if (Field.ERRORS.match(currentFieldName)) { Map fieldErrors = parser.mapOrdered(); for (Map.Entry entry : fieldErrors.entrySet()) { errors.add("Field [" + entry.getKey() + "] has error [" + String.valueOf(entry.getValue()) + "]"); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ERROR_MESSAGES)) { + } else if (Field.ERROR_MESSAGES.match(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { errors.add(parser.text()); } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEvent.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEvent.java index 1e2d0c11208..1a11b1a56e8 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEvent.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEvent.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.notification.pagerduty; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.common.http.HttpMethod; @@ -36,7 +36,7 @@ import java.util.Objects; * https://developer.pagerduty.com/documentation/integration/events/acknowledge * https://developer.pagerduty.com/documentation/integration/events/resolve */ -public class IncidentEvent implements ToXContent { +public class IncidentEvent implements ToXContentObject { static final String HOST = "events.pagerduty.com"; static final String PATH = "/generic/2010-04-15/create_event.json"; @@ -290,58 +290,58 @@ public class IncidentEvent implements ToXContent { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.INCIDENT_KEY)) { + } else if (Fields.INCIDENT_KEY.match(currentFieldName)) { try { incidentKey = TextTemplate.parse(parser); } catch (ElasticsearchParseException e) { throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", Fields.INCIDENT_KEY.getPreferredName()); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.DESCRIPTION)) { + } else if (Fields.DESCRIPTION.match(currentFieldName)) { try { description = TextTemplate.parse(parser); } catch (ElasticsearchParseException e) { throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", Fields.DESCRIPTION.getPreferredName()); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.CLIENT)) { + } else if (Fields.CLIENT.match(currentFieldName)) { try { client = TextTemplate.parse(parser); } catch (ElasticsearchParseException e) { throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", Fields.CLIENT.getPreferredName()); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.CLIENT_URL)) { + } else if (Fields.CLIENT_URL.match(currentFieldName)) { try { clientUrl = TextTemplate.parse(parser); } catch (ElasticsearchParseException e) { throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", Fields.CLIENT_URL.getPreferredName()); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.ACCOUNT)) { + } else if (Fields.ACCOUNT.match(currentFieldName)) { try { account = parser.text(); } catch (ElasticsearchParseException e) { throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", Fields.CLIENT_URL.getPreferredName()); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.PROXY)) { + } else if (Fields.PROXY.match(currentFieldName)) { proxy = HttpProxy.parse(parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.EVENT_TYPE)) { + } else if (Fields.EVENT_TYPE.match(currentFieldName)) { try { eventType = TextTemplate.parse(parser); } catch (ElasticsearchParseException e) { throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}]", Fields.EVENT_TYPE.getPreferredName()); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.ATTACH_PAYLOAD)) { + } else if (Fields.ATTACH_PAYLOAD.match(currentFieldName)) { if (token == XContentParser.Token.VALUE_BOOLEAN) { attachPayload = parser.booleanValue(); } else { throw new ElasticsearchParseException("could not parse pager duty event template. failed to parse field [{}], " + "expected a boolean value but found [{}] instead", Fields.ATTACH_PAYLOAD.getPreferredName(), token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.CONTEXT)) { + } else if (Fields.CONTEXT.match(currentFieldName)) { if (token == XContentParser.Token.START_ARRAY) { List list = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEventContext.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEventContext.java index 472e3216690..614b996d717 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEventContext.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/IncidentEventContext.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.notification.pagerduty; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -195,7 +194,7 @@ public class IncidentEventContext implements ToXContent { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (Strings.hasLength(currentFieldName)) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.TYPE)) { + if (XField.TYPE.match(currentFieldName)) { try { type = Type.valueOf(parser.text().toUpperCase(Locale.ROOT)); } catch (IllegalArgumentException e) { @@ -211,13 +210,13 @@ public class IncidentEventContext implements ToXContent { throw new ElasticsearchParseException(msg, e, currentFieldName); } - if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.HREF)) { + if (XField.HREF.match(currentFieldName)) { href = parsedTemplate; - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.TEXT)) { + } else if (XField.TEXT.match(currentFieldName)) { text = parsedTemplate; - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.SRC)) { + } else if (XField.SRC.match(currentFieldName)) { src = parsedTemplate; - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.ALT)) { + } else if (XField.ALT.match(currentFieldName)) { alt = parsedTemplate; } else { String msg = "could not parse trigger incident event context. unknown field [{}]"; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/SentEvent.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/SentEvent.java index 88d77fbdad7..b65e938e7c9 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/SentEvent.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/pagerduty/SentEvent.java @@ -8,22 +8,21 @@ package org.elasticsearch.xpack.notification.pagerduty; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.watcher.actions.pagerduty.PagerDutyAction; import org.elasticsearch.xpack.common.http.HttpRequest; import org.elasticsearch.xpack.common.http.HttpResponse; +import org.elasticsearch.xpack.watcher.actions.pagerduty.PagerDutyAction; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; -public class SentEvent implements ToXContent { +public class SentEvent implements ToXContentObject { final IncidentEvent event; @Nullable final HttpRequest request; @@ -115,11 +114,11 @@ public class SentEvent implements ToXContent { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.MESSAGE)) { + } else if (XField.MESSAGE.match(currentFieldName)) { message = parser.text(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.CODE)) { + } else if (XField.CODE.match(currentFieldName)) { // we don't use this code.. so just consume the token - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.ERRORS)) { + } else if (XField.ERRORS.match(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { errors.add(parser.text()); } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/SentMessages.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/SentMessages.java index 387d2de087c..540f3e376c0 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/SentMessages.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/SentMessages.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.notification.slack; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.common.http.HttpRequest; import org.elasticsearch.xpack.common.http.HttpResponse; @@ -17,7 +18,7 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; -public class SentMessages implements ToXContent, Iterable { +public class SentMessages implements ToXContentObject, Iterable { private String accountName; private List messages; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/SlackAccount.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/SlackAccount.java index 950796e2106..a57aac448b7 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/SlackAccount.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/SlackAccount.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.common.http.HttpClient; +import org.elasticsearch.xpack.common.http.HttpMethod; import org.elasticsearch.xpack.common.http.HttpProxy; import org.elasticsearch.xpack.common.http.HttpRequest; import org.elasticsearch.xpack.common.http.HttpResponse; @@ -68,6 +69,7 @@ public class SlackAccount { public SentMessages.SentMessage send(final String to, final SlackMessage message, final HttpProxy proxy) { HttpRequest request = HttpRequest.builder(url.getHost(), url.getPort()) .path(url.getPath()) + .method(HttpMethod.POST) .proxy(proxy) .scheme(Scheme.parse(url.getScheme())) .jsonBody(new ToXContent() { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/Attachment.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/Attachment.java index a5b0eedeac6..e0ad1fbfe03 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/Attachment.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/Attachment.java @@ -7,8 +7,7 @@ package org.elasticsearch.xpack.notification.slack.message; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.common.text.TextTemplate; @@ -143,7 +142,7 @@ public class Attachment implements MessageElement { return builder.endObject(); } - static class Template implements ToXContent { + static class Template implements ToXContentObject { final TextTemplate fallback; final TextTemplate color; @@ -307,70 +306,70 @@ public class Attachment implements MessageElement { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.FALLBACK)) { + } else if (XField.FALLBACK.match(currentFieldName)) { try { fallback = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment. failed to parse [{}] field", pe, XField.FALLBACK); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.COLOR)) { + } else if (XField.COLOR.match(currentFieldName)) { try { color = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment. failed to parse [{}] field", pe, XField.COLOR); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.PRETEXT)) { + } else if (XField.PRETEXT.match(currentFieldName)) { try { pretext = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment. failed to parse [{}] field", pe, XField.PRETEXT); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.AUTHOR_NAME)) { + } else if (XField.AUTHOR_NAME.match(currentFieldName)) { try { authorName = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment. failed to parse [{}] field", pe, XField.AUTHOR_NAME); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.AUTHOR_LINK)) { + } else if (XField.AUTHOR_LINK.match(currentFieldName)) { try { authorLink = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment. failed to parse [{}] field", pe, XField.AUTHOR_LINK); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.AUTHOR_ICON)) { + } else if (XField.AUTHOR_ICON.match(currentFieldName)) { try { authorIcon = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment. failed to parse [{}] field", pe, XField.AUTHOR_ICON); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.TITLE)) { + } else if (XField.TITLE.match(currentFieldName)) { try { title = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment. failed to parse [{}] field", pe, XField.TITLE); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.TITLE_LINK)) { + } else if (XField.TITLE_LINK.match(currentFieldName)) { try { titleLink = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment. failed to parse [{}] field", pe, XField.TITLE_LINK); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.TEXT)) { + } else if (XField.TEXT.match(currentFieldName)) { try { text = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment. failed to parse [{}] field", pe, XField.TEXT); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.FIELDS)) { + } else if (XField.FIELDS.match(currentFieldName)) { if (token == XContentParser.Token.START_ARRAY) { List list = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { @@ -390,14 +389,14 @@ public class Attachment implements MessageElement { XField.FIELDS); } } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.IMAGE_URL)) { + } else if (XField.IMAGE_URL.match(currentFieldName)) { try { imageUrl = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment. failed to parse [{}] field", pe, XField.IMAGE_URL); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.THUMB_URL)) { + } else if (XField.THUMB_URL.match(currentFieldName)) { try { thumbUrl = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/DynamicAttachments.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/DynamicAttachments.java index 1c39fbc6e91..3b2bad7272a 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/DynamicAttachments.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/DynamicAttachments.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.notification.slack.message; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.common.text.TextTemplateEngine; @@ -63,14 +62,14 @@ public class DynamicAttachments implements MessageElement { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.LIST_PATH)) { + } else if (XField.LIST_PATH.match(currentFieldName)) { if (token == XContentParser.Token.VALUE_STRING) { listPath = parser.text(); } else { throw new ElasticsearchParseException("could not parse dynamic attachments. expected a string value for [{}] field, " + "but found [{}]", XField.LIST_PATH.getPreferredName(), token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.TEMPLATE)) { + } else if (XField.TEMPLATE.match(currentFieldName)) { try { template = Attachment.Template.parse(parser); } catch (ElasticsearchParseException pe) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/Field.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/Field.java index 58e473e7971..0bf6ebd68d1 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/Field.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/Field.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.notification.slack.message; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -118,21 +117,21 @@ class Field implements MessageElement { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.TITLE)) { + } else if (XField.TITLE.match(currentFieldName)) { try { title = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment field. failed to parse [{}] field", pe, XField.TITLE); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.VALUE)) { + } else if (XField.VALUE.match(currentFieldName)) { try { value = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse message attachment field. failed to parse [{}] field", pe, XField.VALUE); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.SHORT)) { + } else if (XField.SHORT.match(currentFieldName)) { if (token == XContentParser.Token.VALUE_BOOLEAN) { isShort = parser.booleanValue(); } else { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/MessageElement.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/MessageElement.java index 149c4d7374a..26ad5752a6d 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/MessageElement.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/MessageElement.java @@ -6,9 +6,9 @@ package org.elasticsearch.xpack.notification.slack.message; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; -public interface MessageElement extends ToXContent { +public interface MessageElement extends ToXContentObject { interface XField { ParseField TITLE = new ParseField("title"); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/SlackMessage.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/SlackMessage.java index 4515d528e35..9002cf804c5 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/SlackMessage.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/notification/slack/message/SlackMessage.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.notification.slack.message; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -254,14 +253,14 @@ public class SlackMessage implements MessageElement { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.FROM)) { + } else if (XField.FROM.match(currentFieldName)) { try { builder.setFrom(TextTemplate.parse(parser)); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse slack message. failed to parse [{}] field", pe, XField.FROM.getPreferredName()); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.TO)) { + } else if (XField.TO.match(currentFieldName)) { if (token == XContentParser.Token.START_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { try { @@ -279,21 +278,21 @@ public class SlackMessage implements MessageElement { XField.TO.getPreferredName()); } } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.TEXT)) { + } else if (XField.TEXT.match(currentFieldName)) { try { builder.setText(TextTemplate.parse(parser)); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse slack message. failed to parse [{}] field", pe, XField.TEXT.getPreferredName()); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.ICON)) { + } else if (XField.ICON.match(currentFieldName)) { try { builder.setIcon(TextTemplate.parse(parser)); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse slack message. failed to parse [{}] field.", pe, XField.ICON.getPreferredName()); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.ATTACHMENTS)) { + } else if (XField.ATTACHMENTS.match(currentFieldName)) { if (token == XContentParser.Token.START_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { try { @@ -311,7 +310,7 @@ public class SlackMessage implements MessageElement { XField.ATTACHMENTS.getPreferredName()); } } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, XField.DYNAMIC_ATTACHMENTS)) { + } else if (XField.DYNAMIC_ATTACHMENTS.match(currentFieldName)) { try { builder.setDynamicAttachments(DynamicAttachments.parse(parser)); } catch (ElasticsearchParseException pe) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/Security.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/Security.java index 10b7f28f6ae..4a096f4e1ec 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -96,6 +96,7 @@ import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.accesscontrol.OptOutQueryCache; import org.elasticsearch.xpack.security.authz.accesscontrol.SecurityIndexSearcherWrapper; import org.elasticsearch.xpack.security.authz.accesscontrol.SetSecurityUserProcessor; +import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.authz.store.FileRolesStore; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; @@ -240,7 +241,7 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin { } List components = new ArrayList<>(); - final SecurityContext securityContext = new SecurityContext(settings, threadPool, cryptoService); + final SecurityContext securityContext = new SecurityContext(settings, threadPool.getThreadContext(), cryptoService); components.add(securityContext); // realms construction @@ -312,16 +313,19 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin { cryptoService, failureHandler, threadPool, anonymousUser)); components.add(authcService.get()); - final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService); - final NativeRolesStore nativeRolesStore = new NativeRolesStore(settings, client); - final ReservedRolesStore reservedRolesStore = new ReservedRolesStore(securityContext); - final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore); + final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService, licenseState); + final NativeRolesStore nativeRolesStore = new NativeRolesStore(settings, client, licenseState); + final ReservedRolesStore reservedRolesStore = new ReservedRolesStore(); + final CompositeRolesStore allRolesStore = + new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore, licenseState); + // to keep things simple, just invalidate all cached entries on license change. this happens so rarely that the impact should be + // minimal + licenseState.addListener(allRolesStore::invalidateAll); final AuthorizationService authzService = new AuthorizationService(settings, allRolesStore, clusterService, auditTrailService, failureHandler, threadPool, anonymousUser); - components.add(fileRolesStore); // has lifecycle components.add(nativeRolesStore); // used by roles actions components.add(reservedRolesStore); // used by roles actions - components.add(allRolesStore); // for SecurityFeatureSet + components.add(allRolesStore); // for SecurityFeatureSet and clear roles cache components.add(authzService); components.add(new SecurityLifecycleService(settings, clusterService, threadPool, indexAuditTrail, @@ -404,6 +408,8 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin { NativeRolesStore.addSettings(settingsList); AuthenticationService.addSettings(settingsList); AuthorizationService.addSettings(settingsList); + settingsList.add(CompositeRolesStore.CACHE_SIZE_SETTING); + settingsList.add(FieldPermissionsCache.CACHE_SIZE_SETTING); // encryption settings CryptoService.addSettings(settingsList); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityContext.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityContext.java index d47af3d63d9..dc06f0fcd3e 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityContext.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityContext.java @@ -37,9 +37,9 @@ public class SecurityContext { * If cryptoService is null, security is disabled and {@link #getUser()} * and {@link #getAuthentication()} will always return null. */ - public SecurityContext(Settings settings, ThreadPool threadPool, CryptoService cryptoService) { + public SecurityContext(Settings settings, ThreadContext threadContext, CryptoService cryptoService) { this.logger = Loggers.getLogger(getClass(), settings); - this.threadContext = threadPool.getThreadContext(); + this.threadContext = threadContext; this.cryptoService = cryptoService; this.signUserHeader = AuthenticationService.SIGN_USER_HEADER.get(settings); this.nodeName = Node.NODE_NAME_SETTING.get(settings); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java index 03c0b4881ce..d75243185c3 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail; +import org.elasticsearch.xpack.security.authc.esnative.NativeRealmMigrator; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; @@ -52,7 +53,7 @@ public class SecurityLifecycleService extends AbstractComponent implements Clust clusterService.addListener(this); clusterService.addListener(nativeUserStore); clusterService.addListener(nativeRolesStore); - clusterService.addListener(new SecurityTemplateService(settings, client)); + clusterService.addListener(new SecurityTemplateService(settings, client, new NativeRealmMigrator(settings, nativeUserStore))); clusterService.addLifecycleListener(new LifecycleListener() { @Override diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityTemplateService.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityTemplateService.java index 20cb48bdfda..082042f4bf9 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityTemplateService.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/SecurityTemplateService.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.component.AbstractComponent; @@ -32,11 +33,15 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.xpack.security.authc.esnative.NativeRealmMigrator; import org.elasticsearch.xpack.template.TemplateUtils; import java.io.IOException; +import java.util.HashSet; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; import java.util.regex.Pattern; @@ -52,13 +57,20 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste static final String SECURITY_INDEX_TEMPLATE_VERSION_PATTERN = Pattern.quote("${security.template.version}"); static final Version MIN_READ_VERSION = Version.V_5_0_0; + enum UpgradeState { + NOT_STARTED, IN_PROGRESS, COMPLETE, FAILED + } + private final InternalClient client; final AtomicBoolean templateCreationPending = new AtomicBoolean(false); final AtomicBoolean updateMappingPending = new AtomicBoolean(false); + final AtomicReference upgradeDataState = new AtomicReference<>(UpgradeState.NOT_STARTED); + private final NativeRealmMigrator nativeRealmMigrator; - public SecurityTemplateService(Settings settings, InternalClient client) { + public SecurityTemplateService(Settings settings, InternalClient client, NativeRealmMigrator nativeRealmMigrator) { super(settings); this.client = client; + this.nativeRealmMigrator = nativeRealmMigrator; } @Override @@ -79,11 +91,25 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste // make sure mapping is up to date if (state.metaData().getIndices() != null) { if (securityIndexMappingUpToDate(state, logger) == false) { - updateSecurityMapping(); + if (securityIndexAvailable(state, logger)) { + upgradeSecurityData(state, this::updateSecurityMapping); + } } } } + private boolean securityIndexAvailable(ClusterState state, Logger logger) { + final IndexRoutingTable routingTable = getSecurityIndexRoutingTable(state); + if (routingTable == null) { + throw new IllegalStateException("Security index does not exist"); + } + if (routingTable.allPrimaryShardsActive() == false) { + logger.debug("Security index is not yet active"); + return false; + } + return true; + } + private void updateSecurityTemplate() { // only put the template if this is not already in progress if (templateCreationPending.compareAndSet(false, true)) { @@ -91,6 +117,33 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste } } + private boolean upgradeSecurityData(ClusterState state, Runnable andThen) { + // only update the data if this is not already in progress + if (upgradeDataState.compareAndSet(UpgradeState.NOT_STARTED, UpgradeState.IN_PROGRESS) ) { + final Version previousVersion = oldestSecurityIndexMappingVersion(state, logger); + nativeRealmMigrator.performUpgrade(previousVersion, new ActionListener() { + + @Override + public void onResponse(Boolean upgraded) { + upgradeDataState.set(UpgradeState.COMPLETE); + andThen.run(); + } + + @Override + public void onFailure(Exception e) { + upgradeDataState.set(UpgradeState.FAILED); + logger.error((Supplier) () -> new ParameterizedMessage("failed to upgrade security data from version [{}] ", + previousVersion), e); + } + }); + return true; + } else { + andThen.run(); + return false; + } + } + + private void updateSecurityMapping() { // only update the mapping if this is not already in progress if (updateMappingPending.compareAndSet(false, true) ) { @@ -182,6 +235,16 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste } static boolean securityIndexMappingVersionMatches(ClusterState clusterState, Logger logger, Predicate predicate) { + return securityIndexMappingVersions(clusterState, logger).stream().allMatch(predicate); + } + + public static Version oldestSecurityIndexMappingVersion(ClusterState clusterState, Logger logger) { + final Set versions = securityIndexMappingVersions(clusterState, logger); + return versions.stream().min(Version::compareTo).orElse(null); + } + + private static Set securityIndexMappingVersions(ClusterState clusterState, Logger logger) { + Set versions = new HashSet<>(); IndexMetaData indexMetaData = clusterState.metaData().getIndices().get(SECURITY_INDEX_NAME); if (indexMetaData != null) { for (Object object : indexMetaData.getMappings().values().toArray()) { @@ -189,19 +252,23 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste if (mappingMetaData.type().equals(MapperService.DEFAULT_MAPPING)) { continue; } - try { - if (containsCorrectVersion(mappingMetaData.sourceAsMap(), predicate) == false) { - return false; - } - } catch (IOException e) { - logger.error("Cannot parse the mapping for security index.", e); - throw new ElasticsearchException("Cannot parse the mapping for security index.", e); - } + versions.add(readMappingVersion(mappingMetaData, logger)); } - return true; - } else { - // index does not exist so when we create it it will be up to date - return true; + } + return versions; + } + + private static Version readMappingVersion(MappingMetaData mappingMetaData, Logger logger) { + try { + Map meta = (Map) mappingMetaData.sourceAsMap().get("_meta"); + if (meta == null) { + // something pre-5.0, but we don't know what. Use 2.3.0 as a placeholder for "old" + return Version.V_2_3_0; + } + return Version.fromString((String) meta.get(SECURITY_VERSION_STRING)); + } catch (IOException e) { + logger.error("Cannot parse the mapping for security index.", e); + throw new ElasticsearchException("Cannot parse the mapping for security index.", e); } } @@ -249,6 +316,18 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste return predicate.test(Version.fromString((String) meta.get(SECURITY_VERSION_STRING))); } + /** + * Returns the routing-table for the security index, or null if the security index does not exist. + */ + public static IndexRoutingTable getSecurityIndexRoutingTable(ClusterState clusterState) { + IndexMetaData metaData = clusterState.metaData().index(SECURITY_INDEX_NAME); + if (metaData == null) { + return null; + } else { + return clusterState.routingTable().index(SECURITY_INDEX_NAME); + } + } + public static boolean securityIndexMappingAndTemplateUpToDate(ClusterState clusterState, Logger logger) { if (securityTemplateExistsAndIsUpToDate(clusterState, logger) == false) { logger.debug("security template [{}] does not exist or is not up to date, so service cannot start", diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java index b7534717c02..10933d127cd 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java @@ -37,9 +37,9 @@ import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.AuthorizationUtils; -import org.elasticsearch.xpack.security.authz.privilege.GeneralPrivilege; import org.elasticsearch.xpack.security.authz.privilege.HealthAndStatsPrivilege; import org.elasticsearch.xpack.security.crypto.CryptoService; +import org.elasticsearch.xpack.security.support.Automatons; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; @@ -55,8 +55,7 @@ import static org.elasticsearch.xpack.security.support.Exceptions.authorizationE public class SecurityActionFilter extends AbstractComponent implements ActionFilter { private static final Predicate LICENSE_EXPIRATION_ACTION_MATCHER = HealthAndStatsPrivilege.INSTANCE.predicate(); - private static final Predicate SECURITY_ACTION_MATCHER = - new GeneralPrivilege("_security_matcher", "cluster:admin/xpack/security*").predicate(); + private static final Predicate SECURITY_ACTION_MATCHER = Automatons.predicate("cluster:admin/xpack/security*"); private final AuthenticationService authcService; private final AuthorizationService authzService; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleResponse.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleResponse.java index 0072d163f2d..0269d6cbf80 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleResponse.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleResponse.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.security.action.role; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -16,7 +16,7 @@ import java.io.IOException; /** * Response for a role being deleted from the security index */ -public class DeleteRoleResponse extends ActionResponse implements ToXContent { +public class DeleteRoleResponse extends ActionResponse implements ToXContentObject { private boolean found = false; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequest.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequest.java index 6eac954d190..f7d9445ac61 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequest.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequest.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.support.MetadataUtils; -import org.elasticsearch.xpack.security.authz.permission.FieldPermissions; import java.io.IOException; import java.util.ArrayList; @@ -65,12 +64,13 @@ public class PutRoleRequest extends ActionRequest implements WriteRequest { - private final NativeRolesStore rolesStore; + private final CompositeRolesStore rolesStore; @Inject public TransportClearRolesCacheAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - NativeRolesStore rolesStore, IndexNameExpressionResolver indexNameExpressionResolver) { + CompositeRolesStore rolesStore, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, ClearRolesCacheAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, ClearRolesCacheRequest::new, ClearRolesCacheRequest.Node::new, ThreadPool.Names.MANAGEMENT, ClearRolesCacheResponse.Node.class); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java index 507f75ea0d5..29097391896 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.security.action.role; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -16,15 +14,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.security.authz.permission.KibanaRole; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.common.Strings.arrayToDelimitedString; - public class TransportGetRolesAction extends HandledTransportAction { private final NativeRolesStore nativeRolesStore; @@ -55,9 +50,8 @@ public class TransportGetRolesAction extends HandledTransportAction roles = FileRolesStore.parseRoleDescriptors(rolesFile, logger, true, env.settings()); + Map roles = FileRolesStore.parseRoleDescriptors(rolesFile, logger, true, Settings.EMPTY); Set existingRoles; try { existingRoles = getRolesThatExist(terminal, env.settings(), env, options); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmMigrator.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmMigrator.java new file mode 100644 index 00000000000..4b404e50e4a --- /dev/null +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmMigrator.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.esnative; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.inject.internal.Nullable; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.security.SecurityTemplateService; +import org.elasticsearch.xpack.security.user.LogstashSystemUser; + +/** + * Performs migration steps for the {@link NativeRealm} and {@link ReservedRealm}. + * When upgrading an Elasticsearch/X-Pack installation from a previous version, this class is responsible for ensuring that user/role + * data stored in the security index is converted to a format that is appropriate for the newly installed version. + * @see SecurityTemplateService + */ +public class NativeRealmMigrator { + + private final NativeUsersStore nativeUsersStore; + private final Logger logger; + + public NativeRealmMigrator(Settings settings, NativeUsersStore nativeUsersStore) { + this.nativeUsersStore = nativeUsersStore; + this.logger = Loggers.getLogger(getClass(), settings); + } + + /** + * Special care must be taken because this upgrade happens before the security-mapping is updated. + * We do it in that order because the version of the security-mapping controls the behaviour of the + * reserved and native realm + * + * @param listener A listener for the results of the upgrade. Calls {@link ActionListener#onFailure(Exception)} if a problem occurs, + * {@link ActionListener#onResponse(Object) onResponse(true)} if an upgrade is performed, or + * {@link ActionListener#onResponse(Object) onResponse(false)} if no upgrade was required. + * @see SecurityTemplateService#securityIndexMappingAndTemplateSufficientToRead(ClusterState, Logger) + * @see NativeUsersStore#canWrite + * @see NativeUsersStore#mappingVersion + */ + public void performUpgrade(@Nullable Version previousVersion, ActionListener listener) { + try { + if (shouldDisableLogstashUser(previousVersion)) { + logger.info("Upgrading security from version [{}] - new reserved user [{}] will default to disabled", + previousVersion, LogstashSystemUser.NAME); + nativeUsersStore.ensureReservedUserIsDisabled(LogstashSystemUser.NAME, new ActionListener() { + @Override + public void onResponse(Void aVoid) { + listener.onResponse(true); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } else { + listener.onResponse(false); + } + } catch (Exception e) { + listener.onFailure(e); + } + } + + /** + * If we're upgrading from a security version where the {@link LogstashSystemUser} did not exist, then we mark the user as disabled. + * Otherwise the user will exist with a default password, which is desirable for an out-of-the-box experience in fresh installs + * but problematic for already-locked-down upgrades. + */ + private boolean shouldDisableLogstashUser(@Nullable Version previousVersion) { + return previousVersion != null && previousVersion.before(LogstashSystemUser.DEFINED_SINCE); + } + +} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index 6f97bb47c66..52e96858fcb 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.DocWriteResponse.Result; @@ -26,7 +27,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; @@ -62,7 +63,9 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; +import java.util.function.Predicate; +import static org.elasticsearch.xpack.security.SecurityTemplateService.oldestSecurityIndexMappingVersion; import static org.elasticsearch.xpack.security.SecurityTemplateService.securityIndexMappingAndTemplateSufficientToRead; import static org.elasticsearch.xpack.security.SecurityTemplateService.securityIndexMappingAndTemplateUpToDate; @@ -94,6 +97,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL private volatile boolean securityIndexExists = false; private volatile boolean canWrite = false; + private volatile Version mappingVersion = null; public NativeUsersStore(Settings settings, InternalClient client) { super(settings); @@ -404,7 +408,6 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL .execute(new ActionListener() { @Override public void onResponse(UpdateResponse updateResponse) { - assert updateResponse.getResult() == Result.UPDATED; clearRealmCache(username, listener, null); } @@ -428,6 +431,16 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL } } + void ensureReservedUserIsDisabled(final String username, final ActionListener listener) { + getReservedUserInfo(username, ActionListener.wrap(userInfo -> { + if (userInfo == null || userInfo.enabled) { + setReservedUserEnabled(username, false, RefreshPolicy.IMMEDIATE, listener); + } else { + listener.onResponse(null); + } + }, listener::onFailure)); + } + private void setReservedUserEnabled(final String username, final boolean enabled, final RefreshPolicy refreshPolicy, final ActionListener listener) { try { @@ -439,7 +452,6 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL .execute(new ActionListener() { @Override public void onResponse(UpdateResponse updateResponse) { - assert updateResponse.getResult() == Result.UPDATED || updateResponse.getResult() == Result.CREATED; clearRealmCache(username, listener, null); } @@ -509,19 +521,19 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL if (securityIndexMappingAndTemplateUpToDate(clusterState, logger)) { canWrite = true; } else if (securityIndexMappingAndTemplateSufficientToRead(clusterState, logger)) { + mappingVersion = oldestSecurityIndexMappingVersion(clusterState, logger); canWrite = false; } else { canWrite = false; return false; } - IndexMetaData metaData = clusterState.metaData().index(SecurityTemplateService.SECURITY_INDEX_NAME); - if (metaData == null) { + final IndexRoutingTable routingTable = SecurityTemplateService.getSecurityIndexRoutingTable(clusterState); + if (routingTable == null) { logger.debug("security index [{}] does not exist, so service can start", SecurityTemplateService.SECURITY_INDEX_NAME); return true; } - - if (clusterState.routingTable().index(SecurityTemplateService.SECURITY_INDEX_NAME).allPrimaryShardsActive()) { + if (routingTable.allPrimaryShardsActive()) { logger.debug("security index [{}] all primary shards started, so service can start", SecurityTemplateService.SECURITY_INDEX_NAME); securityIndexExists = true; @@ -578,9 +590,21 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL return securityIndexExists; } - void getReservedUserInfo(String username, ActionListener listener) { - assert started(); + /** + * Test whether the effective (active) version of the security mapping meets the requiredVersion. + * + * @return true if the effective version passes the predicate, or the security mapping does not exist (null + * version). Otherwise, false. + */ + public boolean checkMappingVersion(Predicate requiredVersion) { + return this.mappingVersion == null || requiredVersion.test(this.mappingVersion); + } + void getReservedUserInfo(String username, ActionListener listener) { + if (!started() && !securityIndexExists()) { + listener.onFailure(new IllegalStateException("Attempt to get reserved user info - started=" + started() + + " index-exists=" + securityIndexExists())); + } client.prepareGet(SecurityTemplateService.SECURITY_INDEX_NAME, RESERVED_USER_DOC_TYPE, username) .execute(new ActionListener() { @Override @@ -699,6 +723,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL public void clusterChanged(ClusterChangedEvent event) { securityIndexExists = event.state().metaData().indices().get(SecurityTemplateService.SECURITY_INDEX_NAME) != null; canWrite = securityIndexMappingAndTemplateUpToDate(event.state(), logger); + mappingVersion = oldestSecurityIndexMappingVersion(event.state(), logger); } public State state() { @@ -713,6 +738,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL } this.securityIndexExists = false; this.canWrite = false; + this.mappingVersion = null; this.state.set(State.INITIALIZED); } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java index 985e494b480..b736bf55944 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authc.esnative; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -21,6 +22,7 @@ import org.elasticsearch.xpack.security.support.Exceptions; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.ElasticUser; import org.elasticsearch.xpack.security.user.KibanaUser; +import org.elasticsearch.xpack.security.user.LogstashSystemUser; import org.elasticsearch.xpack.security.user.User; import java.util.ArrayList; @@ -28,6 +30,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.function.Predicate; /** * A realm for predefined users. These users can only be modified in terms of changing their passwords; no other modifications are allowed. @@ -38,6 +41,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { public static final String TYPE = "reserved"; static final char[] DEFAULT_PASSWORD_HASH = Hasher.BCRYPT.hash(new SecuredString("changeme".toCharArray())); private static final ReservedUserInfo DEFAULT_USER_INFO = new ReservedUserInfo(DEFAULT_PASSWORD_HASH, true); + private static final ReservedUserInfo DISABLED_USER_INFO = new ReservedUserInfo(DEFAULT_PASSWORD_HASH, false); private final NativeUsersStore nativeUsersStore; private final AnonymousUser anonymousUser; @@ -113,6 +117,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { switch (username) { case ElasticUser.NAME: case KibanaUser.NAME: + case LogstashSystemUser.NAME: return XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings); default: return AnonymousUser.isAnonymousUsername(username, settings); @@ -126,6 +131,8 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { return new ElasticUser(userInfo.enabled); case KibanaUser.NAME: return new KibanaUser(userInfo.enabled); + case LogstashSystemUser.NAME: + return new LogstashSystemUser(userInfo.enabled); default: if (anonymousEnabled && anonymousUser.principal().equals(username)) { return anonymousUser; @@ -140,14 +147,21 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { listener.onResponse(anonymousEnabled ? Collections.singletonList(anonymousUser) : Collections.emptyList()); } else { nativeUsersStore.getAllReservedUserInfo(ActionListener.wrap((reservedUserInfos) -> { - List users = new ArrayList<>(3); + List users = new ArrayList<>(4); + ReservedUserInfo userInfo = reservedUserInfos.get(ElasticUser.NAME); users.add(new ElasticUser(userInfo == null || userInfo.enabled)); + userInfo = reservedUserInfos.get(KibanaUser.NAME); users.add(new KibanaUser(userInfo == null || userInfo.enabled)); + + userInfo = reservedUserInfos.get(LogstashSystemUser.NAME); + users.add(new LogstashSystemUser(userInfo == null || userInfo.enabled)); + if (anonymousEnabled) { users.add(anonymousUser); } + listener.onResponse(users); }, (e) -> { logger.error("failed to retrieve reserved users", e); @@ -160,6 +174,9 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { if (nativeUsersStore.started() == false) { // we need to be able to check for the user store being started... listener.onResponse(null); + } else if (userIsDefinedForCurrentSecurityMapping(username) == false) { + logger.debug("Marking user [{}] as disabled because the security mapping is not at the required version", username); + listener.onResponse(DISABLED_USER_INFO); } else if (nativeUsersStore.securityIndexExists() == false) { listener.onResponse(DEFAULT_USER_INFO); } else { @@ -176,4 +193,18 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { })); } } + + private boolean userIsDefinedForCurrentSecurityMapping(String username) { + final Version requiredVersion = getDefinedVersion(username); + return nativeUsersStore.checkMappingVersion(requiredVersion::onOrBefore); + } + + private Version getDefinedVersion(String username) { + switch (username) { + case LogstashSystemUser.NAME: + return LogstashSystemUser.DEFINED_SINCE; + default: + return Version.V_5_0_0; + } + } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java index 62617f73a08..8e49363bf56 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java @@ -69,7 +69,7 @@ class ActiveDirectorySessionFactory extends SessionFactory { String domainDN = buildDnFromDomain(domainName); GroupsResolver groupResolver = new ActiveDirectoryGroupsResolver(settings.getAsSettings("group_search"), domainDN); defaultADAuthenticator = new DefaultADAuthenticator(settings, timeout, logger, groupResolver, domainDN); - downLevelADAuthenticator = new DownLevelADAuthenticator(settings, timeout, logger, groupResolver, domainDN); + downLevelADAuthenticator = new DownLevelADAuthenticator(config, timeout, logger, groupResolver, domainDN, sslService); upnADAuthenticator = new UpnADAuthenticator(settings, timeout, logger, groupResolver, domainDN); } @@ -227,11 +227,16 @@ class ActiveDirectorySessionFactory extends SessionFactory { final String domainDN; final Settings settings; + final SSLService sslService; + final RealmConfig config; - DownLevelADAuthenticator(Settings settings, TimeValue timeout, Logger logger, GroupsResolver groupsResolver, String domainDN) { - super(settings, timeout, logger, groupsResolver, domainDN); + DownLevelADAuthenticator(RealmConfig config, TimeValue timeout, Logger logger, GroupsResolver groupsResolver, String domainDN, + SSLService sslService) { + super(config.settings(), timeout, logger, groupsResolver, domainDN); this.domainDN = domainDN; - this.settings = settings; + this.settings = config.settings(); + this.sslService = sslService; + this.config = config; } @Override @@ -271,7 +276,7 @@ class ActiveDirectorySessionFactory extends SessionFactory { // the global catalog does not replicate the necessary information to map a netbios dns name to a DN so we need to instead // connect to the normal ports. This code uses the standard ports to avoid adding even more settings and is probably ok as // most AD users do not use non-standard ports - final LDAPConnectionOptions options = connectionOptions(settings); + final LDAPConnectionOptions options = connectionOptions(config, sslService, logger); boolean startedSearching = false; LDAPConnection searchConnection = null; try { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java index c3cf962b3dc..ae8d9562356 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java @@ -263,6 +263,7 @@ public final class LdapUtils { // either no referrals to follow or we have explicitly disabled referral following on the connection so we just create // a new search result that has the values we've collected. The search result passed to this method will not have of the // entries as we are using a result listener and the results are not being collected by the LDAP library + LOGGER.trace("LDAP Search {} => {} ({})", searchRequest, searchResult, entryList); SearchResult resultWithValues = new SearchResult(searchResult.getMessageID(), searchResult.getResultCode(), searchResult .getDiagnosticMessage(), searchResult.getMatchedDN(), referralUrls, entryList, referenceList, entryList.size(), referenceList.size(), searchResult.getResponseControls()); @@ -270,11 +271,15 @@ public final class LdapUtils { } else if (depth >= ldapConnection.getConnectionOptions().getReferralHopLimit()) { // we've gone through too many levels of referrals so we terminate with the values collected so far and the proper result // code to indicate the search was terminated early + LOGGER.trace("Referral limit exceeded {} => {} ({})", searchRequest, searchResult, entryList); SearchResult resultWithValues = new SearchResult(searchResult.getMessageID(), ResultCode.REFERRAL_LIMIT_EXCEEDED, searchResult.getDiagnosticMessage(), searchResult.getMatchedDN(), referralUrls, entryList, referenceList, entryList.size(), referenceList.size(), searchResult.getResponseControls()); consumer.accept(requestID, resultWithValues); } else { + if (LOGGER.isTraceEnabled()) { + LOGGER.trace("LDAP referred elsewhere {} => {}", searchRequest, Arrays.toString(referralUrls)); + } // there are referrals to follow, so we start the process to follow the referrals final CountDown countDown = new CountDown(referralUrls.length); final List referralUrlsList = new ArrayList<>(Arrays.asList(referralUrls)); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactory.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactory.java index 557c2d2c7f2..4360063499d 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactory.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactory.java @@ -13,13 +13,16 @@ import com.unboundid.util.ssl.HostNameSSLSocketVerifier; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.security.authc.RealmConfig; +import org.elasticsearch.xpack.security.authc.RealmSettings; import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.elasticsearch.xpack.ssl.SSLConfigurationSettings; import org.elasticsearch.xpack.ssl.SSLService; +import org.elasticsearch.xpack.ssl.VerificationMode; import javax.net.SocketFactory; import java.util.Arrays; @@ -73,7 +76,7 @@ public abstract class SessionFactory { this.timeout = searchTimeout; this.sslService = sslService; LDAPServers ldapServers = ldapServers(config.settings()); - this.serverSet = serverSet(config.settings(), sslService, ldapServers); + this.serverSet = serverSet(config, sslService, ldapServers); this.sslUsed = ldapServers.ssl; } @@ -107,13 +110,33 @@ public abstract class SessionFactory { throw new UnsupportedOperationException("unauthenticated sessions are not supported"); } - protected static LDAPConnectionOptions connectionOptions(Settings settings) { + protected static LDAPConnectionOptions connectionOptions(RealmConfig config, SSLService sslService, Logger logger) { + Settings realmSettings = config.settings(); LDAPConnectionOptions options = new LDAPConnectionOptions(); - options.setConnectTimeoutMillis(Math.toIntExact(settings.getAsTime(TIMEOUT_TCP_CONNECTION_SETTING, TIMEOUT_DEFAULT).millis())); - options.setFollowReferrals(settings.getAsBoolean(FOLLOW_REFERRALS_SETTING, true)); - options.setResponseTimeoutMillis(settings.getAsTime(TIMEOUT_TCP_READ_SETTING, TIMEOUT_DEFAULT).millis()); + options.setConnectTimeoutMillis(Math.toIntExact(realmSettings.getAsTime(TIMEOUT_TCP_CONNECTION_SETTING, TIMEOUT_DEFAULT).millis())); + options.setFollowReferrals(realmSettings.getAsBoolean(FOLLOW_REFERRALS_SETTING, true)); + options.setResponseTimeoutMillis(realmSettings.getAsTime(TIMEOUT_TCP_READ_SETTING, TIMEOUT_DEFAULT).millis()); options.setAllowConcurrentSocketFactoryUse(true); - if (settings.getAsBoolean(HOSTNAME_VERIFICATION_SETTING, true)) { + SSLConfigurationSettings sslConfigurationSettings = SSLConfigurationSettings.withoutPrefix(); + final Settings realmSSLSettings = realmSettings.getByPrefix("ssl."); + final boolean verificationModeExists = sslConfigurationSettings.verificationMode.exists(realmSSLSettings); + final boolean hostnameVerficationExists = realmSettings.get(HOSTNAME_VERIFICATION_SETTING, null) != null; + if (verificationModeExists && hostnameVerficationExists) { + throw new IllegalArgumentException("[" + HOSTNAME_VERIFICATION_SETTING + "] and [" + + sslConfigurationSettings.verificationMode.getKey() + "] may not be used at the same time"); + } else if (verificationModeExists) { + VerificationMode verificationMode = sslService.getVerificationMode(realmSSLSettings, Settings.EMPTY); + if (verificationMode == VerificationMode.FULL) { + options.setSSLSocketVerifier(new HostNameSSLSocketVerifier(true)); + } + } else if (hostnameVerficationExists) { + new DeprecationLogger(logger).deprecated("the setting [{}] has been deprecated and will be removed in a future version. use " + + "[{}] instead", RealmSettings.getFullSettingKey(config, HOSTNAME_VERIFICATION_SETTING), + RealmSettings.getFullSettingKey(config, "ssl." + sslConfigurationSettings.verificationMode.getKey())); + if (realmSettings.getAsBoolean(HOSTNAME_VERIFICATION_SETTING, true)) { + options.setSSLSocketVerifier(new HostNameSSLSocketVerifier(true)); + } + } else { options.setSSLSocketVerifier(new HostNameSSLSocketVerifier(true)); } return options; @@ -132,7 +155,8 @@ public abstract class SessionFactory { return null; } - private ServerSet serverSet(Settings settings, SSLService clientSSLService, LDAPServers ldapServers) { + private ServerSet serverSet(RealmConfig realmConfig, SSLService clientSSLService, LDAPServers ldapServers) { + Settings settings = realmConfig.settings(); SocketFactory socketFactory = null; if (ldapServers.ssl()) { socketFactory = clientSSLService.sslSocketFactory(settings.getByPrefix("ssl.")); @@ -143,7 +167,7 @@ public abstract class SessionFactory { } } return LdapLoadBalancing.serverSet(ldapServers.addresses(), ldapServers.ports(), settings, socketFactory, - connectionOptions(settings)); + connectionOptions(realmConfig, sslService, logger)); } // package private to use for testing diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index bff2c1e9369..1ab8ed59007 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -31,28 +31,30 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.common.GroupedActionListener; import org.elasticsearch.xpack.security.SecurityTemplateService; +import org.elasticsearch.xpack.security.action.user.AuthenticateAction; +import org.elasticsearch.xpack.security.action.user.ChangePasswordAction; +import org.elasticsearch.xpack.security.action.user.UserRequest; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.AuthenticationFailureHandler; +import org.elasticsearch.xpack.security.authc.esnative.NativeRealm; +import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.security.authz.permission.ClusterPermission; -import org.elasticsearch.xpack.security.authz.permission.DefaultRole; -import org.elasticsearch.xpack.security.authz.permission.GlobalPermission; +import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.security.authz.permission.Role; -import org.elasticsearch.xpack.security.authz.permission.RunAsPermission; -import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.security.support.Automatons; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.xpack.security.user.XPackUser; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -70,6 +72,7 @@ public class AuthorizationService extends AbstractComponent { public static final String ORIGINATING_ACTION_KEY = "_originating_action_name"; private static final Predicate MONITOR_INDEX_PREDICATE = IndexPrivilege.MONITOR.predicate(); + private static final Predicate SAME_USER_PRIVILEGE = Automatons.predicate(ChangePasswordAction.NAME, AuthenticateAction.NAME); private final ClusterService clusterService; private final CompositeRolesStore rolesStore; @@ -78,6 +81,7 @@ public class AuthorizationService extends AbstractComponent { private final AuthenticationFailureHandler authcFailureHandler; private final ThreadContext threadContext; private final AnonymousUser anonymousUser; + private final FieldPermissionsCache fieldPermissionsCache; private final boolean isAnonymousEnabled; private final boolean anonymousAuthzExceptionEnabled; @@ -94,6 +98,7 @@ public class AuthorizationService extends AbstractComponent { this.anonymousUser = anonymousUser; this.isAnonymousEnabled = AnonymousUser.isAnonymousEnabled(settings); this.anonymousAuthzExceptionEnabled = ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING.get(settings); + this.fieldPermissionsCache = new FieldPermissionsCache(settings); } /** @@ -106,8 +111,8 @@ public class AuthorizationService extends AbstractComponent { * @param request The request * @throws ElasticsearchSecurityException If the given user is no allowed to execute the given request */ - public void authorize(Authentication authentication, String action, TransportRequest request, Collection userRoles, - Collection runAsRoles) throws ElasticsearchSecurityException { + public void authorize(Authentication authentication, String action, TransportRequest request, Role userRole, + Role runAsRole) throws ElasticsearchSecurityException { final TransportRequest originalRequest = request; if (request instanceof ConcreteShardRequest) { request = ((ConcreteShardRequest) request).getRequest(); @@ -124,38 +129,20 @@ public class AuthorizationService extends AbstractComponent { } throw denial(authentication, action, request); } - Collection roles = userRoles; - // get the roles of the authenticated user, which may be different than the effective - GlobalPermission permission = permission(roles); - final boolean isRunAs = authentication.isRunAs(); - // permission can be empty as it might be that the user's role is unknown - if (permission.isEmpty()) { - if (isRunAs) { - // the request is a run as request so we should call the specific audit event for a denied run as attempt - throw denyRunAs(authentication, action, request); - } else { - throw denial(authentication, action, request); - } - } + // get the roles of the authenticated user, which may be different than the effective + Role permission = userRole; + // check if the request is a run as request + final boolean isRunAs = authentication.isRunAs(); if (isRunAs) { // if we are running as a user we looked up then the authentication must contain a lookedUpBy. If it doesn't then this user // doesn't really exist but the authc service allowed it through to avoid leaking users that exist in the system if (authentication.getLookedUpBy() == null) { throw denyRunAs(authentication, action, request); - } - - // first we must authorize for the RUN_AS action - RunAsPermission runAs = permission.runAs(); - if (runAs != null && runAs.check(authentication.getRunAsUser().principal())) { + } else if (permission.runAs().check(authentication.getRunAsUser().principal())) { grantRunAs(authentication, action, request); - roles = runAsRoles; - permission = permission(roles); - // permission can be empty as it might be that the run as user's role is unknown - if (permission.isEmpty()) { - throw denial(authentication, action, request); - } + permission = runAsRole; } else { throw denyRunAs(authentication, action, request); } @@ -164,8 +151,7 @@ public class AuthorizationService extends AbstractComponent { // first, we'll check if the action is a cluster action. If it is, we'll only check it against the cluster permissions if (ClusterPrivilege.ACTION_MATCHER.test(action)) { ClusterPermission cluster = permission.cluster(); - // we use the effectiveUser for permission checking since we are running as a user! - if (cluster != null && cluster.check(action, request, authentication)) { + if (cluster.check(action) || checkSameUserPermissions(action, request, authentication)) { setIndicesAccessControl(IndicesAccessControl.ALLOW_ALL); grant(authentication, action, request); return; @@ -210,12 +196,12 @@ public class AuthorizationService extends AbstractComponent { throw denial(authentication, action, request); } - if (permission.indices() == null || permission.indices().isEmpty()) { + if (permission.indices().check(action) == false) { throw denial(authentication, action, request); } MetaData metaData = clusterService.state().metaData(); - AuthorizedIndices authorizedIndices = new AuthorizedIndices(authentication.getRunAsUser(), roles, action, metaData); + AuthorizedIndices authorizedIndices = new AuthorizedIndices(authentication.getRunAsUser(), permission, action, metaData); Set indexNames = resolveIndexNames(authentication, action, request, metaData, authorizedIndices); assert !indexNames.isEmpty() : "every indices request needs to have its indices set thus the resolved indices must not be empty"; @@ -227,14 +213,14 @@ public class AuthorizationService extends AbstractComponent { return; } - IndicesAccessControl indicesAccessControl = permission.authorize(action, indexNames, metaData); + IndicesAccessControl indicesAccessControl = permission.authorize(action, indexNames, metaData, fieldPermissionsCache); if (!indicesAccessControl.isGranted()) { throw denial(authentication, action, request); } else if (indicesAccessControl.getIndexPermissions(SecurityTemplateService.SECURITY_INDEX_NAME) != null && indicesAccessControl.getIndexPermissions(SecurityTemplateService.SECURITY_INDEX_NAME).isGranted() && XPackUser.is(authentication.getRunAsUser()) == false && MONITOR_INDEX_PREDICATE.test(action) == false - && Arrays.binarySearch(authentication.getRunAsUser().roles(), SuperuserRole.NAME) < 0) { + && Arrays.binarySearch(authentication.getRunAsUser().roles(), ReservedRolesStore.SUPERUSER_ROLE.name()) < 0) { // only the XPackUser is allowed to work with this index, but we should allow indices monitoring actions through for debugging // purposes. These monitor requests also sometimes resolve indices concretely and then requests them logger.debug("user [{}] attempted to directly perform [{}] against the security index [{}]", @@ -253,7 +239,7 @@ public class AuthorizationService extends AbstractComponent { for (Alias alias : aliases) { aliasesAndIndices.add(alias.name()); } - indicesAccessControl = permission.authorize("indices:admin/aliases", aliasesAndIndices, metaData); + indicesAccessControl = permission.authorize("indices:admin/aliases", aliasesAndIndices, metaData, fieldPermissionsCache); if (!indicesAccessControl.isGranted()) { throw denial(authentication, "indices:admin/aliases", request); } @@ -288,16 +274,7 @@ public class AuthorizationService extends AbstractComponent { } } - // pkg-private for testing - GlobalPermission permission(Collection roles) { - GlobalPermission.Compound.Builder rolesBuilder = GlobalPermission.Compound.builder(); - for (Role role : roles) { - rolesBuilder.add(role); - } - return rolesBuilder.build(); - } - - public void roles(User user, ActionListener> roleActionListener) { + public void roles(User user, ActionListener roleActionListener) { // we need to special case the internal users in this method, if we apply the anonymous roles to every user including these system // user accounts then we run into the chance of a deadlock because then we need to get a role that we may be trying to get as the // internal user. The SystemUser is special cased as it has special privileges to execute internal actions and should never be @@ -307,8 +284,8 @@ public class AuthorizationService extends AbstractComponent { " roles"); } if (XPackUser.is(user)) { - assert XPackUser.INSTANCE.roles().length == 1 && SuperuserRole.NAME.equals(XPackUser.INSTANCE.roles()[0]); - roleActionListener.onResponse(Collections.singleton(SuperuserRole.INSTANCE)); + assert XPackUser.INSTANCE.roles().length == 1 && ReservedRolesStore.SUPERUSER_ROLE.name().equals(XPackUser.INSTANCE.roles()[0]); + roleActionListener.onResponse(ReservedRolesStore.SUPERUSER_ROLE); return; } @@ -321,15 +298,12 @@ public class AuthorizationService extends AbstractComponent { Collections.addAll(roleNames, anonymousUser.roles()); } - final Collection defaultRoles = Collections.singletonList(DefaultRole.INSTANCE); if (roleNames.isEmpty()) { - roleActionListener.onResponse(defaultRoles); + roleActionListener.onResponse(Role.EMPTY); + } else if (roleNames.contains(ReservedRolesStore.SUPERUSER_ROLE.name())) { + roleActionListener.onResponse(ReservedRolesStore.SUPERUSER_ROLE); } else { - final GroupedActionListener listener = new GroupedActionListener<>(roleActionListener, roleNames.size(), - defaultRoles); - for (String roleName : roleNames) { - rolesStore.roles(roleName, listener); - } + rolesStore.roles(roleNames, fieldPermissionsCache, roleActionListener); } } @@ -354,6 +328,49 @@ public class AuthorizationService extends AbstractComponent { action.equals(SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME); } + static boolean checkSameUserPermissions(String action, TransportRequest request, Authentication authentication) { + final boolean actionAllowed = SAME_USER_PRIVILEGE.test(action); + if (actionAllowed) { + if (request instanceof UserRequest == false) { + assert false : "right now only a user request should be allowed"; + return false; + } + UserRequest userRequest = (UserRequest) request; + String[] usernames = userRequest.usernames(); + if (usernames == null || usernames.length != 1 || usernames[0] == null) { + assert false : "this role should only be used for actions to apply to a single user"; + return false; + } + final String username = usernames[0]; + final boolean sameUsername = authentication.getRunAsUser().principal().equals(username); + if (sameUsername && ChangePasswordAction.NAME.equals(action)) { + return checkChangePasswordAction(authentication); + } + + assert AuthenticateAction.NAME.equals(action) || sameUsername == false; + return sameUsername; + } + return false; + } + + private static boolean checkChangePasswordAction(Authentication authentication) { + // we need to verify that this user was authenticated by or looked up by a realm type that support password changes + // otherwise we open ourselves up to issues where a user in a different realm could be created with the same username + // and do malicious things + final boolean isRunAs = authentication.isRunAs(); + final String realmType; + if (isRunAs) { + realmType = authentication.getLookedUpBy().getType(); + } else { + realmType = authentication.getAuthenticatedBy().getType(); + } + + assert realmType != null; + // ensure the user was authenticated by a realm that we can change a password for. The native realm is an internal realm and + // right now only one can exist in the realm configuration - if this changes we should update this check + return ReservedRealm.TYPE.equals(realmType) || NativeRealm.TYPE.equals(realmType); + } + private ElasticsearchSecurityException denial(Authentication authentication, String action, TransportRequest request) { auditTrail.accessDenied(authentication.getUser(), action, request); return denialException(authentication, action); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index 0cf19e2d75d..731a0b86958 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -10,18 +10,15 @@ import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authz.permission.Role; -import org.elasticsearch.xpack.security.support.AutomatonPredicate; import org.elasticsearch.xpack.security.support.Automatons; import org.elasticsearch.xpack.security.user.SystemUser; -import java.util.Collection; -import java.util.Collections; import java.util.function.BiConsumer; import java.util.function.Predicate; public final class AuthorizationUtils { - private static final Predicate INTERNAL_PREDICATE = new AutomatonPredicate(Automatons.patterns("internal:*")); + private static final Predicate INTERNAL_PREDICATE = Automatons.predicate("internal:*"); private AuthorizationUtils() {} @@ -72,14 +69,13 @@ public final class AuthorizationUtils { public static class AsyncAuthorizer { private final ActionListener listener; - private final BiConsumer, Collection> consumer; + private final BiConsumer consumer; private final Authentication authentication; - private volatile Collection userRoles; - private volatile Collection runAsRoles; + private volatile Role userRoles; + private volatile Role runAsRoles; private CountDown countDown = new CountDown(2); // we expect only two responses!! - public AsyncAuthorizer(Authentication authentication, ActionListener listener, BiConsumer, - Collection> consumer) { + public AsyncAuthorizer(Authentication authentication, ActionListener listener, BiConsumer consumer) { this.consumer = consumer; this.listener = listener; this.authentication = authentication; @@ -87,25 +83,25 @@ public final class AuthorizationUtils { public void authorize(AuthorizationService service) { if (SystemUser.is(authentication.getUser())) { - setUserRoles(Collections.emptyList()); // we can inform the listener immediately - nothing to fetch for us on system user - setRunAsRoles(Collections.emptyList()); + setUserRoles(null); // we can inform the listener immediately - nothing to fetch for us on system user + setRunAsRoles(null); } else { service.roles(authentication.getUser(), ActionListener.wrap(this::setUserRoles, listener::onFailure)); if (authentication.isRunAs()) { assert authentication.getRunAsUser() != null : "runAs user is null but shouldn't"; service.roles(authentication.getRunAsUser(), ActionListener.wrap(this::setRunAsRoles, listener::onFailure)); } else { - setRunAsRoles(Collections.emptyList()); + setRunAsRoles(null); } } } - private void setUserRoles(Collection roles) { + private void setUserRoles(Role roles) { this.userRoles = roles; maybeRun(); } - private void setRunAsRoles(Collection roles) { + private void setRunAsRoles(Role roles) { this.runAsRoles = roles; maybeRun(); } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java index 86a60ce2283..3472f9d7786 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java @@ -9,13 +9,12 @@ import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.xpack.security.SecurityTemplateService; import org.elasticsearch.xpack.security.authz.permission.Role; -import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; +import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.xpack.security.user.XPackUser; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; @@ -29,10 +28,10 @@ class AuthorizedIndices { private final User user; private final String action; private final MetaData metaData; - private final Collection userRoles; + private final Role userRoles; private List authorizedIndices; - AuthorizedIndices(User user, Collection userRoles, String action, MetaData metaData) { + AuthorizedIndices(User user, Role userRoles, String action, MetaData metaData) { this.user = user; this.userRoles = userRoles; this.action = action; @@ -47,16 +46,7 @@ class AuthorizedIndices { } private List load() { - if (userRoles.isEmpty()) { - return Collections.emptyList(); - } - - List> predicates = new ArrayList<>(); - for (Role userRole : userRoles) { - predicates.add(userRole.indices().allowedIndicesMatcher(action)); - } - - Predicate predicate = predicates.stream().reduce(s -> false, Predicate::or); + Predicate predicate = userRoles.indices().allowedIndicesMatcher(action); List indicesAndAliases = new ArrayList<>(); // TODO: can this be done smarter? I think there are usually more indices/aliases in the cluster then indices defined a roles? @@ -67,7 +57,7 @@ class AuthorizedIndices { } } - if (XPackUser.is(user) == false && Arrays.binarySearch(user.roles(), SuperuserRole.NAME) < 0) { + if (XPackUser.is(user) == false && Arrays.binarySearch(user.roles(), ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName()) < 0) { // we should filter out the .security index from wildcards indicesAndAliases.remove(SecurityTemplateService.SECURITY_INDEX_NAME); } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java index aa57915bc15..d3e2f2fc514 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java @@ -6,9 +6,9 @@ package org.elasticsearch.xpack.security.authz; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesArray; @@ -18,12 +18,12 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.xpack.common.xcontent.XContentUtils; -import org.elasticsearch.xpack.security.authz.permission.FieldPermissions; import org.elasticsearch.xpack.security.support.MetadataUtils; import org.elasticsearch.xpack.security.support.Validation; @@ -38,13 +38,14 @@ import java.util.Map; * A holder for a Role that contains user-readable information about the Role * without containing the actual Role object. */ -public class RoleDescriptor implements ToXContent { +public class RoleDescriptor implements ToXContentObject { private final String name; private final String[] clusterPrivileges; private final IndicesPrivileges[] indicesPrivileges; private final String[] runAs; private final Map metadata; + private final Map transientMetadata; public RoleDescriptor(String name, @Nullable String[] clusterPrivileges, @@ -58,12 +59,23 @@ public class RoleDescriptor implements ToXContent { @Nullable IndicesPrivileges[] indicesPrivileges, @Nullable String[] runAs, @Nullable Map metadata) { + this(name, clusterPrivileges, indicesPrivileges, runAs, metadata, null); + } + + public RoleDescriptor(String name, + @Nullable String[] clusterPrivileges, + @Nullable IndicesPrivileges[] indicesPrivileges, + @Nullable String[] runAs, + @Nullable Map metadata, + @Nullable Map transientMetadata) { this.name = name; this.clusterPrivileges = clusterPrivileges != null ? clusterPrivileges : Strings.EMPTY_ARRAY; this.indicesPrivileges = indicesPrivileges != null ? indicesPrivileges : IndicesPrivileges.NONE; this.runAs = runAs != null ? runAs : Strings.EMPTY_ARRAY; this.metadata = metadata != null ? Collections.unmodifiableMap(metadata) : Collections.emptyMap(); + this.transientMetadata = transientMetadata != null ? Collections.unmodifiableMap(transientMetadata) : + Collections.singletonMap("enabled", true); } public String getName() { @@ -86,6 +98,14 @@ public class RoleDescriptor implements ToXContent { return metadata; } + public Map getTransientMetadata() { + return transientMetadata; + } + + public boolean isUsingDocumentOrFieldLevelSecurity() { + return Arrays.stream(indicesPrivileges).anyMatch(ip -> ip.isUsingDocumentLevelSecurity() || ip.isUsingFieldLevelSecurity()); + } + @Override public String toString() { StringBuilder sb = new StringBuilder("Role["); @@ -126,7 +146,12 @@ public class RoleDescriptor implements ToXContent { return result; } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return toXContent(builder, params, true); + } + + public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean includeTransient) throws IOException { builder.startObject(); builder.array(Fields.CLUSTER.getPreferredName(), clusterPrivileges); builder.array(Fields.INDICES.getPreferredName(), (Object[]) indicesPrivileges); @@ -134,6 +159,9 @@ public class RoleDescriptor implements ToXContent { builder.array(Fields.RUN_AS.getPreferredName(), runAs); } builder.field(Fields.METADATA.getPreferredName(), metadata); + if (includeTransient) { + builder.field(Fields.TRANSIENT_METADATA.getPreferredName(), transientMetadata); + } return builder.endObject(); } @@ -147,7 +175,14 @@ public class RoleDescriptor implements ToXContent { } String[] runAs = in.readStringArray(); Map metadata = in.readMap(); - return new RoleDescriptor(name, clusterPrivileges, indicesPrivileges, runAs, metadata); + + final Map transientMetadata; + if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) { + transientMetadata = in.readMap(); + } else { + transientMetadata = Collections.emptyMap(); + } + return new RoleDescriptor(name, clusterPrivileges, indicesPrivileges, runAs, metadata, transientMetadata); } public static void writeTo(RoleDescriptor descriptor, StreamOutput out) throws IOException { @@ -159,6 +194,9 @@ public class RoleDescriptor implements ToXContent { } out.writeStringArray(descriptor.runAs); out.writeMap(descriptor.metadata); + if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) { + out.writeMap(descriptor.transientMetadata); + } } public static RoleDescriptor parse(String name, BytesReference source, boolean allow2xFormat) throws IOException { @@ -191,18 +229,26 @@ public class RoleDescriptor implements ToXContent { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.INDICES)) { + } else if (Fields.INDICES.match(currentFieldName)) { indicesPrivileges = parseIndices(name, parser, allow2xFormat); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.RUN_AS)) { + } else if (Fields.RUN_AS.match(currentFieldName)) { runAsUsers = readStringArray(name, parser, true); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.CLUSTER)) { + } else if (Fields.CLUSTER.match(currentFieldName)) { clusterPrivileges = readStringArray(name, parser, true); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.METADATA)) { + } else if (Fields.METADATA.match(currentFieldName)) { if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException( "expected field [{}] to be of type object, but found [{}] instead", currentFieldName, token); } metadata = parser.map(); + } else if (Fields.TRANSIENT_METADATA.match(currentFieldName)) { + if (token == XContentParser.Token.START_OBJECT) { + // consume object but just drop + parser.map(); + } else { + throw new ElasticsearchParseException("expected field [{}] to be an object, but found [{}] instead", + currentFieldName, token); + } } else { throw new ElasticsearchParseException("failed to parse role [{}]. unexpected field [{}]", name, currentFieldName); } @@ -241,14 +287,14 @@ public class RoleDescriptor implements ToXContent { } String currentFieldName = null; String[] names = null; - String query = null; + BytesReference query = null; String[] privileges = null; String[] grantedFields = null; String[] deniedFields = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.NAMES)) { + } else if (Fields.NAMES.match(currentFieldName)) { if (token == XContentParser.Token.VALUE_STRING) { names = new String[] { parser.text() }; } else if (token == XContentParser.Token.START_ARRAY) { @@ -261,34 +307,35 @@ public class RoleDescriptor implements ToXContent { throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. expected field [{}] " + "value to be a string or an array of strings, but found [{}] instead", roleName, currentFieldName, token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.QUERY)) { + } else if (Fields.QUERY.match(currentFieldName)) { if (token == XContentParser.Token.START_OBJECT) { XContentBuilder builder = JsonXContent.contentBuilder(); XContentHelper.copyCurrentStructure(builder.generator(), parser); - query = builder.string(); - } else if (token == XContentParser.Token.VALUE_STRING){ + query = builder.bytes(); + } else if (token == XContentParser.Token.VALUE_STRING) { final String text = parser.text(); if (text.isEmpty() == false) { - query = text; + query = new BytesArray(text); } } else if (token != XContentParser.Token.VALUE_NULL) { throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. expected field [{}] " + - "value to be null, a string, or an object, but found [{}] instead", roleName, currentFieldName, token); + "value to be null, a string, an array, or an object, but found [{}] instead", roleName, currentFieldName, + token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.FIELD_PERMISSIONS)) { + } else if (Fields.FIELD_PERMISSIONS.match(currentFieldName)) { if (token == XContentParser.Token.START_OBJECT) { token = parser.nextToken(); do { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.GRANT_FIELDS)) { + if (Fields.GRANT_FIELDS.match(currentFieldName)) { parser.nextToken(); grantedFields = readStringArray(roleName, parser, true); if (grantedFields == null) { throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. {} must not " + "be null.", roleName, Fields.GRANT_FIELDS); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.EXCEPT_FIELDS)) { + } else if (Fields.EXCEPT_FIELDS.match(currentFieldName)) { parser.nextToken(); deniedFields = readStringArray(roleName, parser, true); if (deniedFields == null) { @@ -317,9 +364,9 @@ public class RoleDescriptor implements ToXContent { " in \"{}\".", roleName, XContentParser.Token.START_OBJECT, XContentParser.Token.START_ARRAY, token, Fields.FIELD_PERMISSIONS); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.PRIVILEGES)) { + } else if (Fields.PRIVILEGES.match(currentFieldName)) { privileges = readStringArray(roleName, parser, true); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.FIELD_PERMISSIONS_2X)) { + } else if (Fields.FIELD_PERMISSIONS_2X.match(currentFieldName)) { if (allow2xFormat) { grantedFields = readStringArray(roleName, parser, true); } else { @@ -327,6 +374,15 @@ public class RoleDescriptor implements ToXContent { " permissions in role [{}], use [\"{}\": {\"{}\":[...]," + "\"{}\":[...]}] instead", roleName, Fields.FIELD_PERMISSIONS, Fields.GRANT_FIELDS, Fields.EXCEPT_FIELDS, roleName); } + } else if (Fields.TRANSIENT_METADATA.match(currentFieldName)) { + if (token == XContentParser.Token.START_OBJECT) { + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + // it is transient metadata, skip it + } + } else { + throw new ElasticsearchParseException("failed to parse transient metadata for role [{}]. expected {} but got {}" + + " in \"{}\".", roleName, XContentParser.Token.START_OBJECT, token, Fields.TRANSIENT_METADATA); + } } else { throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. unexpected field [{}]", roleName, currentFieldName); @@ -347,7 +403,8 @@ public class RoleDescriptor implements ToXContent { return RoleDescriptor.IndicesPrivileges.builder() .indices(names) .privileges(privileges) - .fieldPermissions(new FieldPermissions(grantedFields, deniedFields)) + .grantedFields(grantedFields) + .deniedFields(deniedFields) .query(query) .build(); } @@ -362,7 +419,8 @@ public class RoleDescriptor implements ToXContent { private String[] indices; private String[] privileges; - private FieldPermissions fieldPermissions = new FieldPermissions(); + private String[] grantedFields = null; + private String[] deniedFields = null; private BytesReference query; private IndicesPrivileges() { @@ -380,8 +438,14 @@ public class RoleDescriptor implements ToXContent { return this.privileges; } - public FieldPermissions getFieldPermissions() { - return fieldPermissions; + @Nullable + public String[] getGrantedFields() { + return this.grantedFields; + } + + @Nullable + public String[] getDeniedFields() { + return this.deniedFields; } @Nullable @@ -389,15 +453,57 @@ public class RoleDescriptor implements ToXContent { return this.query; } + public boolean isUsingDocumentLevelSecurity() { + return query != null; + } + + public boolean isUsingFieldLevelSecurity() { + return hasDeniedFields() || hasGrantedFields(); + } + + private boolean hasDeniedFields() { + return deniedFields != null && deniedFields.length > 0; + } + + private boolean hasGrantedFields() { + if (grantedFields != null && grantedFields.length >= 0) { + // we treat just '*' as no FLS since that's what the UI defaults to + if (grantedFields.length == 1 && "*".equals(grantedFields[0])) { + return false; + } else { + return true; + } + } + return false; + } + @Override public String toString() { StringBuilder sb = new StringBuilder("IndicesPrivileges["); sb.append("indices=[").append(Strings.arrayToCommaDelimitedString(indices)); sb.append("], privileges=[").append(Strings.arrayToCommaDelimitedString(privileges)); sb.append("], "); - sb.append(fieldPermissions.toString()); + if (grantedFields != null || deniedFields != null) { + sb.append(RoleDescriptor.Fields.FIELD_PERMISSIONS).append("=["); + if (grantedFields == null) { + sb.append(RoleDescriptor.Fields.GRANT_FIELDS).append("=null"); + } else { + sb.append(RoleDescriptor.Fields.GRANT_FIELDS).append("=[") + .append(Strings.arrayToCommaDelimitedString(grantedFields)); + sb.append("]"); + } + if (deniedFields == null) { + sb.append(", ").append(RoleDescriptor.Fields.EXCEPT_FIELDS).append("=null"); + } else { + sb.append(", ").append(RoleDescriptor.Fields.EXCEPT_FIELDS).append("=[") + .append(Strings.arrayToCommaDelimitedString(deniedFields)); + sb.append("]"); + } + sb.append("]"); + } if (query != null) { - sb.append(", query=").append(query.utf8ToString()); + sb.append(", query="); + sb.append(query.utf8ToString()); } sb.append("]"); return sb.toString(); @@ -412,7 +518,8 @@ public class RoleDescriptor implements ToXContent { if (!Arrays.equals(indices, that.indices)) return false; if (!Arrays.equals(privileges, that.privileges)) return false; - if (fieldPermissions.equals(that.fieldPermissions) == false) return false; + if (!Arrays.equals(grantedFields, that.grantedFields)) return false; + if (!Arrays.equals(deniedFields, that.deniedFields)) return false; return !(query != null ? !query.equals(that.query) : that.query != null); } @@ -420,7 +527,8 @@ public class RoleDescriptor implements ToXContent { public int hashCode() { int result = Arrays.hashCode(indices); result = 31 * result + Arrays.hashCode(privileges); - result = 31 * result + fieldPermissions.hashCode(); + result = 31 * result + Arrays.hashCode(grantedFields); + result = 31 * result + Arrays.hashCode(deniedFields); result = 31 * result + (query != null ? query.hashCode() : 0); return result; } @@ -430,7 +538,16 @@ public class RoleDescriptor implements ToXContent { builder.startObject(); builder.array("names", indices); builder.array("privileges", privileges); - builder = fieldPermissions.toXContent(builder, params); + if (grantedFields != null || deniedFields != null) { + builder.startObject(RoleDescriptor.Fields.FIELD_PERMISSIONS.getPreferredName()); + if (grantedFields != null) { + builder.array(RoleDescriptor.Fields.GRANT_FIELDS.getPreferredName(), grantedFields); + } + if (deniedFields != null) { + builder.array(RoleDescriptor.Fields.EXCEPT_FIELDS.getPreferredName(), deniedFields); + } + builder.endObject(); + } if (query != null) { builder.field("query", query.utf8ToString()); } @@ -446,7 +563,8 @@ public class RoleDescriptor implements ToXContent { @Override public void readFrom(StreamInput in) throws IOException { this.indices = in.readStringArray(); - this.fieldPermissions = new FieldPermissions(in); + this.grantedFields = in.readOptionalStringArray(); + this.deniedFields = in.readOptionalStringArray(); this.privileges = in.readStringArray(); this.query = in.readOptionalBytesReference(); } @@ -454,7 +572,8 @@ public class RoleDescriptor implements ToXContent { @Override public void writeTo(StreamOutput out) throws IOException { out.writeStringArray(indices); - fieldPermissions.writeTo(out); + out.writeOptionalStringArray(grantedFields); + out.writeOptionalStringArray(deniedFields); out.writeStringArray(privileges); out.writeOptionalBytesReference(query); } @@ -476,8 +595,13 @@ public class RoleDescriptor implements ToXContent { return this; } - public Builder fieldPermissions(FieldPermissions fieldPermissions) { - indicesPrivileges.fieldPermissions = fieldPermissions; + public Builder grantedFields(String... grantedFields) { + indicesPrivileges.grantedFields = grantedFields; + return this; + } + + public Builder deniedFields(String... deniedFields) { + indicesPrivileges.deniedFields = deniedFields; return this; } @@ -486,7 +610,11 @@ public class RoleDescriptor implements ToXContent { } public Builder query(@Nullable BytesReference query) { - indicesPrivileges.query = query; + if (query == null) { + indicesPrivileges.query = null; + } else { + indicesPrivileges.query = query; + } return this; } @@ -514,5 +642,6 @@ public class RoleDescriptor implements ToXContent { ParseField GRANT_FIELDS = new ParseField("grant"); ParseField EXCEPT_FIELDS = new ParseField("except"); ParseField METADATA = new ParseField("metadata"); + ParseField TRANSIENT_METADATA = new ParseField("transient_metadata"); } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControl.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControl.java index c9ec7a9c20d..2688823acff 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControl.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControl.java @@ -88,36 +88,5 @@ public class IndicesAccessControl { public Set getQueries() { return queries; } - - public IndexAccessControl merge(IndexAccessControl other) { - if (other.isGranted() == false) { - // nothing to merge - return this; - } - - final boolean granted = this.granted; - if (granted == false) { - // we do not support negatives, so if the current isn't granted - just return other - assert other.isGranted(); - return other; - } - - FieldPermissions newPermissions = FieldPermissions.merge(this.fieldPermissions, other.fieldPermissions); - - Set queries = null; - if (this.queries != null && other.getQueries() != null) { - queries = new HashSet<>(); - if (this.queries != null) { - queries.addAll(this.queries); - } - if (other.getQueries() != null) { - queries.addAll(other.getQueries()); - } - queries = unmodifiableSet(queries); - } - return new IndexAccessControl(granted, newPermissions, queries); - } - - } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java index d1ec8ce6e9d..edd4fad37e0 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java @@ -32,7 +32,6 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.logging.Loggers; @@ -303,7 +302,7 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper { if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException("Unexpected token [" + token + "]"); } - Script script = Script.parse(parser, ParseFieldMatcher.EMPTY); + Script script = Script.parse(parser); // Add the user details to the params Map params = new HashMap<>(); if (script.getParams() != null) { @@ -403,7 +402,7 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper { } }; QueryRewriteContext copy = new QueryRewriteContext(original.getIndexSettings(), original.getMapperService(), scriptService, - original.getXContentRegistry(), null, client, original.getIndexReader(), original::nowInMillis); + original.getXContentRegistry(), client, original.getIndexReader(), original::nowInMillis); queryBuilder.rewrite(copy); } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/ClusterPermission.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/ClusterPermission.java index 9f1fc321209..99677028d94 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/ClusterPermission.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/ClusterPermission.java @@ -5,93 +5,30 @@ */ package org.elasticsearch.xpack.security.authz.permission; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; -import java.util.List; -import java.util.Objects; import java.util.function.Predicate; /** * A permission that is based on privileges for cluster wide actions */ -public interface ClusterPermission extends Permission { +public final class ClusterPermission { - boolean check(String action, TransportRequest request, Authentication authentication); + public static final ClusterPermission NONE = new ClusterPermission(ClusterPrivilege.NONE); - class Core implements ClusterPermission { + private final ClusterPrivilege privilege; + private final Predicate predicate; - public static final Core NONE = new Core(ClusterPrivilege.NONE) { - @Override - public boolean check(String action, TransportRequest request, Authentication authentication) { - return false; - } - - @Override - public boolean isEmpty() { - return true; - } - }; - - private final ClusterPrivilege privilege; - private final Predicate predicate; - - Core(ClusterPrivilege privilege) { - this.privilege = privilege; - this.predicate = privilege.predicate(); - } - - public ClusterPrivilege privilege() { - return privilege; - } - - @Override - public boolean check(String action, TransportRequest request, Authentication authentication) { - return predicate.test(action); - } - - @Override - public boolean isEmpty() { - return false; - } + ClusterPermission(ClusterPrivilege privilege) { + this.privilege = privilege; + this.predicate = privilege.predicate(); } - class Globals implements ClusterPermission { - - private final List globals; - - Globals(List globals) { - this.globals = globals; - } - - @Override - public boolean check(String action, TransportRequest request, Authentication authentication) { - if (globals == null) { - return false; - } - for (GlobalPermission global : globals) { - Objects.requireNonNull(global, "global must not be null"); - Objects.requireNonNull(global.indices(), "global.indices() must not be null"); - if (global.cluster().check(action, request, authentication)) { - return true; - } - } - return false; - } - - @Override - public boolean isEmpty() { - if (globals == null || globals.isEmpty()) { - return true; - } - for (GlobalPermission global : globals) { - if (!global.isEmpty()) { - return false; - } - } - return true; - } + public ClusterPrivilege privilege() { + return privilege; } + public boolean check(String action) { + return predicate.test(action); + } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/DefaultRole.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/DefaultRole.java deleted file mode 100644 index 8ed29b0999c..00000000000 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/DefaultRole.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.xpack.security.authc.Authentication; -import org.elasticsearch.xpack.security.authc.esnative.NativeRealm; -import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; -import org.elasticsearch.xpack.security.action.user.AuthenticateAction; -import org.elasticsearch.xpack.security.action.user.ChangePasswordAction; -import org.elasticsearch.xpack.security.action.user.UserRequest; -import org.elasticsearch.xpack.security.authz.permission.RunAsPermission.Core; -import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.security.authz.privilege.Privilege.Name; -import org.elasticsearch.transport.TransportRequest; - -/** - * A default role that will be applied to all users other than the internal {@link org.elasticsearch.xpack.security.user.SystemUser}. This - * role grants access to actions that every user should be able to execute such as the ability to change their password and execute the - * authenticate endpoint to get information about themselves - */ -public class DefaultRole extends Role { - - private static final ClusterPermission.Core CLUSTER_PERMISSION = - new SameUserClusterPermission(ClusterPrivilege.get(new Name(ChangePasswordAction.NAME, AuthenticateAction.NAME))); - private static final IndicesPermission.Core INDICES_PERMISSION = IndicesPermission.Core.NONE; - private static final RunAsPermission.Core RUN_AS_PERMISSION = Core.NONE; - - public static final String NAME = "__default_role"; - public static final DefaultRole INSTANCE = new DefaultRole(); - - private DefaultRole() { - super(NAME, CLUSTER_PERMISSION, INDICES_PERMISSION, RUN_AS_PERMISSION); - } - - private static class SameUserClusterPermission extends ClusterPermission.Core { - - private SameUserClusterPermission(ClusterPrivilege privilege) { - super(privilege); - } - - @Override - public boolean check(String action, TransportRequest request, Authentication authentication) { - final boolean actionAllowed = super.check(action, request, authentication); - if (actionAllowed) { - if (request instanceof UserRequest == false) { - assert false : "right now only a user request should be allowed"; - return false; - } - UserRequest userRequest = (UserRequest) request; - String[] usernames = userRequest.usernames(); - if (usernames == null || usernames.length != 1 || usernames[0] == null) { - assert false : "this role should only be used for actions to apply to a single user"; - return false; - } - final String username = usernames[0]; - final boolean sameUsername = authentication.getRunAsUser().principal().equals(username); - if (sameUsername && ChangePasswordAction.NAME.equals(action)) { - return checkChangePasswordAction(authentication); - } - - assert AuthenticateAction.NAME.equals(action) || sameUsername == false; - return sameUsername; - } - return false; - } - } - - static boolean checkChangePasswordAction(Authentication authentication) { - // we need to verify that this user was authenticated by or looked up by a realm type that support password changes - // otherwise we open ourselves up to issues where a user in a different realm could be created with the same username - // and do malicious things - final boolean isRunAs = authentication.isRunAs(); - final String realmType; - if (isRunAs) { - realmType = authentication.getLookedUpBy().getType(); - } else { - realmType = authentication.getAuthenticatedBy().getType(); - } - - assert realmType != null; - // ensure the user was authenticated by a realm that we can change a password for. The native realm is an internal realm and right - // now only one can exist in the realm configuration - if this changes we should update this check - return ReservedRealm.TYPE.equals(realmType) || NativeRealm.TYPE.equals(realmType); - } -} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissions.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissions.java index 25ae2980d6b..13749fa0f66 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissions.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissions.java @@ -13,8 +13,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.AllFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xpack.security.authz.RoleDescriptor; @@ -26,14 +24,10 @@ import java.util.Collection; import java.util.HashSet; import java.util.Set; -import static org.apache.lucene.util.automaton.MinimizationOperations.minimize; -import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES; import static org.apache.lucene.util.automaton.Operations.isTotal; import static org.apache.lucene.util.automaton.Operations.run; -import static org.apache.lucene.util.automaton.Operations.sameLanguage; import static org.apache.lucene.util.automaton.Operations.subsetOf; -import static org.apache.lucene.util.automaton.Operations.union; -import static org.elasticsearch.xpack.security.support.Automatons.minusAndDeterminize; +import static org.elasticsearch.xpack.security.support.Automatons.minusAndMinimize; /** * Stores patterns to fields which access is granted or denied to and maintains an automaton that can be used to check if permission is @@ -43,30 +37,42 @@ import static org.elasticsearch.xpack.security.support.Automatons.minusAndDeterm * 1. It has to match the patterns in grantedFieldsArray * 2. it must not match the patterns in deniedFieldsArray */ -public class FieldPermissions implements Writeable, ToXContent { +public final class FieldPermissions implements Writeable { + + public static final FieldPermissions DEFAULT = new FieldPermissions(); // the patterns for fields which we allow access to. if gratedFieldsArray is null we assume that all fields are grated access to - String[] grantedFieldsArray; + private final String[] grantedFieldsArray; // the patterns for fields which we deny access to. if this is an empty list or null we assume that we do not deny access to any // field explicitly - String[] deniedFieldsArray; + private final String[] deniedFieldsArray; // an automaton that matches all strings that match the patterns in permittedFieldsArray but does not match those that also match a // pattern in deniedFieldsArray. If permittedFieldsAutomaton is null we assume that all fields are granted access to. - Automaton permittedFieldsAutomaton; + private final Automaton permittedFieldsAutomaton; // we cannot easily determine if all fields are allowed and we can therefore also allow access to the _all field hence we deny access // to _all unless this was explicitly configured. - boolean allFieldIsAllowed = false; + private final boolean allFieldIsAllowed; + + public FieldPermissions() { + this(null, null); + } public FieldPermissions(StreamInput in) throws IOException { this(in.readOptionalStringArray(), in.readOptionalStringArray()); } public FieldPermissions(@Nullable String[] grantedFieldsArray, @Nullable String[] deniedFieldsArray) { + this(grantedFieldsArray, deniedFieldsArray, initializePermittedFieldsAutomaton(grantedFieldsArray, deniedFieldsArray), + checkAllFieldIsAllowed(grantedFieldsArray, deniedFieldsArray)); + } + + FieldPermissions(@Nullable String[] grantedFieldsArray, @Nullable String[] deniedFieldsArray, + Automaton permittedFieldsAutomaton, boolean allFieldIsAllowed) { this.grantedFieldsArray = grantedFieldsArray; this.deniedFieldsArray = deniedFieldsArray; - permittedFieldsAutomaton = initializePermittedFieldsAutomaton(grantedFieldsArray, deniedFieldsArray); - allFieldIsAllowed = checkAllFieldIsAllowed(grantedFieldsArray, deniedFieldsArray); + this.permittedFieldsAutomaton = permittedFieldsAutomaton; + this.allFieldIsAllowed = allFieldIsAllowed; } private static boolean checkAllFieldIsAllowed(String[] grantedFieldsArray, String[] deniedFieldsArray) { @@ -87,8 +93,7 @@ public class FieldPermissions implements Writeable, ToXContent { return false; } - private static Automaton initializePermittedFieldsAutomaton(final String[] grantedFieldsArray, - final String[] deniedFieldsArray) { + private static Automaton initializePermittedFieldsAutomaton(final String[] grantedFieldsArray, final String[] deniedFieldsArray) { Automaton grantedFieldsAutomaton; if (grantedFieldsArray == null || containsWildcard(grantedFieldsArray)) { grantedFieldsAutomaton = Automatons.MATCH_ALL; @@ -107,7 +112,7 @@ public class FieldPermissions implements Writeable, ToXContent { Arrays.toString(grantedFieldsArray)); } - grantedFieldsAutomaton = minusAndDeterminize(grantedFieldsAutomaton, deniedFieldsAutomaton); + grantedFieldsAutomaton = minusAndMinimize(grantedFieldsAutomaton, deniedFieldsAutomaton); return grantedFieldsAutomaton; } @@ -120,26 +125,12 @@ public class FieldPermissions implements Writeable, ToXContent { return false; } - public FieldPermissions() { - this(null, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalStringArray(grantedFieldsArray); out.writeOptionalStringArray(deniedFieldsArray); } - @Nullable - String[] getGrantedFieldsArray() { - return grantedFieldsArray; - } - - @Nullable - String[] getDeniedFieldsArray() { - return deniedFieldsArray; - } - @Override public String toString() { StringBuilder sb = new StringBuilder(); @@ -164,21 +155,6 @@ public class FieldPermissions implements Writeable, ToXContent { return sb.toString(); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (grantedFieldsArray != null || deniedFieldsArray != null) { - builder.startObject(RoleDescriptor.Fields.FIELD_PERMISSIONS.getPreferredName()); - if (grantedFieldsArray != null) { - builder.array(RoleDescriptor.Fields.GRANT_FIELDS.getPreferredName(), grantedFieldsArray); - } - if (deniedFieldsArray != null) { - builder.array(RoleDescriptor.Fields.EXCEPT_FIELDS.getPreferredName(), deniedFieldsArray); - } - builder.endObject(); - } - return builder; - } - /** * Returns true if this field permission policy allows access to the field and false if not. * fieldName can be a wildcard. @@ -187,22 +163,28 @@ public class FieldPermissions implements Writeable, ToXContent { return isTotal(permittedFieldsAutomaton) || run(permittedFieldsAutomaton, fieldName); } - // Also, if one grants no access to fields and the other grants all access, merging should result in all access... - public static FieldPermissions merge(FieldPermissions p1, FieldPermissions p2) { - Automaton mergedPermittedFieldsAutomaton; - // we only allow the union of the two automatons - mergedPermittedFieldsAutomaton = union(p1.permittedFieldsAutomaton, p2.permittedFieldsAutomaton); - // need to minimize otherwise isTotal() might return false even if one of the merged ones returned true before - mergedPermittedFieldsAutomaton = minimize(mergedPermittedFieldsAutomaton, DEFAULT_MAX_DETERMINIZED_STATES); - // if one of them allows access to _all we allow it for the merged too - boolean allFieldIsAllowedInMerged = p1.allFieldIsAllowed || p2.allFieldIsAllowed; - return new MergedFieldPermissions(mergedPermittedFieldsAutomaton, allFieldIsAllowedInMerged); + Automaton getPermittedFieldsAutomaton() { + return permittedFieldsAutomaton; + } + + @Nullable + String[] getGrantedFieldsArray() { + return grantedFieldsArray; + } + + @Nullable + String[] getDeniedFieldsArray() { + return deniedFieldsArray; } public boolean hasFieldLevelSecurity() { return isTotal(permittedFieldsAutomaton) == false; } + boolean isAllFieldIsAllowed() { + return allFieldIsAllowed; + } + public Set resolveAllowedFields(Set allowedMetaFields, MapperService mapperService) { HashSet finalAllowedFields = new HashSet<>(); // we always add the allowed meta fields because we must make sure access is not denied accidentally @@ -232,59 +214,14 @@ public class FieldPermissions implements Writeable, ToXContent { // Probably incorrect - comparing Object[] arrays with Arrays.equals if (!Arrays.equals(grantedFieldsArray, that.grantedFieldsArray)) return false; // Probably incorrect - comparing Object[] arrays with Arrays.equals - if (!Arrays.equals(deniedFieldsArray, that.deniedFieldsArray)) return false; - return sameLanguage(permittedFieldsAutomaton, that.permittedFieldsAutomaton); - + return Arrays.equals(deniedFieldsArray, that.deniedFieldsArray); } @Override public int hashCode() { int result = Arrays.hashCode(grantedFieldsArray); result = 31 * result + Arrays.hashCode(deniedFieldsArray); - result = 31 * result + permittedFieldsAutomaton.hashCode(); result = 31 * result + (allFieldIsAllowed ? 1 : 0); return result; } - - /** - * When we merge field permissions we need to union all the allowed fields. We do this by a union of the automatons - * that define which fields are granted access too. However, that means that after merging we cannot know anymore - * which strings defined the automatons. Hence we make a new class that only has an automaton for the fields that - * we grant access to and that throws an exception whenever we try to access the original patterns that lead to - * the automaton. - */ - public static class MergedFieldPermissions extends FieldPermissions { - public MergedFieldPermissions(Automaton grantedFields, boolean allFieldIsAllowed) { - assert grantedFields != null; - this.permittedFieldsAutomaton = grantedFields; - this.grantedFieldsArray = null; - this.deniedFieldsArray = null; - this.allFieldIsAllowed = allFieldIsAllowed; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - throw new UnsupportedOperationException("Cannot build xcontent for merged field permissions"); - } - - @Override - public String toString() { - throw new UnsupportedOperationException("Cannot build string for merged field permissions"); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException("Cannot stream for merged field permissions"); - } - - @Nullable - public String[] getGrantedFieldsArray() { - throw new UnsupportedOperationException("Merged field permissions does not maintain sets"); - } - - @Nullable - public String[] getDeniedFieldsArray() { - throw new UnsupportedOperationException("Merged field permissions does not maintain sets"); - } - } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsCache.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsCache.java new file mode 100644 index 00000000000..121ab4eee0f --- /dev/null +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsCache.java @@ -0,0 +1,181 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authz.permission; + +import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.security.Security.setting; + +/** + * A service for managing the caching of {@link FieldPermissions} as these may often need to be combined or created and internally they + * use an {@link org.apache.lucene.util.automaton.Automaton}, which can be costly to create once you account for minimization + */ +public final class FieldPermissionsCache { + + public static final Setting CACHE_SIZE_SETTING = Setting.longSetting( + setting("authz.store.roles.field_permissions.cache.max_size_in_bytes"), 100 * 1024 * 1024, -1L, Property.NodeScope); + private final Cache cache; + + public FieldPermissionsCache(Settings settings) { + this.cache = CacheBuilder.builder() + .setMaximumWeight(CACHE_SIZE_SETTING.get(settings)) + // this is not completely accurate but in most cases the automaton should be the most expensive aspect + .weigher((key, fieldPermissions) -> fieldPermissions.getPermittedFieldsAutomaton().ramBytesUsed()) + .build(); + } + + /** + * Gets a {@link FieldPermissions} instance that corresponds to the granted and denied parameters. The instance may come from the cache + * or if it gets created, the instance will be cached + */ + FieldPermissions getFieldPermissions(String[] granted, String[] denied) { + final Set grantedSet; + if (granted != null) { + grantedSet = new HashSet<>(granted.length); + Collections.addAll(grantedSet, granted); + } else { + grantedSet = null; + } + + final Set deniedSet; + if (denied != null) { + deniedSet = new HashSet<>(denied.length); + Collections.addAll(deniedSet, denied); + } else { + deniedSet = null; + } + + return getFieldPermissions(grantedSet, deniedSet); + } + + /** + * Gets a {@link FieldPermissions} instance that corresponds to the granted and denied parameters. The instance may come from the cache + * or if it gets created, the instance will be cached + */ + public FieldPermissions getFieldPermissions(Set granted, Set denied) { + Key fpKey = new Key(granted == null ? null : Collections.unmodifiableSet(granted), + denied == null ? null : Collections.unmodifiableSet(denied)); + try { + return cache.computeIfAbsent(fpKey, + (key) -> new FieldPermissions(key.grantedFields == null ? null : key.grantedFields.toArray(Strings.EMPTY_ARRAY), + key.deniedFields == null ? null : key.deniedFields.toArray(Strings.EMPTY_ARRAY))); + } catch (ExecutionException e) { + throw new ElasticsearchException("unable to compute field permissions", e); + } + } + + /** + * Returns a field permissions object that corresponds to the merging of the given field permissions and caches the instance if one was + * not found in the cache. + */ + FieldPermissions getFieldPermissions(Collection fieldPermissionsCollection) { + Optional allowAllFieldPermissions = fieldPermissionsCollection.stream() + .filter((fp) -> Operations.isTotal(fp.getPermittedFieldsAutomaton())) + .findFirst(); + return allowAllFieldPermissions.orElseGet(() -> { + final Set allowedFields; + Optional nullAllowedFields = fieldPermissionsCollection.stream() + .filter((fieldPermissions) -> fieldPermissions.getGrantedFieldsArray() == null) + .findFirst(); + if (nullAllowedFields.isPresent()) { + allowedFields = null; + } else { + allowedFields = fieldPermissionsCollection.stream() + .flatMap(fieldPermissions -> Arrays.stream(fieldPermissions.getGrantedFieldsArray())) + .collect(Collectors.toSet()); + } + + final Set deniedFields = fieldPermissionsCollection.stream() + .filter(fieldPermissions -> fieldPermissions.getDeniedFieldsArray() != null) + .flatMap(fieldPermissions -> Arrays.stream(fieldPermissions.getDeniedFieldsArray())) + .collect(Collectors.toSet()); + try { + return cache.computeIfAbsent(new Key(allowedFields, deniedFields), + (key) -> { + final String[] actualDeniedFields = key.deniedFields == null ? null : + computeDeniedFieldsForPermissions(fieldPermissionsCollection, key.deniedFields); + return new FieldPermissions(key.grantedFields == null ? null : key.grantedFields.toArray(Strings.EMPTY_ARRAY), + actualDeniedFields); + }); + } catch (ExecutionException e) { + throw new ElasticsearchException("unable to compute field permissions", e); + } + }); + } + + private static String[] computeDeniedFieldsForPermissions(Collection fieldPermissionsCollection, + Set allDeniedFields) { + Set allowedDeniedFields = new HashSet<>(); + fieldPermissionsCollection + .stream() + .filter(fieldPermissions -> fieldPermissions.getDeniedFieldsArray() != null) + .forEach((fieldPermissions) -> { + String[] deniedFieldsForPermission = fieldPermissions.getDeniedFieldsArray(); + fieldPermissionsCollection.forEach((fp) -> { + if (fp != fieldPermissions) { + Arrays.stream(deniedFieldsForPermission).forEach((field) -> { + if (fp.grantsAccessTo(field)) { + allowedDeniedFields.add(field); + } + }); + } + }); + }); + + Set difference = Sets.difference(allDeniedFields, allowedDeniedFields); + if (difference.isEmpty()) { + return null; + } else { + return difference.toArray(Strings.EMPTY_ARRAY); + } + } + + private static class Key { + + private final Set grantedFields; + private final Set deniedFields; + + Key(Set grantedFields, Set deniedFields) { + this.grantedFields = grantedFields == null ? null : Collections.unmodifiableSet(grantedFields); + this.deniedFields = deniedFields == null ? null : Collections.unmodifiableSet(deniedFields); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof Key)) return false; + + Key key = (Key) o; + + if (grantedFields != null ? !grantedFields.equals(key.grantedFields) : key.grantedFields != null) return false; + return deniedFields != null ? deniedFields.equals(key.deniedFields) : key.deniedFields == null; + } + + @Override + public int hashCode() { + int result = grantedFields != null ? grantedFields.hashCode() : 0; + result = 31 * result + (deniedFields != null ? deniedFields.hashCode() : 0); + return result; + } + } +} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/GlobalPermission.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/GlobalPermission.java deleted file mode 100644 index 5c0add3f370..00000000000 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/GlobalPermission.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.xpack.security.authz.accesscontrol.IndicesAccessControl; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; - -/** - * A composite permission that combines {@code cluster}, {@code indices} and {@code run_as} permissions - */ -public class GlobalPermission implements Permission { - - public static final GlobalPermission NONE = new GlobalPermission(ClusterPermission.Core.NONE, IndicesPermission.Core.NONE, - RunAsPermission.Core.NONE); - - private final ClusterPermission cluster; - private final IndicesPermission indices; - private final RunAsPermission runAs; - - GlobalPermission(ClusterPermission cluster, IndicesPermission indices, RunAsPermission runAs) { - this.cluster = cluster; - this.indices = indices; - this.runAs = runAs; - } - - public ClusterPermission cluster() { - return cluster; - } - - public IndicesPermission indices() { - return indices; - } - - public RunAsPermission runAs() { - return runAs; - } - - @Override - public boolean isEmpty() { - return (cluster == null || cluster.isEmpty()) && (indices == null || indices.isEmpty()) && (runAs == null || runAs.isEmpty()); - } - - /** - * Returns whether at least one group encapsulated by this indices permissions is authorized to execute the - * specified action with the requested indices/aliases. At the same time if field and/or document level security - * is configured for any group also the allowed fields and role queries are resolved. - */ - public IndicesAccessControl authorize(String action, Set requestedIndicesOrAliases, MetaData metaData) { - Map indexPermissions = indices.authorize( - action, requestedIndicesOrAliases, metaData - ); - - // At least one role / indices permission set need to match with all the requested indices/aliases: - boolean granted = true; - for (Map.Entry entry : indexPermissions.entrySet()) { - if (!entry.getValue().isGranted()) { - granted = false; - break; - } - } - return new IndicesAccessControl(granted, indexPermissions); - } - - public static class Compound extends GlobalPermission { - - Compound(List globals) { - super(new ClusterPermission.Globals(globals), new IndicesPermission.Globals(globals), new RunAsPermission.Globals(globals)); - } - - public static Compound.Builder builder() { - return new Compound.Builder(); - } - - public static class Builder { - - private List globals = new ArrayList<>(); - - private Builder() { - } - - public Compound.Builder add(GlobalPermission global) { - globals.add(global); - return this; - } - - public Compound build() { - return new Compound(Collections.unmodifiableList(globals)); - } - } - } -} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/IndicesPermission.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/IndicesPermission.java index daa3dd7b964..6338f290e9c 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/IndicesPermission.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/IndicesPermission.java @@ -12,12 +12,10 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.xpack.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege; -import org.elasticsearch.xpack.security.support.AutomatonPredicate; import org.elasticsearch.xpack.security.support.Automatons; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -30,7 +28,6 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.function.Predicate; -import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; import static java.util.Collections.unmodifiableSet; @@ -38,12 +35,45 @@ import static java.util.Collections.unmodifiableSet; * A permission that is based on privileges for index related actions executed * on specific indices */ -public interface IndicesPermission extends Permission, Iterable { +public final class IndicesPermission implements Iterable { + + public static final IndicesPermission NONE = new IndicesPermission(); + + private final Function> loadingFunction; + + private final ConcurrentHashMap> allowedIndicesMatchersForAction = new ConcurrentHashMap<>(); + + private final Group[] groups; + + public IndicesPermission(Group... groups) { + this.groups = groups; + loadingFunction = (action) -> { + List indices = new ArrayList<>(); + for (Group group : groups) { + if (group.actionMatcher.test(action)) { + indices.addAll(Arrays.asList(group.indices)); + } + } + return Automatons.predicate(indices); + }; + } + + @Override + public Iterator iterator() { + return Arrays.asList(groups).iterator(); + } + + public Group[] groups() { + return groups; + } /** - * Authorizes the provided action against the provided indices, given the current cluster metadata + * @return A predicate that will match all the indices that this permission + * has the privilege for executing the given action on. */ - Map authorize(String action, Set requestedIndicesOrAliases, MetaData metaData); + public Predicate allowedIndicesMatcher(String action) { + return allowedIndicesMatchersForAction.computeIfAbsent(action, loadingFunction); + } /** * Checks if the permission matches the provided action, without looking at indices. @@ -51,277 +81,83 @@ public interface IndicesPermission extends Permission, Iterable iterator() { - return Collections.emptyIterator(); - } - - @Override - public boolean isEmpty() { + public boolean check(String action) { + for (Group group : groups) { + if (group.check(action)) { return true; } - }; - - private final Function> loadingFunction; - - private final ConcurrentHashMap> allowedIndicesMatchersForAction = new ConcurrentHashMap<>(); - - private final Group[] groups; - - public Core(List groups) { - this(groups.toArray(new Group[groups.size()])); } + return false; + } - public Core(Group... groups) { - this.groups = groups; - loadingFunction = (action) -> { - List indices = new ArrayList<>(); - for (Group group : groups) { - if (group.actionMatcher.test(action)) { - indices.addAll(Arrays.asList(group.indices)); - } + /** + * Authorizes the provided action against the provided indices, given the current cluster metadata + */ + public Map authorize(String action, Set requestedIndicesOrAliases, + MetaData metaData, FieldPermissionsCache fieldPermissionsCache) { + // now... every index that is associated with the request, must be granted + // by at least one indices permission group + + SortedMap allAliasesAndIndices = metaData.getAliasAndIndexLookup(); + Map> fieldPermissionsByIndex = new HashMap<>(); + Map> roleQueriesByIndex = new HashMap<>(); + Map grantedBuilder = new HashMap<>(); + + for (String indexOrAlias : requestedIndicesOrAliases) { + boolean granted = false; + Set concreteIndices = new HashSet<>(); + AliasOrIndex aliasOrIndex = allAliasesAndIndices.get(indexOrAlias); + if (aliasOrIndex != null) { + for (IndexMetaData indexMetaData : aliasOrIndex.getIndices()) { + concreteIndices.add(indexMetaData.getIndex().getName()); } - return new AutomatonPredicate(Automatons.patterns(Collections.unmodifiableList(indices))); - }; - } + } - @Override - public Iterator iterator() { - return Arrays.asList(groups).iterator(); - } - - public Group[] groups() { - return groups; - } - - @Override - public boolean isEmpty() { - return groups == null || groups.length == 0; - } - - /** - * @return A predicate that will match all the indices that this permission - * has the privilege for executing the given action on. - */ - public Predicate allowedIndicesMatcher(String action) { - return allowedIndicesMatchersForAction.computeIfAbsent(action, loadingFunction); - } - - @Override - public boolean check(String action) { for (Group group : groups) { - if (group.check(action)) { - return true; - } - } - return false; - } - - @Override - public Map authorize(String action, Set requestedIndicesOrAliases, - MetaData metaData) { - // now... every index that is associated with the request, must be granted - // by at least one indices permission group - - SortedMap allAliasesAndIndices = metaData.getAliasAndIndexLookup(); - Map> fieldPermissionsByIndex = new HashMap<>(); - Map> roleQueriesByIndex = new HashMap<>(); - Map grantedBuilder = new HashMap<>(); - - for (String indexOrAlias : requestedIndicesOrAliases) { - boolean granted = false; - Set concreteIndices = new HashSet<>(); - AliasOrIndex aliasOrIndex = allAliasesAndIndices.get(indexOrAlias); - if (aliasOrIndex != null) { - for (IndexMetaData indexMetaData : aliasOrIndex.getIndices()) { - concreteIndices.add(indexMetaData.getIndex().getName()); - } - } - - for (Group group : groups) { - if (group.check(action, indexOrAlias)) { - granted = true; - for (String index : concreteIndices) { - if (fieldPermissionsByIndex.get(index) == null) { - fieldPermissionsByIndex.put(index, new HashSet<>()); - } - fieldPermissionsByIndex.get(index).add(group.getFieldPermissions()); - if (group.hasQuery()) { - Set roleQueries = roleQueriesByIndex.get(index); - if (roleQueries == null) { - roleQueries = new HashSet<>(); - roleQueriesByIndex.put(index, roleQueries); - } - roleQueries.add(group.getQuery()); - } - } - } - } - - if (concreteIndices.isEmpty()) { - grantedBuilder.put(indexOrAlias, granted); - } else { - for (String concreteIndex : concreteIndices) { - grantedBuilder.put(concreteIndex, granted); - } - } - } - - Map indexPermissions = new HashMap<>(); - for (Map.Entry entry : grantedBuilder.entrySet()) { - String index = entry.getKey(); - Set roleQueries = roleQueriesByIndex.get(index); - if (roleQueries != null) { - roleQueries = unmodifiableSet(roleQueries); - } - - FieldPermissions fieldPermissions = new FieldPermissions(); - Set indexFieldPermissions = fieldPermissionsByIndex.get(index); - if (indexFieldPermissions != null) { - // get the first field permission entry because we do not want the merge to overwrite granted fields with null - fieldPermissions = indexFieldPermissions.iterator().next(); - for (FieldPermissions fp : indexFieldPermissions) { - fieldPermissions = FieldPermissions.merge(fieldPermissions, fp); - } - } - indexPermissions.put(index, new IndicesAccessControl.IndexAccessControl(entry.getValue(), fieldPermissions, roleQueries)); - } - return unmodifiableMap(indexPermissions); - } - - } - - class Globals implements IndicesPermission { - - private final List globals; - - public Globals(List globals) { - this.globals = globals; - } - - @Override - public Iterator iterator() { - return globals == null || globals.isEmpty() ? - Collections.emptyIterator() : - new Globals.Iter(globals); - } - - @Override - public boolean isEmpty() { - if (globals == null || globals.isEmpty()) { - return true; - } - for (GlobalPermission global : globals) { - if (!global.indices().isEmpty()) { - return false; - } - } - return true; - } - - @Override - public boolean check(String action) { - if (globals == null) { - return false; - } - for (GlobalPermission global : globals) { - Objects.requireNonNull(global, "global must not be null"); - Objects.requireNonNull(global.indices(), "global.indices() must not be null"); - if (global.indices().check(action)) { - return true; - } - } - return false; - } - - @Override - public Map authorize(String action, Set requestedIndicesOrAliases, - MetaData metaData) { - if (isEmpty()) { - return emptyMap(); - } - - // What this code does is just merge `IndexAccessControl` instances from the permissions this class holds: - Map indicesAccessControl = null; - for (GlobalPermission permission : globals) { - Map temp = permission.indices().authorize(action, - requestedIndicesOrAliases, metaData); - if (indicesAccessControl == null) { - indicesAccessControl = new HashMap<>(temp); - } else { - for (Map.Entry entry : temp.entrySet()) { - IndicesAccessControl.IndexAccessControl existing = indicesAccessControl.get(entry.getKey()); - if (existing != null) { - indicesAccessControl.put(entry.getKey(), existing.merge(entry.getValue())); - } else { - indicesAccessControl.put(entry.getKey(), entry.getValue()); + if (group.check(action, indexOrAlias)) { + granted = true; + for (String index : concreteIndices) { + Set fieldPermissions = fieldPermissionsByIndex.computeIfAbsent(index, (k) -> new HashSet<>()); + fieldPermissions.add(group.getFieldPermissions()); + if (group.hasQuery()) { + Set roleQueries = roleQueriesByIndex.computeIfAbsent(index, (k) -> new HashSet<>()); + roleQueries.addAll(group.getQuery()); } } } } - if (indicesAccessControl == null) { - return emptyMap(); + + if (concreteIndices.isEmpty()) { + grantedBuilder.put(indexOrAlias, granted); } else { - return unmodifiableMap(indicesAccessControl); + for (String concreteIndex : concreteIndices) { + grantedBuilder.put(concreteIndex, granted); + } } } - static class Iter implements Iterator { - - private final Iterator globals; - private Iterator current; - - Iter(List globals) { - this.globals = globals.iterator(); - advance(); + Map indexPermissions = new HashMap<>(); + for (Map.Entry entry : grantedBuilder.entrySet()) { + String index = entry.getKey(); + Set roleQueries = roleQueriesByIndex.get(index); + if (roleQueries != null) { + roleQueries = unmodifiableSet(roleQueries); } - @Override - public boolean hasNext() { - return current != null && current.hasNext(); - } - - @Override - public Group next() { - Group group = current.next(); - advance(); - return group; - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - - private void advance() { - if (current != null && current.hasNext()) { - return; - } - if (!globals.hasNext()) { - // we've reached the end of the globals array - current = null; - return; - } - - while (globals.hasNext()) { - IndicesPermission indices = globals.next().indices(); - if (!indices.isEmpty()) { - current = indices.iterator(); - return; - } - } - - current = null; + final FieldPermissions fieldPermissions; + final Set indexFieldPermissions = fieldPermissionsByIndex.get(index); + if (indexFieldPermissions != null && indexFieldPermissions.isEmpty() == false) { + fieldPermissions = indexFieldPermissions.size() == 1 ? indexFieldPermissions.iterator().next() : + fieldPermissionsCache.getFieldPermissions(indexFieldPermissions); + } else { + fieldPermissions = FieldPermissions.DEFAULT; } + indexPermissions.put(index, new IndicesAccessControl.IndexAccessControl(entry.getValue(), fieldPermissions, roleQueries)); } + return unmodifiableMap(indexPermissions); } - class Group { + public static class Group { private final IndexPrivilege privilege; private final Predicate actionMatcher; private final String[] indices; @@ -332,14 +168,14 @@ public interface IndicesPermission extends Permission, Iterable query; - public Group(IndexPrivilege privilege, FieldPermissions fieldPermissions, @Nullable BytesReference query, String... indices) { + public Group(IndexPrivilege privilege, FieldPermissions fieldPermissions, @Nullable Set query, String... indices) { assert indices.length != 0; this.privilege = privilege; this.actionMatcher = privilege.predicate(); this.indices = indices; - this.indexNameMatcher = new AutomatonPredicate(Automatons.patterns(indices)); + this.indexNameMatcher = Automatons.predicate(indices); this.fieldPermissions = Objects.requireNonNull(fieldPermissions); this.query = query; } @@ -353,7 +189,7 @@ public interface IndicesPermission extends Permission, Iterable getQuery() { return query; } @@ -366,7 +202,7 @@ public interface IndicesPermission extends Permission, Iterable requestedIndicesOrAliases, MetaData metaData, + FieldPermissionsCache fieldPermissionsCache) { + Map indexPermissions = indices.authorize( + action, requestedIndicesOrAliases, metaData, fieldPermissionsCache + ); + + // At least one role / indices permission set need to match with all the requested indices/aliases: + boolean granted = true; + for (Map.Entry entry : indexPermissions.entrySet()) { + if (!entry.getValue().isGranted()) { + granted = false; + break; + } + } + return new IndicesAccessControl(granted, indexPermissions); } public static class Builder { private final String name; - private ClusterPermission.Core cluster = ClusterPermission.Core.NONE; - private RunAsPermission.Core runAs = RunAsPermission.Core.NONE; + private ClusterPermission cluster = ClusterPermission.NONE; + private RunAsPermission runAs = RunAsPermission.NONE; private List groups = new ArrayList<>(); + private FieldPermissionsCache fieldPermissionsCache = null; - private Builder(String name) { + private Builder(String name, FieldPermissionsCache fieldPermissionsCache) { this.name = name; + this.fieldPermissionsCache = fieldPermissionsCache; } - private Builder(RoleDescriptor rd) { + private Builder(RoleDescriptor rd, @Nullable FieldPermissionsCache fieldPermissionsCache) { this.name = rd.getName(); + this.fieldPermissionsCache = fieldPermissionsCache; if (rd.getClusterPrivileges().length == 0) { - cluster = ClusterPermission.Core.NONE; + cluster = ClusterPermission.NONE; } else { - this.cluster(ClusterPrivilege.get((new Privilege.Name(rd.getClusterPrivileges())))); + this.cluster(ClusterPrivilege.get(Sets.newHashSet(rd.getClusterPrivileges()))); } - groups.addAll(convertFromIndicesPrivileges(rd.getIndicesPrivileges())); + groups.addAll(convertFromIndicesPrivileges(rd.getIndicesPrivileges(), fieldPermissionsCache)); String[] rdRunAs = rd.getRunAs(); if (rdRunAs != null && rdRunAs.length > 0) { - this.runAs(new GeneralPrivilege(new Privilege.Name(rdRunAs), rdRunAs)); + this.runAs(new Privilege(Sets.newHashSet(rdRunAs), rdRunAs)); } } - // FIXME we should throw an exception if we have already set cluster or runAs... public Builder cluster(ClusterPrivilege privilege) { - cluster = new ClusterPermission.Core(privilege); + cluster = new ClusterPermission(privilege); return this; } - public Builder runAs(GeneralPrivilege privilege) { - runAs = new RunAsPermission.Core(privilege); + public Builder runAs(Privilege privilege) { + runAs = new RunAsPermission(privilege); return this; } public Builder add(IndexPrivilege privilege, String... indices) { - groups.add(new IndicesPermission.Group(privilege, new FieldPermissions(), null, indices)); + groups.add(new IndicesPermission.Group(privilege, FieldPermissions.DEFAULT, null, indices)); return this; } - public Builder add(FieldPermissions fieldPermissions, BytesReference query, IndexPrivilege privilege, String... indices) { + public Builder add(FieldPermissions fieldPermissions, Set query, IndexPrivilege privilege, String... indices) { groups.add(new IndicesPermission.Group(privilege, fieldPermissions, query, indices)); return this; } public Role build() { - IndicesPermission.Core indices = groups.isEmpty() ? IndicesPermission.Core.NONE : - new IndicesPermission.Core(groups.toArray(new IndicesPermission.Group[groups.size()])); + IndicesPermission indices = groups.isEmpty() ? IndicesPermission.NONE : + new IndicesPermission(groups.toArray(new IndicesPermission.Group[groups.size()])); return new Role(name, cluster, indices, runAs); } - static List convertFromIndicesPrivileges(RoleDescriptor.IndicesPrivileges[] indicesPrivileges) { + static List convertFromIndicesPrivileges(RoleDescriptor.IndicesPrivileges[] indicesPrivileges, + @Nullable FieldPermissionsCache fieldPermissionsCache) { List list = new ArrayList<>(indicesPrivileges.length); for (RoleDescriptor.IndicesPrivileges privilege : indicesPrivileges) { - list.add(new IndicesPermission.Group(IndexPrivilege.get(new Privilege.Name(privilege.getPrivileges())), - privilege.getFieldPermissions(), - privilege.getQuery(), + final FieldPermissions fieldPermissions = fieldPermissionsCache != null ? + fieldPermissionsCache.getFieldPermissions(privilege.getGrantedFields(), privilege.getDeniedFields()) : + new FieldPermissions(privilege.getGrantedFields(), privilege.getDeniedFields()); + final Set query = privilege.getQuery() == null ? null : Collections.singleton(privilege.getQuery()); + list.add(new IndicesPermission.Group(IndexPrivilege.get(Sets.newHashSet(privilege.getPrivileges())), + fieldPermissions, + query, privilege.getIndices())); } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/RunAsPermission.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/RunAsPermission.java index b517e46d666..e864ccb4e9e 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/RunAsPermission.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/RunAsPermission.java @@ -5,76 +5,28 @@ */ package org.elasticsearch.xpack.security.authz.permission; -import org.elasticsearch.xpack.security.authz.privilege.GeneralPrivilege; +import org.elasticsearch.xpack.security.authz.privilege.Privilege; -import java.util.List; import java.util.function.Predicate; /** * A permissions that is based on a general privilege that contains patterns of users that this * user can execute a request as */ -public interface RunAsPermission extends Permission { +public final class RunAsPermission { + + public static final RunAsPermission NONE = new RunAsPermission(Privilege.NONE); + + private final Predicate predicate; + + RunAsPermission(Privilege privilege) { + this.predicate = privilege.predicate(); + } /** * Checks if this permission grants run as to the specified user */ - boolean check(String username); - - class Core implements RunAsPermission { - - public static final Core NONE = new Core(GeneralPrivilege.NONE); - - private final GeneralPrivilege privilege; - private final Predicate predicate; - - public Core(GeneralPrivilege privilege) { - this.privilege = privilege; - this.predicate = privilege.predicate(); - } - - @Override - public boolean check(String username) { - return predicate.test(username); - } - - @Override - public boolean isEmpty() { - return this == NONE; - } - } - - class Globals implements RunAsPermission { - private final List globals; - - public Globals(List globals) { - this.globals = globals; - } - - @Override - public boolean check(String username) { - if (globals == null) { - return false; - } - for (GlobalPermission global : globals) { - if (global.runAs().check(username)) { - return true; - } - } - return false; - } - - @Override - public boolean isEmpty() { - if (globals == null || globals.isEmpty()) { - return true; - } - for (GlobalPermission global : globals) { - if (!global.isEmpty()) { - return false; - } - } - return true; - } + public boolean check(String username) { + return predicate.test(username); } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/SuperuserRole.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/SuperuserRole.java deleted file mode 100644 index 181902bf02a..00000000000 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/SuperuserRole.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.xpack.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.security.authz.privilege.GeneralPrivilege; -import org.elasticsearch.xpack.security.authz.privilege.Privilege.Name; -import org.elasticsearch.xpack.security.support.MetadataUtils; - -public class SuperuserRole extends Role { - - public static final String NAME = "superuser"; - public static final RoleDescriptor DESCRIPTOR = new RoleDescriptor(NAME, new String[] { "all" }, - new RoleDescriptor.IndicesPrivileges[] { - RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build()}, - new String[] { "*" }, - MetadataUtils.DEFAULT_RESERVED_METADATA); - public static final SuperuserRole INSTANCE = new SuperuserRole(); - - private SuperuserRole() { - super(DESCRIPTOR.getName(), - new ClusterPermission.Core(ClusterPrivilege.get(new Name(DESCRIPTOR.getClusterPrivileges()))), - new IndicesPermission.Core(Role.Builder.convertFromIndicesPrivileges(DESCRIPTOR.getIndicesPrivileges())), - new RunAsPermission.Core(GeneralPrivilege.ALL)); - } -} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/TransportClientRole.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/TransportClientRole.java deleted file mode 100644 index 30c9a9cd7de..00000000000 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/permission/TransportClientRole.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.xpack.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.security.authz.privilege.Privilege.Name; -import org.elasticsearch.xpack.security.support.MetadataUtils; - -/** - * Reserved role for the transport client - */ -public class TransportClientRole extends Role { - - public static final String NAME = "transport_client"; - private static final String[] CLUSTER_PRIVILEGES = new String[] { "transport_client" }; - - public static final RoleDescriptor DESCRIPTOR = - new RoleDescriptor(NAME, CLUSTER_PRIVILEGES, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA); - public static final TransportClientRole INSTANCE = new TransportClientRole(); - - private TransportClientRole() { - super(DESCRIPTOR.getName(), - new ClusterPermission.Core(ClusterPrivilege.get(new Name(DESCRIPTOR.getClusterPrivileges()))), - IndicesPermission.Core.NONE, RunAsPermission.Core.NONE); - } -} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/AbstractAutomatonPrivilege.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/AbstractAutomatonPrivilege.java deleted file mode 100644 index 9ac88d28c41..00000000000 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/AbstractAutomatonPrivilege.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.privilege; - -import org.apache.lucene.util.automaton.Automaton; -import org.elasticsearch.xpack.security.support.AutomatonPredicate; -import org.elasticsearch.xpack.security.support.Automatons; - -import java.util.function.Predicate; - -import static org.apache.lucene.util.automaton.Operations.subsetOf; -import static org.elasticsearch.xpack.security.support.Automatons.patterns; - -@SuppressWarnings("unchecked") -abstract class AbstractAutomatonPrivilege

> extends Privilege

{ - - protected final Automaton automaton; - - AbstractAutomatonPrivilege(String name, String... patterns) { - super(new Name(name)); - this.automaton = patterns(patterns); - } - - AbstractAutomatonPrivilege(Name name, String... patterns) { - super(name); - this.automaton = patterns(patterns); - } - - AbstractAutomatonPrivilege(Name name, Automaton automaton) { - super(name); - this.automaton = automaton; - } - - @Override - public Predicate predicate() { - return new AutomatonPredicate(automaton); - } - - protected P plus(P other) { - if (other.implies((P) this)) { - return other; - } - if (this.implies(other)) { - return (P) this; - } - return create(name.add(other.name), Automatons.unionAndDeterminize(automaton, other.automaton)); - } - - @Override - public boolean implies(P other) { - return subsetOf(other.automaton, automaton); - } - - @Override - public String toString() { - return name.toString(); - } - - protected abstract P create(Name name, Automaton automaton); - - protected abstract P none(); - - -} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/ClusterPrivilege.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/ClusterPrivilege.java index 19f2b41037d..a22d2e73ef5 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/ClusterPrivilege.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/ClusterPrivilege.java @@ -7,30 +7,33 @@ package org.elasticsearch.xpack.security.authz.privilege; import org.apache.lucene.util.automaton.Automaton; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.xpack.security.support.Automatons; +import java.util.Collections; +import java.util.HashSet; import java.util.Locale; +import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CopyOnWriteArraySet; import java.util.function.Predicate; -import static org.elasticsearch.xpack.security.support.Automatons.minusAndDeterminize; +import static org.elasticsearch.xpack.security.support.Automatons.minusAndMinimize; import static org.elasticsearch.xpack.security.support.Automatons.patterns; -public class ClusterPrivilege extends AbstractAutomatonPrivilege { +public final class ClusterPrivilege extends Privilege { // shared automatons private static final Automaton MANAGE_SECURITY_AUTOMATON = patterns("cluster:admin/xpack/security/*"); private static final Automaton MONITOR_AUTOMATON = patterns("cluster:monitor/*"); private static final Automaton ALL_CLUSTER_AUTOMATON = patterns("cluster:*", "indices:admin/template/*"); - private static final Automaton MANAGE_AUTOMATON = minusAndDeterminize(ALL_CLUSTER_AUTOMATON, MANAGE_SECURITY_AUTOMATON); + private static final Automaton MANAGE_AUTOMATON = minusAndMinimize(ALL_CLUSTER_AUTOMATON, MANAGE_SECURITY_AUTOMATON); private static final Automaton TRANSPORT_CLIENT_AUTOMATON = patterns("cluster:monitor/nodes/liveness", "cluster:monitor/state"); private static final Automaton MANAGE_IDX_TEMPLATE_AUTOMATON = patterns("indices:admin/template/*"); private static final Automaton MANAGE_INGEST_PIPELINE_AUTOMATON = patterns("cluster:admin/ingest/pipeline/*"); - public static final ClusterPrivilege NONE = new ClusterPrivilege(Name.NONE, Automatons.EMPTY); - public static final ClusterPrivilege ALL = new ClusterPrivilege(Name.ALL, ALL_CLUSTER_AUTOMATON); + public static final ClusterPrivilege NONE = new ClusterPrivilege("none", Automatons.EMPTY); + public static final ClusterPrivilege ALL = new ClusterPrivilege("all", ALL_CLUSTER_AUTOMATON); public static final ClusterPrivilege MONITOR = new ClusterPrivilege("monitor", MONITOR_AUTOMATON); public static final ClusterPrivilege MANAGE = new ClusterPrivilege("manage", MANAGE_AUTOMATON); public static final ClusterPrivilege MANAGE_IDX_TEMPLATES = @@ -43,89 +46,69 @@ public class ClusterPrivilege extends AbstractAutomatonPrivilege ACTION_MATCHER = ClusterPrivilege.ALL.predicate(); - private static final Set values = new CopyOnWriteArraySet<>(); + private static final Map VALUES = MapBuilder.newMapBuilder() + .put("none", NONE) + .put("all", ALL) + .put("monitor", MONITOR) + .put("manage", MANAGE) + .put("manage_index_templates", MANAGE_IDX_TEMPLATES) + .put("manage_ingest_pipelines", MANAGE_INGEST_PIPELINES) + .put("transport_client", TRANSPORT_CLIENT) + .put("manage_security", MANAGE_SECURITY) + .put("manage_pipeline", MANAGE_PIPELINE) + .immutableMap(); - static { - values.add(NONE); - values.add(ALL); - values.add(MONITOR); - values.add(MANAGE); - values.add(MANAGE_IDX_TEMPLATES); - values.add(MANAGE_INGEST_PIPELINES); - values.add(TRANSPORT_CLIENT); - values.add(MANAGE_SECURITY); - values.add(MANAGE_PIPELINE); - } - - static Set values() { - return values; - } - - private static final ConcurrentHashMap cache = new ConcurrentHashMap<>(); + private static final ConcurrentHashMap, ClusterPrivilege> CACHE = new ConcurrentHashMap<>(); private ClusterPrivilege(String name, String... patterns) { super(name, patterns); } private ClusterPrivilege(String name, Automaton automaton) { - super(new Name(name), automaton); + super(Collections.singleton(name), automaton); } - private ClusterPrivilege(Name name, Automaton automaton) { + private ClusterPrivilege(Set name, Automaton automaton) { super(name, automaton); } - public static void addCustom(String name, String... actionPatterns) { - for (String pattern : actionPatterns) { - if (!ClusterPrivilege.ACTION_MATCHER.test(pattern)) { - throw new IllegalArgumentException("cannot register custom cluster privilege [" + name + "]. " + - "cluster action must follow the 'cluster:*' format"); + public static ClusterPrivilege get(Set name) { + if (name == null || name.isEmpty()) { + return NONE; + } + return CACHE.computeIfAbsent(name, ClusterPrivilege::resolve); + } + + private static ClusterPrivilege resolve(Set name) { + final int size = name.size(); + if (size == 0) { + throw new IllegalArgumentException("empty set should not be used"); + } + + Set actions = new HashSet<>(); + Set automata = new HashSet<>(); + for (String part : name) { + part = part.toLowerCase(Locale.ROOT); + if (ACTION_MATCHER.test(part)) { + actions.add(actionToPattern(part)); + } else { + ClusterPrivilege privilege = VALUES.get(part); + if (privilege != null && size == 1) { + return privilege; + } else if (privilege != null) { + automata.add(privilege.automaton); + } else { + throw new IllegalArgumentException("unknown cluster privilege [" + name + "]. a privilege must be either " + + "one of the predefined fixed cluster privileges [" + + Strings.collectionToCommaDelimitedString(VALUES.entrySet()) + "] or a pattern over one of the available " + + "cluster actions"); + } } } - ClusterPrivilege custom = new ClusterPrivilege(name, actionPatterns); - if (values.contains(custom)) { - throw new IllegalArgumentException("cannot register custom cluster privilege [" + name + "] as it already exists."); + + if (actions.isEmpty() == false) { + automata.add(patterns(actions)); } - values.add(custom); - } - - @Override - protected ClusterPrivilege create(Name name, Automaton automaton) { - return new ClusterPrivilege(name, automaton); - } - - @Override - protected ClusterPrivilege none() { - return NONE; - } - - public static ClusterPrivilege action(String action) { - String pattern = actionToPattern(action); - return new ClusterPrivilege(action, pattern); - } - - public static ClusterPrivilege get(Name name) { - return cache.computeIfAbsent(name, (theName) -> { - ClusterPrivilege cluster = NONE; - for (String part : theName.parts) { - cluster = cluster == NONE ? resolve(part) : cluster.plus(resolve(part)); - } - return cluster; - }); - } - - private static ClusterPrivilege resolve(String name) { - name = name.toLowerCase(Locale.ROOT); - if (ACTION_MATCHER.test(name)) { - return action(name); - } - for (ClusterPrivilege cluster : values) { - if (name.equals(cluster.name.toString())) { - return cluster; - } - } - throw new IllegalArgumentException("unknown cluster privilege [" + name + "]. a privilege must be either " + - "one of the predefined fixed cluster privileges [" + Strings.collectionToCommaDelimitedString(values) + - "] or a pattern over one of the available cluster actions"); + return new ClusterPrivilege(name, Automatons.unionAndMinimize(automata)); } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/GeneralPrivilege.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/GeneralPrivilege.java deleted file mode 100644 index 2b4864c4540..00000000000 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/GeneralPrivilege.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.privilege; - -import org.apache.lucene.util.automaton.Automaton; -import org.elasticsearch.xpack.security.support.Automatons; - -public class GeneralPrivilege extends AbstractAutomatonPrivilege { - - public static final GeneralPrivilege NONE = new GeneralPrivilege(Name.NONE, Automatons.EMPTY); - public static final GeneralPrivilege ALL = new GeneralPrivilege(Name.ALL, Automatons.MATCH_ALL); - - public GeneralPrivilege(String name, String... patterns) { - super(name, patterns); - } - - public GeneralPrivilege(Name name, String... patterns) { - super(name, patterns); - } - - public GeneralPrivilege(Name name, Automaton automaton) { - super(name, automaton); - } - - @Override - protected GeneralPrivilege create(Name name, Automaton automaton) { - return new GeneralPrivilege(name, automaton); - } - - @Override - protected GeneralPrivilege none() { - return NONE; - } -} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/HealthAndStatsPrivilege.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/HealthAndStatsPrivilege.java index 654925c2503..1130469b901 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/HealthAndStatsPrivilege.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/HealthAndStatsPrivilege.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.security.authz.privilege; -public class HealthAndStatsPrivilege extends GeneralPrivilege { +public final class HealthAndStatsPrivilege extends Privilege { public static final HealthAndStatsPrivilege INSTANCE = new HealthAndStatsPrivilege(); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/IndexPrivilege.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/IndexPrivilege.java index 6bf2a392ab7..06c15419fc5 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/IndexPrivilege.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/IndexPrivilege.java @@ -20,18 +20,22 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.xpack.security.support.Automatons; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.Locale; +import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CopyOnWriteArraySet; import java.util.function.Predicate; import static org.elasticsearch.xpack.security.support.Automatons.patterns; -import static org.elasticsearch.xpack.security.support.Automatons.unionAndDeterminize; +import static org.elasticsearch.xpack.security.support.Automatons.unionAndMinimize; -public class IndexPrivilege extends AbstractAutomatonPrivilege { +public final class IndexPrivilege extends Privilege { private static final Automaton ALL_AUTOMATON = patterns("indices:*"); private static final Automaton READ_AUTOMATON = patterns("indices:data/read/*"); @@ -41,15 +45,16 @@ public class IndexPrivilege extends AbstractAutomatonPrivilege { private static final Automaton DELETE_AUTOMATON = patterns("indices:data/write/delete*"); private static final Automaton WRITE_AUTOMATON = patterns("indices:data/write/*", PutMappingAction.NAME); private static final Automaton MONITOR_AUTOMATON = patterns("indices:monitor/*"); - private static final Automaton MANAGE_AUTOMATON = unionAndDeterminize(MONITOR_AUTOMATON, patterns("indices:admin/*")); + private static final Automaton MANAGE_AUTOMATON = + unionAndMinimize(Arrays.asList(MONITOR_AUTOMATON, patterns("indices:admin/*"))); private static final Automaton CREATE_INDEX_AUTOMATON = patterns(CreateIndexAction.NAME); private static final Automaton DELETE_INDEX_AUTOMATON = patterns(DeleteIndexAction.NAME); private static final Automaton VIEW_METADATA_AUTOMATON = patterns(GetAliasesAction.NAME, AliasesExistAction.NAME, GetIndexAction.NAME, IndicesExistsAction.NAME, GetFieldMappingsAction.NAME + "*", GetMappingsAction.NAME, ClusterSearchShardsAction.NAME, TypesExistsAction.NAME, ValidateQueryAction.NAME + "*", GetSettingsAction.NAME); - public static final IndexPrivilege NONE = new IndexPrivilege(Name.NONE, Automatons.EMPTY); - public static final IndexPrivilege ALL = new IndexPrivilege(Name.ALL, ALL_AUTOMATON); + public static final IndexPrivilege NONE = new IndexPrivilege("none", Automatons.EMPTY); + public static final IndexPrivilege ALL = new IndexPrivilege("all", ALL_AUTOMATON); public static final IndexPrivilege READ = new IndexPrivilege("read", READ_AUTOMATON); public static final IndexPrivilege CREATE = new IndexPrivilege("create", CREATE_AUTOMATON); public static final IndexPrivilege INDEX = new IndexPrivilege("index", INDEX_AUTOMATON); @@ -61,106 +66,78 @@ public class IndexPrivilege extends AbstractAutomatonPrivilege { public static final IndexPrivilege CREATE_INDEX = new IndexPrivilege("create_index", CREATE_INDEX_AUTOMATON); public static final IndexPrivilege VIEW_METADATA = new IndexPrivilege("view_index_metadata", VIEW_METADATA_AUTOMATON); - private static final Set values = new CopyOnWriteArraySet<>(); - - static { - values.add(NONE); - values.add(ALL); - values.add(MANAGE); - values.add(CREATE_INDEX); - values.add(MONITOR); - values.add(READ); - values.add(INDEX); - values.add(DELETE); - values.add(WRITE); - values.add(CREATE); - values.add(DELETE_INDEX); - values.add(VIEW_METADATA); - } + private static final Map VALUES = MapBuilder.newMapBuilder() + .put("none", NONE) + .put("all", ALL) + .put("manage", MANAGE) + .put("create_index", CREATE_INDEX) + .put("monitor", MONITOR) + .put("read", READ) + .put("index", INDEX) + .put("delete", DELETE) + .put("write", WRITE) + .put("create", CREATE) + .put("delete_index", DELETE_INDEX) + .put("view_index_metadata", VIEW_METADATA) + .immutableMap(); public static final Predicate ACTION_MATCHER = ALL.predicate(); public static final Predicate CREATE_INDEX_MATCHER = CREATE_INDEX.predicate(); - static Set values() { - return values; - } - - private static final ConcurrentHashMap cache = new ConcurrentHashMap<>(); - - private IndexPrivilege(String name, String... patterns) { - super(name, patterns); - } + private static final ConcurrentHashMap, IndexPrivilege> CACHE = new ConcurrentHashMap<>(); private IndexPrivilege(String name, Automaton automaton) { - super(new Name(name), automaton); + super(Collections.singleton(name), automaton); } - private IndexPrivilege(Name name, Automaton automaton) { + private IndexPrivilege(Set name, Automaton automaton) { super(name, automaton); } - public static void addCustom(String name, String... actionPatterns) { - for (String pattern : actionPatterns) { - if (!IndexPrivilege.ACTION_MATCHER.test(pattern)) { - throw new IllegalArgumentException("cannot register custom index privilege [" + name + "]." + - " index action must follow the 'indices:*' format"); + public static IndexPrivilege get(Set name) { + return CACHE.computeIfAbsent(name, (theName) -> { + if (theName.isEmpty()) { + return NONE; + } else { + return resolve(theName); } - } - IndexPrivilege custom = new IndexPrivilege(name, actionPatterns); - if (values.contains(custom)) { - throw new IllegalArgumentException("cannot register custom index privilege [" + name + "] as it already exists."); - } - values.add(custom); - } - - @Override - protected IndexPrivilege create(Name name, Automaton automaton) { - if (name == Name.NONE) { - return NONE; - } - return new IndexPrivilege(name, automaton); - } - - @Override - protected IndexPrivilege none() { - return NONE; - } - - public static IndexPrivilege action(String action) { - return new IndexPrivilege(action, actionToPattern(action)); - } - - public static IndexPrivilege get(Name name) { - return cache.computeIfAbsent(name, (theName) -> { - IndexPrivilege index = NONE; - for (String part : theName.parts) { - index = index == NONE ? resolve(part) : index.plus(resolve(part)); - } - return index; }); } - public static IndexPrivilege union(IndexPrivilege... indices) { - IndexPrivilege result = NONE; - for (IndexPrivilege index : indices) { - result = result.plus(index); + private static IndexPrivilege resolve(Set name) { + final int size = name.size(); + if (size == 0) { + throw new IllegalArgumentException("empty set should not be used"); } - return result; - } - private static IndexPrivilege resolve(String name) { - name = name.toLowerCase(Locale.ROOT); - if (ACTION_MATCHER.test(name)) { - return action(name); - } - for (IndexPrivilege index : values) { - if (name.toLowerCase(Locale.ROOT).equals(index.name.toString())) { - return index; + Set actions = new HashSet<>(); + Set automata = new HashSet<>(); + for (String part : name) { + part = part.toLowerCase(Locale.ROOT); + if (ACTION_MATCHER.test(part)) { + actions.add(actionToPattern(part)); + } else { + IndexPrivilege indexPrivilege = VALUES.get(part); + if (indexPrivilege != null && size == 1) { + return indexPrivilege; + } else if (indexPrivilege != null) { + automata.add(indexPrivilege.automaton); + } else { + throw new IllegalArgumentException("unknown index privilege [" + name + "]. a privilege must be either " + + "one of the predefined fixed indices privileges [" + + Strings.collectionToCommaDelimitedString(VALUES.entrySet()) + "] or a pattern over one of the available index" + + " actions"); + } } } - throw new IllegalArgumentException("unknown index privilege [" + name + "]. a privilege must be either " + - "one of the predefined fixed indices privileges [" + Strings.collectionToCommaDelimitedString(values) + - "] or a pattern over one of the available index actions"); + + if (actions.isEmpty() == false) { + automata.add(patterns(actions)); + } + return new IndexPrivilege(name, Automatons.unionAndMinimize(automata)); } + static Map values() { + return VALUES; + } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/Privilege.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/Privilege.java index 14a2f4283a2..382bea5fe2b 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/Privilege.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/Privilege.java @@ -5,36 +5,44 @@ */ package org.elasticsearch.xpack.security.authz.privilege; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.util.set.Sets; +import org.apache.lucene.util.automaton.Automaton; +import org.elasticsearch.xpack.security.support.Automatons; -import java.util.HashSet; +import java.util.Collections; import java.util.Set; import java.util.function.Predicate; -import static java.util.Collections.singleton; -import static java.util.Collections.unmodifiableSet; -import static org.elasticsearch.common.util.set.Sets.newHashSet; +import static org.elasticsearch.xpack.security.support.Automatons.patterns; -public abstract class Privilege

> { +public class Privilege { - protected final Name name; + public static final Privilege NONE = new Privilege(Collections.singleton("none"), Automatons.EMPTY); + public static final Privilege ALL = new Privilege(Collections.singleton("all"), Automatons.MATCH_ALL); - Privilege(Name name) { - this.name = name; + protected final Set name; + protected final Automaton automaton; + protected final Predicate predicate; + + public Privilege(String name, String... patterns) { + this(Collections.singleton(name), patterns); } - public Name name() { + public Privilege(Set name, String... patterns) { + this(name, patterns(patterns)); + } + + public Privilege(Set name, Automaton automaton) { + this.name = name; + this.automaton = automaton; + this.predicate = Automatons.predicate(automaton); + } + + public Set name() { return name; } - public abstract Predicate predicate(); - - public abstract boolean implies(P other); - - @SuppressWarnings("unchecked") - public boolean isAlias(P other) { - return this.implies(other) && other.implies((P) this); + public Predicate predicate() { + return predicate; } @Override @@ -58,54 +66,12 @@ public abstract class Privilege

> { return text + "*"; } - public static class Name { + @Override + public String toString() { + return name.toString(); + } - public static final Name NONE = new Name("none"); - public static final Name ALL = new Name("all"); - - final Set parts; - - public Name(String name) { - assert name != null && !name.contains(","); - parts = singleton(name); - } - - public Name(Set parts) { - assert !parts.isEmpty(); - this.parts = unmodifiableSet(new HashSet<>(parts)); - } - - public Name(String... parts) { - this(unmodifiableSet(newHashSet(parts))); - } - - @Override - public String toString() { - return Strings.collectionToCommaDelimitedString(parts); - } - - public Name add(Name other) { - return new Name(Sets.union(parts, other.parts)); - } - - public Name remove(Name other) { - Set parts = Sets.difference(this.parts, other.parts); - return parts.isEmpty() ? NONE : new Name(parts); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Name name = (Name) o; - - return parts.equals(name.parts); - } - - @Override - public int hashCode() { - return parts.hashCode(); - } + public Automaton getAutomaton() { + return automaton; } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/SystemPrivilege.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/SystemPrivilege.java index 2b4e2d93b1c..779792b8ae3 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/SystemPrivilege.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/privilege/SystemPrivilege.java @@ -5,35 +5,29 @@ */ package org.elasticsearch.xpack.security.authz.privilege; -import org.elasticsearch.xpack.security.support.AutomatonPredicate; +import org.elasticsearch.xpack.security.support.Automatons; +import java.util.Collections; import java.util.function.Predicate; -import static org.elasticsearch.xpack.security.support.Automatons.patterns; - -public class SystemPrivilege extends Privilege { +public final class SystemPrivilege extends Privilege { public static SystemPrivilege INSTANCE = new SystemPrivilege(); - protected static final Predicate PREDICATE = new AutomatonPredicate(patterns( + private static final Predicate PREDICATE = Automatons.predicate( "internal:*", "indices:monitor/*", // added for monitoring "cluster:monitor/*", // added for monitoring "cluster:admin/reroute", // added for DiskThresholdDecider.DiskListener "indices:admin/mapping/put" // needed for recovery and shrink api - )); + ); - SystemPrivilege() { - super(new Name("internal")); + private SystemPrivilege() { + super(Collections.singleton("internal")); } @Override public Predicate predicate() { return PREDICATE; } - - @Override - public boolean implies(SystemPrivilege other) { - return true; - } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index 2cc09de0722..bbdf5bc44b7 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -6,12 +6,41 @@ package org.elasticsearch.xpack.security.authz.store; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.inject.internal.Nullable; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ReleasableLock; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.xpack.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.security.authz.RoleDescriptor.IndicesPrivileges; +import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.security.authz.permission.Role; +import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.security.authz.privilege.Privilege; +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.security.Security.setting; /** * A composite roles store that combines built in roles, file-based roles, and index-based roles. Checks the built in roles first, then the @@ -19,44 +48,199 @@ import java.util.Map; */ public class CompositeRolesStore extends AbstractComponent { + // the lock is used in an odd manner; when iterating over the cache we cannot have modifiers other than deletes using + // the iterator but when not iterating we can modify the cache without external locking. When making normal modifications to the cache + // the read lock is obtained so that we can allow concurrent modifications; however when we need to iterate over the keys or values of + // the cache the write lock must obtained to prevent any modifications + private final ReleasableLock readLock; + private final ReleasableLock writeLock; + + { + final ReadWriteLock iterationLock = new ReentrantReadWriteLock(); + readLock = new ReleasableLock(iterationLock.readLock()); + writeLock = new ReleasableLock(iterationLock.writeLock()); + } + + public static final Setting CACHE_SIZE_SETTING = + Setting.intSetting(setting("authz.store.roles.cache.max_size"), 10000, Property.NodeScope); + private final FileRolesStore fileRolesStore; private final NativeRolesStore nativeRolesStore; private final ReservedRolesStore reservedRolesStore; + private final XPackLicenseState licenseState; + private final Cache, Role> roleCache; + private final Set negativeLookupCache; + private final AtomicLong numInvalidation = new AtomicLong(); public CompositeRolesStore(Settings settings, FileRolesStore fileRolesStore, NativeRolesStore nativeRolesStore, - ReservedRolesStore reservedRolesStore) { + ReservedRolesStore reservedRolesStore, XPackLicenseState licenseState) { super(settings); this.fileRolesStore = fileRolesStore; + // invalidating all on a file based role update is heavy handed to say the least, but in general this should be infrequent so the + // impact isn't really worth the added complexity of only clearing the changed values + fileRolesStore.addListener(this::invalidateAll); this.nativeRolesStore = nativeRolesStore; this.reservedRolesStore = reservedRolesStore; - } - - private Role getBuildInRole(String role) { - // builtins first - Role builtIn = reservedRolesStore.role(role); - if (builtIn != null) { - logger.trace("loaded role [{}] from reserved roles store", role); - return builtIn; + this.licenseState = licenseState; + CacheBuilder, Role> builder = CacheBuilder.builder(); + final int cacheSize = CACHE_SIZE_SETTING.get(settings); + if (cacheSize >= 0) { + builder.setMaximumWeight(cacheSize); } - - // Try the file next, then the index if it isn't there - Role fileRole = fileRolesStore.role(role); - if (fileRole != null) { - logger.trace("loaded role [{}] from file roles store", role); - return fileRole; - } - return null; + this.roleCache = builder.build(); + this.negativeLookupCache = ConcurrentCollections.newConcurrentSet(); } - public void roles(String role, ActionListener roleActionListener) { - Role storedRole = getBuildInRole(role); - if (storedRole == null) { - nativeRolesStore.role(role, roleActionListener); + public void roles(Set roleNames, FieldPermissionsCache fieldPermissionsCache, ActionListener roleActionListener) { + Role existing = roleCache.get(roleNames); + if (existing != null) { + roleActionListener.onResponse(existing); } else { - roleActionListener.onResponse(storedRole); + final long invalidationCounter = numInvalidation.get(); + roleDescriptors(roleNames, ActionListener.wrap( + (descriptors) -> { + final Role role; + if (licenseState.isDocumentAndFieldLevelSecurityAllowed()) { + role = buildRoleFromDescriptors(descriptors, fieldPermissionsCache); + } else { + final Set filtered = descriptors.stream() + .filter((rd) -> rd.isUsingDocumentOrFieldLevelSecurity() == false) + .collect(Collectors.toSet()); + role = buildRoleFromDescriptors(filtered, fieldPermissionsCache); + } + + if (role != null) { + try (ReleasableLock ignored = readLock.acquire()) { + /* this is kinda spooky. We use a read/write lock to ensure we don't modify the cache if we hold the write + * lock (fetching stats for instance - which is kinda overkill?) but since we fetching stuff in an async + * fashion we need to make sure that if the cache got invalidated since we started the request we don't + * put a potential stale result in the cache, hence the numInvalidation.get() comparison to the number of + * invalidation when we started. we just try to be on the safe side and don't cache potentially stale + * results*/ + if (invalidationCounter == numInvalidation.get()) { + roleCache.computeIfAbsent(roleNames, (s) -> role); + } + } + } + roleActionListener.onResponse(role); + }, + roleActionListener::onFailure)); } } + private void roleDescriptors(Set roleNames, ActionListener> roleDescriptorActionListener) { + final Set filteredRoleNames = + roleNames.stream().filter((s) -> negativeLookupCache.contains(s) == false).collect(Collectors.toSet()); + final Set builtInRoleDescriptors = getBuiltInRoleDescriptors(filteredRoleNames); + Set remainingRoleNames = difference(filteredRoleNames, builtInRoleDescriptors); + if (remainingRoleNames.isEmpty()) { + roleDescriptorActionListener.onResponse(Collections.unmodifiableSet(builtInRoleDescriptors)); + } else { + nativeRolesStore.getRoleDescriptors(remainingRoleNames.toArray(Strings.EMPTY_ARRAY), ActionListener.wrap((descriptors) -> { + builtInRoleDescriptors.addAll(descriptors); + if (builtInRoleDescriptors.size() != filteredRoleNames.size()) { + final Set missing = difference(filteredRoleNames, builtInRoleDescriptors); + assert missing.isEmpty() == false : "the missing set should not be empty if the sizes didn't match"; + negativeLookupCache.addAll(missing); + } + roleDescriptorActionListener.onResponse(Collections.unmodifiableSet(builtInRoleDescriptors)); + }, roleDescriptorActionListener::onFailure)); + } + } + + private Set getBuiltInRoleDescriptors(Set roleNames) { + final Set descriptors = reservedRolesStore.roleDescriptors().stream() + .filter((rd) -> roleNames.contains(rd.getName())) + .collect(Collectors.toCollection(HashSet::new)); + + final Set difference = difference(roleNames, descriptors); + if (difference.isEmpty() == false) { + descriptors.addAll(fileRolesStore.roleDescriptors(difference)); + } + + return descriptors; + } + + private Set difference(Set roleNames, Set descriptors) { + Set foundNames = descriptors.stream().map(RoleDescriptor::getName).collect(Collectors.toSet()); + return Sets.difference(roleNames, foundNames); + } + + public static Role buildRoleFromDescriptors(Set roleDescriptors, FieldPermissionsCache fieldPermissionsCache) { + if (roleDescriptors.isEmpty()) { + return Role.EMPTY; + } + StringBuilder nameBuilder = new StringBuilder(); + Set clusterPrivileges = new HashSet<>(); + Set runAs = new HashSet<>(); + Map, MergeableIndicesPrivilege> indicesPrivilegesMap = new HashMap<>(); + for (RoleDescriptor descriptor : roleDescriptors) { + nameBuilder.append(descriptor.getName()); + nameBuilder.append('_'); + if (descriptor.getClusterPrivileges() != null) { + clusterPrivileges.addAll(Arrays.asList(descriptor.getClusterPrivileges())); + } + if (descriptor.getRunAs() != null) { + runAs.addAll(Arrays.asList(descriptor.getRunAs())); + } + IndicesPrivileges[] indicesPrivileges = descriptor.getIndicesPrivileges(); + for (IndicesPrivileges indicesPrivilege : indicesPrivileges) { + Set key = Sets.newHashSet(indicesPrivilege.getIndices()); + // if a index privilege is an explicit denial, then we treat it as non-existent since we skipped these in the past when + // merging + final boolean isExplicitDenial = + indicesPrivileges.length == 1 && "none".equalsIgnoreCase(indicesPrivilege.getPrivileges()[0]); + if (isExplicitDenial == false) { + indicesPrivilegesMap.compute(key, (k, value) -> { + if (value == null) { + return new MergeableIndicesPrivilege(indicesPrivilege.getIndices(), indicesPrivilege.getPrivileges(), + indicesPrivilege.getGrantedFields(), indicesPrivilege.getDeniedFields(), indicesPrivilege.getQuery()); + } else { + value.merge(new MergeableIndicesPrivilege(indicesPrivilege.getIndices(), indicesPrivilege.getPrivileges(), + indicesPrivilege.getGrantedFields(), indicesPrivilege.getDeniedFields(), indicesPrivilege.getQuery())); + return value; + } + }); + } + } + } + + final Set clusterPrivs = clusterPrivileges.isEmpty() ? null : clusterPrivileges; + final Privilege runAsPrivilege = runAs.isEmpty() ? Privilege.NONE : new Privilege(runAs, runAs.toArray(Strings.EMPTY_ARRAY)); + Role.Builder builder = Role.builder(nameBuilder.toString(), fieldPermissionsCache) + .cluster(ClusterPrivilege.get(clusterPrivs)) + .runAs(runAsPrivilege); + indicesPrivilegesMap.entrySet().forEach((entry) -> { + MergeableIndicesPrivilege privilege = entry.getValue(); + builder.add(fieldPermissionsCache.getFieldPermissions(privilege.grantedFields, privilege.deniedFields), privilege.query, + IndexPrivilege.get(privilege.privileges), privilege.indices.toArray(Strings.EMPTY_ARRAY)); + }); + return builder.build(); + } + + public void invalidateAll() { + numInvalidation.incrementAndGet(); + negativeLookupCache.clear(); + try (ReleasableLock ignored = readLock.acquire()) { + roleCache.invalidateAll(); + } + } + + public void invalidate(String role) { + numInvalidation.incrementAndGet(); + + // the cache cannot be modified while doing this operation per the terms of the cache iterator + try (ReleasableLock ignored = writeLock.acquire()) { + Iterator> keyIter = roleCache.keys().iterator(); + while (keyIter.hasNext()) { + Set key = keyIter.next(); + if (key.contains(role)) { + keyIter.remove(); + } + } + } + negativeLookupCache.remove(role); + } public Map usageStats() { Map usage = new HashMap<>(2); @@ -64,4 +248,49 @@ public class CompositeRolesStore extends AbstractComponent { usage.put("native", nativeRolesStore.usageStats()); return usage; } + + /** + * A mutable class that can be used to represent the combination of one or more {@link IndicesPrivileges} + */ + private static class MergeableIndicesPrivilege { + private Set indices; + private Set privileges; + private Set grantedFields = null; + private Set deniedFields = null; + private Set query = null; + + MergeableIndicesPrivilege(String[] indices, String[] privileges, @Nullable String[] grantedFields, @Nullable String[] deniedFields, + @Nullable BytesReference query) { + this.indices = Sets.newHashSet(Objects.requireNonNull(indices)); + this.privileges = Sets.newHashSet(Objects.requireNonNull(privileges)); + this.grantedFields = grantedFields == null ? null : Sets.newHashSet(grantedFields); + this.deniedFields = deniedFields == null ? null : Sets.newHashSet(deniedFields); + if (query != null) { + this.query = Sets.newHashSet(query); + } + } + + void merge(MergeableIndicesPrivilege other) { + assert indices.equals(other.indices) : "index names must be equivalent in order to merge"; + this.grantedFields = combineFieldSets(this.grantedFields, other.grantedFields); + this.deniedFields = combineFieldSets(this.deniedFields, other.deniedFields); + this.privileges.addAll(other.privileges); + + if (this.query == null || other.query == null) { + this.query = null; + } else { + this.query.addAll(other.query); + } + } + + private static Set combineFieldSets(Set set, Set other) { + if (set == null || other == null) { + // null = grant all so it trumps others + return null; + } else { + set.addAll(other); + return set; + } + } + } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index c2effeb7d66..8001c8b3736 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -8,23 +8,22 @@ package org.elasticsearch.xpack.security.authz.store; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.yaml.YamlXContent; import org.elasticsearch.env.Environment; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.watcher.FileChangesListener; import org.elasticsearch.watcher.FileWatcher; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.security.authz.permission.IndicesPermission.Group; -import org.elasticsearch.xpack.security.authz.permission.Role; +import org.elasticsearch.xpack.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.security.support.NoOpLogger; import org.elasticsearch.xpack.security.support.Validation; @@ -34,60 +33,55 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.regex.Pattern; import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; import static java.util.Collections.unmodifiableMap; -public class FileRolesStore extends AbstractLifecycleComponent { +public class FileRolesStore extends AbstractComponent { private static final Pattern IN_SEGMENT_LINE = Pattern.compile("^\\s+.+"); private static final Pattern SKIP_LINE = Pattern.compile("(^#.*|^\\s*)"); private final Path file; - private final Runnable listener; - private final ResourceWatcherService watcherService; + private final XPackLicenseState licenseState; + private final List listeners = new ArrayList<>(); - private volatile Map permissions; + private volatile Map permissions; - public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService) { - this(settings, env, watcherService, () -> {}); + public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState) + throws IOException { + this(settings, env, watcherService, () -> {}, licenseState); } - public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Runnable listener) { + FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Runnable listener, + XPackLicenseState licenseState) throws IOException { super(settings); this.file = resolveFile(env); - this.listener = listener; - this.watcherService = watcherService; - permissions = emptyMap(); - } - - @Override - protected void doStart() throws ElasticsearchException { + if (listener != null) { + listeners.add(listener); + } + this.licenseState = licenseState; FileWatcher watcher = new FileWatcher(file.getParent()); watcher.addListener(new FileListener()); - try { - watcherService.add(watcher, ResourceWatcherService.Frequency.HIGH); - } catch (IOException e) { - throw new ElasticsearchException("failed to setup roles file watcher", e); - } - permissions = parseFile(file, logger, settings); + watcherService.add(watcher, ResourceWatcherService.Frequency.HIGH); + permissions = parseFile(file, logger, settings, licenseState); } - @Override - protected void doStop() throws ElasticsearchException { - } - - @Override - protected void doClose() throws ElasticsearchException { - } - - public Role role(String role) { - return permissions.get(role); + Set roleDescriptors(Set roleNames) { + Set descriptors = new HashSet<>(); + roleNames.forEach((name) -> { + RoleDescriptor descriptor = permissions.get(name); + if (descriptor != null) { + descriptors.add(descriptor); + } + }); + return descriptors; } public Map usageStats() { @@ -96,10 +90,10 @@ public class FileRolesStore extends AbstractLifecycleComponent { boolean dls = false; boolean fls = false; - for (Role role : permissions.values()) { - for (Group group : role.indices()) { - fls = fls || group.getFieldPermissions().hasFieldLevelSecurity(); - dls = dls || group.hasQuery(); + for (RoleDescriptor descriptor : permissions.values()) { + for (IndicesPrivileges indicesPrivileges : descriptor.getIndicesPrivileges()) { + fls = fls || indicesPrivileges.getGrantedFields() != null || indicesPrivileges.getDeniedFields() != null; + dls = dls || indicesPrivileges.getQuery() != null; } if (fls && dls) { break; @@ -111,44 +105,53 @@ public class FileRolesStore extends AbstractLifecycleComponent { return usageStats; } + void addListener(Runnable runnable) { + Objects.requireNonNull(runnable); + synchronized (this) { + listeners.add(runnable); + } + } + public static Path resolveFile(Environment env) { return XPackPlugin.resolveConfigFile(env, "roles.yml"); } public static Set parseFileForRoleNames(Path path, Logger logger) { - Map roleMap = parseFile(path, logger, false, Settings.EMPTY); - if (roleMap == null) { - return emptySet(); - } - return roleMap.keySet(); + return parseRoleDescriptors(path, logger, false, Settings.EMPTY).keySet(); } - public static Map parseFile(Path path, Logger logger, Settings settings) { - return parseFile(path, logger, true, settings); + public static Map parseFile(Path path, Logger logger, Settings settings, XPackLicenseState licenseState) { + return parseFile(path, logger, true, settings, licenseState); } - public static Map parseFile(Path path, Logger logger, boolean resolvePermission, Settings settings) { + public static Map parseFile(Path path, Logger logger, boolean resolvePermission, + Settings settings, XPackLicenseState licenseState) { if (logger == null) { logger = NoOpLogger.INSTANCE; } - Map roles = new HashMap<>(); + Map roles = new HashMap<>(); logger.debug("attempting to read roles file located at [{}]", path.toAbsolutePath()); if (Files.exists(path)) { try { List roleSegments = roleSegments(path); + final boolean flsDlsLicensed = licenseState.isDocumentAndFieldLevelSecurityAllowed(); for (String segment : roleSegments) { - Role role = parseRole(segment, path, logger, resolvePermission, settings); - if (role != null) { - if (ReservedRolesStore.isReserved(role.name())) { + RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermission, settings); + if (descriptor != null) { + if (ReservedRolesStore.isReserved(descriptor.getName())) { logger.warn("role [{}] is reserved. the relevant role definition in the mapping file will be ignored", - role.name()); + descriptor.getName()); + } else if (flsDlsLicensed == false && descriptor.isUsingDocumentOrFieldLevelSecurity()) { + logger.warn("role [{}] uses document and/or field level security, which is not enabled by the current license" + + ". this role will be ignored", descriptor.getName()); + // we still put the role in the map to avoid unnecessary negative lookups + roles.put(descriptor.getName(), descriptor); } else { - roles.put(role.name(), role); + roles.put(descriptor.getName(), descriptor); } } } - } catch (IOException ioe) { logger.error( (Supplier) () -> new ParameterizedMessage( @@ -166,8 +169,7 @@ public class FileRolesStore extends AbstractLifecycleComponent { return unmodifiableMap(roles); } - public static Map parseRoleDescriptors(Path path, Logger logger, - boolean resolvePermission, Settings settings) { + public static Map parseRoleDescriptors(Path path, Logger logger, boolean resolvePermission, Settings settings) { if (logger == null) { logger = NoOpLogger.INSTANCE; } @@ -196,31 +198,7 @@ public class FileRolesStore extends AbstractLifecycleComponent { } @Nullable - private static Role parseRole(String segment, Path path, Logger logger, boolean resolvePermissions, Settings settings) { - RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermissions, settings); - - if (descriptor != null) { - String roleName = descriptor.getName(); - // first check if FLS/DLS is enabled on the role... - for (RoleDescriptor.IndicesPrivileges privilege : descriptor.getIndicesPrivileges()) { - - if ((privilege.getQuery() != null || privilege.getFieldPermissions().hasFieldLevelSecurity()) - && XPackSettings.DLS_FLS_ENABLED.get(settings) == false) { - logger.error("invalid role definition [{}] in roles file [{}]. document and field level security is not " + - "enabled. set [{}] to [true] in the configuration file. skipping role...", roleName, path - .toAbsolutePath(), XPackSettings.DLS_FLS_ENABLED.getKey()); - return null; - } - } - return Role.builder(descriptor).build(); - } else { - return null; - } - } - - @Nullable - static RoleDescriptor parseRoleDescriptor(String segment, Path path, Logger logger, - boolean resolvePermissions, Settings settings) { + static RoleDescriptor parseRoleDescriptor(String segment, Path path, Logger logger, boolean resolvePermissions, Settings settings) { String roleName = null; try { // EMPTY is safe here because we never use namedObject @@ -246,7 +224,7 @@ public class FileRolesStore extends AbstractLifecycleComponent { // we pass true as last parameter because we do not want to reject files if field permissions // are given in 2.x syntax RoleDescriptor descriptor = RoleDescriptor.parse(roleName, parser, true); - return descriptor; + return checkDescriptor(descriptor, path, logger, settings); } else { logger.error("invalid role definition [{}] in roles file [{}]. skipping role...", roleName, path.toAbsolutePath()); return null; @@ -282,6 +260,22 @@ public class FileRolesStore extends AbstractLifecycleComponent { return null; } + @Nullable + private static RoleDescriptor checkDescriptor(RoleDescriptor descriptor, Path path, Logger logger, Settings settings) { + String roleName = descriptor.getName(); + // first check if FLS/DLS is enabled on the role... + for (RoleDescriptor.IndicesPrivileges privilege : descriptor.getIndicesPrivileges()) { + if ((privilege.getQuery() != null || privilege.getGrantedFields() != null || privilege.getDeniedFields() != null) + && XPackSettings.DLS_FLS_ENABLED.get(settings) == false) { + logger.error("invalid role definition [{}] in roles file [{}]. document and field level security is not " + + "enabled. set [{}] to [true] in the configuration file. skipping role...", roleName, path + .toAbsolutePath(), XPackSettings.DLS_FLS_ENABLED.getKey()); + return null; + } + } + return descriptor; + } + private static List roleSegments(Path path) throws IOException { List segments = new ArrayList<>(); StringBuilder builder = null; @@ -321,7 +315,7 @@ public class FileRolesStore extends AbstractLifecycleComponent { public void onFileChanged(Path file) { if (file.equals(FileRolesStore.this.file)) { try { - permissions = parseFile(file, logger, settings); + permissions = parseFile(file, logger, settings, licenseState); logger.info("updated roles (roles file [{}] changed)", file.toAbsolutePath()); } catch (Exception e) { logger.error( @@ -329,7 +323,10 @@ public class FileRolesStore extends AbstractLifecycleComponent { "could not reload roles file [{}]. Current roles remain unmodified", file.toAbsolutePath()), e); return; } - listener.run(); + + synchronized (FileRolesStore.this) { + listeners.forEach(Runnable::run); + } } } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 839609a5371..537cee1259a 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -20,7 +20,6 @@ import org.elasticsearch.action.search.MultiSearchRequestBuilder; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.MultiSearchResponse.Item; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -28,21 +27,19 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.cache.Cache; -import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.security.SecurityTemplateService; import org.elasticsearch.xpack.security.action.role.ClearRolesCacheRequest; @@ -50,10 +47,10 @@ import org.elasticsearch.xpack.security.action.role.ClearRolesCacheResponse; import org.elasticsearch.xpack.security.action.role.DeleteRoleRequest; import org.elasticsearch.xpack.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.security.authz.permission.IndicesPermission.Group; -import org.elasticsearch.xpack.security.authz.permission.Role; +import org.elasticsearch.xpack.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.security.client.SecurityClient; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -61,11 +58,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; @@ -92,45 +85,30 @@ public class NativeRolesStore extends AbstractComponent implements ClusterStateL FAILED } + // these are no longer used, but leave them around for users upgrading private static final Setting CACHE_SIZE_SETTING = - Setting.intSetting(setting("authz.store.roles.index.cache.max_size"), 10000, Property.NodeScope); - private static final Setting CACHE_TTL_SETTING = - Setting.timeSetting(setting("authz.store.roles.index.cache.ttl"), TimeValue.timeValueMinutes(20), Property.NodeScope); + Setting.intSetting(setting("authz.store.roles.index.cache.max_size"), 10000, Property.NodeScope, Property.Deprecated); + private static final Setting CACHE_TTL_SETTING = Setting.timeSetting(setting("authz.store.roles.index.cache.ttl"), + TimeValue.timeValueMinutes(20), Property.NodeScope, Property.Deprecated); private static final String ROLE_DOC_TYPE = "role"; private final InternalClient client; + private final XPackLicenseState licenseState; private final AtomicReference state = new AtomicReference<>(State.INITIALIZED); private final boolean isTribeNode; - private final Cache roleCache; - // the lock is used in an odd manner; when iterating over the cache we cannot have modifiers other than deletes using - // the iterator but when not iterating we can modify the cache without external locking. When making normal modifications to the cache - // the read lock is obtained so that we can allow concurrent modifications; however when we need to iterate over the keys or values of - // the cache the write lock must obtained to prevent any modifications - private final ReleasableLock readLock; - private final ReleasableLock writeLock; - - { - final ReadWriteLock iterationLock = new ReentrantReadWriteLock(); - readLock = new ReleasableLock(iterationLock.readLock()); - writeLock = new ReleasableLock(iterationLock.writeLock()); - } private SecurityClient securityClient; - // incremented each time the cache is invalidated - private final AtomicLong numInvalidation = new AtomicLong(0); private volatile boolean securityIndexExists = false; private volatile boolean canWrite = false; - public NativeRolesStore(Settings settings, InternalClient client) { + public NativeRolesStore(Settings settings, InternalClient client, XPackLicenseState licenseState) { super(settings); this.client = client; - this.roleCache = CacheBuilder.builder() - .setMaximumWeight(CACHE_SIZE_SETTING.get(settings)) - .setExpireAfterWrite(CACHE_TTL_SETTING.get(settings)) - .build(); this.isTribeNode = settings.getGroups("tribe", true).isEmpty() == false; + this.securityClient = new SecurityClient(client); + this.licenseState = licenseState; } public boolean canStart(ClusterState clusterState, boolean master) { @@ -177,7 +155,6 @@ public class NativeRolesStore extends AbstractComponent implements ClusterStateL public void start() { try { if (state.compareAndSet(State.INITIALIZED, State.STARTING)) { - this.securityClient = new SecurityClient(client); state.set(State.STARTED); } } catch (Exception e) { @@ -198,13 +175,13 @@ public class NativeRolesStore extends AbstractComponent implements ClusterStateL public void getRoleDescriptors(String[] names, final ActionListener> listener) { if (state() != State.STARTED) { logger.trace("attempted to get roles before service was started"); - listener.onFailure(new IllegalStateException("roles cannot be retrieved as native role service has not been started")); + listener.onResponse(Collections.emptySet()); return; } if (names != null && names.length == 1) { - getRoleAndVersion(Objects.requireNonNull(names[0]), ActionListener.wrap(roleAndVersion -> - listener.onResponse(roleAndVersion == null || roleAndVersion.getRoleDescriptor() == null ? Collections.emptyList() - : Collections.singletonList(roleAndVersion.getRoleDescriptor())), listener::onFailure)); + getRoleDescriptor(Objects.requireNonNull(names[0]), ActionListener.wrap(roleDescriptor -> + listener.onResponse(roleDescriptor == null ? Collections.emptyList() : Collections.singletonList(roleDescriptor)), + listener::onFailure)); } else { try { QueryBuilder query; @@ -221,7 +198,8 @@ public class NativeRolesStore extends AbstractComponent implements ClusterStateL .setFetchSource(true) .request(); request.indicesOptions().ignoreUnavailable(); - InternalClient.fetchAllByEntity(client, request, listener, (hit) -> transformRole(hit.getId(), hit.getSourceRef(), logger)); + InternalClient.fetchAllByEntity(client, request, listener, + (hit) -> transformRole(hit.getId(), hit.getSourceRef(), logger, licenseState)); } catch (Exception e) { logger.error((Supplier) () -> new ParameterizedMessage("unable to retrieve roles {}", Arrays.toString(names)), e); listener.onFailure(e); @@ -273,16 +251,23 @@ public class NativeRolesStore extends AbstractComponent implements ClusterStateL listener.onResponse(false); } else if (isTribeNode) { listener.onFailure(new UnsupportedOperationException("roles may not be created or modified using a tribe node")); - return; - } else if (canWrite == false) { + } else if (canWrite == false) { listener.onFailure(new IllegalStateException("role cannot be created or modified as service cannot write until template and " + "mappings are up to date")); - return; + } else if (licenseState.isDocumentAndFieldLevelSecurityAllowed()) { + innerPutRole(request, role, listener); + } else if (role.isUsingDocumentOrFieldLevelSecurity()) { + listener.onFailure(LicenseUtils.newComplianceException("field and document level security")); + } else { + innerPutRole(request, role, listener); } + } + // pkg-private for testing + void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener listener) { try { client.prepareIndex(SecurityTemplateService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, role.getName()) - .setSource(role.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .setSource(role.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS, false)) .setRefreshPolicy(request.getRefreshPolicy()) .execute(new ActionListener() { @Override @@ -303,25 +288,6 @@ public class NativeRolesStore extends AbstractComponent implements ClusterStateL } } - public void role(String roleName, ActionListener listener) { - if (state() != State.STARTED) { - listener.onResponse(null); - } else { - getRoleAndVersion(roleName, new ActionListener() { - @Override - public void onResponse(RoleAndVersion roleAndVersion) { - listener.onResponse(roleAndVersion == null ? null : roleAndVersion.getRole()); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - - } - }); - } - } - public Map usageStats() { if (state() != State.STARTED) { return Collections.emptyMap(); @@ -337,23 +303,9 @@ public class NativeRolesStore extends AbstractComponent implements ClusterStateL return usageStats; } + // FIXME this needs to be async long count = 0L; - try (final ReleasableLock ignored = writeLock.acquire()) { - for (RoleAndVersion rv : roleCache.values()) { - if (rv == RoleAndVersion.NON_EXISTENT) { - continue; - } - - count++; - Role role = rv.getRole(); - for (Group group : role.indices()) { - fls = fls || group.getFieldPermissions().hasFieldLevelSecurity(); - dls = dls || group.hasQuery(); - } - } - } - - // slow path - query for necessary information + // query for necessary information if (fls == false || dls == false) { MultiSearchRequestBuilder builder = client.prepareMultiSearch() .add(client.prepareSearch(SecurityTemplateService.SECURITY_INDEX_NAME) @@ -407,67 +359,37 @@ public class NativeRolesStore extends AbstractComponent implements ClusterStateL return usageStats; } - private void getRoleAndVersion(final String roleId, ActionListener roleActionListener) { + private void getRoleDescriptor(final String roleId, ActionListener roleActionListener) { if (securityIndexExists == false) { roleActionListener.onResponse(null); } else { - RoleAndVersion cachedRoleAndVersion = roleCache.get(roleId); - if (cachedRoleAndVersion == null) { - final long invalidationCounter = numInvalidation.get(); - executeGetRoleRequest(roleId, new ActionListener() { - @Override - public void onResponse(GetResponse response) { - final RoleAndVersion roleAndVersion; - RoleDescriptor descriptor = transformRole(response); - if (descriptor != null) { - logger.debug("loaded role [{}] from index with version [{}]", roleId, response.getVersion()); - roleAndVersion = new RoleAndVersion(descriptor, response.getVersion()); - } else { - roleAndVersion = RoleAndVersion.NON_EXISTENT; - } + executeGetRoleRequest(roleId, new ActionListener() { + @Override + public void onResponse(GetResponse response) { + final RoleDescriptor descriptor = transformRole(response); + roleActionListener.onResponse(descriptor); + } - /* this is kinda spooky. We use a read/write lock to ensure we don't modify the cache if we hold the write - * lock (fetching stats for instance - which is kinda overkill?) but since we fetching stuff in an async - * fashion we need to make sure that if the cacht got invalidated since we started the request we don't - * put a potential stale result in the cache, hence the numInvalidation.get() comparison to the number of - * invalidation when we started. we just try to be on the safe side and don't cache potentially stale - * results*/ - try (final ReleasableLock ignored = readLock.acquire()) { - if (invalidationCounter == numInvalidation.get()) { - roleCache.computeIfAbsent(roleId, (k) -> roleAndVersion); - } - } catch (ExecutionException e) { - throw new AssertionError("failed to load constant non-null value", e); - } - roleActionListener.onResponse(roleAndVersion); + @Override + public void onFailure(Exception e) { + // if the index or the shard is not there / available we just claim the role is not there + if (TransportActions.isShardNotAvailableException(e)) { + logger.warn((Supplier) () -> new ParameterizedMessage("failed to load role [{}] index not available", + roleId), e); + roleActionListener.onResponse(null); + } else { + logger.error((Supplier) () -> new ParameterizedMessage("failed to load role [{}]", roleId), e); + roleActionListener.onFailure(e); } - - @Override - public void onFailure(Exception e) { - // if the index or the shard is not there / available we just claim the role is not there - if (TransportActions.isShardNotAvailableException(e)) { - logger.warn((Supplier) () -> new ParameterizedMessage("failed to load role [{}] index not available", - roleId), e); - roleActionListener.onResponse(RoleAndVersion.NON_EXISTENT); - } else { - logger.error((Supplier) () -> new ParameterizedMessage("failed to load role [{}]", roleId), e); - roleActionListener.onFailure(e); - } - } - }); - } else { - roleActionListener.onResponse(cachedRoleAndVersion); - } + } + }); } } - // pkg-private for testing - void executeGetRoleRequest(String role, ActionListener listener) { + private void executeGetRoleRequest(String role, ActionListener listener) { try { GetRequest request = client.prepareGet(SecurityTemplateService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, role).request(); - // TODO we use a threaded listener here to make sure we don't execute on a transport thread. This can be removed once - // all blocking operations are removed from this and NativeUserStore - client.get(request, new ThreadedActionListener<>(logger, client.threadPool(), ThreadPool.Names.LISTENER, listener, true)); + client.get(request, listener); } catch (IndexNotFoundException e) { logger.trace( (Supplier) () -> new ParameterizedMessage( @@ -487,28 +409,11 @@ public class NativeRolesStore extends AbstractComponent implements ClusterStateL if (state != State.STOPPED && state != State.FAILED) { throw new IllegalStateException("can only reset if stopped!!!"); } - invalidateAll(); this.securityIndexExists = false; this.canWrite = false; this.state.set(State.INITIALIZED); } - public void invalidateAll() { - logger.debug("invalidating all roles in cache"); - numInvalidation.incrementAndGet(); - try (final ReleasableLock ignored = readLock.acquire()) { - roleCache.invalidateAll(); - } - } - - public void invalidate(String role) { - logger.debug("invalidating role [{}] in cache", role); - numInvalidation.incrementAndGet(); - try (final ReleasableLock ignored = readLock.acquire()) { - roleCache.invalidate(role); - } - } - private void clearRoleCache(final String role, ActionListener listener, Response response) { ClearRolesCacheRequest request = new ClearRolesCacheRequest().names(role); securityClient.clearRolesCache(request, new ActionListener() { @@ -543,54 +448,47 @@ public class NativeRolesStore extends AbstractComponent implements ClusterStateL if (response.isExists() == false) { return null; } - return transformRole(response.getId(), response.getSourceAsBytesRef(), logger); + + return transformRole(response.getId(), response.getSourceAsBytesRef(), logger, licenseState); } @Nullable - static RoleDescriptor transformRole(String name, BytesReference sourceBytes, Logger logger) { + static RoleDescriptor transformRole(String name, BytesReference sourceBytes, Logger logger, XPackLicenseState licenseState) { try { // we pass true as last parameter because we do not want to reject permissions if the field permissions // are given in 2.x syntax - return RoleDescriptor.parse(name, sourceBytes, true); + RoleDescriptor roleDescriptor = RoleDescriptor.parse(name, sourceBytes, true); + if (licenseState.isDocumentAndFieldLevelSecurityAllowed()) { + return roleDescriptor; + } else { + final boolean dlsEnabled = + Arrays.stream(roleDescriptor.getIndicesPrivileges()).anyMatch(IndicesPrivileges::isUsingDocumentLevelSecurity); + final boolean flsEnabled = + Arrays.stream(roleDescriptor.getIndicesPrivileges()).anyMatch(IndicesPrivileges::isUsingFieldLevelSecurity); + if (dlsEnabled || flsEnabled) { + List unlicensedFeatures = new ArrayList<>(2); + if (flsEnabled) { + unlicensedFeatures.add("fls"); + } + if (dlsEnabled) { + unlicensedFeatures.add("dls"); + } + Map transientMap = new HashMap<>(2); + transientMap.put("unlicensed_features", unlicensedFeatures); + transientMap.put("enabled", false); + return new RoleDescriptor(roleDescriptor.getName(), roleDescriptor.getClusterPrivileges(), + roleDescriptor.getIndicesPrivileges(), roleDescriptor.getRunAs(), roleDescriptor.getMetadata(), transientMap); + } else { + return roleDescriptor; + } + + } } catch (Exception e) { logger.error((Supplier) () -> new ParameterizedMessage("error in the format of data for role [{}]", name), e); return null; } } - private static class RoleAndVersion { - - private static final RoleAndVersion NON_EXISTENT = new RoleAndVersion(); - - private final RoleDescriptor roleDescriptor; - private final Role role; - private final long version; - - private RoleAndVersion() { - roleDescriptor = null; - role = null; - version = Long.MIN_VALUE; - } - - RoleAndVersion(RoleDescriptor roleDescriptor, long version) { - this.roleDescriptor = roleDescriptor; - this.role = Role.builder(roleDescriptor).build(); - this.version = version; - } - - RoleDescriptor getRoleDescriptor() { - return roleDescriptor; - } - - Role getRole() { - return role; - } - - long getVersion() { - return version; - } - } - public static void addSettings(List> settings) { settings.add(CACHE_SIZE_SETTING); settings.add(CACHE_TTL_SETTING); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java index b4079f189dc..62920e32fa4 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java @@ -5,65 +5,62 @@ */ package org.elasticsearch.xpack.security.authz.store; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Set; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.xpack.security.SecurityContext; +import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.xpack.monitoring.action.MonitoringBulkAction; import org.elasticsearch.xpack.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.security.authz.permission.IngestAdminRole; -import org.elasticsearch.xpack.security.authz.permission.KibanaRole; -import org.elasticsearch.xpack.security.authz.permission.KibanaUserRole; -import org.elasticsearch.xpack.security.authz.permission.MonitoringUserRole; -import org.elasticsearch.xpack.security.authz.permission.RemoteMonitoringAgentRole; -import org.elasticsearch.xpack.security.authz.permission.ReportingUserRole; import org.elasticsearch.xpack.security.authz.permission.Role; -import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; -import org.elasticsearch.xpack.security.authz.permission.TransportClientRole; +import org.elasticsearch.xpack.security.support.MetadataUtils; + import org.elasticsearch.xpack.security.user.KibanaUser; import org.elasticsearch.xpack.security.user.SystemUser; -import org.elasticsearch.xpack.security.user.User; public class ReservedRolesStore { - private static final User DEFAULT_ENABLED_KIBANA_USER = new KibanaUser(true); - private final SecurityContext securityContext; + public static final RoleDescriptor SUPERUSER_ROLE_DESCRIPTOR = new RoleDescriptor("superuser", new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build()}, + new String[] { "*" }, + MetadataUtils.DEFAULT_RESERVED_METADATA); + public static final Role SUPERUSER_ROLE = Role.builder(SUPERUSER_ROLE_DESCRIPTOR, null).build(); + private static final Map RESERVED_ROLES = initializeReservedRoles(); - public ReservedRolesStore(SecurityContext securityContext) { - this.securityContext = securityContext; - } - - public Role role(String role) { - switch (role) { - case SuperuserRole.NAME: - return SuperuserRole.INSTANCE; - case TransportClientRole.NAME: - return TransportClientRole.INSTANCE; - case KibanaUserRole.NAME: - return KibanaUserRole.INSTANCE; - case MonitoringUserRole.NAME: - return MonitoringUserRole.INSTANCE; - case RemoteMonitoringAgentRole.NAME: - return RemoteMonitoringAgentRole.INSTANCE; - case IngestAdminRole.NAME: - return IngestAdminRole.INSTANCE; - case ReportingUserRole.NAME: - return ReportingUserRole.INSTANCE; - case KibanaRole.NAME: - // The only user that should know about this role is the kibana user itself (who has this role). The reason we want to hide - // this role is that it was created specifically for kibana, with all the permissions that the kibana user needs. - // We don't want it to be assigned to other users. The Kibana user here must always be enabled if it is in the - // security context - if (DEFAULT_ENABLED_KIBANA_USER.equals(securityContext.getUser())) { - return KibanaRole.INSTANCE; - } - return null; - default: - return null; - } + private static Map initializeReservedRoles() { + return MapBuilder.newMapBuilder() + .put("superuser", new RoleDescriptor("superuser", new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build()}, + new String[] { "*" }, + MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put("transport_client", new RoleDescriptor("transport_client", new String[] { "transport_client" }, null, null, + MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put("kibana_user", new RoleDescriptor("kibana_user", new String[] { "monitor" }, new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices(".kibana*").privileges("manage", "read", "index", "delete") + .build() }, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put("monitoring_user", new RoleDescriptor("monitoring_user", null, new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices(".marvel-es-*", ".monitoring-*").privileges("read").build() }, + null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put("remote_monitoring_agent", new RoleDescriptor("remote_monitoring_agent", + new String[] { "manage_index_templates", "manage_ingest_pipelines", "monitor" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices(".marvel-es-*", ".monitoring-*").privileges("all").build() }, + null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put("ingest_admin", new RoleDescriptor("ingest_admin", new String[] { "manage_index_templates", "manage_pipeline" }, + null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put("reporting_user", new RoleDescriptor("reporting_user", null, new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices(".reporting-*").privileges("read", "write").build() }, + null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put(KibanaUser.ROLE_NAME, new RoleDescriptor(KibanaUser.ROLE_NAME, new String[] { "monitor", MonitoringBulkAction.NAME}, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices(".kibana*", ".reporting-*").privileges("all").build() }, + null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put("logstash_system", new RoleDescriptor("logstash_system", new String[] { "monitor", MonitoringBulkAction.NAME}, + null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .immutableMap(); } public Map usageStats() { @@ -71,64 +68,19 @@ public class ReservedRolesStore { } public RoleDescriptor roleDescriptor(String role) { - switch (role) { - case SuperuserRole.NAME: - return SuperuserRole.DESCRIPTOR; - case TransportClientRole.NAME: - return TransportClientRole.DESCRIPTOR; - case KibanaUserRole.NAME: - return KibanaUserRole.DESCRIPTOR; - case MonitoringUserRole.NAME: - return MonitoringUserRole.DESCRIPTOR; - case RemoteMonitoringAgentRole.NAME: - return RemoteMonitoringAgentRole.DESCRIPTOR; - case IngestAdminRole.NAME: - return IngestAdminRole.DESCRIPTOR; - case ReportingUserRole.NAME: - return ReportingUserRole.DESCRIPTOR; - case KibanaRole.NAME: - // The only user that should know about this role is the kibana user itself (who has this role). The reason we want to hide - // this role is that it was created specifically for kibana, with all the permissions that the kibana user needs. - // We don't want it to be assigned to other users. - if (DEFAULT_ENABLED_KIBANA_USER.equals(securityContext.getUser())) { - return KibanaRole.DESCRIPTOR; - } - return null; - default: - return null; - } + return RESERVED_ROLES.get(role); } public Collection roleDescriptors() { - if (DEFAULT_ENABLED_KIBANA_USER.equals(securityContext.getUser())) { - return Arrays.asList(SuperuserRole.DESCRIPTOR, TransportClientRole.DESCRIPTOR, KibanaUserRole.DESCRIPTOR, - KibanaRole.DESCRIPTOR, MonitoringUserRole.DESCRIPTOR, RemoteMonitoringAgentRole.DESCRIPTOR, - IngestAdminRole.DESCRIPTOR, ReportingUserRole.DESCRIPTOR); - } - return Arrays.asList(SuperuserRole.DESCRIPTOR, TransportClientRole.DESCRIPTOR, KibanaUserRole.DESCRIPTOR, - MonitoringUserRole.DESCRIPTOR, RemoteMonitoringAgentRole.DESCRIPTOR, IngestAdminRole.DESCRIPTOR, - ReportingUserRole.DESCRIPTOR); + return RESERVED_ROLES.values(); } public static Set names() { - return Sets.newHashSet(SuperuserRole.NAME, KibanaRole.NAME, TransportClientRole.NAME, KibanaUserRole.NAME, - MonitoringUserRole.NAME, RemoteMonitoringAgentRole.NAME, IngestAdminRole.NAME, ReportingUserRole.NAME); + return RESERVED_ROLES.keySet(); } public static boolean isReserved(String role) { - switch (role) { - case SuperuserRole.NAME: - case KibanaRole.NAME: - case KibanaUserRole.NAME: - case TransportClientRole.NAME: - case MonitoringUserRole.NAME: - case RemoteMonitoringAgentRole.NAME: - case SystemUser.ROLE_NAME: - case IngestAdminRole.NAME: - case ReportingUserRole.NAME: - return true; - default: - return false; - } + return RESERVED_ROLES.containsKey(role) || SystemUser.ROLE_NAME.equals(role); } + } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java index a2c7eee7a8b..94f6124f21e 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java @@ -60,7 +60,7 @@ public class RestChangePasswordAction extends BaseRestHandler { ChangePasswordResponse changePasswordResponse, XContentBuilder builder) throws Exception { - return new BytesRestResponse(RestStatus.OK, channel.newBuilder().startObject().endObject()); + return new BytesRestResponse(RestStatus.OK, builder.startObject().endObject()); } }); } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java index 4979da3bb19..5e009077018 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java @@ -48,7 +48,7 @@ public class RestSetEnabledAction extends BaseRestHandler { .execute(new RestBuilderListener(channel) { @Override public RestResponse buildResponse(SetEnabledResponse setEnabledResponse, XContentBuilder builder) throws Exception { - return new BytesRestResponse(RestStatus.OK, channel.newBuilder().startObject().endObject()); + return new BytesRestResponse(RestStatus.OK, builder.startObject().endObject()); } }); } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/support/AutomatonPredicate.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/support/AutomatonPredicate.java deleted file mode 100644 index 166474b5de7..00000000000 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/support/AutomatonPredicate.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.support; - -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.CharacterRunAutomaton; - -import java.util.function.Predicate; - -import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES; - -public class AutomatonPredicate implements Predicate { - - private final CharacterRunAutomaton automaton; - - public AutomatonPredicate(Automaton automaton) { - this.automaton = new CharacterRunAutomaton(automaton, DEFAULT_MAX_DETERMINIZED_STATES); - } - - @Override - public boolean test(String input) { - return automaton.run(input); - } -} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java index 6952822d94f..51c8ecb41f6 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java @@ -7,17 +7,18 @@ package org.elasticsearch.xpack.security.support; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.RegExp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.function.Predicate; import static org.apache.lucene.util.automaton.MinimizationOperations.minimize; import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES; import static org.apache.lucene.util.automaton.Operations.concatenate; -import static org.apache.lucene.util.automaton.Operations.determinize; import static org.apache.lucene.util.automaton.Operations.minus; import static org.apache.lucene.util.automaton.Operations.union; @@ -106,13 +107,26 @@ public final class Automatons { return concatenate(automata); } - public static Automaton unionAndDeterminize(Automaton a1, Automaton a2) { - Automaton res = union(a1, a2); - return determinize(res, DEFAULT_MAX_DETERMINIZED_STATES); + public static Automaton unionAndMinimize(Collection automata) { + Automaton res = union(automata); + return minimize(res, DEFAULT_MAX_DETERMINIZED_STATES); } - public static Automaton minusAndDeterminize(Automaton a1, Automaton a2) { + public static Automaton minusAndMinimize(Automaton a1, Automaton a2) { Automaton res = minus(a1, a2, DEFAULT_MAX_DETERMINIZED_STATES); - return determinize(res, DEFAULT_MAX_DETERMINIZED_STATES); + return minimize(res, DEFAULT_MAX_DETERMINIZED_STATES); + } + + public static Predicate predicate(String... patterns) { + return predicate(Arrays.asList(patterns)); + } + + public static Predicate predicate(Collection patterns) { + return predicate(patterns(patterns)); + } + + public static Predicate predicate(Automaton automaton) { + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_MAX_DETERMINIZED_STATES); + return runAutomaton::run; } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java index ed67bbfdb29..9b85e385a31 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.security.transport; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.common.CheckedConsumer; @@ -24,13 +25,16 @@ import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; +import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.security.SecurityContext; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.AuthorizationUtils; import org.elasticsearch.xpack.security.authz.accesscontrol.RequestContext; import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4Transport; +import org.elasticsearch.xpack.security.user.KibanaUser; import org.elasticsearch.xpack.security.user.SystemUser; +import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.xpack.ssl.SSLService; import java.io.IOException; @@ -56,6 +60,7 @@ public class SecurityServerTransportInterceptor implements TransportInterceptor private final ThreadPool threadPool; private final Settings settings; private final SecurityContext securityContext; + private final boolean reservedRealmEnabled; public SecurityServerTransportInterceptor(Settings settings, ThreadPool threadPool, @@ -73,6 +78,7 @@ public class SecurityServerTransportInterceptor implements TransportInterceptor this.sslService = sslService; this.securityContext = securityContext; this.profileFilters = initializeProfileFilters(destructiveOperations); + this.reservedRealmEnabled = XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings); } @Override @@ -87,6 +93,13 @@ public class SecurityServerTransportInterceptor implements TransportInterceptor if (AuthorizationUtils.shouldReplaceUserWithSystem(threadPool.getThreadContext(), action)) { securityContext.executeAsUser(SystemUser.INSTANCE, (original) -> sendWithUser(connection, action, request, options, new ContextRestoreResponseHandler<>(threadPool.getThreadContext(), original, handler), sender)); + } else if (reservedRealmEnabled && connection.getVersion().before(Version.V_5_2_0_UNRELEASED) && + KibanaUser.NAME.equals(securityContext.getUser().principal())) { + final User kibanaUser = securityContext.getUser(); + final User bwcKibanaUser = new User(kibanaUser.principal(), new String[] { "kibana" }, kibanaUser.fullName(), + kibanaUser.email(), kibanaUser.metadata(), kibanaUser.enabled()); + securityContext.executeAsUser(bwcKibanaUser, (original) -> sendWithUser(connection, action, request, options, + new ContextRestoreResponseHandler<>(threadPool.getThreadContext(), original, handler), sender)); } else { sendWithUser(connection, action, request, options, handler, sender); } @@ -134,11 +147,13 @@ public class SecurityServerTransportInterceptor implements TransportInterceptor switch (type) { case "client": profileFilters.put(entry.getKey(), new ServerTransportFilter.ClientProfile(authcService, authzService, - threadPool.getThreadContext(), extractClientCert, destructiveOperations)); + threadPool.getThreadContext(), extractClientCert, destructiveOperations, reservedRealmEnabled, + securityContext)); break; default: profileFilters.put(entry.getKey(), new ServerTransportFilter.NodeProfile(authcService, authzService, - threadPool.getThreadContext(), extractClientCert, destructiveOperations)); + threadPool.getThreadContext(), extractClientCert, destructiveOperations, reservedRealmEnabled, + securityContext)); } } @@ -147,7 +162,7 @@ public class SecurityServerTransportInterceptor implements TransportInterceptor final boolean clientAuth = sslService.isSSLClientAuthEnabled(transportSSLSettings); final boolean extractClientCert = profileSsl && clientAuth; profileFilters.put(TransportSettings.DEFAULT_PROFILE, new ServerTransportFilter.NodeProfile(authcService, authzService, - threadPool.getThreadContext(), extractClientCert, destructiveOperations)); + threadPool.getThreadContext(), extractClientCert, destructiveOperations, reservedRealmEnabled, securityContext)); } return Collections.unmodifiableMap(profileFilters); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java index 25d36a2a82d..ee5307cbb16 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java @@ -10,6 +10,7 @@ import io.netty.handler.ssl.SslHandler; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexAction; @@ -22,11 +23,16 @@ import org.elasticsearch.transport.DelegatingTransportChannel; import org.elasticsearch.transport.TcpTransportChannel; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.security.SecurityContext; import org.elasticsearch.xpack.security.action.SecurityActionMapper; +import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authc.pki.PkiRealm; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.AuthorizationUtils; +import org.elasticsearch.xpack.security.transport.SecurityServerTransportInterceptor.ContextRestoreResponseHandler; +import org.elasticsearch.xpack.security.user.KibanaUser; +import org.elasticsearch.xpack.security.user.User; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLPeerUnverifiedException; @@ -65,14 +71,19 @@ public interface ServerTransportFilter { private final ThreadContext threadContext; private final boolean extractClientCert; private final DestructiveOperations destructiveOperations; + private final boolean reservedRealmEnabled; + private final SecurityContext securityContext; - public NodeProfile(AuthenticationService authcService, AuthorizationService authzService, - ThreadContext threadContext, boolean extractClientCert, DestructiveOperations destructiveOperations) { + NodeProfile(AuthenticationService authcService, AuthorizationService authzService, + ThreadContext threadContext, boolean extractClientCert, DestructiveOperations destructiveOperations, + boolean reservedRealmEnabled, SecurityContext securityContext) { this.authcService = authcService; this.authzService = authzService; this.threadContext = threadContext; this.extractClientCert = extractClientCert; this.destructiveOperations = destructiveOperations; + this.reservedRealmEnabled = reservedRealmEnabled; + this.securityContext = securityContext; } @Override @@ -112,12 +123,31 @@ public interface ServerTransportFilter { } authcService.authenticate(securityAction, request, null, ActionListener.wrap((authentication) -> { - final AuthorizationUtils.AsyncAuthorizer asyncAuthorizer = - new AuthorizationUtils.AsyncAuthorizer(authentication, listener, (userRoles, runAsRoles) -> { - authzService.authorize(authentication, securityAction, request, userRoles, runAsRoles); - listener.onResponse(null); + if (reservedRealmEnabled && authentication.getVersion().before(Version.V_5_2_0_UNRELEASED) + && KibanaUser.NAME.equals(authentication.getUser().principal())) { + // the authentication came from an older node - so let's replace the user with our version + final User kibanaUser = new KibanaUser(authentication.getUser().enabled()); + if (kibanaUser.enabled()) { + securityContext.executeAsUser(kibanaUser, (original) -> { + final Authentication replacedUserAuth = Authentication.getAuthentication(threadContext); + final AuthorizationUtils.AsyncAuthorizer asyncAuthorizer = + new AuthorizationUtils.AsyncAuthorizer(replacedUserAuth, listener, (userRoles, runAsRoles) -> { + authzService.authorize(replacedUserAuth, securityAction, request, userRoles, runAsRoles); + listener.onResponse(null); + }); + asyncAuthorizer.authorize(authzService); }); - asyncAuthorizer.authorize(authzService); + } else { + throw new IllegalStateException("a disabled user should never be sent. " + kibanaUser); + } + } else { + final AuthorizationUtils.AsyncAuthorizer asyncAuthorizer = + new AuthorizationUtils.AsyncAuthorizer(authentication, listener, (userRoles, runAsRoles) -> { + authzService.authorize(authentication, securityAction, request, userRoles, runAsRoles); + listener.onResponse(null); + }); + asyncAuthorizer.authorize(authzService); + } }, listener::onFailure)); } } @@ -151,9 +181,11 @@ public interface ServerTransportFilter { */ class ClientProfile extends NodeProfile { - public ClientProfile(AuthenticationService authcService, AuthorizationService authzService, - ThreadContext threadContext, boolean extractClientCert, DestructiveOperations destructiveOperations) { - super(authcService, authzService, threadContext, extractClientCert, destructiveOperations); + ClientProfile(AuthenticationService authcService, AuthorizationService authzService, + ThreadContext threadContext, boolean extractClientCert, DestructiveOperations destructiveOperations, + boolean reservedRealmEnabled, SecurityContext securityContext) { + super(authcService, authzService, threadContext, extractClientCert, destructiveOperations, reservedRealmEnabled, + securityContext); } @Override diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java index 0ff4aad2119..85a921e7fbe 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java @@ -10,7 +10,6 @@ import io.netty.handler.ipfilter.IpFilterRuleType; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; @@ -22,7 +21,6 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xpack.security.audit.AuditTrailService; -import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java index ff740d44ef8..6d8382fb3b0 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.security.user; -import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; import org.elasticsearch.xpack.security.support.MetadataUtils; /** @@ -15,7 +14,7 @@ import org.elasticsearch.xpack.security.support.MetadataUtils; public class ElasticUser extends User { public static final String NAME = "elastic"; - public static final String ROLE_NAME = SuperuserRole.NAME; + private static final String ROLE_NAME = "superuser"; public ElasticUser(boolean enabled) { super(NAME, new String[] { ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java index 614a0d0abd6..8edf134b891 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.security.user; -import org.elasticsearch.xpack.security.authz.permission.KibanaRole; import org.elasticsearch.xpack.security.support.MetadataUtils; /** @@ -14,7 +13,7 @@ import org.elasticsearch.xpack.security.support.MetadataUtils; public class KibanaUser extends User { public static final String NAME = "kibana"; - public static final String ROLE_NAME = KibanaRole.NAME; + public static final String ROLE_NAME = "kibana_system"; public KibanaUser(boolean enabled) { super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/LogstashSystemUser.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/LogstashSystemUser.java new file mode 100644 index 00000000000..8b0fb06b68f --- /dev/null +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/LogstashSystemUser.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.user; + +import org.elasticsearch.Version; +import org.elasticsearch.xpack.security.support.MetadataUtils; + +/** + * Built in user for logstash internals. Currently used for Logstash monitoring. + */ +public class LogstashSystemUser extends User { + + public static final String NAME = "logstash_system"; + private static final String ROLE_NAME = "logstash_system"; + public static final Version DEFINED_SINCE = Version.V_5_2_0_UNRELEASED; + + public LogstashSystemUser(boolean enabled) { + super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); + } +} diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/User.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/User.java index e65e5e11b9d..10e2e7d4c31 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/User.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/User.java @@ -11,11 +11,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.security.support.MetadataUtils; - import java.io.IOException; import java.util.Arrays; import java.util.Collections; @@ -24,7 +23,7 @@ import java.util.Map; /** * An authenticated user */ -public class User implements ToXContent { +public class User implements ToXContentObject { private final String username; private final String[] roles; @@ -170,7 +169,7 @@ public class User implements ToXContent { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(Fields.USERNAME.getPreferredName(), principal()); builder.array(Fields.ROLES.getPreferredName(), roles()); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java index c96bd9b3c4d..2f95cd9998d 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java @@ -5,15 +5,13 @@ */ package org.elasticsearch.xpack.security.user; -import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; - /** * XPack internal user that manages xpack. Has all cluster/indices permissions for x-pack to operate. */ public class XPackUser extends User { public static final String NAME = "_xpack"; - public static final String ROLE_NAME = SuperuserRole.NAME; + private static final String ROLE_NAME = "superuser"; public static final XPackUser INSTANCE = new XPackUser(); private XPackUser() { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index c34326f1b1a..65d5c564eaf 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -9,12 +9,15 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.util.Providers; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; @@ -187,8 +190,19 @@ public class Watcher implements ActionPlugin, ScriptPlugin { private static final Logger logger = Loggers.getLogger(XPackPlugin.class); - static { - MetaData.registerPrototype(WatcherMetaData.TYPE, WatcherMetaData.PROTO); + public List getNamedWriteables() { + List entries = new ArrayList<>(); + entries.add(new NamedWriteableRegistry.Entry(MetaData.Custom.class, WatcherMetaData.TYPE, WatcherMetaData::new)); + entries.add(new NamedWriteableRegistry.Entry(NamedDiff.class, WatcherMetaData.TYPE, WatcherMetaData::readDiffFrom)); + return entries; + } + + public List getNamedXContent() { + List entries = new ArrayList<>(); + // Metadata + entries.add(new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField(WatcherMetaData.TYPE), + WatcherMetaData::fromXContent)); + return entries; } protected final Settings settings; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/WatcherMetaData.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/WatcherMetaData.java index c16ab695bd8..61468af0370 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/WatcherMetaData.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/WatcherMetaData.java @@ -5,10 +5,10 @@ */ package org.elasticsearch.xpack.watcher; -import org.elasticsearch.cluster.AbstractDiffable; +import org.elasticsearch.cluster.AbstractNamedDiffable; +import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -17,10 +17,9 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.EnumSet; -public class WatcherMetaData extends AbstractDiffable implements MetaData.Custom { +public class WatcherMetaData extends AbstractNamedDiffable implements MetaData.Custom { public static final String TYPE = "watcher"; - public static final WatcherMetaData PROTO = new WatcherMetaData(false); private final boolean manuallyStopped; @@ -33,7 +32,7 @@ public class WatcherMetaData extends AbstractDiffable implement } @Override - public String type() { + public String getWriteableName() { return TYPE; } @@ -42,9 +41,12 @@ public class WatcherMetaData extends AbstractDiffable implement return EnumSet.of(MetaData.XContentContext.GATEWAY); } - @Override - public MetaData.Custom readFrom(StreamInput streamInput) throws IOException { - return new WatcherMetaData(streamInput.readBoolean()); + public WatcherMetaData(StreamInput streamInput) throws IOException { + this(streamInput.readBoolean()); + } + + public static NamedDiff readDiffFrom(StreamInput streamInput) throws IOException { + return readDiffFrom(MetaData.Custom.class, TYPE, streamInput); } @Override @@ -52,8 +54,7 @@ public class WatcherMetaData extends AbstractDiffable implement streamOutput.writeBoolean(manuallyStopped); } - @Override - public MetaData.Custom fromXContent(XContentParser parser) throws IOException { + public static MetaData.Custom fromXContent(XContentParser parser) throws IOException { XContentParser.Token token; Boolean manuallyStopped = null; String currentFieldName = null; @@ -63,7 +64,7 @@ public class WatcherMetaData extends AbstractDiffable implement currentFieldName = parser.currentName(); break; case VALUE_BOOLEAN: - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.MANUALLY_STOPPED)) { + if (Field.MANUALLY_STOPPED.match(currentFieldName)) { manuallyStopped = parser.booleanValue(); } break; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/Action.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/Action.java index 7dd90765c48..4bdc5b9279f 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/Action.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/Action.java @@ -8,12 +8,13 @@ package org.elasticsearch.xpack.watcher.actions; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.Locale; -public interface Action extends ToXContent { +public interface Action extends ToXContentObject { String type(); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionStatus.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionStatus.java index ec37f5e6d5b..542d622ed7b 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionStatus.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionStatus.java @@ -8,10 +8,10 @@ package org.elasticsearch.xpack.watcher.actions; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.joda.time.DateTime; @@ -24,7 +24,7 @@ import java.util.Objects; import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalArgument; import static org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils.dateTimeFormatter; -public class ActionStatus implements ToXContent { +public class ActionStatus implements ToXContentObject { private AckStatus ackStatus; @Nullable private Execution lastExecution; @@ -167,13 +167,13 @@ public class ActionStatus implements ToXContent { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ACK_STATUS)) { + } else if (Field.ACK_STATUS.match(currentFieldName)) { ackStatus = AckStatus.parse(watchId, actionId, parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.LAST_EXECUTION)) { + } else if (Field.LAST_EXECUTION.match(currentFieldName)) { lastExecution = Execution.parse(watchId, actionId, parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.LAST_SUCCESSFUL_EXECUTION)) { + } else if (Field.LAST_SUCCESSFUL_EXECUTION.match(currentFieldName)) { lastSuccessfulExecution = Execution.parse(watchId, actionId, parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.LAST_THROTTLE)) { + } else if (Field.LAST_THROTTLE.match(currentFieldName)) { lastThrottle = Throttle.parse(watchId, actionId, parser); } else { throw new ElasticsearchParseException("could not parse action status for [{}/{}]. unexpected field [{}]", watchId, @@ -259,9 +259,9 @@ public class ActionStatus implements ToXContent { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMESTAMP)) { + } else if (Field.TIMESTAMP.match(currentFieldName)) { timestamp = dateTimeFormatter.parser().parseDateTime(parser.text()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ACK_STATUS_STATE)) { + } else if (Field.ACK_STATUS_STATE.match(currentFieldName)) { state = State.valueOf(parser.text().toUpperCase(Locale.ROOT)); } else { throw new ElasticsearchParseException("could not parse action status for [{}/{}]. unexpected field [{}.{}]", watchId, @@ -365,11 +365,11 @@ public class ActionStatus implements ToXContent { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMESTAMP)) { + } else if (Field.TIMESTAMP.match(currentFieldName)) { timestamp = dateTimeFormatter.parser().parseDateTime(parser.text()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.EXECUTION_SUCCESSFUL)) { + } else if (Field.EXECUTION_SUCCESSFUL.match(currentFieldName)) { successful = parser.booleanValue(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.REASON)) { + } else if (Field.REASON.match(currentFieldName)) { reason = parser.text(); } else { throw new ElasticsearchParseException("could not parse action status for [{}/{}]. unexpected field [{}.{}]", watchId, @@ -465,9 +465,9 @@ public class ActionStatus implements ToXContent { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMESTAMP)) { + } else if (Field.TIMESTAMP.match(currentFieldName)) { timestamp = dateTimeFormatter.parser().parseDateTime(parser.text()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.REASON)) { + } else if (Field.REASON.match(currentFieldName)) { reason = parser.text(); } else { throw new ElasticsearchParseException("could not parse action status for [{}/{}]. unexpected field [{}.{}]", watchId, diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java index a7c23ba0d65..db3d8e016ef 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java @@ -11,9 +11,9 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.license.XPackLicenseState; @@ -32,7 +32,7 @@ import java.time.Clock; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; -public class ActionWrapper implements ToXContent { +public class ActionWrapper implements ToXContentObject { private String id; @Nullable @@ -209,13 +209,13 @@ public class ActionWrapper implements ToXContent { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Watch.Field.CONDITION)) { + if (Watch.Field.CONDITION.match(currentFieldName)) { condition = actionRegistry.getConditionRegistry().parseExecutable(watchId, parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Transform.Field.TRANSFORM)) { + } else if (Transform.Field.TRANSFORM.match(currentFieldName)) { transform = actionRegistry.getTransformRegistry().parse(watchId, parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Throttler.Field.THROTTLE_PERIOD)) { + } else if (Throttler.Field.THROTTLE_PERIOD.match(currentFieldName)) { throttlePeriod = timeValueMillis(parser.longValue()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Throttler.Field.THROTTLE_PERIOD_HUMAN)) { + } else if (Throttler.Field.THROTTLE_PERIOD_HUMAN.match(currentFieldName)) { try { throttlePeriod = WatcherDateTimeUtils.parseTimeValue(parser, Throttler.Field.THROTTLE_PERIOD_HUMAN.toString()); } catch (ElasticsearchParseException pe) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ExecutableAction.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ExecutableAction.java index 0cb0ad5ca43..a610f65e044 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ExecutableAction.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/ExecutableAction.java @@ -6,14 +6,14 @@ package org.elasticsearch.xpack.watcher.actions; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.watcher.watch.Payload; import java.io.IOException; -public abstract class ExecutableAction implements ToXContent { +public abstract class ExecutableAction implements ToXContentObject { protected final A action; protected final Logger logger; @@ -59,7 +59,7 @@ public abstract class ExecutableAction implements ToXContent { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return action.toXContent(builder, params); } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/email/EmailAction.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/email/EmailAction.java index 36df738f784..d21bf95373c 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/email/EmailAction.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/email/EmailAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.actions.email; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.common.secret.Secret; @@ -137,24 +136,24 @@ public class EmailAction implements Action { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ATTACH_DATA)) { + } else if (Field.ATTACH_DATA.match(currentFieldName)) { try { dataAttachment = DataAttachment.parse(parser); } catch (IOException ioe) { throw new ElasticsearchParseException("could not parse [{}] action [{}/{}]. failed to parse data attachment field " + "[{}]", ioe, TYPE, watchId, actionId, currentFieldName); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ATTACHMENTS)) { + } else if (Field.ATTACHMENTS.match(currentFieldName)) { attachments = emailAttachmentsParser.parse(parser); } else if (!emailParser.handle(currentFieldName, parser)) { if (token == XContentParser.Token.VALUE_STRING) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ACCOUNT)) { + if (Field.ACCOUNT.match(currentFieldName)) { account = parser.text(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.USER)) { + } else if (Field.USER.match(currentFieldName)) { user = parser.text(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.PASSWORD)) { + } else if (Field.PASSWORD.match(currentFieldName)) { password = WatcherXContentParser.secretOrNull(parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.PROFILE)) { + } else if (Field.PROFILE.match(currentFieldName)) { try { profile = Profile.resolve(parser.text()); } catch (IllegalArgumentException iae) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatAction.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatAction.java index 45b13b7adeb..a6f9d7f7258 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatAction.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/hipchat/HipChatAction.java @@ -10,7 +10,6 @@ import com.google.common.base.Objects; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.common.http.HttpProxy; @@ -80,16 +79,16 @@ public class HipChatAction implements Action { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ACCOUNT)) { + } else if (Field.ACCOUNT.match(currentFieldName)) { if (token == XContentParser.Token.VALUE_STRING) { account = parser.text(); } else { throw new ElasticsearchParseException("failed to parse [{}] action [{}/{}]. expected [{}] to be of type string, but " + "found [{}] instead", TYPE, watchId, actionId, Field.ACCOUNT.getPreferredName(), token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.PROXY)) { + } else if (Field.PROXY.match(currentFieldName)) { proxy = HttpProxy.parse(parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.MESSAGE)) { + } else if (Field.MESSAGE.match(currentFieldName)) { try { message = HipChatMessage.Template.parse(parser); } catch (Exception e) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexAction.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexAction.java index 599eae08d40..16a964bbcb3 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexAction.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.actions.index; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -109,7 +108,7 @@ public class IndexAction implements Action { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.INDEX)) { + } else if (Field.INDEX.match(currentFieldName)) { try { index = parser.text(); } catch (ElasticsearchParseException pe) { @@ -117,21 +116,21 @@ public class IndexAction implements Action { "field [{}]", pe, TYPE, watchId, actionId, currentFieldName); } } else if (token == XContentParser.Token.VALUE_NUMBER) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT)) { + if (Field.TIMEOUT.match(currentFieldName)) { timeout = timeValueMillis(parser.longValue()); } else { throw new ElasticsearchParseException("could not parse [{}] action [{}/{}]. unexpected number field [{}]", TYPE, watchId, actionId, currentFieldName); } } else if (token == XContentParser.Token.VALUE_STRING) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.DOC_TYPE)) { + if (Field.DOC_TYPE.match(currentFieldName)) { docType = parser.text(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.EXECUTION_TIME_FIELD)) { + } else if (Field.EXECUTION_TIME_FIELD.match(currentFieldName)) { executionTimeField = parser.text(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT_HUMAN)) { + } else if (Field.TIMEOUT_HUMAN.match(currentFieldName)) { // Parser for human specified timeouts and 2.x compatibility timeout = WatcherDateTimeUtils.parseTimeValue(parser, Field.TIMEOUT_HUMAN.toString()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.DYNAMIC_NAME_TIMEZONE)) { + } else if (Field.DYNAMIC_NAME_TIMEZONE.match(currentFieldName)) { if (token == XContentParser.Token.VALUE_STRING) { dynamicNameTimeZone = DateTimeZone.forID(parser.text()); } else { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/JiraAction.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/JiraAction.java index a6d201591ff..90f0c610bb5 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/JiraAction.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/jira/JiraAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.watcher.actions.jira; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.common.http.HttpProxy; @@ -83,16 +82,16 @@ public class JiraAction implements Action { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ACCOUNT)) { + } else if (Field.ACCOUNT.match(currentFieldName)) { if (token == XContentParser.Token.VALUE_STRING) { account = parser.text(); } else { throw new ElasticsearchParseException("failed to parse [{}] action [{}/{}]. expected [{}] to be of type string, but " + "found [{}] instead", TYPE, watchId, actionId, Field.ACCOUNT.getPreferredName(), token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.PROXY)) { + } else if (Field.PROXY.match(currentFieldName)) { proxy = HttpProxy.parse(parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.FIELDS)) { + } else if (Field.FIELDS.match(currentFieldName)) { try { fields = parser.map(); } catch (Exception e) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingAction.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingAction.java index 0f93717072a..5851365c44f 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingAction.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.actions.logging; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.watcher.actions.Action; @@ -77,7 +76,7 @@ public class LoggingAction implements Action { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TEXT)) { + } else if (Field.TEXT.match(currentFieldName)) { try { text = TextTemplate.parse(parser); } catch (ElasticsearchParseException pe) { @@ -85,9 +84,9 @@ public class LoggingAction implements Action { watchId, actionId, Field.TEXT.getPreferredName()); } } else if (token == XContentParser.Token.VALUE_STRING) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.CATEGORY)) { + if (Field.CATEGORY.match(currentFieldName)) { category = parser.text(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.LEVEL)) { + } else if (Field.LEVEL.match(currentFieldName)) { try { level = LoggingLevel.valueOf(parser.text().toUpperCase(Locale.ROOT)); } catch (IllegalArgumentException iae) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/SlackAction.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/SlackAction.java index 22757b11a90..1f2cf39feb0 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/SlackAction.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/SlackAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.watcher.actions.slack; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.common.http.HttpProxy; @@ -79,16 +78,16 @@ public class SlackAction implements Action { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ACCOUNT)) { + } else if (Field.ACCOUNT.match(currentFieldName)) { if (token == XContentParser.Token.VALUE_STRING) { account = parser.text(); } else { throw new ElasticsearchParseException("failed to parse [{}] action [{}/{}]. expected [{}] to be of type string, but " + "found [{}] instead", TYPE, watchId, actionId, Field.ACCOUNT.getPreferredName(), token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.PROXY)) { + } else if (Field.PROXY.match(currentFieldName)) { proxy = HttpProxy.parse(parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.MESSAGE)) { + } else if (Field.MESSAGE.match(currentFieldName)) { try { message = SlackMessage.Template.parse(parser); } catch (Exception e) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java index b68ebf8e59f..9e4483dbf65 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java @@ -5,18 +5,16 @@ */ package org.elasticsearch.xpack.watcher.client; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.watcher.actions.Action; import org.elasticsearch.xpack.watcher.actions.throttler.Throttler; -import org.elasticsearch.xpack.watcher.condition.Condition; import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.Condition; import org.elasticsearch.xpack.watcher.input.Input; import org.elasticsearch.xpack.watcher.input.none.NoneInput; import org.elasticsearch.xpack.watcher.support.Exceptions; @@ -31,7 +29,7 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -public class WatchSourceBuilder implements ToXContent { +public class WatchSourceBuilder extends ToXContentToBytes implements ToXContent { private Trigger trigger; private Input input = NoneInput.INSTANCE; @@ -165,18 +163,7 @@ public class WatchSourceBuilder implements ToXContent { return builder.endObject(); } - public BytesReference buildAsBytes(XContentType contentType) { - try { - XContentBuilder builder = XContentFactory.contentBuilder(contentType); - toXContent(builder, ToXContent.EMPTY_PARAMS); - return builder.bytes(); - } catch (IOException ioe) { - // todo think of a better std exception for this - throw new ElasticsearchException("failed to render watch source as bytes", ioe); - } - } - - static class TransformedAction implements ToXContent { + static class TransformedAction implements ToXContentObject { private final String id; private final Action action; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/AlwaysCondition.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/AlwaysCondition.java index f463705155e..a8b4ce9e1b3 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/AlwaysCondition.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/AlwaysCondition.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.condition; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; @@ -40,11 +39,6 @@ public final class AlwaysCondition extends Condition { return RESULT_INSTANCE; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject().endObject(); - } - @Override public boolean equals(Object obj) { return obj instanceof AlwaysCondition; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/Condition.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/Condition.java index edf9f9cb993..a70699fe618 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/Condition.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/Condition.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.condition; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; @@ -14,7 +15,7 @@ import java.io.IOException; import java.util.Locale; import java.util.Map; -public abstract class Condition implements ToXContent { +public abstract class Condition implements ToXContentObject { protected final String type; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java index 351c36a83c5..21425b08dbb 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.condition; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.CompiledScript; @@ -55,7 +54,7 @@ public final class ScriptCondition extends Condition { public static ScriptCondition parse(ScriptService scriptService, String watchId, XContentParser parser) throws IOException { try { - Script script = Script.parse(parser, ParseFieldMatcher.STRICT); + Script script = Script.parse(parser); return new ScriptCondition(script, scriptService); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse [{}] condition for watch [{}]. failed to parse script", pe, TYPE, diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java index 9caf3a5aa91..637f1db4f44 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.common.stats.Counters; import org.elasticsearch.xpack.watcher.Watcher; @@ -262,10 +263,13 @@ public final class ExecutionService extends AbstractComponent { logger.trace("not executing watch [{}] because it is already queued", ctx.watch().id()); record = ctx.abortBeforeExecution(ExecutionState.NOT_EXECUTED_ALREADY_QUEUED, "Watch is already queued in thread pool"); } else { - final AtomicBoolean watchExists = new AtomicBoolean(true); - client.getWatch(ctx.watch().id(), ActionListener.wrap((r) -> watchExists.set(r.isExists()), (e) -> watchExists.set(false))); + boolean watchExists = false; + try { + GetResponse response = client.getWatch(ctx.watch().id()); + watchExists = response.isExists(); + } catch (IndexNotFoundException e) {} - if (ctx.knownWatch() && watchExists.get() == false) { + if (ctx.knownWatch() && watchExists == false) { // fail fast if we are trying to execute a deleted watch String message = "unable to find watch for record [" + ctx.id() + "], perhaps it has been deleted, ignoring..."; record = ctx.abortBeforeExecution(ExecutionState.NOT_EXECUTED_WATCH_MISSING, message); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatch.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatch.java index 0ba75a2457d..c13f1c1deec 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatch.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatch.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.execution; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; @@ -80,7 +79,7 @@ public class TriggeredWatch implements ToXContent { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TRIGGER_EVENT)) { + if (Field.TRIGGER_EVENT.match(currentFieldName)) { triggerEvent = triggerService.parseTriggerEvent(wid.watchId(), id, parser); } else { parser.skipChildren(); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java index 41f38ef587b..9669c170bcd 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java @@ -92,50 +92,12 @@ public class TriggeredWatchStore extends AbstractComponent { } } - public void put(TriggeredWatch triggeredWatch) throws Exception { - ensureStarted(); - accessLock.lock(); - try { - IndexRequest request = new IndexRequest(INDEX_NAME, DOC_TYPE, triggeredWatch.id().value()) - .source(XContentFactory.jsonBuilder().value(triggeredWatch)) - .opType(IndexRequest.OpType.CREATE); - client.index(request, (TimeValue) null); - } catch (IOException e) { - throw ioException("failed to persist triggered watch [{}]", e, triggeredWatch); - } finally { - accessLock.unlock(); - } - } - - public void put(final TriggeredWatch triggeredWatch, final ActionListener listener) { - ensureStarted(); - try { - IndexRequest request = new IndexRequest(INDEX_NAME, DOC_TYPE, triggeredWatch.id().value()) - .source(XContentFactory.jsonBuilder().value(triggeredWatch)) - .opType(IndexRequest.OpType.CREATE); - client.index(request, ActionListener.wrap(response -> listener.onResponse(true), listener::onFailure)); - } catch (IOException e) { - logger.warn((Supplier) () -> new ParameterizedMessage("could not index triggered watch [{}], ignoring it...", - triggeredWatch.id()), e); - } - } - public void putAll(final List triggeredWatches, final ActionListener listener) { - if (triggeredWatches.isEmpty()) { listener.onResponse(new BitSet(0)); return; } - if (triggeredWatches.size() == 1) { - put(triggeredWatches.get(0), ActionListener.wrap(success -> { - BitSet bitSet = new BitSet(1); - bitSet.set(0); - listener.onResponse(bitSet); - }, listener::onFailure)); - return; - } - ensureStarted(); BulkRequest request = new BulkRequest(); for (TriggeredWatch triggeredWatch : triggeredWatches) { @@ -163,6 +125,10 @@ public class TriggeredWatchStore extends AbstractComponent { }, listener::onFailure)); } + public void put(TriggeredWatch triggeredWatch) throws Exception { + putAll(Collections.singletonList(triggeredWatch)); + } + public BitSet putAll(final List triggeredWatches) throws Exception { ensureStarted(); try { @@ -268,5 +234,4 @@ public class TriggeredWatchStore extends AbstractComponent { throw illegalState("unable to persist triggered watches, the store is not ready"); } } - } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionResult.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionResult.java index 12456ce873d..275f963dadd 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionResult.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionResult.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.watcher.execution; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.watcher.condition.Condition; @@ -19,7 +19,7 @@ import org.joda.time.DateTime; import java.io.IOException; import java.util.Map; -public class WatchExecutionResult implements ToXContent { +public class WatchExecutionResult implements ToXContentObject { private final DateTime executionTime; private final long executionDurationMs; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionSnapshot.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionSnapshot.java index f5912775121..0562062ba82 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionSnapshot.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionSnapshot.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.watcher.execution; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.actions.ActionWrapper; import org.joda.time.DateTime; @@ -17,7 +17,7 @@ import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.Map; -public class WatchExecutionSnapshot implements Streamable, ToXContent { +public class WatchExecutionSnapshot implements Streamable, ToXContentObject { private String watchId; private String watchRecordId; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/history/WatchRecord.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/history/WatchRecord.java index f3c1c7d6d85..21a21f5256f 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/history/WatchRecord.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/history/WatchRecord.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.actions.Action; import org.elasticsearch.xpack.watcher.actions.ActionWrapper; @@ -30,7 +30,7 @@ import java.util.Collections; import java.util.Map; import java.util.Objects; -public abstract class WatchRecord implements ToXContent { +public abstract class WatchRecord implements ToXContentObject { protected final Wid id; protected final TriggerEvent triggerEvent; @@ -117,7 +117,7 @@ public abstract class WatchRecord implements ToXContent { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(Field.WATCH_ID.getPreferredName(), id.watchId()); builder.field(Field.STATE.getPreferredName(), state.id()); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/ExecutableInput.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/ExecutableInput.java index e1d702c9170..bc5b815a661 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/ExecutableInput.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/ExecutableInput.java @@ -7,14 +7,14 @@ package org.elasticsearch.xpack.watcher.input; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.watcher.watch.Payload; import java.io.IOException; -public abstract class ExecutableInput implements ToXContent { +public abstract class ExecutableInput implements ToXContentObject { protected final I input; protected final Logger logger; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/Input.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/Input.java index 7c96adc08f8..9ec5ecbe746 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/Input.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/Input.java @@ -7,18 +7,18 @@ package org.elasticsearch.xpack.watcher.input; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.watch.Payload; import java.io.IOException; import java.util.Locale; -public interface Input extends ToXContent { +public interface Input extends ToXContentObject { String type(); - abstract class Result implements ToXContent { + abstract class Result implements ToXContentObject { public enum Status { SUCCESS, FAILURE @@ -85,9 +85,7 @@ public interface Input extends ToXContent { } interface Builder { - I build(); - } interface Field { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/http/HttpInput.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/http/HttpInput.java index 15d733bcb45..33ef650bb85 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/http/HttpInput.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/http/HttpInput.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.input.http; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.watcher.input.Input; @@ -81,7 +80,7 @@ public class HttpInput implements Input { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.REQUEST)) { + } else if (Field.REQUEST.match(currentFieldName)) { try { request = requestParser.parse(parser); } catch (ElasticsearchParseException pe) { @@ -104,7 +103,7 @@ public class HttpInput implements Input { watchId, currentFieldName); } } else if (token == XContentParser.Token.VALUE_STRING) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.RESPONSE_CONTENT_TYPE)) { + if (Field.RESPONSE_CONTENT_TYPE.match(currentFieldName)) { expectedResponseBodyType = HttpContentType.resolve(parser.text()); if (expectedResponseBodyType == null) { throw new ElasticsearchParseException("could not parse [{}] input for watch [{}]. unknown content type [{}]", diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java index 55e01d5fd85..4adcfec196a 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java @@ -12,9 +12,9 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.watcher.watch.Payload; import java.util.Map; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.watcher.input.search.SearchInput.TYPE; /** @@ -80,9 +79,9 @@ public class ExecutableSearchInput extends ExecutableInput filteredKeys = XContentFilterKeysUtils.filterMapOrdered(input.getExtractKeys(), parser); payload = new Payload.Simple(filteredKeys); } else { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInput.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInput.java index bf4febd6b35..8f00846b91b 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInput.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInput.java @@ -5,15 +5,12 @@ */ package org.elasticsearch.xpack.watcher.input.search; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.SearchRequestParsers; import org.elasticsearch.xpack.watcher.input.Input; import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; @@ -108,9 +105,7 @@ public class SearchInput implements Input { return builder; } - public static SearchInput parse(Logger inputLogger, String watchId, XContentParser parser, - ParseFieldMatcher parseFieldMatcher, - SearchRequestParsers searchRequestParsers) throws IOException { + public static SearchInput parse(String watchId, XContentParser parser) throws IOException { WatcherSearchTemplateRequest request = null; Set extract = null; TimeValue timeout = null; @@ -121,16 +116,15 @@ public class SearchInput implements Input { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.REQUEST)) { + } else if (Field.REQUEST.match(currentFieldName)) { try { - request = WatcherSearchTemplateRequest.fromXContent(inputLogger, parser, ExecutableSearchInput.DEFAULT_SEARCH_TYPE, - parseFieldMatcher, searchRequestParsers); + request = WatcherSearchTemplateRequest.fromXContent(parser, ExecutableSearchInput.DEFAULT_SEARCH_TYPE); } catch (ElasticsearchParseException srpe) { throw new ElasticsearchParseException("could not parse [{}] input for watch [{}]. failed to parse [{}]", srpe, TYPE, watchId, currentFieldName); } } else if (token == XContentParser.Token.START_ARRAY) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.EXTRACT)) { + if (Field.EXTRACT.match(currentFieldName)) { extract = new HashSet<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { @@ -144,12 +138,12 @@ public class SearchInput implements Input { throw new ElasticsearchParseException("could not parse [{}] input for watch [{}]. unexpected array field [{}]", TYPE, watchId, currentFieldName); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT)) { + } else if (Field.TIMEOUT.match(currentFieldName)) { timeout = timeValueMillis(parser.longValue()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT_HUMAN)) { + } else if (Field.TIMEOUT_HUMAN.match(currentFieldName)) { // Parser for human specified timeouts and 2.x compatibility timeout = WatcherDateTimeUtils.parseTimeValue(parser, Field.TIMEOUT_HUMAN.toString()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.DYNAMIC_NAME_TIMEZONE)) { + } else if (Field.DYNAMIC_NAME_TIMEZONE.match(currentFieldName)) { if (token == XContentParser.Token.VALUE_STRING) { dynamicNameTimeZone = DateTimeZone.forID(parser.text()); } else { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInputFactory.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInputFactory.java index 31aa5354a24..a2941e6a9a8 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInputFactory.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInputFactory.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.input.search; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -24,8 +23,6 @@ import java.io.IOException; public class SearchInputFactory extends InputFactory { private final WatcherClientProxy client; private final TimeValue defaultTimeout; - private final SearchRequestParsers searchRequestParsers; - private final ParseFieldMatcher parseFieldMatcher; private final WatcherSearchTemplateService searchTemplateService; public SearchInputFactory(Settings settings, InternalClient client, SearchRequestParsers searchRequestParsers, @@ -36,9 +33,7 @@ public class SearchInputFactory extends InputFactory responseToData(ToXContent response) throws IOException { - XContentBuilder builder = jsonBuilder().startObject().value(response).endObject(); - return XContentHelper.convertToMap(builder.bytes(), false).v2(); + public static Map responseToData(ToXContentObject response) throws IOException { + return XContentHelper.convertToMap(XContentHelper.toXContent(response, XContentType.JSON), false).v2(); } public static Map flattenModel(Map map) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java index 5a6f5b666c1..3d78e6d3b22 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java @@ -5,25 +5,18 @@ */ package org.elasticsearch.xpack.watcher.support.search; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.search.SearchRequestParsers; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xpack.common.text.TextTemplate; import java.io.IOException; import java.util.ArrayList; @@ -37,7 +30,7 @@ import java.util.Objects; * A {@link WatcherSearchTemplateRequest} contains the search request and the eventual template that will * be rendered as a script by {@link WatcherSearchTemplateService} before being executed. */ -public class WatcherSearchTemplateRequest implements ToXContent { +public class WatcherSearchTemplateRequest implements ToXContentObject { private final String[] indices; private final String[] types; @@ -165,10 +158,7 @@ public class WatcherSearchTemplateRequest implements ToXContent { /** * Reads a new watcher search request instance for the specified parser. */ - public static WatcherSearchTemplateRequest fromXContent(Logger logger, XContentParser parser, - SearchType searchType, - ParseFieldMatcher parseFieldMatcher, - SearchRequestParsers searchRequestParsers) throws IOException { + public static WatcherSearchTemplateRequest fromXContent(XContentParser parser, SearchType searchType) throws IOException { List indices = new ArrayList<>(); List types = new ArrayList<>(); IndicesOptions indicesOptions = DEFAULT_INDICES_OPTIONS; @@ -181,7 +171,7 @@ public class WatcherSearchTemplateRequest implements ToXContent { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, INDICES_FIELD)) { + if (INDICES_FIELD.match(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { indices.add(parser.textOrNull()); @@ -190,7 +180,7 @@ public class WatcherSearchTemplateRequest implements ToXContent { currentFieldName + "] field, but instead found [" + token + "]"); } } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, TYPES_FIELD)) { + } else if (TYPES_FIELD.match(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { types.add(parser.textOrNull()); @@ -204,12 +194,12 @@ public class WatcherSearchTemplateRequest implements ToXContent { currentFieldName + "]"); } } else if (token == XContentParser.Token.START_OBJECT) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, BODY_FIELD)) { + if (BODY_FIELD.match(currentFieldName)) { try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) { builder.copyCurrentStructure(parser); searchSource = builder.bytes(); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, INDICES_OPTIONS_FIELD)) { + } else if (INDICES_OPTIONS_FIELD.match(currentFieldName)) { boolean expandOpen = DEFAULT_INDICES_OPTIONS.expandWildcardsOpen(); boolean expandClosed = DEFAULT_INDICES_OPTIONS.expandWildcardsClosed(); boolean allowNoIndices = DEFAULT_INDICES_OPTIONS.allowNoIndices(); @@ -218,7 +208,7 @@ public class WatcherSearchTemplateRequest implements ToXContent { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, EXPAND_WILDCARDS_FIELD)) { + if (EXPAND_WILDCARDS_FIELD.match(currentFieldName)) { switch (parser.text()) { case "all": expandOpen = true; @@ -240,9 +230,9 @@ public class WatcherSearchTemplateRequest implements ToXContent { throw new ElasticsearchParseException("could not read search request. unknown value [" + parser.text() + "] for [" + currentFieldName + "] field "); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, IGNORE_UNAVAILABLE_FIELD)) { + } else if (IGNORE_UNAVAILABLE_FIELD.match(currentFieldName)) { ignoreUnavailable = parser.booleanValue(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, ALLOW_NO_INDICES_FIELD)) { + } else if (ALLOW_NO_INDICES_FIELD.match(currentFieldName)) { allowNoIndices = parser.booleanValue(); } else { throw new ElasticsearchParseException("could not read search request. unexpected index option [" + @@ -255,21 +245,21 @@ public class WatcherSearchTemplateRequest implements ToXContent { } indicesOptions = IndicesOptions.fromOptions(ignoreUnavailable, allowNoIndices, expandOpen, expandClosed, DEFAULT_INDICES_OPTIONS); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, TEMPLATE_FIELD)) { - template = Script.parse(parser, ParseFieldMatcher.STRICT, Script.DEFAULT_TEMPLATE_LANG); + } else if (TEMPLATE_FIELD.match(currentFieldName)) { + template = Script.parse(parser, Script.DEFAULT_TEMPLATE_LANG); } else { throw new ElasticsearchParseException("could not read search request. unexpected object field [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.VALUE_STRING) { - if (ParseFieldMatcher.STRICT.match(currentFieldName, INDICES_FIELD)) { + if (INDICES_FIELD.match(currentFieldName)) { String indicesStr = parser.text(); indices.addAll(Arrays.asList(Strings.delimitedListToStringArray(indicesStr, ",", " \t"))); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, TYPES_FIELD)) { + } else if (TYPES_FIELD.match(currentFieldName)) { String typesStr = parser.text(); types.addAll(Arrays.asList(Strings.delimitedListToStringArray(typesStr, ",", " \t"))); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, SEARCH_TYPE_FIELD)) { - searchType = SearchType.fromString(parser.text().toLowerCase(Locale.ROOT), ParseFieldMatcher.EMPTY); + } else if (SEARCH_TYPE_FIELD.match(currentFieldName)) { + searchType = SearchType.fromString(parser.text().toLowerCase(Locale.ROOT)); } else { throw new ElasticsearchParseException("could not read search request. unexpected string field [" + currentFieldName + "]"); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java index eee670aa7ef..b4adbc71bcf 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java @@ -74,8 +74,7 @@ public class WatcherSearchTemplateService extends AbstractComponent { BytesReference source = request.getSearchSource(); if (source != null && source.length() > 0) { try (XContentParser parser = XContentFactory.xContent(source).createParser(xContentRegistry, source)) { - sourceBuilder.parseXContent(new QueryParseContext(searchRequestParsers.queryParsers, parser, parseFieldMatcher), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers); + sourceBuilder.parseXContent(new QueryParseContext(parser, parseFieldMatcher)); searchRequest.source(sourceBuilder); } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherXContentParser.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherXContentParser.java index b8c7cae702b..8c3081123e6 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherXContentParser.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherXContentParser.java @@ -272,11 +272,6 @@ public class WatcherXContentParser implements XContentParser { return parser.getTokenLocation(); } - @Override - public boolean isClosed() { - return parser.isClosed(); - } - @Override public T namedObject(Class categoryClass, String name, Object context) throws IOException { return parser.namedObject(categoryClass, name, context); @@ -287,6 +282,11 @@ public class WatcherXContentParser implements XContentParser { return parser.getXContentRegistry(); } + @Override + public boolean isClosed() { + return parser.isClosed(); + } + @Override public void close() throws ElasticsearchException { parser.close(); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/Transform.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/Transform.java index 5efbc778fb6..853cd52e278 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/Transform.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/Transform.java @@ -9,6 +9,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.watch.Payload; @@ -19,7 +20,7 @@ public interface Transform extends ToXContent { String type(); - abstract class Result implements ToXContent { + abstract class Result implements ToXContentObject { public enum Status { SUCCESS, FAILURE @@ -67,7 +68,7 @@ public interface Transform extends ToXContent { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(Field.TYPE.getPreferredName(), type); builder.field(Field.STATUS.getPreferredName(), status.name().toLowerCase(Locale.ROOT)); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransform.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransform.java index 1ad253b1248..bbcb7de73ed 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransform.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransform.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.transform.script; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; @@ -56,7 +55,7 @@ public class ScriptTransform implements Transform { public static ScriptTransform parse(String watchId, XContentParser parser) throws IOException { try { - Script script = Script.parse(parser, ParseFieldMatcher.STRICT); + Script script = Script.parse(parser); return new ScriptTransform(script); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse [{}] transform for watch [{}]. failed to parse script", pe, TYPE, diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransform.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransform.java index c5f9749ead7..d8c87b42c99 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransform.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransform.java @@ -5,15 +5,12 @@ */ package org.elasticsearch.xpack.watcher.transform.search; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.SearchRequestParsers; import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.elasticsearch.xpack.watcher.transform.Transform; @@ -91,10 +88,7 @@ public class SearchTransform implements Transform { return builder; } - public static SearchTransform parse(Logger transformLogger, String watchId, - XContentParser parser, - ParseFieldMatcher parseFieldMatcher, - SearchRequestParsers searchRequestParsers) throws IOException { + public static SearchTransform parse(String watchId, XContentParser parser) throws IOException { WatcherSearchTemplateRequest request = null; TimeValue timeout = null; DateTimeZone dynamicNameTimeZone = null; @@ -104,20 +98,19 @@ public class SearchTransform implements Transform { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.REQUEST)) { + } else if (Field.REQUEST.match(currentFieldName)) { try { - request = WatcherSearchTemplateRequest.fromXContent(transformLogger, parser, - ExecutableSearchTransform.DEFAULT_SEARCH_TYPE, parseFieldMatcher, searchRequestParsers); + request = WatcherSearchTemplateRequest.fromXContent(parser, ExecutableSearchTransform.DEFAULT_SEARCH_TYPE); } catch (ElasticsearchParseException srpe) { throw new ElasticsearchParseException("could not parse [{}] transform for watch [{}]. failed to parse [{}]", srpe, TYPE, watchId, currentFieldName); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT)) { + } else if (Field.TIMEOUT.match(currentFieldName)) { timeout = timeValueMillis(parser.longValue()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMEOUT_HUMAN)) { + } else if (Field.TIMEOUT_HUMAN.match(currentFieldName)) { // Parser for human specified timeouts and 2.x compatibility timeout = WatcherDateTimeUtils.parseTimeValue(parser, Field.TIMEOUT_HUMAN.toString()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.DYNAMIC_NAME_TIMEZONE)) { + } else if (Field.DYNAMIC_NAME_TIMEZONE.match(currentFieldName)) { if (token == XContentParser.Token.VALUE_STRING) { dynamicNameTimeZone = DateTimeZone.forID(parser.text()); } else { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransformFactory.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransformFactory.java index a8b330b2367..e4ef8ffc035 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransformFactory.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransformFactory.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.transform.search; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -23,8 +22,6 @@ import java.io.IOException; public class SearchTransformFactory extends TransformFactory { protected final WatcherClientProxy client; private final TimeValue defaultTimeout; - private final SearchRequestParsers searchRequestParsers; - private final ParseFieldMatcher parseFieldMatcher; private final WatcherSearchTemplateService searchTemplateService; public SearchTransformFactory(Settings settings, InternalClient client, SearchRequestParsers searchRequestParsers, @@ -36,8 +33,6 @@ public class SearchTransformFactory extends TransformFactory { - if (getResponse.isExists() == false) { + if (getResponse.isExists()) { + try (XContentBuilder builder = jsonBuilder()) { + // When we return the watch via the Get Watch REST API, we want to return the watch as was specified in the put api, + // we don't include the status in the watch source itself, but as a separate top level field, so that + // it indicates the the status is managed by watcher itself. + DateTime now = new DateTime(clock.millis(), UTC); + Watch watch = parser.parseWithSecrets(request.getId(), true, getResponse.getSourceAsBytesRef(), now); + watch.toXContent(builder, WatcherParams.builder() + .hideSecrets(true) + .put(Watch.INCLUDE_STATUS_KEY, false) + .build()); + watch.version(getResponse.getVersion()); + watch.status().version(getResponse.getVersion()); + listener.onResponse(new GetWatchResponse(watch.id(), watch.status(), builder.bytes(), XContentType.JSON)); + } + } else { listener.onResponse(new GetWatchResponse(request.getId())); } - - try (XContentBuilder builder = jsonBuilder()) { - // When we return the watch via the Get Watch REST API, we want to return the watch as was specified in the put api, - // we don't include the status in the watch source itself, but as a separate top level field, so that - // it indicates the the status is managed by watcher itself. - DateTime now = new DateTime(clock.millis(), UTC); - Watch watch = parser.parseWithSecrets(request.getId(), true, getResponse.getSourceAsBytesRef(), now); - watch.toXContent(builder, WatcherParams.builder() - .hideSecrets(true) - .put(Watch.INCLUDE_STATUS_KEY, false) - .build()); - watch.version(getResponse.getVersion()); - watch.status().version(getResponse.getVersion()); - listener.onResponse(new GetWatchResponse(watch.id(), watch.status(), builder.bytes(), XContentType.JSON)); - } }, listener::onFailure)); } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsResponse.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsResponse.java index e4f175e07ab..17187acfc1f 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsResponse.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsResponse.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.WatcherMetaData; import org.elasticsearch.xpack.watcher.WatcherState; @@ -21,7 +21,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; -public class WatcherStatsResponse extends ActionResponse implements ToXContent { +public class WatcherStatsResponse extends ActionResponse implements ToXContentObject { private long watchesCount; private WatcherState watcherState; @@ -127,7 +127,7 @@ public class WatcherStatsResponse extends ActionResponse implements ToXContent { queuedWatches.add(new QueuedWatch(in)); } } - watcherMetaData = (WatcherMetaData) WatcherMetaData.PROTO.readFrom(in); + watcherMetaData = new WatcherMetaData(in); } @Override diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/Trigger.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/Trigger.java index cbe98b87bde..9090aa072ae 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/Trigger.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/Trigger.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.watcher.trigger; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; -public interface Trigger extends ToXContent { +public interface Trigger extends ToXContentObject { String type(); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEvent.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEvent.java index c00b4dfa561..ecb83d1f4a7 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEvent.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEvent.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.watcher.trigger; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; import org.joda.time.DateTime; @@ -15,7 +15,7 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; -public abstract class TriggerEvent implements ToXContent { +public abstract class TriggerEvent implements ToXContentObject { private final String jobName; protected final DateTime triggeredTime; diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/DailySchedule.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/DailySchedule.java index 3c5ee6c8371..5624afec877 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/DailySchedule.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/DailySchedule.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.trigger.schedule; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.watcher.trigger.schedule.support.DayTimes; @@ -89,7 +88,7 @@ public class DailySchedule extends CronnableSchedule { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, AT_FIELD)) { + } else if (AT_FIELD.match(currentFieldName)) { if (token != XContentParser.Token.START_ARRAY) { try { times.add(DayTimes.parse(parser, token)); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/HourlySchedule.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/HourlySchedule.java index 88d6f13e1ee..50310cb6d4f 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/HourlySchedule.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/HourlySchedule.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.trigger.schedule; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -101,7 +100,7 @@ public class HourlySchedule extends CronnableSchedule { currentFieldName = parser.currentName(); } else if (currentFieldName == null) { throw new ElasticsearchParseException("could not parse [{}] schedule. unexpected token [{}]", TYPE, token); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, MINUTE_FIELD)) { + } else if (MINUTE_FIELD.match(currentFieldName)) { if (token.isValue()) { try { minutes.add(DayTimes.parseMinuteValue(parser, token)); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTriggerEvent.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTriggerEvent.java index 3eb1ae94b96..1a8c4985a5e 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTriggerEvent.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTriggerEvent.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.trigger.schedule; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; @@ -65,7 +64,7 @@ public class ScheduleTriggerEvent extends TriggerEvent { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TRIGGERED_TIME)) { + } else if (Field.TRIGGERED_TIME.match(currentFieldName)) { try { triggeredTime = WatcherDateTimeUtils.parseDateMath(currentFieldName, parser, DateTimeZone.UTC, clock); } catch (ElasticsearchParseException pe) { @@ -73,7 +72,7 @@ public class ScheduleTriggerEvent extends TriggerEvent { throw new ElasticsearchParseException("could not parse [{}] trigger event for [{}] for watch [{}]. failed to parse " + "date field [{}]", pe, ScheduleTriggerEngine.TYPE, context, watchId, currentFieldName); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.SCHEDULED_TIME)) { + } else if (Field.SCHEDULED_TIME.match(currentFieldName)) { try { scheduledTime = WatcherDateTimeUtils.parseDateMath(currentFieldName, parser, DateTimeZone.UTC, clock); } catch (ElasticsearchParseException pe) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java index da416896429..63efae3a910 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/DayTimes.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.trigger.schedule.support; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -197,7 +196,7 @@ public class DayTimes implements Times { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, HOUR_FIELD)) { + } else if (HOUR_FIELD.match(currentFieldName)) { if (token.isValue()) { hours.add(parseHourValue(parser, token)); } else if (token == XContentParser.Token.START_ARRAY) { @@ -208,7 +207,7 @@ public class DayTimes implements Times { throw new ElasticsearchParseException("invalid time hour value. expected string/number value or an array of " + "string/number values, but found [{}]", token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, MINUTE_FIELD)) { + } else if (MINUTE_FIELD.match(currentFieldName)) { if (token.isValue()) { minutes.add(parseMinuteValue(parser, token)); } else if (token == XContentParser.Token.START_ARRAY) { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java index 2bf28ac3f83..7954930fe85 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/MonthTimes.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.trigger.schedule.support; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -155,7 +154,7 @@ public class MonthTimes implements Times { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, DAY_FIELD)) { + } else if (DAY_FIELD.match(currentFieldName)) { if (token.isValue()) { daysSet.add(parseDayValue(parser, token)); } else if (token == XContentParser.Token.START_ARRAY) { @@ -166,7 +165,7 @@ public class MonthTimes implements Times { throw new ElasticsearchParseException("invalid month day value for [{}] field. expected string/number value or an " + "array of string/number values, but found [{}]", currentFieldName, token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, TIME_FIELD)) { + } else if (TIME_FIELD.match(currentFieldName)) { if (token != XContentParser.Token.START_ARRAY) { try { timesSet.add(DayTimes.parse(parser, token)); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/WeekTimes.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/WeekTimes.java index 3c9224036e8..89c57d08460 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/WeekTimes.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/WeekTimes.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.trigger.schedule.support; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -112,7 +111,7 @@ public class WeekTimes implements Times { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, DAY_FIELD)) { + } else if (DAY_FIELD.match(currentFieldName)) { if (token.isValue()) { daysSet.add(parseDayValue(parser, token)); } else if (token == XContentParser.Token.START_ARRAY) { @@ -123,7 +122,7 @@ public class WeekTimes implements Times { throw new ElasticsearchParseException("invalid week day value for [{}] field. expected string/number value or an " + "array of string/number values, but found [{}]", currentFieldName, token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, TIME_FIELD)) { + } else if (TIME_FIELD.match(currentFieldName)) { if (token != XContentParser.Token.START_ARRAY) { try { timesSet.add(DayTimes.parse(parser, token)); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java index 1a82764c1ba..195497f7c34 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/YearTimes.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.trigger.schedule.support; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -145,7 +144,7 @@ public class YearTimes implements Times { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, MONTH_FIELD)) { + } else if (MONTH_FIELD.match(currentFieldName)) { if (token.isValue()) { monthsSet.add(parseMonthValue(parser, token)); } else if (token == XContentParser.Token.START_ARRAY) { @@ -156,7 +155,7 @@ public class YearTimes implements Times { throw new ElasticsearchParseException("invalid year month value for [{}] field. expected string/number value or an " + "array of string/number values, but found [{}]", currentFieldName, token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, DAY_FIELD)) { + } else if (DAY_FIELD.match(currentFieldName)) { if (token.isValue()) { daysSet.add(MonthTimes.parseDayValue(parser, token)); } else if (token == XContentParser.Token.START_ARRAY) { @@ -167,7 +166,7 @@ public class YearTimes implements Times { throw new ElasticsearchParseException("invalid year day value for [{}] field. expected string/number value or an " + "array of string/number values, but found [{}]", currentFieldName, token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, TIME_FIELD)) { + } else if (TIME_FIELD.match(currentFieldName)) { if (token != XContentParser.Token.START_ARRAY) { try { timesSet.add(DayTimes.parse(parser, token)); diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/Payload.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/Payload.java index 9e97016d7b7..d53bb24bae6 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/Payload.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/Payload.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.watch; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -19,7 +20,7 @@ import static org.elasticsearch.xpack.watcher.support.WatcherUtils.responseToDat public interface Payload extends ToXContent { - Simple EMPTY = new Simple(Collections.emptyMap()); + Simple EMPTY = new Simple(Collections.emptyMap()); Map data(); @@ -28,7 +29,7 @@ public interface Payload extends ToXContent { private final Map data; public Simple() { - this(new HashMap()); + this(new HashMap<>()); } public Simple(String key, Object value) { @@ -73,10 +74,8 @@ public interface Payload extends ToXContent { } class XContent extends Simple { - - public XContent(ToXContent response) throws IOException { + public XContent(ToXContentObject response) throws IOException { super(responseToData(response)); } - } } diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java index 0dae7d04f1d..d8a61ca040c 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.watch; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; @@ -16,7 +15,7 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.common.secret.Secret; @@ -32,7 +31,6 @@ import org.elasticsearch.xpack.watcher.input.ExecutableInput; import org.elasticsearch.xpack.watcher.input.InputRegistry; import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput; import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; -import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.watcher.support.xcontent.WatcherXContentParser; import org.elasticsearch.xpack.watcher.transform.ExecutableTransform; import org.elasticsearch.xpack.watcher.trigger.Trigger; @@ -46,17 +44,15 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Pattern; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentHelper.createParser; import static org.elasticsearch.xpack.watcher.support.Exceptions.ioException; import static org.joda.time.DateTimeZone.UTC; -public class Watch implements TriggerEngine.Job, ToXContent { +public class Watch implements TriggerEngine.Job, ToXContentObject { public static final String ALL_ACTIONS_ID = "_all"; public static final String INCLUDE_STATUS_KEY = "include_status"; @@ -198,12 +194,6 @@ public class Watch implements TriggerEngine.Job, ToXContent { return builder; } - public BytesReference getAsBytes() throws IOException { - // we don't want to cache this and instead rebuild it every time on demand. The watch is in - // memory and we don't need this redundancy - return toXContent(jsonBuilder(), WatcherParams.builder().put(Watch.INCLUDE_STATUS_KEY, true).build()).bytes(); - } - public static class Parser extends AbstractComponent { private final TriggerService triggerService; @@ -291,17 +281,17 @@ public class Watch implements TriggerEngine.Job, ToXContent { currentFieldName = parser.currentName(); } else if (token == null || currentFieldName == null) { throw new ElasticsearchParseException("could not parse watch [{}], unexpected token [{}]", id, token); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TRIGGER)) { + } else if (Field.TRIGGER.match(currentFieldName)) { trigger = triggerService.parseTrigger(id, parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.INPUT)) { + } else if (Field.INPUT.match(currentFieldName)) { input = inputRegistry.parse(id, parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.CONDITION)) { + } else if (Field.CONDITION.match(currentFieldName)) { condition = actionRegistry.getConditionRegistry().parseExecutable(id, parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TRANSFORM)) { + } else if (Field.TRANSFORM.match(currentFieldName)) { transform = actionRegistry.getTransformRegistry().parse(id, parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.THROTTLE_PERIOD)) { + } else if (Field.THROTTLE_PERIOD.match(currentFieldName)) { throttlePeriod = timeValueMillis(parser.longValue()); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.THROTTLE_PERIOD_HUMAN)) { + } else if (Field.THROTTLE_PERIOD_HUMAN.match(currentFieldName)) { // Parser for human specified and 2.x backwards compatible throttle period try { throttlePeriod = WatcherDateTimeUtils.parseTimeValue(parser, Field.THROTTLE_PERIOD_HUMAN.toString()); @@ -309,11 +299,11 @@ public class Watch implements TriggerEngine.Job, ToXContent { throw new ElasticsearchParseException("could not parse watch [{}]. failed to parse time value for field [{}]", pe, id, currentFieldName); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ACTIONS)) { + } else if (Field.ACTIONS.match(currentFieldName)) { actions = actionRegistry.parseActions(id, parser); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.METADATA)) { + } else if (Field.METADATA.match(currentFieldName)) { metatdata = parser.map(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.STATUS)) { + } else if (Field.STATUS.match(currentFieldName)) { if (includeStatus) { status = WatchStatus.parse(id, parser, clock); } else { diff --git a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStatus.java b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStatus.java index b5ed38af7ac..c084078bab3 100644 --- a/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStatus.java +++ b/elasticsearch/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStatus.java @@ -8,11 +8,11 @@ package org.elasticsearch.xpack.watcher.watch; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.watcher.actions.Action; @@ -36,7 +36,7 @@ import static org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils.write import static org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils.writeOptionalDate; import static org.joda.time.DateTimeZone.UTC; -public class WatchStatus implements ToXContent, Streamable { +public class WatchStatus implements ToXContentObject, Streamable { private State state; @@ -235,35 +235,35 @@ public class WatchStatus implements ToXContent, Streamable { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.STATE)) { + } else if (Field.STATE.match(currentFieldName)) { try { state = State.parse(parser, clock); } catch (ElasticsearchParseException e) { throw new ElasticsearchParseException("could not parse watch status for [{}]. failed to parse field [{}]", e, watchId, currentFieldName); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.VERSION)) { + } else if (Field.VERSION.match(currentFieldName)) { if (token.isValue()) { version = parser.longValue(); } else { throw new ElasticsearchParseException("could not parse watch status for [{}]. expecting field [{}] to hold a long " + "value, found [{}] instead", watchId, currentFieldName, token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.LAST_CHECKED)) { + } else if (Field.LAST_CHECKED.match(currentFieldName)) { if (token.isValue()) { lastChecked = parseDate(currentFieldName, parser, UTC); } else { throw new ElasticsearchParseException("could not parse watch status for [{}]. expecting field [{}] to hold a date " + "value, found [{}] instead", watchId, currentFieldName, token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.LAST_MET_CONDITION)) { + } else if (Field.LAST_MET_CONDITION.match(currentFieldName)) { if (token.isValue()) { lastMetCondition = parseDate(currentFieldName, parser, UTC); } else { throw new ElasticsearchParseException("could not parse watch status for [{}]. expecting field [{}] to hold a date " + "value, found [{}] instead", watchId, currentFieldName, token); } - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ACTIONS)) { + } else if (Field.ACTIONS.match(currentFieldName)) { actions = new HashMap<>(); if (token == XContentParser.Token.START_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -329,9 +329,9 @@ public class WatchStatus implements ToXContent, Streamable { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.ACTIVE)) { + } else if (Field.ACTIVE.match(currentFieldName)) { active = parser.booleanValue(); - } else if (ParseFieldMatcher.STRICT.match(currentFieldName, Field.TIMESTAMP)) { + } else if (Field.TIMESTAMP.match(currentFieldName)) { timestamp = parseDate(currentFieldName, parser, UTC); } } diff --git a/elasticsearch/src/main/plugin-metadata/plugin-security.policy b/elasticsearch/src/main/plugin-metadata/plugin-security.policy index 25f2ea78531..a0441496ed4 100644 --- a/elasticsearch/src/main/plugin-metadata/plugin-security.policy +++ b/elasticsearch/src/main/plugin-metadata/plugin-security.policy @@ -2,9 +2,8 @@ grant { // needed because of problems in unbound LDAP library permission java.util.PropertyPermission "*", "read,write"; - // needed to set expert SSL options, etc + // required to configure the custom mailcap for watcher permission java.lang.RuntimePermission "setFactory"; - permission javax.net.ssl.SSLPermission "setHostnameVerifier"; // needed when sending emails for javax.activation // otherwise a classnotfound exception is thrown due to trying diff --git a/elasticsearch/src/main/resources/monitoring-data.json b/elasticsearch/src/main/resources/monitoring-data.json index a694499c8ea..84f1ebcea81 100644 --- a/elasticsearch/src/main/resources/monitoring-data.json +++ b/elasticsearch/src/main/resources/monitoring-data.json @@ -3,10 +3,12 @@ "settings": { "index.number_of_shards": 1, "index.number_of_replicas": 1, - "index.codec": "best_compression", - "index.mapper.dynamic": false + "index.codec": "best_compression" }, "mappings": { + "_default_": { + "enabled": false + }, "cluster_info": { "enabled": false, "_meta": { @@ -18,6 +20,9 @@ }, "node": { "enabled": false + }, + "logstash": { + "enabled": false } } } diff --git a/elasticsearch/src/main/resources/monitoring-es.json b/elasticsearch/src/main/resources/monitoring-es.json index 3c434831ad6..22b334e4d62 100644 --- a/elasticsearch/src/main/resources/monitoring-es.json +++ b/elasticsearch/src/main/resources/monitoring-es.json @@ -581,7 +581,59 @@ } }, "os": { - "type": "object" + "properties": { + "cgroup": { + "properties": { + "cpuacct": { + "properties": { + "control_group": { + "type": "keyword" + }, + "usage_nanos": { + "type": "long" + } + } + }, + "cpu": { + "properties": { + "control_group": { + "type": "keyword" + }, + "stat": { + "properties": { + "number_of_elapsed_periods": { + "type": "long" + }, + "number_of_times_throttled": { + "type": "long" + }, + "time_throttled_nanos": { + "type": "long" + } + } + } + } + } + } + }, + "cpu": { + "properties": { + "load_average": { + "properties": { + "1m": { + "type": "half_float" + }, + "5m": { + "type": "half_float" + }, + "15m": { + "type": "half_float" + } + } + } + } + } + } }, "process": { "type": "object" diff --git a/elasticsearch/src/main/resources/monitoring-logstash.json b/elasticsearch/src/main/resources/monitoring-logstash.json new file mode 100644 index 00000000000..02124ccc3a4 --- /dev/null +++ b/elasticsearch/src/main/resources/monitoring-logstash.json @@ -0,0 +1,211 @@ +{ + "template": ".monitoring-logstash-${monitoring.template.version}-*", + "settings": { + "index.number_of_shards": 1, + "index.number_of_replicas": 1, + "index.codec": "best_compression" + }, + "mappings": { + "_default_": { + "_all": { + "enabled": false + }, + "properties": { + "cluster_uuid": { + "type": "keyword" + }, + "timestamp": { + "type": "date", + "format": "date_time" + }, + "source_node": { + "properties": { + "uuid": { + "type": "keyword" + }, + "host": { + "type": "keyword" + }, + "transport_address": { + "type": "keyword" + }, + "ip": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "attributes": { + "dynamic": true, + "properties": { + "data": { + "type": "boolean" + }, + "master": { + "type": "boolean" + }, + "client": { + "type": "boolean" + } + } + } + } + } + } + }, + "logstash_stats": { + "properties": { + "logstash_stats": { + "properties": { + "logstash": { + "properties": { + "uuid": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "host": { + "type": "keyword" + }, + "http_address": { + "type": "keyword" + }, + "version": { + "type": "keyword" + }, + "snapshot": { + "type": "boolean" + }, + "status": { + "type": "keyword" + }, + "pipeline": { + "properties": { + "workers": { + "type": "short" + }, + "batch_size": { + "type": "long" + } + } + } + } + }, + "events": { + "properties": { + "filtered": { + "type": "long" + }, + "in": { + "type": "long" + }, + "out": { + "type": "long" + }, + "duration_in_millis": { + "type": "long" + } + } + }, + "timestamp": { + "type": "date" + }, + "jvm": { + "properties": { + "uptime_in_millis": { + "type": "long" + }, + "gc": { + "properties": { + "collectors": { + "properties": { + "old": { + "properties": { + "collection_count": { + "type": "long" + }, + "collection_time_in_millis": { + "type": "long" + } + } + }, + "young": { + "properties": { + "collection_count": { + "type": "long" + }, + "collection_time_in_millis": { + "type": "long" + } + } + } + } + } + } + }, + "mem": { + "properties": { + "heap_max_in_bytes": { + "type": "long" + }, + "heap_used_in_bytes": { + "type": "long" + }, + "heap_used_percent": { + "type": "long" + } + } + } + } + }, + "os": { + "properties": { + "load": { + "properties": { + "1m": { + "type": "half_float" + }, + "5m": { + "type": "half_float" + }, + "15m": { + "type": "half_float" + } + } + } + } + }, + "process": { + "properties": { + "cpu": { + "properties": { + "percent": { + "type": "long" + } + } + }, + "max_file_descriptors": { + "type": "long" + }, + "open_file_descriptors": { + "type": "long" + } + } + }, + "reloads": { + "properties": { + "failures": { + "type": "long" + }, + "successes": { + "type": "long" + } + } + } + } + } + } + } + } +} diff --git a/elasticsearch/src/test/java/org/elasticsearch/OldSecurityIndexBackwardsCompatibilityTests.java b/elasticsearch/src/test/java/org/elasticsearch/OldSecurityIndexBackwardsCompatibilityTests.java index ee66d9c1a01..33ab9020d45 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/OldSecurityIndexBackwardsCompatibilityTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/OldSecurityIndexBackwardsCompatibilityTests.java @@ -109,19 +109,19 @@ public class OldSecurityIndexBackwardsCompatibilityTests extends AbstractOldXPac RoleDescriptor.IndicesPrivileges indicesPrivileges = role.getIndicesPrivileges()[0]; assertThat(indicesPrivileges.getIndices(), arrayWithSize(2)); assertArrayEquals(new String[] { "index1", "index2" }, indicesPrivileges.getIndices()); - assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("title")); - assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("body")); + final FieldPermissions fieldPermissions = + new FieldPermissions(indicesPrivileges.getGrantedFields(), indicesPrivileges.getDeniedFields()); + assertTrue(fieldPermissions.grantsAccessTo("title")); + assertTrue(fieldPermissions.grantsAccessTo("body")); assertArrayEquals(new String[] { "all" }, indicesPrivileges.getPrivileges()); - assertEquals("{\"match\": {\"title\": \"foo\"}}", indicesPrivileges.getQuery().utf8ToString()); + assertEquals("{\"match\": {\"title\": \"foo\"}}", indicesPrivileges.getQuery().iterator().next().utf8ToString()); assertArrayEquals(new String[] { "all" }, role.getClusterPrivileges()); assertArrayEquals(new String[] { "other_user" }, role.getRunAs()); assertEquals("bwc_test_role", role.getName()); // check x-content is rendered in new format although it comes from an old index XContentBuilder builder = jsonBuilder(); - builder.startObject(); - indicesPrivileges.getFieldPermissions().toXContent(builder, null); - builder.endObject(); - assertThat(builder.string(), equalTo("{\"field_security\":{\"grant\":[\"title\",\"body\"]}}")); + indicesPrivileges.toXContent(builder, null); + assertThat(builder.string(), containsString("\"field_security\":{\"grant\":[\"title\",\"body\"]}")); logger.info("Getting users..."); assertBusy(() -> { @@ -167,7 +167,8 @@ public class OldSecurityIndexBackwardsCompatibilityTests extends AbstractOldXPac PutRoleResponse roleResponse = securityClient.preparePutRole("test_role").addIndices( new String[] { "index3" }, new String[] { "all" }, - new FieldPermissions(new String[]{"title", "body"}, null), + new String[] { "title", "body" }, + null, new BytesArray("{\"term\": {\"title\":\"not\"}}")).cluster("all") .get(); assertTrue(roleResponse.isCreated()); diff --git a/elasticsearch/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java b/elasticsearch/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java index a37109afd39..3428515fa98 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java @@ -14,14 +14,14 @@ import org.elasticsearch.xpack.security.SecurityTemplateService; import org.elasticsearch.xpack.security.action.role.DeleteRoleResponse; import org.elasticsearch.xpack.security.action.role.GetRolesResponse; import org.elasticsearch.xpack.security.action.role.PutRoleResponse; -import org.elasticsearch.xpack.security.authz.permission.FieldPermissions; -import org.elasticsearch.xpack.security.authz.permission.Role; +import org.elasticsearch.xpack.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.elasticsearch.xpack.security.client.SecurityClient; import org.junit.Before; import org.junit.BeforeClass; import java.util.Arrays; +import java.util.Collection; import java.util.List; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; @@ -52,7 +52,7 @@ public class ClearRolesCacheTests extends NativeRealmIntegTestCase { for (String role : roles) { c.preparePutRole(role) .cluster("none") - .addIndices(new String[] { "*" }, new String[] { "ALL" }, new FieldPermissions(), null) + .addIndices(new String[] { "*" }, new String[] { "ALL" }, null, null, null) .get(); logger.debug("--> created role [{}]", role); } @@ -61,11 +61,9 @@ public class ClearRolesCacheTests extends NativeRealmIntegTestCase { // warm up the caches on every node for (NativeRolesStore rolesStore : internalCluster().getInstances(NativeRolesStore.class)) { - for (String role : roles) { - PlainActionFuture future = new PlainActionFuture<>(); - rolesStore.role(role, future); - assertThat(future.actionGet(), notNullValue()); - } + PlainActionFuture> future = new PlainActionFuture<>(); + rolesStore.getRoleDescriptors(roles, future); + assertThat(future.actionGet(), notNullValue()); } } @@ -87,7 +85,7 @@ public class ClearRolesCacheTests extends NativeRealmIntegTestCase { for (String role : toModify) { PutRoleResponse response = securityClient.preparePutRole(role) .cluster("none") - .addIndices(new String[] { "*" }, new String[] { "ALL" }, new FieldPermissions(), null) + .addIndices(new String[] { "*" }, new String[] { "ALL" }, null, null, null) .runAs(role) .setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE) .get(); diff --git a/elasticsearch/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/elasticsearch/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index c36177995bc..2ed5755c3be 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -136,7 +136,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { QueryBuilder combined = QueryBuilders.boolQuery() .should(QueryBuilders.termQuery("field2", "value2")) .should(QueryBuilders.termQuery("field1", "value1")) - .minimumNumberShouldMatch(1); + .minimumShouldMatch(1); response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) .prepareSearch("test") .setQuery(randomBoolean() ? combined : QueryBuilders.matchAllQuery()) diff --git a/elasticsearch/src/test/java/org/elasticsearch/license/LicenseTribeTests.java b/elasticsearch/src/test/java/org/elasticsearch/license/LicenseTribeTests.java index 812214d1cc6..2ad39e68dad 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/license/LicenseTribeTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/license/LicenseTribeTests.java @@ -35,8 +35,9 @@ public class LicenseTribeTests extends TribeTransportTestCase { } @Override - protected void verifyActionOnTribeNode(Client tribeClient) { - failAction(tribeClient, GetLicenseAction.INSTANCE); + protected void verifyActionOnTribeNode(Client tribeClient) throws Exception { + // The get licence action should work, but everything else should fail + tribeClient.execute(GetLicenseAction.INSTANCE, new GetLicenseRequest()).get(); failAction(tribeClient, PutLicenseAction.INSTANCE); failAction(tribeClient, DeleteLicenseAction.INSTANCE); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/license/LicensesMetaDataSerializationTests.java b/elasticsearch/src/test/java/org/elasticsearch/license/LicensesMetaDataSerializationTests.java index f4a9e3c5a62..0beb6ae9d7d 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/license/LicensesMetaDataSerializationTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/license/LicensesMetaDataSerializationTests.java @@ -5,11 +5,13 @@ */ package org.elasticsearch.license; +import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoriesMetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -19,6 +21,8 @@ import org.elasticsearch.test.ESTestCase; import java.util.Collections; import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -46,11 +50,11 @@ public class LicensesMetaDataSerializationTests extends ESTestCase { RepositoriesMetaData repositoriesMetaData = new RepositoriesMetaData(repositoryMetaData); final MetaData.Builder metaDataBuilder = MetaData.builder(); if (randomBoolean()) { // random order of insertion - metaDataBuilder.putCustom(licensesMetaData.type(), licensesMetaData); - metaDataBuilder.putCustom(repositoriesMetaData.type(), repositoriesMetaData); + metaDataBuilder.putCustom(licensesMetaData.getWriteableName(), licensesMetaData); + metaDataBuilder.putCustom(repositoriesMetaData.getWriteableName(), repositoriesMetaData); } else { - metaDataBuilder.putCustom(repositoriesMetaData.type(), repositoriesMetaData); - metaDataBuilder.putCustom(licensesMetaData.type(), licensesMetaData); + metaDataBuilder.putCustom(repositoriesMetaData.getWriteableName(), repositoriesMetaData); + metaDataBuilder.putCustom(licensesMetaData.getWriteableName(), licensesMetaData); } // serialize metadata XContentBuilder builder = XContentFactory.jsonBuilder(); @@ -61,8 +65,8 @@ public class LicensesMetaDataSerializationTests extends ESTestCase { // deserialize metadata again MetaData metaData = MetaData.Builder.fromXContent(createParser(builder)); // check that custom metadata still present - assertThat(metaData.custom(licensesMetaData.type()), notNullValue()); - assertThat(metaData.custom(repositoriesMetaData.type()), notNullValue()); + assertThat(metaData.custom(licensesMetaData.getWriteableName()), notNullValue()); + assertThat(metaData.custom(repositoriesMetaData.getWriteableName()), notNullValue()); } public void testXContentSerializationOneTrial() throws Exception { @@ -99,9 +103,17 @@ public class LicensesMetaDataSerializationTests extends ESTestCase { private static LicensesMetaData getLicensesMetaDataFromXContent(XContentParser parser) throws Exception { parser.nextToken(); // consume null parser.nextToken(); // consume "licenses" - LicensesMetaData licensesMetaDataFromXContent = LicensesMetaData.PROTO.fromXContent(parser); + LicensesMetaData licensesMetaDataFromXContent = LicensesMetaData.fromXContent(parser); parser.nextToken(); // consume endObject assertThat(parser.nextToken(), nullValue()); return licensesMetaDataFromXContent; } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(Stream.concat( + new Licensing(Settings.EMPTY).getNamedXContent().stream(), + ClusterModule.getNamedXWriteables().stream() + ).collect(Collectors.toList())); + } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/license/TribeTransportTestCase.java b/elasticsearch/src/test/java/org/elasticsearch/license/TribeTransportTestCase.java index 1267a1bad73..48fadc77499 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/license/TribeTransportTestCase.java +++ b/elasticsearch/src/test/java/org/elasticsearch/license/TribeTransportTestCase.java @@ -136,11 +136,15 @@ public abstract class TribeTransportTestCase extends ESIntegTestCase { .put(tribe2Defaults.build()) .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put(internalCluster().getDefaultSettings()) + .put(XPackSettings.SECURITY_ENABLED.getKey(), false) // otherwise it conflicts with mock transport + .put("tribe.t1." + XPackSettings.SECURITY_ENABLED.getKey(), false) + .put("tribe.t2." + XPackSettings.SECURITY_ENABLED.getKey(), false) .put("node.name", "tribe_node") // make sure we can identify threads from this node .put("transport.type", MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME) .build(); - final List> mockPlugins = Arrays.asList(TestZenDiscovery.TestPlugin.class, MockTcpTransportPlugin.class); + final List> mockPlugins = Arrays.asList(TestZenDiscovery.TestPlugin.class, MockTcpTransportPlugin.class, + XPackPlugin.class); final Node tribeNode = new MockNode(merged, mockPlugins).start(); Client tribeClient = tribeNode.client(); @@ -189,7 +193,7 @@ public abstract class TribeTransportTestCase extends ESIntegTestCase { /** * Verify transport action behaviour on tribe node */ - protected abstract void verifyActionOnTribeNode(Client tribeClient); + protected abstract void verifyActionOnTribeNode(Client tribeClient) throws Exception; protected void failAction(Client client, Action action) { try { diff --git a/elasticsearch/src/test/java/org/elasticsearch/test/http/Headers.java b/elasticsearch/src/test/java/org/elasticsearch/test/http/Headers.java index 2a5f6f44587..a5d735be766 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/test/http/Headers.java +++ b/elasticsearch/src/test/java/org/elasticsearch/test/http/Headers.java @@ -7,6 +7,7 @@ package org.elasticsearch.test.http; import org.elasticsearch.common.SuppressForbidden; +import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -48,7 +49,16 @@ public class Headers { * @param value Value of the header */ void add(String name, String value) { - this.headers.put(name, Collections.singletonList(value)); + this.headers.compute(name, (k, v) -> { + if (v == null) { + return Collections.singletonList(value); + } else { + List list = new ArrayList<>(); + list.addAll(v); + list.add(value); + return list; + } + }); } /** diff --git a/elasticsearch/src/test/java/org/elasticsearch/test/http/MockWebServer.java b/elasticsearch/src/test/java/org/elasticsearch/test/http/MockWebServer.java index ce6239061e3..f6d0b244144 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/test/http/MockWebServer.java +++ b/elasticsearch/src/test/java/org/elasticsearch/test/http/MockWebServer.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.test.http; -import com.google.common.base.Charsets; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpServer; import com.sun.net.httpserver.HttpsConfigurator; @@ -20,6 +19,7 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.mocksocket.MockHttpServer; import javax.net.ssl.SSLContext; import java.io.Closeable; @@ -28,6 +28,7 @@ import java.io.InputStreamReader; import java.io.OutputStream; import java.net.InetAddress; import java.net.InetSocketAddress; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Queue; @@ -83,11 +84,11 @@ public class MockWebServer implements Closeable { public void start() throws IOException { InetSocketAddress address = new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0); if (sslContext != null) { - HttpsServer httpsServer = HttpsServer.create(address, 0); + HttpsServer httpsServer = MockHttpServer.createHttps(address, 0); httpsServer.setHttpsConfigurator(new CustomHttpsConfigurator(sslContext, needClientAuth)); server = httpsServer; } else { - server = HttpServer.create(address, 0); + server = MockHttpServer.createHttp(address, 0); } server.start(); @@ -109,7 +110,7 @@ public class MockWebServer implements Closeable { if (Strings.isEmpty(response.getBody())) { s.sendResponseHeaders(response.getStatusCode(), 0); } else { - byte[] responseAsBytes = response.getBody().getBytes(Charsets.UTF_8); + byte[] responseAsBytes = response.getBody().getBytes(StandardCharsets.UTF_8); s.sendResponseHeaders(response.getStatusCode(), responseAsBytes.length); sleepIfNeeded(response.getBodyDelay()); try (OutputStream responseBody = s.getResponseBody()) { diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpClientTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpClientTests.java index 5bc266a61ac..82359947bd2 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpClientTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpClientTests.java @@ -6,8 +6,10 @@ package org.elasticsearch.xpack.common.http; import com.carrotsearch.randomizedtesting.generators.RandomStrings; +import org.apache.http.client.ClientProtocolException; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; @@ -15,7 +17,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.test.junit.annotations.Network; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.common.http.auth.HttpAuthRegistry; import org.elasticsearch.xpack.common.http.auth.basic.BasicAuth; import org.elasticsearch.xpack.common.http.auth.basic.BasicAuthFactory; @@ -32,15 +33,19 @@ import java.io.InputStreamReader; import java.net.InetAddress; import java.net.ServerSocket; import java.net.Socket; +import java.net.SocketTimeoutException; import java.nio.charset.StandardCharsets; import java.nio.file.Path; +import java.util.Locale; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -89,7 +94,6 @@ public class HttpClientTests extends ESTestCase { HttpRequest request = requestBuilder.build(); HttpResponse response = httpClient.execute(request); - assertThat(response.status(), equalTo(responseCode)); assertThat(response.body().utf8ToString(), equalTo(body)); assertThat(webServer.requests(), hasSize(1)); @@ -98,7 +102,6 @@ public class HttpClientTests extends ESTestCase { assertThat(webServer.requests().get(0).getHeader(headerKey), equalTo(headerValue)); } - @TestLogging("org.elasticsearch.http.test:TRACE") public void testNoQueryString() throws Exception { webServer.enqueue(new MockResponse().setResponseCode(200).setBody("body")); HttpRequest.Builder requestBuilder = HttpRequest.builder("localhost", webServer.getPort()) @@ -114,7 +117,7 @@ public class HttpClientTests extends ESTestCase { assertThat(webServer.requests().get(0).getBody(), is(nullValue())); } - public void testUrlEncodingWithQueryStrings() throws Exception{ + public void testUrlEncodingWithQueryStrings() throws Exception { webServer.enqueue(new MockResponse().setResponseCode(200).setBody("body")); HttpRequest.Builder requestBuilder = HttpRequest.builder("localhost", webServer.getPort()) .method(HttpMethod.GET) @@ -216,9 +219,9 @@ public class HttpClientTests extends ESTestCase { public void testHttpsClientAuth() throws Exception { Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", resource.toString()) - .put("xpack.ssl.keystore.password", "testnode") - .build(); + .put("xpack.ssl.keystore.path", resource.toString()) + .put("xpack.ssl.keystore.password", "testnode") + .build(); TestsSSLService sslService = new TestsSSLService(settings, environment); httpClient = new HttpClient(settings, authRegistry, sslService); @@ -329,11 +332,31 @@ public class HttpClientTests extends ESTestCase { } } + public void testThatProxyConfigurationRequiresHostAndPort() { + Settings.Builder settings = Settings.builder(); + if (randomBoolean()) { + settings.put(HttpSettings.PROXY_HOST.getKey(), "localhost"); + } else { + settings.put(HttpSettings.PROXY_PORT.getKey(), 8080); + } + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new HttpClient(settings.build(), authRegistry, new SSLService(settings.build(), environment))); + assertThat(e.getMessage(), + containsString("HTTP proxy requires both settings: [xpack.http.proxy.host] and [xpack.http.proxy.port]")); + } + public void testThatUrlPathIsNotEncoded() throws Exception { // %2F is a slash that needs to be encoded to not be misinterpreted as a path String path = "/%3Clogstash-%7Bnow%2Fd%7D%3E/_search"; webServer.enqueue(new MockResponse().setResponseCode(200).setBody("foo")); - HttpRequest request = HttpRequest.builder("localhost", webServer.getPort()).path(path).build(); + HttpRequest request; + if (randomBoolean()) { + request = HttpRequest.builder("localhost", webServer.getPort()).path(path).build(); + } else { + // ensure that fromUrl acts the same way than the above builder + request = HttpRequest.builder().fromUrl(String.format(Locale.ROOT, "http://localhost:%s%s", webServer.getPort(), path)).build(); + } httpClient.execute(request); assertThat(webServer.requests(), hasSize(1)); @@ -344,6 +367,36 @@ public class HttpClientTests extends ESTestCase { assertThat(webServer.requests().get(0).getUri().getPath(), is("//_search")); } + public void testThatDuplicateHeaderKeysAreReturned() throws Exception { + MockResponse mockResponse = new MockResponse().setResponseCode(200).setBody("foo") + .addHeader("foo", "bar") + .addHeader("foo", "baz") + .addHeader("Content-Length", "3"); + webServer.enqueue(mockResponse); + + HttpRequest request = HttpRequest.builder("localhost", webServer.getPort()).path("/").build(); + HttpResponse httpResponse = httpClient.execute(request); + + assertThat(webServer.requests(), hasSize(1)); + + assertThat(httpResponse.headers(), hasKey("foo")); + assertThat(httpResponse.headers().get("foo"), containsInAnyOrder("bar", "baz")); + } + + // finally fixing https://github.com/elastic/x-plugins/issues/1141 - yay! Fixed due to switching to apache http client internally! + public void testThatClientTakesTimeoutsIntoAccountAfterHeadersAreSent() throws Exception { + webServer.enqueue(new MockResponse().setResponseCode(200).setBody("foo").setBodyDelay(TimeValue.timeValueSeconds(2))); + + HttpRequest request = HttpRequest.builder("localhost", webServer.getPort()).path("/foo") + .method(HttpMethod.POST) + .body("foo") + .connectionTimeout(TimeValue.timeValueMillis(500)) + .readTimeout(TimeValue.timeValueMillis(500)) + .build(); + SocketTimeoutException e = expectThrows(SocketTimeoutException.class, () -> httpClient.execute(request)); + assertThat(e.getMessage(), is("Read timed out")); + } + public void testThatHttpClientFailsOnNonHttpResponse() throws Exception { ExecutorService executor = Executors.newSingleThreadExecutor(); AtomicReference hasExceptionHappened = new AtomicReference(); @@ -360,11 +413,19 @@ public class HttpClientTests extends ESTestCase { } }); HttpRequest request = HttpRequest.builder("localhost", serverSocket.getLocalPort()).path("/").build(); - IOException e = expectThrows(IOException.class, () -> httpClient.execute(request)); - assertThat(e.getMessage(), is("Not a valid HTTP response, no status code in response")); + expectThrows(ClientProtocolException.class, () -> httpClient.execute(request)); assertThat("A server side exception occured, but shouldnt", hasExceptionHappened.get(), is(nullValue())); } finally { terminate(executor); } } + + public void testNoContentResponse() throws Exception { + int noContentStatusCode = 204; + webServer.enqueue(new MockResponse().setResponseCode(noContentStatusCode)); + HttpRequest request = HttpRequest.builder("localhost", webServer.getPort()).path("/foo").build(); + HttpResponse response = httpClient.execute(request); + assertThat(response.status(), is(noContentStatusCode)); + assertThat(response.body(), is(nullValue())); + } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpConnectionTimeoutTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpConnectionTimeoutTests.java index 3f7de0265c8..6a473df76b4 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpConnectionTimeoutTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpConnectionTimeoutTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.common.http; +import org.apache.http.conn.ConnectTimeoutException; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -37,7 +38,7 @@ public class HttpConnectionTimeoutTests extends ESTestCase { try { httpClient.execute(request); fail("expected timeout exception"); - } catch (ElasticsearchTimeoutException ete) { + } catch (ConnectTimeoutException ete) { TimeValue timeout = TimeValue.timeValueNanos(System.nanoTime() - start); logger.info("http connection timed out after {}", timeout.format()); // it's supposed to be 10, but we'll give it an error margin of 2 seconds @@ -63,7 +64,7 @@ public class HttpConnectionTimeoutTests extends ESTestCase { try { httpClient.execute(request); fail("expected timeout exception"); - } catch (ElasticsearchTimeoutException ete) { + } catch (ConnectTimeoutException ete) { TimeValue timeout = TimeValue.timeValueNanos(System.nanoTime() - start); logger.info("http connection timed out after {}", timeout.format()); // it's supposed to be 7, but we'll give it an error margin of 2 seconds @@ -90,7 +91,7 @@ public class HttpConnectionTimeoutTests extends ESTestCase { try { httpClient.execute(request); fail("expected timeout exception"); - } catch (ElasticsearchTimeoutException ete) { + } catch (ConnectTimeoutException ete) { TimeValue timeout = TimeValue.timeValueNanos(System.nanoTime() - start); logger.info("http connection timed out after {}", timeout.format()); // it's supposed to be 7, but we'll give it an error margin of 2 seconds diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpReadTimeoutTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpReadTimeoutTests.java index eba15f63104..d42cb15a237 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpReadTimeoutTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/common/http/HttpReadTimeoutTests.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.common.http; -import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; @@ -17,6 +17,8 @@ import org.elasticsearch.xpack.ssl.SSLService; import org.junit.After; import org.junit.Before; +import java.net.SocketTimeoutException; + import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; import static org.mockito.Mockito.mock; @@ -47,7 +49,7 @@ public class HttpReadTimeoutTests extends ESTestCase { .build(); long start = System.nanoTime(); - expectThrows(ElasticsearchTimeoutException.class, () -> httpClient.execute(request)); + expectThrows(SocketTimeoutException.class, () -> httpClient.execute(request)); TimeValue timeout = TimeValue.timeValueNanos(System.nanoTime() - start); logger.info("http connection timed out after {}", timeout.format()); @@ -69,7 +71,7 @@ public class HttpReadTimeoutTests extends ESTestCase { .build(); long start = System.nanoTime(); - expectThrows(ElasticsearchTimeoutException.class, () -> httpClient.execute(request)); + expectThrows(SocketTimeoutException.class, () -> httpClient.execute(request)); TimeValue timeout = TimeValue.timeValueNanos(System.nanoTime() - start); logger.info("http connection timed out after {}", timeout.format()); @@ -86,18 +88,18 @@ public class HttpReadTimeoutTests extends ESTestCase { , mock(HttpAuthRegistry.class), new SSLService(environment.settings(), environment)); HttpRequest request = HttpRequest.builder("localhost", webServer.getPort()) - .readTimeout(TimeValue.timeValueSeconds(5)) + .readTimeout(TimeValue.timeValueSeconds(3)) .method(HttpMethod.POST) .path("/") .build(); long start = System.nanoTime(); - expectThrows(ElasticsearchTimeoutException.class, () -> httpClient.execute(request)); + expectThrows(SocketTimeoutException.class, () -> httpClient.execute(request)); TimeValue timeout = TimeValue.timeValueNanos(System.nanoTime() - start); logger.info("http connection timed out after {}", timeout.format()); - // it's supposed to be 5, but we'll give it an error margin of 2 seconds - assertThat(timeout.seconds(), greaterThan(3L)); - assertThat(timeout.seconds(), lessThan(7L)); + // it's supposed to be 3, but we'll give it an error margin of 2 seconds + assertThat(timeout.seconds(), greaterThan(1L)); + assertThat(timeout.seconds(), lessThan(5L)); } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoredSystemTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoredSystemTests.java new file mode 100644 index 00000000000..ba808291350 --- /dev/null +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoredSystemTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Locale; + +import static org.hamcrest.Matchers.containsString; + +/** + * Tests {@link MonitoredSystem}. + */ +public class MonitoredSystemTests extends ESTestCase { + + public void testGetSystem() { + // everything is just lowercased... + for (final MonitoredSystem system : MonitoredSystem.values()) { + assertEquals(system.name().toLowerCase(Locale.ROOT), system.getSystem()); + } + } + + public void testFromSystem() { + for (final MonitoredSystem system : MonitoredSystem.values()) { + final String lowercased = system.name().toLowerCase(Locale.ROOT); + + assertSame(system, MonitoredSystem.fromSystem(system.name())); + assertSame(system, MonitoredSystem.fromSystem(lowercased)); + } + } + + public void testFromUnknownSystem() { + final String unknownSystem = randomAsciiOfLengthBetween(3, 4); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + MonitoredSystem.fromSystem(unknownSystem); + }); + + assertThat(e.getMessage(), containsString(unknownSystem)); + } + +} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringPluginTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringPluginTests.java index cc00210f73f..d94777dcda6 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringPluginTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringPluginTests.java @@ -20,14 +20,13 @@ import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope = TEST, transportClientRatio = 0, numClientNodes = 0, numDataNodes = 0) public class MonitoringPluginTests extends MonitoringIntegTestCase { - @Override - protected void startCollection() { + protected void startMonitoringService() { // do nothing as monitoring is sometime unbound } @Override - protected void stopCollection() { + protected void stopMonitoringService() { // do nothing as monitoring is sometime unbound } @@ -44,7 +43,7 @@ public class MonitoringPluginTests extends MonitoringIntegTestCase { .put(XPackSettings.MONITORING_ENABLED.getKey(), true) .build()); assertPluginIsLoaded(); - assertServiceIsBound(AgentService.class); + assertServiceIsBound(MonitoringService.class); } public void testMonitoringDisabled() { @@ -52,7 +51,7 @@ public class MonitoringPluginTests extends MonitoringIntegTestCase { .put(XPackSettings.MONITORING_ENABLED.getKey(), false) .build()); assertPluginIsLoaded(); - assertServiceIsNotBound(AgentService.class); + assertServiceIsNotBound(MonitoringService.class); } public void testMonitoringEnabledOnTribeNode() { @@ -61,13 +60,13 @@ public class MonitoringPluginTests extends MonitoringIntegTestCase { .put("tribe.name", "t1") .build()); assertPluginIsLoaded(); - assertServiceIsBound(AgentService.class); + assertServiceIsBound(MonitoringService.class); } public void testMonitoringDisabledOnTribeNode() { internalCluster().startNode(Settings.builder().put("tribe.name", "t1").build()); assertPluginIsLoaded(); - assertServiceIsNotBound(AgentService.class); + assertServiceIsNotBound(MonitoringService.class); } private void assertPluginIsLoaded() { diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringServiceTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringServiceTests.java new file mode 100644 index 00000000000..c0680c95877 --- /dev/null +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringServiceTests.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.xpack.monitoring.exporter.ExportException; +import org.elasticsearch.xpack.monitoring.exporter.Exporters; +import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; +import org.junit.After; +import org.junit.Before; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicInteger; + +import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MonitoringServiceTests extends ESTestCase { + + TestThreadPool threadPool; + MonitoringService monitoringService; + ClusterService clusterService; + ClusterSettings clusterSettings; + + @Before + public void setUp() throws Exception { + super.setUp(); + threadPool = new TestThreadPool(getTestName()); + clusterService = mock(ClusterService.class); + clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<>(MonitoringSettings.getSettings())); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + } + + @After + public void terminate() throws Exception { + if (monitoringService != null) { + monitoringService.close(); + } + terminate(threadPool); + } + + public void testIsMonitoringActive() throws Exception { + monitoringService = new MonitoringService(Settings.EMPTY, clusterSettings, threadPool, emptySet(), new CountingExporter()); + + monitoringService.start(); + assertBusy(() -> assertTrue(monitoringService.isStarted())); + assertTrue(monitoringService.isMonitoringActive()); + + monitoringService.stop(); + assertBusy(() -> assertFalse(monitoringService.isStarted())); + assertFalse(monitoringService.isMonitoringActive()); + + monitoringService.start(); + assertBusy(() -> assertTrue(monitoringService.isStarted())); + assertTrue(monitoringService.isMonitoringActive()); + + monitoringService.close(); + assertBusy(() -> assertFalse(monitoringService.isStarted())); + assertFalse(monitoringService.isMonitoringActive()); + } + + public void testInterval() throws Exception { + Settings settings = Settings.builder().put(MonitoringSettings.INTERVAL.getKey(), TimeValue.MINUS_ONE).build(); + + CountingExporter exporter = new CountingExporter(); + monitoringService = new MonitoringService(settings, clusterSettings, threadPool, emptySet(), exporter); + + monitoringService.start(); + assertBusy(() -> assertTrue(monitoringService.isStarted())); + assertFalse("interval -1 does not start the monitoring execution", monitoringService.isMonitoringActive()); + assertEquals(0, exporter.getExportsCount()); + + monitoringService.setInterval(TimeValue.timeValueSeconds(1)); + assertTrue(monitoringService.isMonitoringActive()); + assertBusy(() -> assertThat(exporter.getExportsCount(), greaterThan(0))); + + monitoringService.setInterval(TimeValue.timeValueMillis(100)); + assertFalse(monitoringService.isMonitoringActive()); + + monitoringService.setInterval(TimeValue.MINUS_ONE); + assertFalse(monitoringService.isMonitoringActive()); + } + + public void testSkipExecution() throws Exception { + final CountDownLatch latch = new CountDownLatch(1); + final BlockingExporter exporter = new BlockingExporter(latch); + + Settings settings = Settings.builder().put(MonitoringSettings.INTERVAL.getKey(), MonitoringSettings.MIN_INTERVAL).build(); + monitoringService = new MonitoringService(settings, clusterSettings, threadPool, emptySet(), exporter); + + logger.debug("start the monitoring service"); + monitoringService.start(); + assertBusy(() -> assertTrue(monitoringService.isStarted())); + + logger.debug("wait for the monitoring execution to be started"); + assertBusy(() -> assertThat(exporter.getExportsCount(), equalTo(1))); + + logger.debug("cancel current execution to avoid further execution once the latch is unblocked"); + monitoringService.cancelExecution(); + + logger.debug("unblock the exporter"); + latch.countDown(); + + logger.debug("verify that it hasn't been called more than one"); + assertThat(exporter.getExportsCount(), equalTo(1)); + } + + class CountingExporter extends Exporters { + + private final AtomicInteger exports = new AtomicInteger(0); + + public CountingExporter() { + super(Settings.EMPTY, Collections.emptyMap(), clusterService); + } + + @Override + public void export(Collection docs) throws ExportException { + exports.incrementAndGet(); + } + + int getExportsCount() { + return exports.get(); + } + + @Override + protected void doStart() { + } + + @Override + protected void doStop() { + } + + @Override + protected void doClose() { + } + } + + class BlockingExporter extends CountingExporter { + + private final CountDownLatch latch; + + BlockingExporter(CountDownLatch latch) { + super(); + this.latch = latch; + } + + @Override + public void export(Collection docs) throws ExportException { + super.export(docs); + try { + latch.await(); + } catch (InterruptedException e) { + throw new ExportException("BlockingExporter failed", e); + } + } + + @Override + protected void doStart() { + } + + @Override + protected void doStop() { + } + + @Override + protected void doClose() { + } + } +} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringSettingsIntegTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringSettingsIntegTests.java index 484b6dedd82..941f62f1dee 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringSettingsIntegTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringSettingsIntegTests.java @@ -78,9 +78,8 @@ public class MonitoringSettingsIntegTests extends MonitoringIntegTestCase { assertThat(monitoringSettings.recoveryActiveOnly(), equalTo(recoveryActiveOnly)); } - for (final AgentService service : internalCluster().getInstances(AgentService.class)) { - assertThat(service.getSamplingInterval().millis(), equalTo(interval.millis())); - assertArrayEquals(service.collectors(), collectors); + for (final MonitoringService service : internalCluster().getInstances(MonitoringService.class)) { + assertThat(service.getInterval().millis(), equalTo(interval.millis())); } @@ -124,8 +123,8 @@ public class MonitoringSettingsIntegTests extends MonitoringIntegTestCase { continue; } if (setting == MonitoringSettings.INTERVAL) { - for (final AgentService service : internalCluster().getInstances(AgentService.class)) { - assertEquals(service.getSamplingInterval(), setting.get(updatedSettings)); + for (final MonitoringService service : internalCluster().getInstances(MonitoringService.class)) { + assertEquals(service.getInterval(), setting.get(updatedSettings)); } } else { for (final MonitoringSettings monitoringSettings1 : internalCluster().getInstances(MonitoringSettings.class)) { diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/OldMonitoringIndicesBackwardsCompatibilityTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/OldMonitoringIndicesBackwardsCompatibilityTests.java index 633633c5684..f224150849f 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/OldMonitoringIndicesBackwardsCompatibilityTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/OldMonitoringIndicesBackwardsCompatibilityTests.java @@ -180,8 +180,7 @@ public class OldMonitoringIndicesBackwardsCompatibilityTests extends AbstractOld } finally { /* Now we stop monitoring and disable the HTTP exporter. We also delete all data and checks multiple times if they have not been re created by some in flight monitoring bulk request */ - internalCluster().getInstances(AgentService.class).forEach(AgentService::stopCollection); - internalCluster().getInstances(AgentService.class).forEach(AgentService::stop); + internalCluster().getInstances(MonitoringService.class).forEach(MonitoringService::stop); Settings.Builder settings = Settings.builder().put(MonitoringSettings.INTERVAL.getKey(), "-1"); if (httpExporter) { diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkActionTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkActionTests.java index 49d51b1b35b..9faaf342cc0 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkActionTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkActionTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.LocalClusterUpdateTask; import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -110,6 +111,8 @@ public class TransportMonitoringBulkActionTests extends ESTestCase { } }); clusterService.setClusterStatePublisher((event, ackListener) -> {}); + clusterService.setDiscoverySettings(new DiscoverySettings(Settings.EMPTY, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); clusterService.start(); transportService = new TransportService(clusterService.getSettings(), transport, threadPool, @@ -142,16 +145,11 @@ public class TransportMonitoringBulkActionTests extends ESTestCase { final ClusterBlocks.Builder block = ClusterBlocks.builder().addGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ALL); final CountDownLatch latch = new CountDownLatch(1); - clusterService.submitStateUpdateTask("add blocks to cluster state", new ClusterStateUpdateTask() { + clusterService.submitStateUpdateTask("add blocks to cluster state", new LocalClusterUpdateTask() { @Override - public ClusterState execute(ClusterState currentState) throws Exception { + public ClusterTasksResult execute(ClusterState currentState) throws Exception { // make sure we increment versions as listener may depend on it for change - return ClusterState.builder(currentState).blocks(block).version(currentState.version() + 1).build(); - } - - @Override - public boolean runOnlyOnMaster() { - return false; + return newState(ClusterState.builder(currentState).blocks(block).version(currentState.version() + 1).build()); } @Override diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/AbstractExporterTemplateTestCase.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/AbstractExporterTemplateTestCase.java deleted file mode 100644 index a6d292f8a8e..00000000000 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/AbstractExporterTemplateTestCase.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.monitoring.exporter; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.LicenseService; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.xpack.monitoring.MonitoredSystem; -import org.elasticsearch.xpack.monitoring.MonitoringSettings; -import org.elasticsearch.xpack.monitoring.collector.Collector; -import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStatsCollector; -import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; -import org.elasticsearch.xpack.security.InternalClient; - -import java.io.IOException; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; -import static org.hamcrest.Matchers.notNullValue; - -@ClusterScope(scope = TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0.0) -public abstract class AbstractExporterTemplateTestCase extends MonitoringIntegTestCase { - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - Settings.Builder settings = Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(MonitoringSettings.INTERVAL.getKey(), "-1"); - - for (Map.Entry setting : exporterSettings().getAsMap().entrySet()) { - settings.put("xpack.monitoring.exporters._exporter." + setting.getKey(), setting.getValue()); - } - return settings.build(); - } - - protected abstract Settings exporterSettings(); - - protected abstract void deleteTemplates() throws Exception; - - protected abstract void deletePipeline() throws Exception; - - protected abstract void putTemplate(String name) throws Exception; - - protected abstract void putPipeline(String name) throws Exception; - - protected abstract void assertTemplateExists(String name) throws Exception; - - protected abstract void assertPipelineExists(String name) throws Exception; - - protected abstract void assertTemplateNotUpdated(String name) throws Exception; - - protected abstract void assertPipelineNotUpdated(String name) throws Exception; - - public void testCreateWhenNoExistingTemplates() throws Exception { - internalCluster().startNode(); - - deleteTemplates(); - deletePipeline(); - doExporting(); - - logger.debug("--> templates does not exist: it should have been created in the current version"); - for (String template : monitoringTemplateNames()) { - assertTemplateExists(template); - } - assertPipelineExists(Exporter.EXPORT_PIPELINE_NAME); - - doExporting(); - - logger.debug("--> indices should have been created"); - awaitIndexExists(currentDataIndexName()); - awaitIndexExists(currentTimestampedIndexName()); - } - - public void testCreateWhenExistingTemplatesAreOld() throws Exception { - internalCluster().startNode(); - - putTemplate(indexTemplateName()); - putTemplate(dataTemplateName()); - putPipeline(Exporter.EXPORT_PIPELINE_NAME); - - doExporting(); - - logger.debug("--> existing templates are old"); - assertTemplateExists(dataTemplateName()); - assertTemplateExists(indexTemplateName()); - - logger.debug("--> existing templates are old: new templates should be created"); - for (String template : monitoringTemplateNames()) { - assertTemplateExists(template); - } - assertPipelineExists(Exporter.EXPORT_PIPELINE_NAME); - - doExporting(); - - logger.debug("--> indices should have been created"); - awaitIndexExists(currentDataIndexName()); - awaitIndexExists(currentTimestampedIndexName()); - } - - public void testCreateWhenExistingTemplateAreUpToDate() throws Exception { - internalCluster().startNode(); - - putTemplate(indexTemplateName()); - putTemplate(dataTemplateName()); - putPipeline(Exporter.EXPORT_PIPELINE_NAME); - - doExporting(); - - logger.debug("--> existing templates are up to date"); - for (String template : monitoringTemplateNames()) { - assertTemplateExists(template); - } - assertPipelineExists(Exporter.EXPORT_PIPELINE_NAME); - - logger.debug("--> existing templates has the same version: they should not be changed"); - assertTemplateNotUpdated(indexTemplateName()); - assertTemplateNotUpdated(dataTemplateName()); - assertPipelineNotUpdated(Exporter.EXPORT_PIPELINE_NAME); - - doExporting(); - - logger.debug("--> indices should have been created"); - awaitIndexExists(currentDataIndexName()); - awaitIndexExists(currentTimestampedIndexName()); - } - - protected void doExporting() throws Exception { - // TODO: these should be unit tests, not using guice - ClusterService clusterService = internalCluster().getInstance(ClusterService.class); - XPackLicenseState licenseState = internalCluster().getInstance(XPackLicenseState.class); - LicenseService licenseService = internalCluster().getInstance(LicenseService.class); - InternalClient client = internalCluster().getInstance(InternalClient.class); - Collector collector = new ClusterStatsCollector(clusterService.getSettings(), clusterService, - new MonitoringSettings(clusterService.getSettings(), clusterService.getClusterSettings()), - licenseState, client, licenseService); - - Exporters exporters = internalCluster().getInstance(Exporters.class); - assertNotNull(exporters); - - // Wait for exporting bulks to be ready to export - Runnable busy = () -> assertThat(exporters.openBulk(), notNullValue()); - assertBusy(busy); - exporters.export(collector.collect()); - } - - private String dataTemplateName() { - MockDataIndexNameResolver resolver = new MockDataIndexNameResolver(MonitoringTemplateUtils.TEMPLATE_VERSION); - return resolver.templateName(); - } - - private String indexTemplateName() { - MockTimestampedIndexNameResolver resolver = - new MockTimestampedIndexNameResolver(MonitoredSystem.ES, exporterSettings(), MonitoringTemplateUtils.TEMPLATE_VERSION); - return resolver.templateName(); - } - - private String currentDataIndexName() { - MockDataIndexNameResolver resolver = new MockDataIndexNameResolver(MonitoringTemplateUtils.TEMPLATE_VERSION); - return resolver.index(null); - } - - private String currentTimestampedIndexName() { - MonitoringDoc doc = new MonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString()); - doc.setTimestamp(System.currentTimeMillis()); - - MockTimestampedIndexNameResolver resolver = - new MockTimestampedIndexNameResolver(MonitoredSystem.ES, exporterSettings(), MonitoringTemplateUtils.TEMPLATE_VERSION); - return resolver.index(doc); - } - - /** Generates a basic template **/ - protected static BytesReference generateTemplateSource(String name) throws IOException { - return jsonBuilder().startObject() - .field("template", name) - .startObject("settings") - .field("index.number_of_shards", 1) - .field("index.number_of_replicas", 1) - .endObject() - .startObject("mappings") - .startObject("_default_") - .startObject("_all") - .field("enabled", false) - .endObject() - .field("date_detection", false) - .startObject("properties") - .startObject("cluster_uuid") - .field("type", "keyword") - .endObject() - .startObject("timestamp") - .field("type", "date") - .field("format", "date_time") - .endObject() - .endObject() - .endObject() - .startObject("cluster_info") - .field("enabled", false) - .endObject() - .startObject("cluster_stats") - .startObject("properties") - .startObject("cluster_stats") - .field("type", "object") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject().bytes(); - } -} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ExportersTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ExportersTests.java index 81a1174d68f..d9c8e25076f 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ExportersTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ExportersTests.java @@ -6,9 +6,6 @@ package org.elasticsearch.xpack.monitoring.exporter; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -46,9 +43,6 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class ExportersTests extends ESTestCase { @@ -200,57 +194,6 @@ public class ExportersTests extends ESTestCase { assertThat(settings, hasEntry("_name1.foo", "bar")); } - public void testOpenBulkOnMaster() throws Exception { - Exporter.Factory factory = new MockFactory(false); - Exporter.Factory masterOnlyFactory = new MockFactory(true); - factories.put("mock", factory); - factories.put("mock_master_only", masterOnlyFactory); - Exporters exporters = new Exporters(Settings.builder() - .put("xpack.monitoring.exporters._name0.type", "mock") - .put("xpack.monitoring.exporters._name1.type", "mock_master_only") - .build(), factories, clusterService); - exporters.start(); - - DiscoveryNodes nodes = mock(DiscoveryNodes.class); - when(nodes.isLocalNodeElectedMaster()).thenReturn(true); - when(clusterService.state()).thenReturn(ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) - .nodes(nodes).build()); - - ExportBulk bulk = exporters.openBulk(); - assertThat(bulk, notNullValue()); - - verify(exporters.getExporter("_name0"), times(1)).masterOnly(); - verify(exporters.getExporter("_name0"), times(1)).openBulk(); - verify(exporters.getExporter("_name1"), times(1)).masterOnly(); - verify(exporters.getExporter("_name1"), times(1)).openBulk(); - } - - public void testExportNotOnMaster() throws Exception { - Exporter.Factory factory = new MockFactory(false); - Exporter.Factory masterOnlyFactory = new MockFactory(true); - factories.put("mock", factory); - factories.put("mock_master_only", masterOnlyFactory); - Exporters exporters = new Exporters(Settings.builder() - .put("xpack.monitoring.exporters._name0.type", "mock") - .put("xpack.monitoring.exporters._name1.type", "mock_master_only") - .build(), factories, clusterService); - exporters.start(); - - DiscoveryNodes nodes = mock(DiscoveryNodes.class); - when(nodes.isLocalNodeElectedMaster()).thenReturn(false); - when(clusterService.state()).thenReturn(ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) - .nodes(nodes).build()); - - ExportBulk bulk = exporters.openBulk(); - assertThat(bulk, notNullValue()); - - verify(exporters.getExporter("_name0"), times(1)).masterOnly(); - verify(exporters.getExporter("_name0"), times(1)).openBulk(); - verify(exporters.getExporter("_name1"), times(1)).masterOnly(); - verify(exporters.getExporter("_name1"), times(1)).isSingleton(); - verifyNoMoreInteractions(exporters.getExporter("_name1")); - } - public void testEmptyPipeline() throws IOException { String json = Exporter.emptyPipeline(XContentType.JSON).string(); @@ -360,20 +303,15 @@ public class ExportersTests extends ESTestCase { static class MockFactory implements Exporter.Factory { - private final boolean masterOnly; - - public MockFactory(boolean masterOnly) { - this.masterOnly = masterOnly; - } @Override public Exporter create(Exporter.Config config) { Exporter exporter = mock(Exporter.class); when(exporter.name()).thenReturn(config.name()); - when(exporter.masterOnly()).thenReturn(masterOnly); when(exporter.openBulk()).thenReturn(mock(ExportBulk.class)); return exporter; } + } static class CountingExporter extends Exporter { diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/AbstractPublishableHttpResourceTestCase.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/AbstractPublishableHttpResourceTestCase.java index 11144671ac3..04fad9dfbb0 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/AbstractPublishableHttpResourceTestCase.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/AbstractPublishableHttpResourceTestCase.java @@ -144,13 +144,19 @@ public abstract class AbstractPublishableHttpResourceTestCase extends ESTestCase assertThat(parameters.get("filter_path"), is("$NONE")); } - private void doCheckWithStatusCode(final PublishableHttpResource resource, final String resourceBasePath, final String resourceName, - final RestStatus status, - final CheckResponse expected) + protected void doCheckWithStatusCode(final PublishableHttpResource resource, final String resourceBasePath, final String resourceName, + final RestStatus status, + final CheckResponse expected) throws IOException { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final Response response = response("GET", endpoint, status); + doCheckWithStatusCode(resource, endpoint, expected, response); + } + + protected void doCheckWithStatusCode(final PublishableHttpResource resource, final String endpoint, final CheckResponse expected, + final Response response) + throws IOException { when(client.performRequest("GET", endpoint, resource.getParameters())).thenReturn(response); assertThat(resource.doCheck(client), is(expected)); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/DataTypeMappingHttpResourceTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/DataTypeMappingHttpResourceTests.java new file mode 100644 index 00000000000..7ee6b9a1b7b --- /dev/null +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/DataTypeMappingHttpResourceTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.exporter.http; + +import org.apache.http.HttpEntity; +import org.elasticsearch.client.Response; +import org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.CheckResponse; +import org.apache.http.entity.StringEntity; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils.DATA_INDEX; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Tests {@link DataTypeMappingHttpResource}. + */ +public class DataTypeMappingHttpResourceTests extends AbstractPublishableHttpResourceTestCase { + + private final String typeName = "my_type"; + + private final DataTypeMappingHttpResource resource = new DataTypeMappingHttpResource(owner, masterTimeout, typeName); + + public void testDoCheckTrueFor404() throws IOException { + // if the index is not there, then we don't need to manually add the type + doCheckWithStatusCode(resource, "/" + DATA_INDEX + "/_mapping", typeName, notFoundCheckStatus(), CheckResponse.EXISTS); + } + + public void testDoCheckTrue() throws IOException { + final String endpoint = "/" + DATA_INDEX + "/_mapping/" + typeName; + // success does't mean it exists unless the mapping exists! it returns {} if the index exists, but the type does not + final Response response = response("GET", endpoint, successfulCheckStatus()); + final HttpEntity responseEntity = mock(HttpEntity.class); + final long validMapping = randomIntBetween(3, Integer.MAX_VALUE); + + when(response.getEntity()).thenReturn(responseEntity); + when(responseEntity.getContentLength()).thenReturn(validMapping); + + doCheckWithStatusCode(resource, endpoint, CheckResponse.EXISTS, response); + + verify(responseEntity).getContentLength(); + } + + public void testDoCheckFalse() throws IOException { + final String endpoint = "/" + DATA_INDEX + "/_mapping/" + typeName; + // success does't mean it exists unless the mapping exists! it returns {} if the index exists, but the type does not + final Response response = response("GET", endpoint, successfulCheckStatus()); + final HttpEntity responseEntity = mock(HttpEntity.class); + final long invalidMapping = randomIntBetween(Integer.MIN_VALUE, 2); + + when(response.getEntity()).thenReturn(responseEntity); + when(responseEntity.getContentLength()).thenReturn(invalidMapping); + + doCheckWithStatusCode(resource, endpoint, CheckResponse.DOES_NOT_EXIST, response); + + verify(responseEntity).getContentLength(); + } + + public void testDoCheckNullWithException() throws IOException { + assertCheckWithException(resource, "/" + DATA_INDEX + "/_mapping", typeName); + } + + public void testDoPublishTrue() throws IOException { + assertPublishSucceeds(resource, "/" + DATA_INDEX + "/_mapping", typeName, StringEntity.class); + } + + public void testDoPublishFalse() throws IOException { + assertPublishFails(resource, "/" + DATA_INDEX + "/_mapping", typeName, StringEntity.class); + } + + public void testDoPublishFalseWithException() throws IOException { + assertPublishWithException(resource, "/" + DATA_INDEX + "/_mapping", typeName, StringEntity.class); + } + + public void testParameters() { + final Map parameters = resource.getParameters(); + + if (masterTimeout != null) { + assertThat(parameters.get("master_timeout"), is(masterTimeout.toString())); + } + + assertThat(parameters.size(), is(masterTimeout == null ? 0 : 1)); + } + +} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterIT.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterIT.java index 216c073ffdc..ef00ef15620 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterIT.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterIT.java @@ -46,6 +46,7 @@ import org.junit.Before; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -56,6 +57,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils.DATA_INDEX; import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.FILTER_PATH_NONE; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -64,6 +66,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; @ESIntegTestCase.ClusterScope(scope = Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0.0) @@ -87,13 +90,15 @@ public class HttpExporterIT extends MonitoringIntegTestCase { } public void testExport() throws Exception { + final boolean typeMappingsExistAlready = randomBoolean(); final boolean templatesExistsAlready = randomBoolean(); final boolean pipelineExistsAlready = randomBoolean(); final boolean bwcIndexesExist = randomBoolean(); final boolean bwcAliasesExist = randomBoolean(); enqueueGetClusterVersionResponse(Version.CURRENT); - enqueueSetupResponses(webServer, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); + enqueueSetupResponses(webServer, + typeMappingsExistAlready, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); final Settings.Builder builder = Settings.builder().put(MonitoringSettings.INTERVAL.getKey(), "-1") @@ -105,11 +110,14 @@ public class HttpExporterIT extends MonitoringIntegTestCase { final int nbDocs = randomIntBetween(1, 25); export(newRandomMonitoringDocs(nbDocs)); - assertMonitorResources(webServer, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); + assertMonitorResources(webServer, + typeMappingsExistAlready, templatesExistsAlready, pipelineExistsAlready, + bwcIndexesExist, bwcAliasesExist); assertBulk(webServer, nbDocs); } public void testExportWithHeaders() throws Exception { + final boolean typeMappingsExistAlready = randomBoolean(); final boolean templatesExistsAlready = randomBoolean(); final boolean pipelineExistsAlready = randomBoolean(); final boolean bwcIndexesExist = randomBoolean(); @@ -125,7 +133,8 @@ public class HttpExporterIT extends MonitoringIntegTestCase { headers.put("Array-Check", array); enqueueGetClusterVersionResponse(Version.CURRENT); - enqueueSetupResponses(webServer, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); + enqueueSetupResponses(webServer, + typeMappingsExistAlready, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); Settings.Builder builder = Settings.builder().put(MonitoringSettings.INTERVAL.getKey(), "-1") @@ -140,12 +149,16 @@ public class HttpExporterIT extends MonitoringIntegTestCase { final int nbDocs = randomIntBetween(1, 25); export(newRandomMonitoringDocs(nbDocs)); - assertMonitorResources(webServer, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist, headers, null); + assertMonitorResources(webServer, + typeMappingsExistAlready, templatesExistsAlready, pipelineExistsAlready, + bwcIndexesExist, bwcAliasesExist, + headers, null); assertBulk(webServer, nbDocs, headers, null); } public void testExportWithBasePath() throws Exception { final boolean useHeaders = randomBoolean(); + final boolean typeMappingsExistAlready = randomBoolean(); final boolean templatesExistsAlready = randomBoolean(); final boolean pipelineExistsAlready = randomBoolean(); final boolean bwcIndexesExist = randomBoolean(); @@ -163,7 +176,8 @@ public class HttpExporterIT extends MonitoringIntegTestCase { } enqueueGetClusterVersionResponse(Version.CURRENT); - enqueueSetupResponses(webServer, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); + enqueueSetupResponses(webServer, + typeMappingsExistAlready, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); enqueueResponse(200, "{\"errors\": false}"); String basePath = "path/to"; @@ -196,12 +210,15 @@ public class HttpExporterIT extends MonitoringIntegTestCase { final int nbDocs = randomIntBetween(1, 25); export(newRandomMonitoringDocs(nbDocs)); - assertMonitorResources(webServer, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist, headers, - basePath); + assertMonitorResources(webServer, + typeMappingsExistAlready, templatesExistsAlready, pipelineExistsAlready, + bwcIndexesExist, bwcAliasesExist, + headers, basePath); assertBulk(webServer, nbDocs, headers, basePath); } public void testHostChangeReChecksTemplate() throws Exception { + final boolean typeMappingsExistAlready = randomBoolean(); final boolean templatesExistsAlready = randomBoolean(); final boolean pipelineExistsAlready = randomBoolean(); final boolean bwcIndexesExist = randomBoolean(); @@ -212,14 +229,16 @@ public class HttpExporterIT extends MonitoringIntegTestCase { .put("xpack.monitoring.exporters._http.host", getFormattedAddress(webServer)); enqueueGetClusterVersionResponse(Version.CURRENT); - enqueueSetupResponses(webServer, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); + enqueueSetupResponses(webServer, + typeMappingsExistAlready, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); enqueueResponse(200, "{\"errors\": false}"); internalCluster().startNode(builder); export(Collections.singletonList(newRandomMonitoringDoc())); - assertMonitorResources(webServer, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); + assertMonitorResources(webServer, + typeMappingsExistAlready, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); assertBulk(webServer); try (MockWebServer secondWebServer = createMockWebServer()) { @@ -227,6 +246,7 @@ public class HttpExporterIT extends MonitoringIntegTestCase { Settings.builder().putArray("xpack.monitoring.exporters._http.host", getFormattedAddress(secondWebServer)))); enqueueGetClusterVersionResponse(secondWebServer, Version.CURRENT); + enqueueMappingTypeResponses(secondWebServer, !typeMappingsExistAlready); // pretend that one of the templates is missing for (Tuple template : monitoringTemplates()) { if (template.v1().contains(MonitoringBulkTimestampedResolver.Data.DATA)) { @@ -245,6 +265,7 @@ public class HttpExporterIT extends MonitoringIntegTestCase { export(Collections.singletonList(newRandomMonitoringDoc())); assertMonitorVersion(secondWebServer); + assertMonitorMappingTypes(secondWebServer, !typeMappingsExistAlready, null, null); for (Tuple template : monitoringTemplates()) { MockRequest recordedRequest = secondWebServer.takeRequest(); @@ -286,6 +307,7 @@ public class HttpExporterIT extends MonitoringIntegTestCase { } public void testDynamicIndexFormatChange() throws Exception { + final boolean typeMappingsExistAlready = randomBoolean(); final boolean templatesExistsAlready = randomBoolean(); final boolean pipelineExistsAlready = randomBoolean(); final boolean bwcIndexesExist = randomBoolean(); @@ -298,13 +320,17 @@ public class HttpExporterIT extends MonitoringIntegTestCase { internalCluster().startNode(builder); enqueueGetClusterVersionResponse(Version.CURRENT); - enqueueSetupResponses(webServer, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); + enqueueSetupResponses(webServer, + typeMappingsExistAlready, templatesExistsAlready, pipelineExistsAlready, + bwcIndexesExist, bwcAliasesExist); enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); MonitoringDoc doc = newRandomMonitoringDoc(); export(Collections.singletonList(doc)); - assertMonitorResources(webServer, templatesExistsAlready, pipelineExistsAlready, bwcIndexesExist, bwcAliasesExist); + assertMonitorResources(webServer, + typeMappingsExistAlready, templatesExistsAlready, pipelineExistsAlready, + bwcIndexesExist, bwcAliasesExist); MockRequest recordedRequest = assertBulk(webServer); @SuppressWarnings("unchecked") @@ -321,7 +347,7 @@ public class HttpExporterIT extends MonitoringIntegTestCase { .setTransientSettings(Settings.builder().put("xpack.monitoring.exporters._http.index.name.time_format", newTimeFormat))); enqueueGetClusterVersionResponse(Version.CURRENT); - enqueueSetupResponses(webServer, true, true, false, false); + enqueueSetupResponses(webServer, true, true, true, false, false); enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); doc = newRandomMonitoringDoc(); @@ -330,7 +356,7 @@ public class HttpExporterIT extends MonitoringIntegTestCase { String expectedMonitoringIndex = ".monitoring-es-" + MonitoringTemplateUtils.TEMPLATE_VERSION + "-" + DateTimeFormat.forPattern(newTimeFormat).withZoneUTC().print(doc.getTimestamp()); - assertMonitorResources(webServer, true, true, false, false); + assertMonitorResources(webServer, true, true, true, false, false); recordedRequest = assertBulk(webServer); bytes = recordedRequest.getBody().getBytes(StandardCharsets.UTF_8); @@ -357,22 +383,58 @@ public class HttpExporterIT extends MonitoringIntegTestCase { assertHeaders(request, customHeaders); } - private void assertMonitorResources(final MockWebServer webServer, final boolean templateAlreadyExists, - final boolean pipelineAlreadyExists, boolean bwcIndexesExist, boolean bwcAliasesExist) throws Exception { - assertMonitorResources(webServer, templateAlreadyExists, pipelineAlreadyExists, bwcIndexesExist, bwcAliasesExist, null, null); + private void assertMonitorResources(final MockWebServer webServer, + final boolean typeMappingsExistAlready, + final boolean templateAlreadyExists, final boolean pipelineAlreadyExists, + final boolean bwcIndexesExist, final boolean bwcAliasesExist) throws Exception { + assertMonitorResources(webServer, typeMappingsExistAlready, templateAlreadyExists, pipelineAlreadyExists, + bwcIndexesExist, bwcAliasesExist, null, null); } - private void assertMonitorResources(final MockWebServer webServer, final boolean templateAlreadyExists, - final boolean pipelineAlreadyExists, boolean bwcIndexesExist, boolean bwcAliasesExist, - @Nullable final Map customHeaders, @Nullable final String basePath) throws Exception { + private void assertMonitorResources(final MockWebServer webServer, + final boolean typeMappingsExistAlready, + final boolean templateAlreadyExists, final boolean pipelineAlreadyExists, + boolean bwcIndexesExist, boolean bwcAliasesExist, + @Nullable final Map customHeaders, + @Nullable final String basePath) throws Exception { assertMonitorVersion(webServer, customHeaders, basePath); + assertMonitorMappingTypes(webServer, typeMappingsExistAlready, customHeaders, basePath); assertMonitorTemplates(webServer, templateAlreadyExists, customHeaders, basePath); assertMonitorPipelines(webServer, pipelineAlreadyExists, customHeaders, basePath); assertMonitorBackwardsCompatibilityAliases(webServer, bwcIndexesExist && false == bwcAliasesExist, customHeaders, basePath); } - private void assertMonitorTemplates(final MockWebServer webServer, final boolean alreadyExists, - @Nullable final Map customHeaders, @Nullable final String basePath) throws Exception { + private void assertMonitorMappingTypes(final MockWebServer webServer, + final boolean alreadyExists, + @Nullable final Map customHeaders, + @Nullable final String basePath) throws Exception { + final String pathPrefix = basePathToAssertablePrefix(basePath); + MockRequest request; + + for (final String type : MonitoringTemplateUtils.NEW_DATA_TYPES) { + request = webServer.takeRequest(); + + assertThat(request.getMethod(), equalTo("GET")); + assertThat(request.getUri().getPath(), equalTo(pathPrefix + "/" + DATA_INDEX + "/_mapping/" + type)); + assertThat(request.getUri().getQuery(), nullValue()); + assertHeaders(request, customHeaders); + + if (alreadyExists == false) { + request = webServer.takeRequest(); + + assertThat(request.getMethod(), equalTo("PUT")); + assertThat(request.getUri().getPath(), equalTo(pathPrefix + "/" + DATA_INDEX + "/_mapping/" + type)); + assertThat(request.getUri().getQuery(), nullValue()); + assertThat(request.getBody(), equalTo("{\"enabled\":false}")); + assertHeaders(request, customHeaders); + } + } + } + + private void assertMonitorTemplates(final MockWebServer webServer, + final boolean alreadyExists, + @Nullable final Map customHeaders, + @Nullable final String basePath) throws Exception { final String pathPrefix = basePathToAssertablePrefix(basePath); MockRequest request; @@ -510,7 +572,7 @@ public class HttpExporterIT extends MonitoringIntegTestCase { doc.setClusterUUID(internalCluster().getClusterName()); doc.setTimestamp(System.currentTimeMillis()); doc.setSourceNode(new DiscoveryNode("id", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT)); - doc.setClusterState(ClusterState.PROTO); + doc.setClusterState(ClusterState.EMPTY_STATE); doc.setStatus(ClusterHealthStatus.GREEN); return doc; } @@ -550,13 +612,41 @@ public class HttpExporterIT extends MonitoringIntegTestCase { .field("number", v.toString()).endObject().endObject().bytes().utf8ToString())); } - private void enqueueSetupResponses(MockWebServer webServer, boolean templatesAlreadyExists, boolean pipelineAlreadyExists, - boolean bwcIndexesExist, boolean bwcAliasesExist) throws IOException { + private void enqueueSetupResponses(MockWebServer webServer, + boolean typeMappingsAlreadyExist, + boolean templatesAlreadyExists, boolean pipelineAlreadyExists, + boolean bwcIndexesExist, boolean bwcAliasesExist) throws IOException { + enqueueMappingTypeResponses(webServer, typeMappingsAlreadyExist); enqueueTemplateResponses(webServer, templatesAlreadyExists); enqueuePipelineResponses(webServer, pipelineAlreadyExists); enqueueBackwardsCompatibilityAliasResponse(webServer, bwcIndexesExist, bwcAliasesExist); } + private void enqueueMappingTypeResponses(final MockWebServer webServer, final boolean alreadyExists) throws IOException { + if (alreadyExists) { + enqueueMappingTypeResponsesExistsAlreadyOrWillBeCreated(webServer); + } else { + enqueueMappingTypeResponsesDoesNotExistYet(webServer); + } + } + + private void enqueueMappingTypeResponsesDoesNotExistYet(final MockWebServer webServer) throws IOException { + for (final String type : MonitoringTemplateUtils.NEW_DATA_TYPES) { + enqueueResponse(webServer, 200, "{}"); + enqueueResponse(webServer, 200, "type [" + type + "] created"); + } + } + + private void enqueueMappingTypeResponsesExistsAlreadyOrWillBeCreated(final MockWebServer webServer) throws IOException { + for (final String type : MonitoringTemplateUtils.NEW_DATA_TYPES) { + if (randomBoolean()) { + enqueueResponse(webServer, 200, "{\".monitoring-data-2\":{\"" + type + "\":{\"enabled\":false}}}"); + } else { + enqueueResponse(webServer, 404, "index does not exist; template will create it"); + } + } + } + private void enqueueTemplateResponses(final MockWebServer webServer, final boolean alreadyExists) throws IOException { if (alreadyExists) { enqueueTemplateResponsesExistsAlready(webServer); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java index 974d2fc6292..68032d1455a 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java @@ -16,12 +16,14 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.monitoring.exporter.Exporter; +import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.resolver.ResolversRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils.DATA_INDEX; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyMapOf; import static org.mockito.Matchers.eq; @@ -37,7 +39,11 @@ import static org.mockito.Mockito.when; */ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTestCase { - private final int EXPECTED_TEMPLATES = 3; + /** + * kibana, logstash, beats + */ + private final int EXPECTED_TYPES = MonitoringTemplateUtils.NEW_DATA_TYPES.length; + private final int EXPECTED_TEMPLATES = 4; private final RestClient client = mock(RestClient.class); private final Response versionResponse = mock(Response.class); @@ -60,7 +66,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe verifyNoMoreInteractions(client); } - public void testTemplateCheckBlocksAfterSuccessfulVersion() throws IOException { + public void testTypeMappingCheckBlocksAfterSuccessfulVersion() throws IOException { final Exception exception = failureGetException(); final boolean firstSucceeds = randomBoolean(); int expectedGets = 1; @@ -68,6 +74,109 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe whenValidVersionResponse(); + // failure in the middle of various templates being checked/published; suggests a node dropped + if (firstSucceeds) { + final boolean successfulFirst = randomBoolean(); + // -2 from one success + a necessary failure after it! + final int extraPasses = Math.max(randomIntBetween(0, EXPECTED_TYPES - 2), 0); + final int successful = randomIntBetween(0, extraPasses); + final int unsuccessful = extraPasses - successful; + + final Response first = successfulFirst ? successfulGetTypeMappingResponse() : unsuccessfulGetTypeMappingResponse(); + + final List otherResponses = getTypeMappingResponses(successful, unsuccessful); + + // last check fails implies that N - 2 publishes succeeded! + when(client.performRequest(eq("GET"), startsWith("/" + DATA_INDEX + "/_mapping/"), anyMapOf(String.class, String.class))) + .thenReturn(first, otherResponses.toArray(new Response[otherResponses.size()])) + .thenThrow(exception); + whenSuccessfulPutTypeMappings(otherResponses.size() + 1); + + expectedGets += 1 + successful + unsuccessful; + expectedPuts = (successfulFirst ? 0 : 1) + unsuccessful; + } else { + when(client.performRequest(eq("GET"), startsWith("/" + DATA_INDEX + "/_mapping/"), anyMapOf(String.class, String.class))) + .thenThrow(exception); + } + + assertTrue(resources.isDirty()); + assertFalse(resources.checkAndPublish(client)); + // ensure it didn't magically become not-dirty + assertTrue(resources.isDirty()); + + verifyVersionCheck(); + verifyGetTypeMappings(expectedGets); + verifyPutTypeMappings(expectedPuts); + verifyNoMoreInteractions(client); + } + + public void testTypeMappingPublishBlocksAfterSuccessfulVersion() throws IOException { + final Exception exception = failurePutException(); + final boolean firstSucceeds = randomBoolean(); + int expectedGets = 1; + int expectedPuts = 1; + + whenValidVersionResponse(); + + // failure in the middle of various templates being checked/published; suggests a node dropped + if (firstSucceeds) { + final Response firstSuccess = successfulPutResponse(); + // -2 from one success + a necessary failure after it! + final int extraPasses = randomIntBetween(0, EXPECTED_TYPES - 2); + final int successful = randomIntBetween(0, extraPasses); + final int unsuccessful = extraPasses - successful; + + final List otherResponses = successfulPutResponses(unsuccessful); + + // first one passes for sure, so we need an extra "unsuccessful" GET + whenGetTypeMappingResponse(successful, unsuccessful + 2); + + // previous publishes must have succeeded + when(client.performRequest(eq("PUT"), + startsWith("/" + DATA_INDEX + "/_mapping/"), + anyMapOf(String.class, String.class), + any(HttpEntity.class))) + .thenReturn(firstSuccess, otherResponses.toArray(new Response[otherResponses.size()])) + .thenThrow(exception); + + // GETs required for each PUT attempt (first is guaranteed "unsuccessful") + expectedGets += successful + unsuccessful + 1; + // unsuccessful are PUT attempts + the guaranteed successful PUT (first) + expectedPuts += unsuccessful + 1; + } else { + // fail the check so that it has to attempt the PUT + whenGetTypeMappingResponse(0, 1); + + when(client.performRequest(eq("PUT"), + startsWith("/" + DATA_INDEX + "/_mapping/"), + anyMapOf(String.class, String.class), + any(HttpEntity.class))) + .thenThrow(exception); + } + + assertTrue(resources.isDirty()); + assertFalse(resources.checkAndPublish(client)); + // ensure it didn't magically become not-dirty + assertTrue(resources.isDirty()); + + verifyVersionCheck(); + verifyGetTypeMappings(expectedGets); + verifyPutTypeMappings(expectedPuts); + verifyNoMoreInteractions(client); + } + + public void testTemplateCheckBlocksAfterSuccessfulTypeMapping() throws IOException { + final int successfulGetTypeMappings = randomIntBetween(0, EXPECTED_TYPES); + final int unsuccessfulGetTypeMappings = EXPECTED_TYPES - successfulGetTypeMappings; + final Exception exception = failureGetException(); + final boolean firstSucceeds = randomBoolean(); + int expectedGets = 1; + int expectedPuts = 0; + + whenValidVersionResponse(); + whenGetTypeMappingResponse(successfulGetTypeMappings, unsuccessfulGetTypeMappings); + whenSuccessfulPutTypeMappings(EXPECTED_TYPES); + // failure in the middle of various templates being checked/published; suggests a node dropped if (firstSucceeds) { final boolean successfulFirst = randomBoolean(); @@ -95,22 +204,28 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe assertTrue(resources.isDirty()); assertFalse(resources.checkAndPublish(client)); - // ensure it didn't magically become + // ensure it didn't magically become not-dirty assertTrue(resources.isDirty()); verifyVersionCheck(); + verifyGetTypeMappings(EXPECTED_TYPES); + verifyPutTypeMappings(unsuccessfulGetTypeMappings); verifyGetTemplates(expectedGets); verifyPutTemplates(expectedPuts); verifyNoMoreInteractions(client); } - public void testTemplatePublishBlocksAfterSuccessfulVersion() throws IOException { + public void testTemplatePublishBlocksAfterSuccessfulTypeMapping() throws IOException { + final int successfulGetTypeMappings = randomIntBetween(0, EXPECTED_TYPES); + final int unsuccessfulGetTypeMappings = EXPECTED_TYPES - successfulGetTypeMappings; final Exception exception = failurePutException(); final boolean firstSucceeds = randomBoolean(); int expectedGets = 1; int expectedPuts = 1; whenValidVersionResponse(); + whenGetTypeMappingResponse(successfulGetTypeMappings, unsuccessfulGetTypeMappings); + whenSuccessfulPutTypeMappings(EXPECTED_TYPES); // failure in the middle of various templates being checked/published; suggests a node dropped if (firstSucceeds) { @@ -144,21 +259,27 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe assertTrue(resources.isDirty()); assertFalse(resources.checkAndPublish(client)); - // ensure it didn't magically become + // ensure it didn't magically become not-dirty assertTrue(resources.isDirty()); verifyVersionCheck(); + verifyGetTypeMappings(EXPECTED_TYPES); + verifyPutTypeMappings(unsuccessfulGetTypeMappings); verifyGetTemplates(expectedGets); verifyPutTemplates(expectedPuts); verifyNoMoreInteractions(client); } public void testPipelineCheckBlocksAfterSuccessfulTemplates() throws IOException { + final int successfulGetTypeMappings = randomIntBetween(0, EXPECTED_TYPES); + final int unsuccessfulGetTypeMappings = EXPECTED_TYPES - successfulGetTypeMappings; final int successfulGetTemplates = randomIntBetween(0, EXPECTED_TEMPLATES); final int unsuccessfulGetTemplates = EXPECTED_TEMPLATES - successfulGetTemplates; final Exception exception = failureGetException(); whenValidVersionResponse(); + whenGetTypeMappingResponse(successfulGetTypeMappings, unsuccessfulGetTypeMappings); + whenSuccessfulPutTypeMappings(EXPECTED_TYPES); whenGetTemplates(successfulGetTemplates, unsuccessfulGetTemplates); whenSuccessfulPutTemplates(EXPECTED_TEMPLATES); @@ -168,10 +289,12 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe assertTrue(resources.isDirty()); assertFalse(resources.checkAndPublish(client)); - // ensure it didn't magically become + // ensure it didn't magically become not-dirty assertTrue(resources.isDirty()); verifyVersionCheck(); + verifyGetTypeMappings(EXPECTED_TYPES); + verifyPutTypeMappings(unsuccessfulGetTypeMappings); verifyGetTemplates(EXPECTED_TEMPLATES); verifyPutTemplates(unsuccessfulGetTemplates); verifyGetPipelines(1); @@ -180,11 +303,15 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe } public void testPipelinePublishBlocksAfterSuccessfulTemplates() throws IOException { + final int successfulGetTypeMappings = randomIntBetween(0, EXPECTED_TYPES); + final int unsuccessfulGetTypeMappings = EXPECTED_TYPES - successfulGetTypeMappings; final int successfulGetTemplates = randomIntBetween(0, EXPECTED_TEMPLATES); final int unsuccessfulGetTemplates = EXPECTED_TEMPLATES - successfulGetTemplates; final Exception exception = failurePutException(); whenValidVersionResponse(); + whenGetTypeMappingResponse(successfulGetTypeMappings, unsuccessfulGetTypeMappings); + whenSuccessfulPutTypeMappings(EXPECTED_TYPES); whenGetTemplates(successfulGetTemplates, unsuccessfulGetTemplates); whenSuccessfulPutTemplates(EXPECTED_TEMPLATES); // pipeline can't be there @@ -199,10 +326,12 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe assertTrue(resources.isDirty()); assertFalse(resources.checkAndPublish(client)); - // ensure it didn't magically become + // ensure it didn't magically become not-dirty assertTrue(resources.isDirty()); verifyVersionCheck(); + verifyGetTypeMappings(EXPECTED_TYPES); + verifyPutTypeMappings(unsuccessfulGetTypeMappings); verifyGetTemplates(EXPECTED_TEMPLATES); verifyPutTemplates(unsuccessfulGetTemplates); verifyGetPipelines(1); @@ -211,12 +340,16 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe } public void testSuccessfulChecks() throws IOException { + final int successfulGetTypeMappings = randomIntBetween(0, EXPECTED_TYPES); + final int unsuccessfulGetTypeMappings = EXPECTED_TYPES - successfulGetTypeMappings; final int successfulGetTemplates = randomIntBetween(0, EXPECTED_TEMPLATES); final int unsuccessfulGetTemplates = EXPECTED_TEMPLATES - successfulGetTemplates; final int successfulGetPipelines = randomIntBetween(0, 1); final int unsuccessfulGetPipelines = 1 - successfulGetPipelines; whenValidVersionResponse(); + whenGetTypeMappingResponse(successfulGetTypeMappings, unsuccessfulGetTypeMappings); + whenSuccessfulPutTypeMappings(EXPECTED_TYPES); whenGetTemplates(successfulGetTemplates, unsuccessfulGetTemplates); whenSuccessfulPutTemplates(unsuccessfulGetTemplates); whenGetPipelines(successfulGetPipelines, unsuccessfulGetPipelines); @@ -230,6 +363,8 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe assertFalse(resources.isDirty()); verifyVersionCheck(); + verifyGetTypeMappings(EXPECTED_TYPES); + verifyPutTypeMappings(unsuccessfulGetTypeMappings); verifyGetTemplates(EXPECTED_TEMPLATES); verifyPutTemplates(unsuccessfulGetTemplates); verifyGetPipelines(1); @@ -258,8 +393,50 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe return response("GET", "/_get_something", notFoundCheckStatus()); } + private Response successfulGetTypeMappingResponse() { + final Response response; + + if (randomBoolean()) { + // it returned 200, but we also need it to contain _something_ in the JSON {...} + final HttpEntity entity = new StringEntity("{\"" + DATA_INDEX + "\":{}}", ContentType.APPLICATION_JSON); + + response = successfulGetResponse(); + + when(response.getEntity()).thenReturn(entity); + } else { + // simulates the index does not exist + response = unsuccessfulGetResponse(); + } + + return response; + } + + private Response unsuccessfulGetTypeMappingResponse() { + // "unsuccessful" for type mappings is a response code 200, but the response is literally "{}" + final Response response = successfulGetResponse(); + final HttpEntity entity = new StringEntity("{}", ContentType.APPLICATION_JSON); + + when(response.getEntity()).thenReturn(entity); + + return response; + } + + private List getTypeMappingResponses(final int successful, final int unsuccessful) { + final List responses = new ArrayList<>(successful + unsuccessful); + + for (int i = 0; i < successful; ++i) { + responses.add(successfulGetTypeMappingResponse()); + } + + for (int i = 0; i < unsuccessful; ++i) { + responses.add(unsuccessfulGetTypeMappingResponse()); + } + + return responses; + } + private List getResponses(final int successful, final int unsuccessful) { - final List responses = new ArrayList<>(successful); + final List responses = new ArrayList<>(successful + unsuccessful); for (int i = 0; i < successful; ++i) { responses.add(successfulGetResponse()); @@ -299,6 +476,35 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe when(client.performRequest(eq("GET"), eq("/"), anyMapOf(String.class, String.class))).thenReturn(versionResponse); } + private void whenGetTypeMappingResponse(final int successful, final int unsuccessful) throws IOException { + final List gets = getTypeMappingResponses(successful, unsuccessful); + + if (gets.size() == 1) { + when(client.performRequest(eq("GET"), startsWith("/" + DATA_INDEX + "/_mapping"), anyMapOf(String.class, String.class))) + .thenReturn(gets.get(0)); + } else { + when(client.performRequest(eq("GET"), startsWith("/" + DATA_INDEX + "/_mapping"), anyMapOf(String.class, String.class))) + .thenReturn(gets.get(0), gets.subList(1, gets.size()).toArray(new Response[gets.size() - 1])); + } + } + + private void whenSuccessfulPutTypeMappings(final int successful) throws IOException { + final List successfulPuts = successfulPutResponses(successful); + + // empty is possible if they all exist + if (successful == 1) { + when(client.performRequest(eq("PUT"), + startsWith("/" + DATA_INDEX + "/_mapping"), + anyMapOf(String.class, String.class), any(HttpEntity.class))) + .thenReturn(successfulPuts.get(0)); + } else if (successful > 1) { + when(client.performRequest(eq("PUT"), + startsWith("/" + DATA_INDEX + "/_mapping"), + anyMapOf(String.class, String.class), any(HttpEntity.class))) + .thenReturn(successfulPuts.get(0), successfulPuts.subList(1, successful).toArray(new Response[successful - 1])); + } + } + private void whenGetTemplates(final int successful, final int unsuccessful) throws IOException { final List gets = getResponses(successful, unsuccessful); @@ -375,6 +581,18 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe verify(client).performRequest(eq("GET"), eq("/"), anyMapOf(String.class, String.class)); } + private void verifyGetTypeMappings(final int called) throws IOException { + verify(client, times(called)) + .performRequest(eq("GET"), startsWith("/" + DATA_INDEX + "/_mapping"), anyMapOf(String.class, String.class)); + } + + private void verifyPutTypeMappings(final int called) throws IOException { + verify(client, times(called)).performRequest(eq("PUT"), // method + startsWith("/" + DATA_INDEX + "/_mapping"), // endpoint + anyMapOf(String.class, String.class), // parameters (e.g., timeout) + any(HttpEntity.class)); // raw template + } + private void verifyGetTemplates(final int called) throws IOException { verify(client, times(called)).performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class)); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java index 18d9f79fec4..69cebebcf5e 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.monitoring.exporter.Exporter; import org.elasticsearch.xpack.monitoring.exporter.Exporter.Config; +import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.resolver.ResolversRegistry; import org.elasticsearch.xpack.ssl.SSLService; @@ -284,6 +285,10 @@ public class HttpExporterTests extends ESTestCase { final List resources = multiResource.getResources(); final int version = (int)resources.stream().filter((resource) -> resource instanceof VersionHttpResource).count(); + final List typeMappings = + resources.stream().filter((resource) -> resource instanceof DataTypeMappingHttpResource) + .map(DataTypeMappingHttpResource.class::cast) + .collect(Collectors.toList()); final List templates = resources.stream().filter((resource) -> resource instanceof TemplateHttpResource) .map(TemplateHttpResource.class::cast) @@ -298,9 +303,11 @@ public class HttpExporterTests extends ESTestCase { .collect(Collectors.toList()); // expected number of resources - assertThat(multiResource.getResources().size(), equalTo(version + templates.size() + pipelines.size() + bwc.size())); + assertThat(multiResource.getResources().size(), + equalTo(version + typeMappings.size() + templates.size() + pipelines.size() + bwc.size())); assertThat(version, equalTo(1)); - assertThat(templates, hasSize(3)); + assertThat(typeMappings, hasSize(MonitoringTemplateUtils.NEW_DATA_TYPES.length)); + assertThat(templates, hasSize(4)); assertThat(pipelines, hasSize(useIngest ? 1 : 0)); assertThat(bwc, hasSize(1)); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java index 8f273fa096a..feef43f27a0 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java @@ -9,7 +9,9 @@ import org.apache.http.HttpEntity; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.SuppressLoggerChecks; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.CheckResponse; @@ -58,8 +60,7 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc when(client.performRequest("GET", endpoint, resource.getParameters())).thenReturn(response); - assertThat(resource.checkForResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType), - is(CheckResponse.ERROR)); + sometimesAssertSimpleCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, CheckResponse.ERROR, response); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); verify(client).performRequest("GET", endpoint, resource.getParameters()); @@ -73,11 +74,11 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc final RestStatus failedStatus = failedCheckStatus(); final ResponseException responseException = responseException("GET", endpoint, failedStatus); final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException); + final Response response = e == responseException ? responseException.getResponse() : null; when(client.performRequest("GET", endpoint, resource.getParameters())).thenThrow(e); - assertThat(resource.checkForResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType), - is(CheckResponse.ERROR)); + sometimesAssertSimpleCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, CheckResponse.ERROR, response); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); verify(client).performRequest("GET", endpoint, resource.getParameters()); @@ -139,8 +140,7 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc when(client.performRequest("GET", endpoint, resource.getParameters())).thenReturn(response); - assertThat(resource.checkForResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType), - is(expected)); + sometimesAssertSimpleCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, expected, response); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); verify(client).performRequest("GET", endpoint, resource.getParameters()); @@ -188,4 +188,21 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc verifyNoMoreInteractions(client, response, logger, entity); } + private void sometimesAssertSimpleCheckForResource(final RestClient client, final Logger logger, + final String resourceBasePath, + final String resourceName, final String resourceType, + final CheckResponse expected, final Response response) { + // sometimes use the simple check + if (randomBoolean()) { + assertThat(resource.simpleCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType), + is(expected)); + } else { + final Tuple responseTuple = + resource.checkForResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType); + + assertThat(responseTuple.v1(), is(expected)); + assertThat(responseTuple.v2(), is(response)); + } + } + } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterTemplateTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterTemplateTests.java index 27147d62cb9..96be1c1e5c8 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterTemplateTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterTemplateTests.java @@ -5,58 +5,230 @@ */ package org.elasticsearch.xpack.monitoring.exporter.local; -import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.ingest.PipelineConfiguration; -import org.elasticsearch.xpack.monitoring.exporter.AbstractExporterTemplateTestCase; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.monitoring.MonitoringSettings; +import org.elasticsearch.xpack.monitoring.collector.Collector; +import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStatsCollector; import org.elasticsearch.xpack.monitoring.exporter.Exporter; +import org.elasticsearch.xpack.monitoring.exporter.Exporters; +import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils; +import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; +import org.elasticsearch.xpack.security.InternalClient; -import java.util.Collections; +import java.io.IOException; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.notNullValue; -public class LocalExporterTemplateTests extends AbstractExporterTemplateTestCase { +@ESIntegTestCase.ClusterScope(scope = TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0.0) +public class LocalExporterTemplateTests extends MonitoringIntegTestCase { + + private final Settings localExporter = Settings.builder().put("type", LocalExporter.TYPE).build(); @Override - protected Settings exporterSettings() { - return Settings.builder().put("type", LocalExporter.TYPE).build(); + protected Settings nodeSettings(int nodeOrdinal) { + Settings.Builder settings = Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(MonitoringSettings.INTERVAL.getKey(), "-1") + .put("xpack.monitoring.exporters._exporter.type", LocalExporter.TYPE); + return settings.build(); } - @Override - protected void deleteTemplates() throws Exception { - waitNoPendingTasksOnAll(); - cluster().wipeAllTemplates(Collections.emptySet()); + public void testCreateWhenExistingTemplatesAreOld() throws Exception { + internalCluster().startNode(); + + // put an old variant of the monitoring-data-# index so that types need to be added + final CreateIndexRequest request = new CreateIndexRequest(MonitoringTemplateUtils.DATA_INDEX); + + request.settings(Settings.builder().put("index.mapper.dynamic", false).build()); + // notably absent are: kibana, logstash, and beats + request.mapping("cluster_info", "{\"enabled\": false}"); + request.mapping("node", "{\"enabled\": false}"); + request.mapping("fake", "{\"enabled\": false}"); + + client().admin().indices().create(request).actionGet(); + + putTemplate(indexTemplateName()); + putTemplate(dataTemplateName()); + putPipeline(Exporter.EXPORT_PIPELINE_NAME); + + doExporting(); + + logger.debug("--> existing templates are old"); + assertTemplateExists(dataTemplateName()); + assertTemplateExists(indexTemplateName()); + + logger.debug("--> existing templates are old: new templates should be created"); + for (String template : monitoringTemplateNames()) { + assertTemplateExists(template); + } + assertPipelineExists(Exporter.EXPORT_PIPELINE_NAME); + + doExporting(); + + logger.debug("--> indices should have been created"); + awaitIndexExists(currentDataIndexName()); + assertIndicesExists(currentTimestampedIndexName()); + + // ensure that it added mapping types to monitoring-data-2, without throwing away the index + assertMapping(MonitoringTemplateUtils.DATA_INDEX, "fake"); + for (final String type : MonitoringTemplateUtils.NEW_DATA_TYPES) { + assertMapping(MonitoringTemplateUtils.DATA_INDEX, type); + } } - @Override - protected void deletePipeline() throws Exception { - waitNoPendingTasksOnAll(); - cluster().client().admin().cluster().deletePipeline(new DeletePipelineRequest(Exporter.EXPORT_PIPELINE_NAME)); + private void assertMapping(final String index, final String type) throws Exception { + GetMappingsResponse response = client().admin().indices().prepareGetMappings(index).setTypes(type).get(); + ImmutableOpenMap mappings = response.getMappings().get(index); + assertThat(mappings, notNullValue()); + MappingMetaData mappingMetaData = mappings.get(type); + assertThat(mappingMetaData, notNullValue()); } - @Override - protected void putTemplate(String name) throws Exception { + public void testCreateWhenExistingTemplateAreUpToDate() throws Exception { + internalCluster().startNode(); + + putTemplate(indexTemplateName()); + putTemplate(dataTemplateName()); + putPipeline(Exporter.EXPORT_PIPELINE_NAME); + + doExporting(); + + logger.debug("--> existing templates are up to date"); + for (String template : monitoringTemplateNames()) { + assertTemplateExists(template); + } + assertPipelineExists(Exporter.EXPORT_PIPELINE_NAME); + + logger.debug("--> existing templates has the same version: they should not be changed"); + assertTemplateNotUpdated(indexTemplateName()); + assertTemplateNotUpdated(dataTemplateName()); + assertPipelineNotUpdated(Exporter.EXPORT_PIPELINE_NAME); + + doExporting(); + + logger.debug("--> indices should have been created"); + awaitIndexExists(currentDataIndexName()); + awaitIndexExists(currentTimestampedIndexName()); + } + + protected void doExporting() throws Exception { + // TODO: these should be unit tests, not using guice (copied from now-deleted AbstractExporterTemplateTestCase) + ClusterService clusterService = internalCluster().getInstance(ClusterService.class); + XPackLicenseState licenseState = internalCluster().getInstance(XPackLicenseState.class); + LicenseService licenseService = internalCluster().getInstance(LicenseService.class); + InternalClient client = internalCluster().getInstance(InternalClient.class); + Collector collector = new ClusterStatsCollector(clusterService.getSettings(), clusterService, + new MonitoringSettings(clusterService.getSettings(), clusterService.getClusterSettings()), + licenseState, client, licenseService); + + Exporters exporters = internalCluster().getInstance(Exporters.class); + assertNotNull(exporters); + + Exporter exporter = exporters.getExporter("_exporter"); + + // Wait for exporting bulks to be ready to export + Runnable busy = () -> assertThat(exporter.openBulk(), notNullValue()); + assertBusy(busy); + exporters.export(collector.collect()); + } + + private String dataTemplateName() { + MockDataIndexNameResolver resolver = new MockDataIndexNameResolver(MonitoringTemplateUtils.TEMPLATE_VERSION); + return resolver.templateName(); + } + + private String indexTemplateName() { + MockTimestampedIndexNameResolver resolver = + new MockTimestampedIndexNameResolver(MonitoredSystem.ES, localExporter, MonitoringTemplateUtils.TEMPLATE_VERSION); + return resolver.templateName(); + } + + private String currentDataIndexName() { + return MonitoringTemplateUtils.DATA_INDEX; + } + + private String currentTimestampedIndexName() { + MonitoringDoc doc = new MonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString()); + doc.setTimestamp(System.currentTimeMillis()); + + MockTimestampedIndexNameResolver resolver = + new MockTimestampedIndexNameResolver(MonitoredSystem.ES, localExporter, MonitoringTemplateUtils.TEMPLATE_VERSION); + return resolver.index(doc); + } + + /** Generates a basic template **/ + private BytesReference generateTemplateSource(String name) throws IOException { + return jsonBuilder().startObject() + .field("template", name) + .startObject("settings") + .field("index.number_of_shards", 1) + .field("index.number_of_replicas", 1) + .endObject() + .startObject("mappings") + .startObject("_default_") + .startObject("_all") + .field("enabled", false) + .endObject() + .field("date_detection", false) + .startObject("properties") + .startObject("cluster_uuid") + .field("type", "keyword") + .endObject() + .startObject("timestamp") + .field("type", "date") + .field("format", "date_time") + .endObject() + .endObject() + .endObject() + .startObject("cluster_info") + .field("enabled", false) + .endObject() + .startObject("cluster_stats") + .startObject("properties") + .startObject("cluster_stats") + .field("type", "object") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject().bytes(); + } + + private void putTemplate(String name) throws Exception { waitNoPendingTasksOnAll(); assertAcked(client().admin().indices().preparePutTemplate(name).setSource(generateTemplateSource(name)).get()); } - @Override - protected void putPipeline(String name) throws Exception { + private void putPipeline(String name) throws Exception { waitNoPendingTasksOnAll(); assertAcked(client().admin().cluster().preparePutPipeline(name, Exporter.emptyPipeline(XContentType.JSON).bytes()).get()); } - @Override - protected void assertTemplateExists(String name) throws Exception { + private void assertTemplateExists(String name) throws Exception { waitNoPendingTasksOnAll(); waitForMonitoringTemplate(name); } - @Override - protected void assertPipelineExists(String name) throws Exception { + private void assertPipelineExists(String name) throws Exception { waitNoPendingTasksOnAll(); assertPipelineInstalled(name); } @@ -73,21 +245,14 @@ public class LocalExporterTemplateTests extends AbstractExporterTemplateTestCase }, 60, TimeUnit.SECONDS); } - @Override - protected void assertTemplateNotUpdated(String name) throws Exception { + private void assertTemplateNotUpdated(String name) throws Exception { waitNoPendingTasksOnAll(); assertTemplateExists(name); } - @Override - protected void assertPipelineNotUpdated(String name) throws Exception { + private void assertPipelineNotUpdated(String name) throws Exception { waitNoPendingTasksOnAll(); assertPipelineExists(name); } - @AwaitsFix(bugUrl = "testing locally to determine why this is a race condition on Jenkins") - @Override - public void testCreateWhenNoExistingTemplates() throws Exception { - - } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterTests.java index 0a6af45da9e..806e09812d2 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterTests.java @@ -60,7 +60,7 @@ public class LocalExporterTests extends MonitoringIntegTestCase { @After public void cleanup() throws Exception { - updateMonitoringInterval(-1, TimeUnit.SECONDS); + disableMonitoringInterval(); wipeMonitoringIndices(); } @@ -210,7 +210,7 @@ public class LocalExporterTests extends MonitoringIntegTestCase { doc.setClusterUUID(internalCluster().getClusterName()); doc.setTimestamp(System.currentTimeMillis()); doc.setSourceNode(new DiscoveryNode("id", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT)); - doc.setClusterState(ClusterState.PROTO); + doc.setClusterState(ClusterState.EMPTY_STATE); doc.setStatus(ClusterHealthStatus.GREEN); return doc; } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoTests.java index 4ddcb7c519e..db69509965f 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterInfoTests.java @@ -52,7 +52,7 @@ public class ClusterInfoTests extends MonitoringIntegTestCase { @After public void cleanup() throws Exception { - updateMonitoringInterval(-1, TimeUnit.SECONDS); + disableMonitoringInterval(); wipeMonitoringIndices(); } @@ -148,7 +148,7 @@ public class ClusterInfoTests extends MonitoringIntegTestCase { .should(QueryBuilders.matchQuery(License.Fields.STATUS, License.Status.INVALID.label())) .should(QueryBuilders.matchQuery(License.Fields.STATUS, License.Status.EXPIRED.label())) .should(QueryBuilders.matchQuery("cluster_name", cluster().getClusterName())) - .minimumNumberShouldMatch(1) + .minimumShouldMatch(1) ).get(), 0L); } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStateTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStateTests.java index d423dab6177..818ef4f498d 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStateTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStateTests.java @@ -58,7 +58,7 @@ public class ClusterStateTests extends MonitoringIntegTestCase { @After public void cleanup() throws Exception { - updateMonitoringInterval(-1, TimeUnit.SECONDS); + disableMonitoringInterval(); wipeMonitoringIndices(); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsResolverTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsResolverTests.java index f2c2d445fc5..214742b0af0 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsResolverTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsResolverTests.java @@ -115,7 +115,7 @@ public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCa buildNewFakeTransportAddress()); return new NodeInfo(Version.CURRENT, org.elasticsearch.Build.CURRENT, new DiscoveryNode("node_0", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), Settings.EMPTY, - DummyOsInfo.INSTANCE, new ProcessInfo(randomInt(), randomBoolean(), randomPositiveLong()), JvmInfo.jvmInfo(), + DummyOsInfo.INSTANCE, new ProcessInfo(randomInt(), randomBoolean(), randomNonNegativeLong()), JvmInfo.jvmInfo(), new ThreadPoolInfo(Collections.singletonList(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5))), new TransportInfo(transportAddress, Collections.emptyMap()), new HttpInfo(transportAddress, randomLong()), new PluginsAndModules(Collections.emptyList(), Collections.emptyList()), diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsTests.java index d1c9b32d123..5dd16583c1c 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/cluster/ClusterStatsTests.java @@ -38,7 +38,7 @@ public class ClusterStatsTests extends MonitoringIntegTestCase { @After public void cleanup() throws Exception { - updateMonitoringInterval(-1, TimeUnit.SECONDS); + disableMonitoringInterval(); wipeMonitoringIndices(); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndexRecoveryTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndexRecoveryTests.java index d2a9714409d..8ec9c137013 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndexRecoveryTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndexRecoveryTests.java @@ -44,7 +44,7 @@ public class IndexRecoveryTests extends MonitoringIntegTestCase { @After public void cleanup() throws Exception { - updateMonitoringInterval(-1, TimeUnit.SECONDS); + disableMonitoringInterval(); wipeMonitoringIndices(); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndexStatsTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndexStatsTests.java index d5c97f58f6f..5463dc11ad6 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndexStatsTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndexStatsTests.java @@ -36,7 +36,7 @@ public class IndexStatsTests extends MonitoringIntegTestCase { @After public void cleanup() throws Exception { - updateMonitoringInterval(-1, TimeUnit.SECONDS); + disableMonitoringInterval(); wipeMonitoringIndices(); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndicesStatsTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndicesStatsTests.java index 3ecd98e3e90..344ce9bef85 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndicesStatsTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/indices/IndicesStatsTests.java @@ -36,7 +36,7 @@ public class IndicesStatsTests extends MonitoringIntegTestCase { @After public void cleanup() throws Exception { - updateMonitoringInterval(-1, TimeUnit.SECONDS); + disableMonitoringInterval(); wipeMonitoringIndices(); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/MultiNodesStatsTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/MultiNodesStatsTests.java index de9f4cd26c3..b64ab207c5e 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/MultiNodesStatsTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/MultiNodesStatsTests.java @@ -38,7 +38,7 @@ public class MultiNodesStatsTests extends MonitoringIntegTestCase { @After public void cleanup() throws Exception { - updateMonitoringInterval(-1, TimeUnit.SECONDS); + disableMonitoringInterval(); wipeMonitoringIndices(); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolverTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolverTests.java index d67d26e509b..5d9133cd2be 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolverTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsResolverTests.java @@ -78,11 +78,13 @@ public class NodeStatsResolverTests extends MonitoringIndexNameResolverTestCase< @Override protected void assertSourceField(String field, Map sourceFields) { // Assertions on node stats fields that are not reported on Windows platforms - if (Constants.WINDOWS && field.startsWith("node_stats.os.cpu.load_average")) { - return; + if (Constants.WINDOWS) { + if (field.startsWith("node_stats.os.cpu.load_average")) { + return; + } } - // we only report IoStats and spins on Linux + // we only report IoStats on Linux if (Constants.LINUX == false) { if (field.startsWith("node_stats.fs.io_stats")) { return; @@ -94,6 +96,16 @@ public class NodeStatsResolverTests extends MonitoringIndexNameResolverTestCase< return; } + // cgroups can be null, and it's only reported on Linux + if (field.startsWith("node_stats.os.cgroup")) { + return; + } + + // load average is unavailable on macOS for 5m and 15m (but we get 1m), but it's also possible on Linux too + if ("node_stats.os.cpu.load_average.5m".equals(field) || "node_stats.os.cpu.load_average.15m".equals(field)) { + return; + } + super.assertSourceField(field, sourceFields); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsTests.java index 814247b27af..a5cbedab221 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/node/NodeStatsTests.java @@ -40,12 +40,11 @@ public class NodeStatsTests extends MonitoringIntegTestCase { @After public void cleanup() throws Exception { - updateMonitoringInterval(-1, TimeUnit.SECONDS); + disableMonitoringInterval(); wipeMonitoringIndices(); } public void testNodeStats() throws Exception { - logger.debug("--> creating some indices for future node stats"); final int numDocs = between(50, 150); for (int i = 0; i < numDocs; i++) { client().prepareIndex("test", "foo").setSource("value", randomInt()).get(); @@ -59,32 +58,34 @@ public class NodeStatsTests extends MonitoringIntegTestCase { awaitMonitoringDocsCount(greaterThan(0L), NodeStatsResolver.TYPE); - logger.debug("--> searching for monitoring documents of type [{}]", NodeStatsResolver.TYPE); SearchResponse response = client().prepareSearch().setTypes(NodeStatsResolver.TYPE).get(); assertThat(response.getHits().getTotalHits(), greaterThan(0L)); - logger.debug("--> checking that every document contains the expected fields"); - for (SearchHit searchHit : response.getHits().getHits()) { Map fields = searchHit.sourceAsMap(); for (String filter : nodeStatsFilters(watcherEnabled)) { if (Constants.WINDOWS) { // load average is unavailable on Windows - if ("node_stats.os.cpu.load_average.1m".equals(filter)) { + if (filter.startsWith("node_stats.os.cpu.load_average")) { continue; } } - if (filter.startsWith("node_stats.fs")) { - // NORELEASE what is going on here? + + // fs and cgroup stats are only reported on Linux, but it's acceptable for _node/stats to report them as null if the OS is + // misconfigured or not reporting them for some reason (e.g., older kernel) + if (filter.startsWith("node_stats.fs") || filter.startsWith("node_stats.os.cgroup")) { + continue; + } + + // load average is unavailable on macOS for 5m and 15m (but we get 1m), but it's also possible on Linux too + if ("node_stats.os.cpu.load_average.5m".equals(filter) || "node_stats.os.cpu.load_average.15m".equals(filter)) { continue; } assertContains(filter, fields); } } - - logger.debug("--> node stats successfully collected"); } /** diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/shards/ShardsTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/shards/ShardsTests.java index c71f0133b3d..deeabb494ca 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/shards/ShardsTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/resolver/shards/ShardsTests.java @@ -52,7 +52,7 @@ public class ShardsTests extends MonitoringIntegTestCase { @After public void cleanup() throws Exception { - updateMonitoringInterval(-1, TimeUnit.SECONDS); + disableMonitoringInterval(); wipeMonitoringIndices(); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java index ff2a13649fd..5aa8087ad47 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -30,12 +31,12 @@ import org.elasticsearch.xpack.XPackClient; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.monitoring.MonitoringService; import org.elasticsearch.xpack.monitoring.MonitoringSettings; -import org.elasticsearch.xpack.monitoring.AgentService; +import org.elasticsearch.xpack.monitoring.client.MonitoringClient; import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; import org.elasticsearch.xpack.monitoring.resolver.MonitoringIndexNameResolver; import org.elasticsearch.xpack.monitoring.resolver.ResolversRegistry; -import org.elasticsearch.xpack.monitoring.client.MonitoringClient; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.file.FileRealm; import org.elasticsearch.xpack.security.authc.support.Hasher; @@ -174,7 +175,7 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase { @Before public void setUp() throws Exception { super.setUp(); - startCollection(); + startMonitoringService(); } @After @@ -182,7 +183,7 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase { if (watcherEnabled != null && watcherEnabled) { internalCluster().getInstance(WatcherLifeCycleService.class, internalCluster().getMasterName()).stop(); } - stopCollection(); + stopMonitoringService(); super.tearDown(); } @@ -202,36 +203,29 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase { return false; } - protected void stopCollection() { - for (AgentService agent : internalCluster().getInstances(AgentService.class)) { - agent.stopCollection(); - } + protected void startMonitoringService() { + internalCluster().getInstances(MonitoringService.class).forEach(MonitoringService::start); } - protected void startCollection() { - for (AgentService agent : internalCluster().getInstances(AgentService.class)) { - agent.startCollection(); - } + protected void stopMonitoringService() { + internalCluster().getInstances(MonitoringService.class).forEach(MonitoringService::stop); } protected void wipeMonitoringIndices() throws Exception { CountDown retries = new CountDown(3); - assertBusy(new Runnable() { - @Override - public void run() { - try { - boolean exist = client().admin().indices().prepareExists(MONITORING_INDICES_PREFIX + "*") - .get().isExists(); - if (exist) { - deleteMonitoringIndices(); - } else { - retries.countDown(); - } - } catch (IndexNotFoundException e) { + assertBusy(() -> { + try { + boolean exist = client().admin().indices().prepareExists(MONITORING_INDICES_PREFIX + "*") + .get().isExists(); + if (exist) { + deleteMonitoringIndices(); + } else { retries.countDown(); } - assertThat(retries.isCountedDown(), is(true)); + } catch (IndexNotFoundException e) { + retries.countDown(); } + assertThat(retries.isCountedDown(), is(true)); }); } @@ -364,6 +358,10 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase { return parent + "." + field; } + protected void disableMonitoringInterval() { + updateMonitoringInterval(TimeValue.MINUS_ONE.millis(), TimeUnit.MILLISECONDS); + } + protected void updateMonitoringInterval(long value, TimeUnit timeUnit) { assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings( Settings.builder().put(MonitoringSettings.INTERVAL.getKey(), value, timeUnit))); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java index 41a462ae2ab..81c4d810f45 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.security.authc.AuthenticationService; @@ -22,9 +21,6 @@ import org.junit.Before; import java.io.IOException; import java.util.concurrent.atomic.AtomicReference; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - public class SecurityContextTests extends ESTestCase { private boolean signHeader; @@ -42,9 +38,7 @@ public class SecurityContextTests extends ESTestCase { .build(); threadContext = new ThreadContext(settings); cryptoService = new CryptoService(settings, new Environment(settings)); - ThreadPool threadPool = mock(ThreadPool.class); - when(threadPool.getThreadContext()).thenReturn(threadContext); - securityContext = new SecurityContext(settings, threadPool, cryptoService); + securityContext = new SecurityContext(settings, threadContext, cryptoService); } public void testGetAuthenticationAndUserInEmptyContext() throws IOException { diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/SecurityTemplateServiceTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/SecurityTemplateServiceTests.java index 98971e98033..0520c45d5d1 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/SecurityTemplateServiceTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/SecurityTemplateServiceTests.java @@ -5,6 +5,11 @@ */ package org.elasticsearch.xpack.security; +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicReference; + import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.Action; @@ -32,20 +37,23 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.MockTransportClient; +import org.elasticsearch.xpack.security.authc.esnative.NativeRealmMigrator; +import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.elasticsearch.xpack.template.TemplateUtils; import org.junit.After; import org.junit.Before; - -import java.io.IOException; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; +import org.mockito.Mockito; import static org.elasticsearch.xpack.security.SecurityTemplateService.SECURITY_INDEX_NAME; import static org.elasticsearch.xpack.security.SecurityTemplateService.SECURITY_INDEX_TEMPLATE_VERSION_PATTERN; import static org.elasticsearch.xpack.security.SecurityTemplateService.SECURITY_TEMPLATE_NAME; +import static org.elasticsearch.xpack.security.SecurityTemplateService.UpgradeState; import static org.elasticsearch.xpack.security.SecurityTemplateService.securityIndexMappingVersionMatches; import static org.elasticsearch.xpack.security.SecurityTemplateService.securityTemplateExistsAndVersionMatches; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -54,6 +62,7 @@ public class SecurityTemplateServiceTests extends ESTestCase { private TransportClient transportClient; private ThreadPool threadPool; private ClusterService clusterService; + private NativeRealmMigrator nativeRealmMigrator; SecurityTemplateService securityTemplateService; private static final ClusterState EMPTY_CLUSTER_STATE = new ClusterState.Builder(new ClusterName("test-cluster")).build(); @@ -82,8 +91,16 @@ public class SecurityTemplateServiceTests extends ESTestCase { listeners.add(listener); } } + + nativeRealmMigrator = mock(NativeRealmMigrator.class); + Mockito.doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(false); + return null; + }).when(nativeRealmMigrator).performUpgrade(any(Version.class), any(ActionListener.class)); + client = new IClient(transportClient); - securityTemplateService = new SecurityTemplateService(Settings.EMPTY, client); + securityTemplateService = new SecurityTemplateService(Settings.EMPTY, client, nativeRealmMigrator); listeners = new CopyOnWriteArrayList<>(); } @@ -145,7 +162,7 @@ public class SecurityTemplateServiceTests extends ESTestCase { // now check what happens if we get back an unacknowledged response try { listeners.get(0).onResponse(new TestPutIndexTemplateResponse()); - fail("this hould have failed because request was not acknowledged"); + fail("this should have failed because request was not acknowledged"); } catch (ElasticsearchException e) { } assertFalse(securityTemplateService.updateMappingPending.get()); @@ -181,16 +198,38 @@ public class SecurityTemplateServiceTests extends ESTestCase { public void testOutdatedMappingIsIdentifiedAsNotUpToDate() throws IOException { String templateString = "/wrong-version-" + SECURITY_TEMPLATE_NAME + ".json"; + final Version wrongVersion = Version.fromString("4.0.0"); ClusterState.Builder clusterStateBuilder = createClusterStateWithMapping(templateString); assertFalse(SecurityTemplateService.securityIndexMappingUpToDate(clusterStateBuilder.build(), logger)); - checkMappingUpdateWorkCorrectly(clusterStateBuilder); + assertThat(SecurityTemplateService.oldestSecurityIndexMappingVersion(clusterStateBuilder.build(), logger), equalTo(wrongVersion)); + checkMappingUpdateWorkCorrectly(clusterStateBuilder, wrongVersion); } - private void checkMappingUpdateWorkCorrectly(ClusterState.Builder clusterStateBuilder) { + private void checkMappingUpdateWorkCorrectly(ClusterState.Builder clusterStateBuilder, Version expectedOldVersion) { + AtomicReference migratorVersionRef = new AtomicReference<>(null); + AtomicReference> migratorListenerRef = new AtomicReference<>(null); + Mockito.doAnswer(invocation -> { + migratorVersionRef.set((Version) invocation.getArguments()[0]); + migratorListenerRef.set((ActionListener) invocation.getArguments()[1]); + return null; + }).when(nativeRealmMigrator).performUpgrade(any(Version.class), any(ActionListener.class)); + + assertThat(securityTemplateService.upgradeDataState.get(), equalTo(UpgradeState.NOT_STARTED)); + securityTemplateService.clusterChanged(new ClusterChangedEvent("test-event", clusterStateBuilder.build() , EMPTY_CLUSTER_STATE)); + + assertThat(migratorVersionRef.get(), equalTo(expectedOldVersion)); + assertThat(migratorListenerRef.get(), notNullValue()); + assertThat(listeners.size(), equalTo(0)); // migrator has not responded yet + assertThat(securityTemplateService.updateMappingPending.get(), equalTo(false)); + assertThat(securityTemplateService.upgradeDataState.get(), equalTo(UpgradeState.IN_PROGRESS)); + + migratorListenerRef.get().onResponse(true); + assertThat(listeners.size(), equalTo(3)); // we have three types in the mapping assertTrue(securityTemplateService.updateMappingPending.get()); + assertThat(securityTemplateService.upgradeDataState.get(), equalTo(UpgradeState.COMPLETE)); // if we do it again this should not send an update ActionListener listener = listeners.get(0); @@ -254,7 +293,9 @@ public class SecurityTemplateServiceTests extends ESTestCase { String templateString = "/missing-version-" + SECURITY_TEMPLATE_NAME + ".json"; ClusterState.Builder clusterStateBuilder = createClusterStateWithMapping(templateString); assertFalse(SecurityTemplateService.securityIndexMappingUpToDate(clusterStateBuilder.build(), logger)); - checkMappingUpdateWorkCorrectly(clusterStateBuilder); + assertThat(SecurityTemplateService.oldestSecurityIndexMappingVersion(clusterStateBuilder.build(), logger), + equalTo(Version.V_2_3_0)); + checkMappingUpdateWorkCorrectly(clusterStateBuilder, Version.V_2_3_0); } public void testMissingIndexIsIdentifiedAsUpToDate() throws IOException { @@ -267,6 +308,7 @@ public class SecurityTemplateServiceTests extends ESTestCase { assertTrue(SecurityTemplateService.securityIndexMappingUpToDate(clusterStateBuilder.build(), logger)); securityTemplateService.clusterChanged(new ClusterChangedEvent("test-event", clusterStateBuilder.build() , EMPTY_CLUSTER_STATE)); + assertThat(SecurityTemplateService.oldestSecurityIndexMappingVersion(clusterStateBuilder.build(), logger), nullValue()); assertThat(listeners.size(), equalTo(0)); } @@ -274,13 +316,13 @@ public class SecurityTemplateServiceTests extends ESTestCase { IndexMetaData.Builder indexMetaData = createIndexMetadata(templateString); ImmutableOpenMap.Builder mapBuilder = ImmutableOpenMap.builder(); mapBuilder.put(SECURITY_INDEX_NAME, indexMetaData.build()); - MetaData.Builder metaDataBuidler = new MetaData.Builder(); - metaDataBuidler.indices(mapBuilder.build()); + MetaData.Builder metaDataBuilder = new MetaData.Builder(); + metaDataBuilder.indices(mapBuilder.build()); String mappingString = "/" + SECURITY_TEMPLATE_NAME + ".json"; IndexTemplateMetaData.Builder templateMeta = getIndexTemplateMetaData(mappingString); - metaDataBuidler.put(templateMeta); + metaDataBuilder.put(templateMeta); ClusterState.Builder clusterStateBuilder = ClusterState.builder(state()); - clusterStateBuilder.metaData(metaDataBuidler.build()); + clusterStateBuilder.metaData(metaDataBuilder.build()).routingTable(SecurityTestUtils.buildSecurityIndexRoutingTable()); return clusterStateBuilder; } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java index 03ef4b07657..8b1c095a431 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java @@ -30,12 +30,12 @@ import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; +import org.elasticsearch.xpack.security.authz.permission.Role; import org.elasticsearch.xpack.security.crypto.CryptoService; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; import org.junit.Before; -import java.util.Collection; import java.util.Collections; import java.util.HashSet; @@ -95,15 +95,16 @@ public class SecurityActionFilterTests extends ESTestCase { callback.onResponse(authentication); return Void.TYPE; }).when(authcService).authenticate(eq("_action"), eq(request), eq(SystemUser.INSTANCE), any(ActionListener.class)); + final Role empty = Role.EMPTY; doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[1]; - callback.onResponse(Collections.emptyList()); + callback.onResponse(empty); return Void.TYPE; }).when(authzService).roles(any(User.class), any(ActionListener.class)); doReturn(request).when(spy(filter)).unsign(user, "_action", request); filter.apply(task, "_action", request, listener, chain); - verify(authzService).authorize(authentication, "_action", request, Collections.emptyList(), Collections.emptyList()); + verify(authzService).authorize(authentication, "_action", request, empty, null); verify(chain).proceed(eq(task), eq("_action"), eq(request), isA(ContextPreservingActionListener.class)); } @@ -123,10 +124,11 @@ public class SecurityActionFilterTests extends ESTestCase { callback.onResponse(authentication); return Void.TYPE; }).when(authcService).authenticate(eq(action), eq(request), eq(SystemUser.INSTANCE), any(ActionListener.class)); + final Role empty = Role.EMPTY; doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[1]; - callback.onResponse(Collections.emptyList()); + callback.onResponse(empty); return Void.TYPE; }).when(authzService).roles(any(User.class), any(ActionListener.class)); doReturn(request).when(spy(filter)).unsign(user, action, request); @@ -135,7 +137,7 @@ public class SecurityActionFilterTests extends ESTestCase { verify(listener).onFailure(isA(IllegalArgumentException.class)); verifyNoMoreInteractions(authzService, chain); } else { - verify(authzService).authorize(authentication, action, request, Collections.emptyList(), Collections.emptyList()); + verify(authzService).authorize(authentication, action, request, empty, null); verify(chain).proceed(eq(task), eq(action), eq(request), isA(ContextPreservingActionListener.class)); } } @@ -157,11 +159,11 @@ public class SecurityActionFilterTests extends ESTestCase { doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[1]; - callback.onResponse(Collections.emptyList()); + callback.onResponse(Role.EMPTY); return Void.TYPE; }).when(authzService).roles(any(User.class), any(ActionListener.class)); - doThrow(exception).when(authzService).authorize(eq(authentication), eq("_action"), eq(request), any(Collection.class), - any(Collection.class)); + doThrow(exception).when(authzService).authorize(eq(authentication), eq("_action"), eq(request), any(Role.class), + any(Role.class)); filter.apply(task, "_action", request, listener, chain); verify(listener).onFailure(exception); verifyNoMoreInteractions(chain); @@ -182,16 +184,17 @@ public class SecurityActionFilterTests extends ESTestCase { }).when(authcService).authenticate(eq("_action"), eq(request), eq(SystemUser.INSTANCE), any(ActionListener.class)); when(cryptoService.isSigned("signed_scroll_id")).thenReturn(true); when(cryptoService.unsignAndVerify("signed_scroll_id")).thenReturn("scroll_id"); + final Role empty = Role.EMPTY; doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[1]; - callback.onResponse(Collections.emptyList()); + callback.onResponse(empty); return Void.TYPE; }).when(authzService).roles(any(User.class), any(ActionListener.class)); filter.apply(task, "_action", request, listener, chain); assertThat(request.scrollId(), equalTo("scroll_id")); - verify(authzService).authorize(authentication, "_action", request, Collections.emptyList(), Collections.emptyList()); + verify(authzService).authorize(authentication, "_action", request, empty, null); verify(chain).proceed(eq(task), eq("_action"), eq(request), isA(ContextPreservingActionListener.class)); } @@ -214,7 +217,7 @@ public class SecurityActionFilterTests extends ESTestCase { doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[1]; - callback.onResponse(Collections.emptyList()); + callback.onResponse(Role.EMPTY); return Void.TYPE; }).when(authzService).roles(any(User.class), any(ActionListener.class)); filter.apply(task, "_action", request, listener, chain); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java index 2eff2902f16..7d1dd1d79e1 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java @@ -11,65 +11,45 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xpack.security.SecurityContext; import org.elasticsearch.xpack.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.security.authz.permission.KibanaRole; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; -import org.elasticsearch.xpack.security.user.ElasticUser; -import org.elasticsearch.xpack.security.user.KibanaUser; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.Optional; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.mockito.AdditionalMatchers.aryEq; import static org.mockito.Matchers.any; -import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; public class TransportGetRolesActionTests extends ESTestCase { public void testReservedRoles() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); - SecurityContext context = mock(SecurityContext.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore(context)); + mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); - final boolean isKibanaUser = randomBoolean(); - if (isKibanaUser) { - when(context.getUser()).thenReturn(new KibanaUser(true)); - } else { - when(context.getUser()).thenReturn(new ElasticUser(true)); - } final int size = randomIntBetween(1, ReservedRolesStore.names().size()); final List names = randomSubsetOf(size, ReservedRolesStore.names()); final List expectedNames = new ArrayList<>(names); - if (isKibanaUser == false) { - expectedNames.remove(KibanaRole.NAME); - } doAnswer(invocation -> { Object[] args = invocation.getArguments(); @@ -98,32 +78,19 @@ public class TransportGetRolesActionTests extends ESTestCase { assertThat(throwableRef.get(), is(nullValue())); assertThat(responseRef.get(), is(notNullValue())); - if (isKibanaUser && expectedNames.isEmpty()) { - assertThat(responseRef.get().roles(), is(emptyArray())); - verify(rolesStore, times(1)).getRoleDescriptors(eq(Strings.EMPTY_ARRAY), any(ActionListener.class)); - } else { - List retrievedRoleNames = - Arrays.asList(responseRef.get().roles()).stream().map(RoleDescriptor::getName).collect(Collectors.toList()); - assertThat(retrievedRoleNames, containsInAnyOrder(expectedNames.toArray(Strings.EMPTY_ARRAY))); - verifyZeroInteractions(rolesStore); - } + List retrievedRoleNames = + Arrays.asList(responseRef.get().roles()).stream().map(RoleDescriptor::getName).collect(Collectors.toList()); + assertThat(retrievedRoleNames, containsInAnyOrder(expectedNames.toArray(Strings.EMPTY_ARRAY))); + verifyZeroInteractions(rolesStore); } public void testStoreRoles() { final List storeRoleDescriptors = randomRoleDescriptors(); NativeRolesStore rolesStore = mock(NativeRolesStore.class); - SecurityContext context = mock(SecurityContext.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore(context)); - - final boolean isKibanaUser = randomBoolean(); - if (isKibanaUser) { - when(context.getUser()).thenReturn(new KibanaUser(true)); - } else { - when(context.getUser()).thenReturn(new ElasticUser(true)); - } + mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); request.names(storeRoleDescriptors.stream().map(RoleDescriptor::getName).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY)); @@ -172,13 +139,11 @@ public class TransportGetRolesActionTests extends ESTestCase { } NativeRolesStore rolesStore = mock(NativeRolesStore.class); - SecurityContext context = mock(SecurityContext.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore(context)); + mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); - final boolean isKibanaUser = randomBoolean(); final List expectedNames = new ArrayList<>(); if (all) { expectedNames.addAll(reservedRoleNames); @@ -187,13 +152,6 @@ public class TransportGetRolesActionTests extends ESTestCase { expectedNames.addAll(requestedNames); } - if (isKibanaUser) { - when(context.getUser()).thenReturn(new KibanaUser(true)); - } else { - expectedNames.remove(KibanaRole.NAME); - when(context.getUser()).thenReturn(new ElasticUser(true)); - } - GetRolesRequest request = new GetRolesRequest(); request.names(requestedNames.toArray(Strings.EMPTY_ARRAY)); @@ -245,11 +203,10 @@ public class TransportGetRolesActionTests extends ESTestCase { final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException()); final List storeRoleDescriptors = randomRoleDescriptors(); NativeRolesStore rolesStore = mock(NativeRolesStore.class); - SecurityContext context = mock(SecurityContext.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore(context)); + mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); request.names(storeRoleDescriptors.stream().map(RoleDescriptor::getName).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY)); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index 368db4f948c..8206eaca457 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.security.action.user; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -140,6 +142,8 @@ public class TransportGetUsersActionTests extends ESTestCase { public void testReservedUsersOnly() { NativeUsersStore usersStore = mock(NativeUsersStore.class); when(usersStore.started()).thenReturn(true); + when(usersStore.checkMappingVersion(any())).thenReturn(true); + ReservedRealmTests.mockGetAllReservedUserInfo(usersStore, Collections.emptyMap()); ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings)); PlainActionFuture> userFuture = new PlainActionFuture<>(); @@ -167,6 +171,7 @@ public class TransportGetUsersActionTests extends ESTestCase { @Override public void onFailure(Exception e) { + logger.warn("Request failed", e); throwableRef.set(e); } }); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java index a06db65a590..325f68cf91e 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.security.authc.esnative; -import com.google.common.base.Charsets; import joptsimple.OptionParser; import joptsimple.OptionSet; import org.elasticsearch.cli.MockTerminal; @@ -16,10 +15,10 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.security.SecurityTemplateService; -import org.elasticsearch.xpack.security.authz.permission.FieldPermissions; import org.elasticsearch.xpack.security.client.SecurityClient; import org.junit.BeforeClass; +import java.nio.charset.StandardCharsets; import java.util.HashSet; import java.util.Set; @@ -76,7 +75,7 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase { MockTerminal t = new MockTerminal(); String username = nodeClientUsername(); - String password = new String(nodeClientPassword().utf8Bytes(), Charsets.UTF_8); + String password = new String(nodeClientPassword().utf8Bytes(), StandardCharsets.UTF_8); String url = getHttpURL(); ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles(); Settings sslSettings = @@ -112,7 +111,7 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase { .cluster("all", "none") .runAs("root", "nobody") .addIndices(new String[]{"index"}, new String[]{"read"}, - new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"query\": {\"match_all\": {}}}")) + new String[]{"body", "title"}, null, new BytesArray("{\"query\": {\"match_all\": {}}}")) .get(); addedRoles.add(rname); } @@ -121,7 +120,7 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase { MockTerminal t = new MockTerminal(); String username = nodeClientUsername(); - String password = new String(nodeClientPassword().utf8Bytes(), Charsets.UTF_8); + String password = new String(nodeClientPassword().utf8Bytes(), StandardCharsets.UTF_8); String url = getHttpURL(); ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles(); Settings sslSettings = diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateToolTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateToolTests.java index 285f93a3726..6b44c810207 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateToolTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateToolTests.java @@ -50,7 +50,7 @@ public class ESNativeRealmMigrateToolTests extends CommandTestCase { RoleDescriptor.IndicesPrivileges ip = RoleDescriptor.IndicesPrivileges.builder() .indices(new String[]{"i1", "i2", "i3"}) .privileges(new String[]{"all"}) - .fieldPermissions(new FieldPermissions(new String[]{"body"}, null)) + .grantedFields("body") .build(); RoleDescriptor.IndicesPrivileges[] ips = new RoleDescriptor.IndicesPrivileges[1]; ips[0] = ip; diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index a31eed85976..b260821ec87 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -29,11 +29,9 @@ import org.elasticsearch.xpack.security.action.user.DeleteUserResponse; import org.elasticsearch.xpack.security.action.user.GetUsersResponse; import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.elasticsearch.xpack.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.security.authz.permission.FieldPermissions; -import org.elasticsearch.xpack.security.authz.permission.KibanaRole; import org.elasticsearch.xpack.security.authz.permission.Role; -import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; +import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.security.client.SecurityClient; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.ElasticUser; @@ -93,7 +91,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { PutRoleResponse response = securityClient() .preparePutRole("native_anonymous") .cluster("ALL") - .addIndices(new String[]{"*"}, new String[]{"ALL"}, new FieldPermissions(), null) + .addIndices(new String[]{"*"}, new String[]{"ALL"}, null, null, null) .get(); assertTrue(response.isCreated()); } else { @@ -170,14 +168,13 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { SecurityClient c = securityClient(); final List existingRoles = Arrays.asList(c.prepareGetRoles().get().roles()); final int existing = existingRoles.size(); - final Map metadata = Collections.singletonMap("key", (Object) randomAsciiOfLengthBetween(1, 10)); + final Map metadata = Collections.singletonMap("key", randomAsciiOfLengthBetween(1, 10)); logger.error("--> creating role"); c.preparePutRole("test_role") .cluster("all", "none") .runAs("root", "nobody") - .addIndices(new String[]{"index"}, new String[]{"read"}, - new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"query\": " + - "{\"match_all\": {}}}")) + .addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null, + new BytesArray("{\"query\": {\"match_all\": {}}}")) .metadata(metadata) .get(); logger.error("--> waiting for .security index"); @@ -193,16 +190,14 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { c.preparePutRole("test_role2") .cluster("all", "none") .runAs("root", "nobody") - .addIndices(new String[]{"index"}, new String[]{"read"}, - new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"query\": " + - "{\"match_all\": {}}}")) + .addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null, + new BytesArray("{\"query\": {\"match_all\": {}}}")) .get(); c.preparePutRole("test_role3") .cluster("all", "none") .runAs("root", "nobody") - .addIndices(new String[]{"index"}, new String[]{"read"}, - new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"query\": " + - "{\"match_all\": {}}}")) + .addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null, + new BytesArray("{\"query\": {\"match_all\": {}}}")) .get(); logger.info("--> retrieving all roles"); @@ -228,8 +223,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { logger.error("--> creating role"); c.preparePutRole("test_role") .cluster("all") - .addIndices(new String[] { "*" }, new String[] { "read" }, - new FieldPermissions(new String[] { "body", "title" }, null), + .addIndices(new String[] { "*" }, new String[] { "read" }, new String[]{"body", "title"}, null, new BytesArray("{\"match_all\": {}}")) .get(); logger.error("--> creating user"); @@ -323,8 +317,8 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { logger.error("--> creating role"); c.preparePutRole("test_role") .cluster("all") - .addIndices(new String[]{"*"}, new String[]{"read"}, - new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"match_all\": {}}")) + .addIndices(new String[]{"*"}, new String[]{"read"}, new String[]{"body", "title"}, null, + new BytesArray("{\"match_all\": {}}")) .get(); logger.error("--> creating user"); c.preparePutUser("joe", "s3krit".toCharArray(), "test_role").get(); @@ -338,8 +332,8 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { assertFalse(response.isTimedOut()); c.preparePutRole("test_role") .cluster("none") - .addIndices(new String[]{"*"}, new String[]{"read"}, - new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"match_all\": {}}")) + .addIndices(new String[]{"*"}, new String[]{"read"}, new String[]{"body", "title"}, null, + new BytesArray("{\"match_all\": {}}")) .get(); if (anonymousEnabled && roleExists) { assertNoTimeout(client() @@ -353,18 +347,18 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { GetRolesResponse getRolesResponse = c.prepareGetRoles().names("test_role").get(); assertTrue("test_role does not exist!", getRolesResponse.hasRoles()); assertTrue("any cluster permission should be authorized", - Role.builder(getRolesResponse.roles()[0]).build().cluster().check("cluster:admin/foo", null, null)); + Role.builder(getRolesResponse.roles()[0], null).build().cluster().check("cluster:admin/foo")); c.preparePutRole("test_role") .cluster("none") - .addIndices(new String[]{"*"}, new String[]{"read"}, - new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"match_all\": {}}")) + .addIndices(new String[]{"*"}, new String[]{"read"}, new String[]{"body", "title"}, null, + new BytesArray("{\"match_all\": {}}")) .get(); getRolesResponse = c.prepareGetRoles().names("test_role").get(); assertTrue("test_role does not exist!", getRolesResponse.hasRoles()); assertFalse("no cluster permission should be authorized", - Role.builder(getRolesResponse.roles()[0]).build().cluster().check("cluster:admin/bar", null, null)); + Role.builder(getRolesResponse.roles()[0], null).build().cluster().check("cluster:admin/bar")); } } @@ -373,8 +367,8 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { logger.error("--> creating role"); c.preparePutRole("test_role") .cluster("all") - .addIndices(new String[]{"*"}, new String[]{"read"}, - new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"match_all\": {}}")) + .addIndices(new String[]{"*"}, new String[]{"read"}, new String[]{"body", "title"}, null, + new BytesArray("{\"match_all\": {}}")) .get(); c.preparePutUser("joe", "s3krit".toCharArray(), "test_role").get(); logger.error("--> waiting for .security index"); @@ -400,11 +394,11 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { // create some roles client.preparePutRole("admin_role") .cluster("all") - .addIndices(new String[]{"*"}, new String[]{"all"}, new FieldPermissions(), null) + .addIndices(new String[]{"*"}, new String[]{"all"}, null, null, null) .get(); client.preparePutRole("read_role") .cluster("none") - .addIndices(new String[]{"*"}, new String[]{"read"}, new FieldPermissions(), null) + .addIndices(new String[]{"*"}, new String[]{"read"}, null, null, null) .get(); assertThat(client.prepareGetUsers("joes").get().hasUsers(), is(false)); @@ -500,7 +494,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { } else { client.preparePutRole("read_role") .cluster("none") - .addIndices(new String[]{"*"}, new String[]{"read"}, new FieldPermissions(), null) + .addIndices(new String[]{"*"}, new String[]{"read"}, null, null, null) .get(); } @@ -561,7 +555,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { } public void testOperationsOnReservedRoles() throws Exception { - final String name = randomFrom(SuperuserRole.NAME, KibanaRole.NAME); + final String name = randomFrom(ReservedRolesStore.names()); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> securityClient().preparePutRole(name).cluster("monitor").get()); assertThat(exception.getMessage(), containsString("role [" + name + "] is reserved")); @@ -572,12 +566,8 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { // get role is allowed GetRolesResponse response = securityClient().prepareGetRoles(name).get(); - if (KibanaRole.NAME.equals(name)) { - assertThat(response.hasRoles(), is(false)); - } else { - assertThat(response.hasRoles(), is(true)); - assertThat(response.roles()[0].getName(), is(name)); - } + assertThat(response.hasRoles(), is(true)); + assertThat(response.roles()[0].getName(), is(name)); } public void testCreateAndChangePassword() throws Exception { @@ -619,22 +609,25 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { SecurityClient client = new SecurityClient(client()); PutRoleResponse putRoleResponse = client.preparePutRole("admin_role") .cluster("all") - .addIndices(new String[]{"*"}, new String[]{"all"}, new FieldPermissions(), null) + .addIndices(new String[]{"*"}, new String[]{"all"}, null, null, null) .get(); assertThat(putRoleResponse.isCreated(), is(true)); roles++; if (fls) { PutRoleResponse roleResponse; String[] fields = new String[]{"foo"}; - FieldPermissions fieldPermissions; + final String[] grantedFields; + final String[] deniedFields; if (randomBoolean()) { - fieldPermissions = new FieldPermissions(fields, null); + grantedFields = fields; + deniedFields = null; } else { - fieldPermissions = new FieldPermissions(null, fields); + grantedFields = null; + deniedFields = fields; } roleResponse = client.preparePutRole("admin_role_fls") .cluster("all") - .addIndices(new String[]{"*"}, new String[]{"all"}, fieldPermissions, null) + .addIndices(new String[]{"*"}, new String[]{"all"}, grantedFields, deniedFields, null) .get(); assertThat(roleResponse.isCreated(), is(true)); roles++; @@ -643,7 +636,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { if (dls) { PutRoleResponse roleResponse = client.preparePutRole("admin_role_dls") .cluster("all") - .addIndices(new String[]{"*"}, new String[]{"all"}, new FieldPermissions(), new BytesArray("{ \"match_all\": {} }")) + .addIndices(new String[]{"*"}, new String[]{"all"}, null, null, new BytesArray("{ \"match_all\": {} }")) .get(); assertThat(roleResponse.isCreated(), is(true)); roles++; diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmMigratorTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmMigratorTests.java new file mode 100644 index 00000000000..8de4c204a89 --- /dev/null +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmMigratorTests.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.esnative; + +import java.util.concurrent.ExecutionException; +import java.util.function.Consumer; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.user.LogstashSystemUser; +import org.junit.Before; +import org.mockito.Mockito; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +public class NativeRealmMigratorTests extends ESTestCase { + + private Consumer> ensureDisabledHandler; + private NativeUsersStore nativeUsersStore; + private NativeRealmMigrator migrator; + + @Before + public void setupMocks() { + ensureDisabledHandler = listener -> listener.onResponse(null); + nativeUsersStore = Mockito.mock(NativeUsersStore.class); + Mockito.doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + ensureDisabledHandler.accept(listener); + return null; + }).when(nativeUsersStore).ensureReservedUserIsDisabled(any(), any()); + + final Settings settings = Settings.EMPTY; + migrator = new NativeRealmMigrator(settings, nativeUsersStore); + } + + public void testNoChangeOnFreshInstall() throws Exception { + verifyNoOpUpgrade(null); + } + + public void testNoChangeOnUpgradeOnOrAfterV5_2() throws Exception { + verifyNoOpUpgrade(randomFrom(Version.V_5_2_0_UNRELEASED, Version.V_6_0_0_alpha1_UNRELEASED)); + } + + public void testDisableLogstashOnUpgradeFromVersionPriorToV5_2() throws Exception { + verifyUpgradeDisablesLogstashSystemUser(randomFrom(Version.V_5_1_1_UNRELEASED, Version.V_5_0_2, Version.V_5_0_0)); + } + + public void testExceptionInUsersStoreIsPropagatedToListener() throws Exception { + final RuntimeException thrown = new RuntimeException("Forced failure"); + this.ensureDisabledHandler = listener -> listener.onFailure(thrown); + final PlainActionFuture future = doUpgrade(Version.V_5_0_0); + final ExecutionException caught = expectThrows(ExecutionException.class, future::get); + assertThat(caught.getCause(), is(thrown)); + } + + private void verifyNoOpUpgrade(Version fromVersion) throws ExecutionException, InterruptedException { + final PlainActionFuture future = doUpgrade(fromVersion); + verifyNoMoreInteractions(nativeUsersStore); + assertThat(future.get(), is(Boolean.FALSE)); + } + + private void verifyUpgradeDisablesLogstashSystemUser(Version fromVersion) throws ExecutionException, InterruptedException { + final PlainActionFuture future = doUpgrade(fromVersion); + verify(nativeUsersStore).ensureReservedUserIsDisabled(eq(LogstashSystemUser.NAME), any()); + verifyNoMoreInteractions(nativeUsersStore); + assertThat(future.get(), is(Boolean.TRUE)); + } + + private PlainActionFuture doUpgrade(Version fromVersion) { + final PlainActionFuture future = new PlainActionFuture<>(); + migrator.performUpgrade(fromVersion, future); + return future; + } +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java index c2a3564d59d..ef89f95535e 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.security.authc.esnative; import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; @@ -18,19 +19,24 @@ import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.ElasticUser; import org.elasticsearch.xpack.security.user.KibanaUser; +import org.elasticsearch.xpack.security.user.LogstashSystemUser; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import org.mockito.ArgumentCaptor; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Map.Entry; +import java.util.concurrent.ExecutionException; +import java.util.function.Predicate; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; @@ -55,6 +61,7 @@ public class ReservedRealmTests extends ESTestCase { public void setupMocks() { usersStore = mock(NativeUsersStore.class); when(usersStore.started()).thenReturn(true); + when(usersStore.checkMappingVersion(any())).thenReturn(true); mockGetAllReservedUserInfo(usersStore, Collections.emptyMap()); } @@ -62,7 +69,7 @@ public class ReservedRealmTests extends ESTestCase { when(usersStore.started()).thenReturn(false); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); - final String principal = randomFrom(ElasticUser.NAME, KibanaUser.NAME); + final String principal = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME); PlainActionFuture listener = new PlainActionFuture<>(); reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, DEFAULT_PASSWORD), listener); @@ -72,8 +79,19 @@ public class ReservedRealmTests extends ESTestCase { verifyNoMoreInteractions(usersStore); } + public void testMappingVersionFromBeforeUserExisted() throws ExecutionException, InterruptedException { + when(usersStore.checkMappingVersion(any())).thenReturn(false); + final ReservedRealm reservedRealm = + new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); + final String principal = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME); + + PlainActionFuture future = new PlainActionFuture<>(); + reservedRealm.authenticate(new UsernamePasswordToken(principal, DEFAULT_PASSWORD), future); + assertThat(future.get().enabled(), equalTo(false)); + } + public void testDefaultPasswordAuthentication() throws Throwable { - final User expected = randomFrom(new ElasticUser(true), new KibanaUser(true)); + final User expected = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); final String principal = expected.principal(); final boolean securityIndexExists = randomBoolean(); if (securityIndexExists) { @@ -97,6 +115,9 @@ public class ReservedRealmTests extends ESTestCase { if (securityIndexExists) { verify(usersStore).getReservedUserInfo(eq(principal), any(ActionListener.class)); } + final ArgumentCaptor predicateCaptor = ArgumentCaptor.forClass(Predicate.class); + verify(usersStore).checkMappingVersion(predicateCaptor.capture()); + verifyVersionPredicate(principal, predicateCaptor.getValue()); verifyNoMoreInteractions(usersStore); } @@ -107,7 +128,7 @@ public class ReservedRealmTests extends ESTestCase { when(usersStore.securityIndexExists()).thenReturn(true); } final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings)); - final User expected = randomFrom(new ElasticUser(true), new KibanaUser(true)); + final User expected = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); final String principal = expected.principal(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -117,16 +138,24 @@ public class ReservedRealmTests extends ESTestCase { verifyZeroInteractions(usersStore); } - public void testAuthenticationWithStoredPassword() throws Throwable { + public void testAuthenticationEnabledUserWithStoredPassword() throws Throwable { + verifySuccessfulAuthentication(true); + } + + public void testAuthenticationDisabledUserWithStoredPassword() throws Throwable { + verifySuccessfulAuthentication(false); + } + + private void verifySuccessfulAuthentication(boolean enabled) { final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); - final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true)); + final User expectedUser = randomFrom(new ElasticUser(enabled), new KibanaUser(enabled), new LogstashSystemUser(enabled)); final String principal = expectedUser.principal(); final SecuredString newPassword = new SecuredString("foobar".toCharArray()); when(usersStore.securityIndexExists()).thenReturn(true); doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[1]; - callback.onResponse(new ReservedUserInfo(Hasher.BCRYPT.hash(newPassword), true)); + callback.onResponse(new ReservedUserInfo(Hasher.BCRYPT.hash(newPassword), enabled)); return null; }).when(usersStore).getReservedUserInfo(eq(principal), any(ActionListener.class)); @@ -148,16 +177,21 @@ public class ReservedRealmTests extends ESTestCase { reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, newPassword), authListener); final User authenticated = authListener.actionGet(); assertEquals(expectedUser, authenticated); + assertThat(expectedUser.enabled(), is(enabled)); + verify(usersStore, times(2)).started(); verify(usersStore, times(2)).securityIndexExists(); verify(usersStore, times(2)).getReservedUserInfo(eq(principal), any(ActionListener.class)); + final ArgumentCaptor predicateCaptor = ArgumentCaptor.forClass(Predicate.class); + verify(usersStore, times(2)).checkMappingVersion(predicateCaptor.capture()); + verifyVersionPredicate(principal, predicateCaptor.getValue()); verifyNoMoreInteractions(usersStore); } public void testLookup() throws Exception { final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); - final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true)); + final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); final String principal = expectedUser.principal(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -167,6 +201,10 @@ public class ReservedRealmTests extends ESTestCase { verify(usersStore).started(); verify(usersStore).securityIndexExists(); + final ArgumentCaptor predicateCaptor = ArgumentCaptor.forClass(Predicate.class); + verify(usersStore).checkMappingVersion(predicateCaptor.capture()); + verifyVersionPredicate(principal, predicateCaptor.getValue()); + PlainActionFuture future = new PlainActionFuture<>(); reservedRealm.doLookupUser("foobar", future); final User doesntExist = future.actionGet(); @@ -178,7 +216,7 @@ public class ReservedRealmTests extends ESTestCase { Settings settings = Settings.builder().put(XPackSettings.RESERVED_REALM_ENABLED_SETTING.getKey(), false).build(); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings)); - final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true)); + final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); final String principal = expectedUser.principal(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -191,7 +229,7 @@ public class ReservedRealmTests extends ESTestCase { public void testLookupThrows() throws Exception { final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); - final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true)); + final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); final String principal = expectedUser.principal(); when(usersStore.securityIndexExists()).thenReturn(true); final RuntimeException e = new RuntimeException("store threw"); @@ -209,11 +247,16 @@ public class ReservedRealmTests extends ESTestCase { verify(usersStore).started(); verify(usersStore).securityIndexExists(); verify(usersStore).getReservedUserInfo(eq(principal), any(ActionListener.class)); + + final ArgumentCaptor predicateCaptor = ArgumentCaptor.forClass(Predicate.class); + verify(usersStore).checkMappingVersion(predicateCaptor.capture()); + verifyVersionPredicate(principal, predicateCaptor.getValue()); + verifyNoMoreInteractions(usersStore); } public void testIsReserved() { - final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true)); + final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); final String principal = expectedUser.principal(); assertThat(ReservedRealm.isReserved(principal, Settings.EMPTY), is(true)); @@ -223,7 +266,7 @@ public class ReservedRealmTests extends ESTestCase { public void testIsReservedDisabled() { Settings settings = Settings.builder().put(XPackSettings.RESERVED_REALM_ENABLED_SETTING.getKey(), false).build(); - final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true)); + final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); final String principal = expectedUser.principal(); assertThat(ReservedRealm.isReserved(principal, settings), is(false)); @@ -236,7 +279,7 @@ public class ReservedRealmTests extends ESTestCase { new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY)); PlainActionFuture> userFuture = new PlainActionFuture<>(); reservedRealm.users(userFuture); - assertThat(userFuture.actionGet(), containsInAnyOrder(new ElasticUser(true), new KibanaUser(true))); + assertThat(userFuture.actionGet(), containsInAnyOrder(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true))); } public void testGetUsersDisabled() { @@ -289,4 +332,22 @@ public class ReservedRealmTests extends ESTestCase { }).when(usersStore).getReservedUserInfo(eq(entry.getKey()), any(ActionListener.class)); } } + + private void verifyVersionPredicate(String principal, Predicate versionPredicate) { + assertThat(versionPredicate.test(Version.V_2_4_3), is(false)); + assertThat(versionPredicate.test(Version.V_5_0_0_rc1), is(false)); + switch (principal) { + case LogstashSystemUser.NAME: + assertThat(versionPredicate.test(Version.V_5_0_0), is(false)); + assertThat(versionPredicate.test(Version.V_5_1_1_UNRELEASED), is(false)); + assertThat(versionPredicate.test(Version.V_5_2_0_UNRELEASED), is(true)); + break; + default: + assertThat(versionPredicate.test(Version.V_5_0_0), is(true)); + assertThat(versionPredicate.test(Version.V_5_1_1_UNRELEASED), is(true)); + assertThat(versionPredicate.test(Version.V_5_2_0_UNRELEASED), is(true)); + break; + } + assertThat(versionPredicate.test(Version.V_6_0_0_alpha1_UNRELEASED), is(true)); + } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryIntegTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryIntegTests.java index 9b4a668f092..33608d1b27f 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryIntegTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryIntegTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.xpack.ssl.SSLService; +import org.elasticsearch.xpack.ssl.VerificationMode; import org.junit.Before; import java.nio.file.Path; @@ -39,10 +40,19 @@ public class AbstractActiveDirectoryIntegTests extends ESTestCase { if (useGlobalSSL) { builder.put("xpack.ssl.keystore.path", keystore) .put("xpack.ssl.keystore.password", "changeit"); + + // fake realm to load config with certificate verification mode + builder.put("xpack.security.authc.realms.bar.ssl.keystore.path", keystore); + builder.put("xpack.security.authc.realms.bar.ssl.keystore.password", "changeit"); + builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } else { - // fake a realm so ssl will get loaded + // fake realms so ssl will get loaded builder.put("xpack.security.authc.realms.foo.ssl.truststore.path", keystore); builder.put("xpack.security.authc.realms.foo.ssl.truststore.password", "changeit"); + builder.put("xpack.security.authc.realms.foo.ssl.verification_mode", VerificationMode.FULL); + builder.put("xpack.security.authc.realms.bar.ssl.truststore.path", keystore); + builder.put("xpack.security.authc.realms.bar.ssl.truststore.password", "changeit"); + builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } globalSettings = builder.build(); Environment environment = new Environment(globalSettings); @@ -55,8 +65,12 @@ public class AbstractActiveDirectoryIntegTests extends ESTestCase { .putArray(ActiveDirectorySessionFactory.URLS_SETTING, ldapUrl) .put(ActiveDirectorySessionFactory.AD_DOMAIN_NAME_SETTING, adDomainName) .put(ActiveDirectorySessionFactory.AD_USER_SEARCH_BASEDN_SETTING, userSearchDN) - .put(ActiveDirectorySessionFactory.AD_USER_SEARCH_SCOPE_SETTING, scope) - .put(ActiveDirectorySessionFactory.HOSTNAME_VERIFICATION_SETTING, hostnameVerification); + .put(ActiveDirectorySessionFactory.AD_USER_SEARCH_SCOPE_SETTING, scope); + if (randomBoolean()) { + builder.put("ssl.verification_mode", hostnameVerification ? VerificationMode.FULL : VerificationMode.CERTIFICATE); + } else { + builder.put(ActiveDirectorySessionFactory.HOSTNAME_VERIFICATION_SETTING, hostnameVerification); + } if (useGlobalSSL == false) { builder.put("ssl.truststore.path", getDataPath("../ldap/support/ldaptrust.jks")) .put("ssl.truststore.password", "changeit"); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java index 4414269700b..8e4313c09ff 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; @@ -27,6 +28,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.ssl.SSLService; +import org.elasticsearch.xpack.ssl.VerificationMode; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -72,6 +75,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase { private ResourceWatcherService resourceWatcherService; private ThreadPool threadPool; private Settings globalSettings; + private SSLService sslService; @BeforeClass public static void setNumberOfLdapServers() { @@ -101,6 +105,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase { threadPool = new TestThreadPool("active directory realm tests"); resourceWatcherService = new ResourceWatcherService(Settings.EMPTY, threadPool); globalSettings = Settings.builder().put("path.home", createTempDir()).build(); + sslService = new SSLService(globalSettings, new Environment(globalSettings)); } @After @@ -112,10 +117,15 @@ public class ActiveDirectoryRealmTests extends ESTestCase { } } + @Override + public boolean enableWarningsCheck() { + return false; + } + public void testAuthenticateUserPrincipleName() throws Exception { Settings settings = settings(); RealmConfig config = new RealmConfig("testAuthenticateUserPrincipleName", settings, globalSettings); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, null); + ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService, () -> {}); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); @@ -129,7 +139,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase { public void testAuthenticateSAMAccountName() throws Exception { Settings settings = settings(); RealmConfig config = new RealmConfig("testAuthenticateSAMAccountName", settings, globalSettings); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, null); + ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService, () -> {}); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); @@ -153,7 +163,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase { public void testAuthenticateCachesSuccesfulAuthentications() throws Exception { Settings settings = settings(); RealmConfig config = new RealmConfig("testAuthenticateCachesSuccesfulAuthentications", settings, globalSettings); - ActiveDirectorySessionFactory sessionFactory = spy(new ActiveDirectorySessionFactory(config, null)); + ActiveDirectorySessionFactory sessionFactory = spy(new ActiveDirectorySessionFactory(config, sslService)); DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService, () -> {}); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); @@ -171,7 +181,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase { public void testAuthenticateCachingCanBeDisabled() throws Exception { Settings settings = settings(Settings.builder().put(CachingUsernamePasswordRealm.CACHE_TTL_SETTING.getKey(), -1).build()); RealmConfig config = new RealmConfig("testAuthenticateCachingCanBeDisabled", settings, globalSettings); - ActiveDirectorySessionFactory sessionFactory = spy(new ActiveDirectorySessionFactory(config, null)); + ActiveDirectorySessionFactory sessionFactory = spy(new ActiveDirectorySessionFactory(config, sslService)); DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService, () -> {}); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); @@ -189,7 +199,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase { public void testAuthenticateCachingClearsCacheOnRoleMapperRefresh() throws Exception { Settings settings = settings(); RealmConfig config = new RealmConfig("testAuthenticateCachingClearsCacheOnRoleMapperRefresh", settings, globalSettings); - ActiveDirectorySessionFactory sessionFactory = spy(new ActiveDirectorySessionFactory(config, null)); + ActiveDirectorySessionFactory sessionFactory = spy(new ActiveDirectorySessionFactory(config, sslService)); DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService, () -> {}); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); @@ -220,7 +230,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase { .put(ROLE_MAPPING_FILE_SETTING, getDataPath("role_mapping.yml")) .build()); RealmConfig config = new RealmConfig("testRealmMapsGroupsToRoles", settings, globalSettings); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, null); + ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService, () -> {}); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); @@ -236,7 +246,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase { .put(ROLE_MAPPING_FILE_SETTING, getDataPath("role_mapping.yml")) .build()); RealmConfig config = new RealmConfig("testRealmMapsGroupsToRoles", settings, globalSettings); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, null); + ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService, () -> {}); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); @@ -254,7 +264,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase { .put("load_balance.type", loadBalanceType) .build()); RealmConfig config = new RealmConfig("testRealmUsageStats", settings, globalSettings); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, null); + ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService, () -> {}); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); @@ -272,12 +282,15 @@ public class ActiveDirectoryRealmTests extends ESTestCase { } private Settings settings(Settings extraSettings) throws Exception { - return Settings.builder() + Settings.Builder builder = Settings.builder() .putArray(URLS_SETTING, ldapUrls()) .put(ActiveDirectorySessionFactory.AD_DOMAIN_NAME_SETTING, "ad.test.elasticsearch.com") - .put(DnRoleMapper.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING.getKey(), true) - .put(HOSTNAME_VERIFICATION_SETTING, false) - .put(extraSettings) - .build(); + .put(DnRoleMapper.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING.getKey(), true); + if (randomBoolean()) { + builder.put("ssl.verification_mode", VerificationMode.CERTIFICATE); + } else { + builder.put(HOSTNAME_VERIFICATION_SETTING, false); + } + return builder.put(extraSettings).build(); } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index ae0c71eb96f..fc7bcf3058e 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory; import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.elasticsearch.xpack.security.authc.support.SecuredStringTests; import org.elasticsearch.test.junit.annotations.Network; +import org.elasticsearch.xpack.ssl.VerificationMode; import java.util.List; import java.util.concurrent.ExecutionException; @@ -31,6 +32,11 @@ import static org.hamcrest.Matchers.is; @Network public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryIntegTests { + @Override + public boolean enableWarningsCheck() { + return false; + } + @SuppressWarnings("unchecked") public void testAdAuth() throws Exception { RealmConfig config = new RealmConfig("ad-test", buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false), globalSettings); @@ -78,7 +84,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI Settings settings = Settings.builder() .put(buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false)) .put("group_search.filter", "(objectClass=*)") - .put(SessionFactory.HOSTNAME_VERIFICATION_SETTING, false) + .put("ssl.verification_mode", VerificationMode.CERTIFICATE) .put(SessionFactory.TIMEOUT_TCP_READ_SETTING, "1ms") .build(); RealmConfig config = new RealmConfig("ad-test", settings, globalSettings); @@ -296,7 +302,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI String userTemplate = "CN={0},CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; Settings settings = Settings.builder() .put(LdapTestCase.buildLdapSettings(AD_LDAP_URL, userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE)) - .put(LdapSessionFactory.HOSTNAME_VERIFICATION_SETTING, true) + .put("ssl.verification_mode", VerificationMode.FULL) .build(); RealmConfig config = new RealmConfig("ad-test", settings, globalSettings); LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService); @@ -313,8 +319,12 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI Settings buildAdSettings(String ldapUrl, String adDomainName, boolean hostnameVerification) { Settings.Builder builder = Settings.builder() .put(ActiveDirectorySessionFactory.URLS_SETTING, ldapUrl) - .put(ActiveDirectorySessionFactory.AD_DOMAIN_NAME_SETTING, adDomainName) - .put(ActiveDirectorySessionFactory.HOSTNAME_VERIFICATION_SETTING, hostnameVerification); + .put(ActiveDirectorySessionFactory.AD_DOMAIN_NAME_SETTING, adDomainName); + if (randomBoolean()) { + builder.put("ssl.verification_mode", hostnameVerification ? VerificationMode.FULL : VerificationMode.CERTIFICATE); + } else { + builder.put(ActiveDirectorySessionFactory.HOSTNAME_VERIFICATION_SETTING, hostnameVerification); + } if (useGlobalSSL == false) { builder.put("ssl.truststore.path", getDataPath("../ldap/support/ldaptrust.jks")) .put("ssl.truststore.password", "changeit"); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java index b28a2ea8c4f..8197a5f4650 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession.GroupsRes import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ssl.SSLService; +import org.elasticsearch.xpack.ssl.VerificationMode; import org.junit.After; import org.junit.Before; @@ -44,10 +45,19 @@ public abstract class GroupsResolverTestCase extends ESTestCase { if (useGlobalSSL) { builder.put("xpack.ssl.keystore.path", keystore) .put("xpack.ssl.keystore.password", "changeit"); + + // fake realm to load config with certificate verification mode + builder.put("xpack.security.authc.realms.bar.ssl.keystore.path", keystore); + builder.put("xpack.security.authc.realms.bar.ssl.keystore.password", "changeit"); + builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } else { - // fake a realm so ssl will get loaded + // fake realms so ssl will get loaded builder.put("xpack.security.authc.realms.foo.ssl.keystore.path", keystore); builder.put("xpack.security.authc.realms.foo.ssl.keystore.password", "changeit"); + builder.put("xpack.security.authc.realms.foo.ssl.verification_mode", VerificationMode.FULL); + builder.put("xpack.security.authc.realms.bar.ssl.keystore.path", keystore); + builder.put("xpack.security.authc.realms.bar.ssl.keystore.password", "changeit"); + builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } Settings settings = builder.build(); Environment env = new Environment(settings); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java index dc0f4d3cdd9..16de13fa0b2 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.security.authc.ldap; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope; import org.elasticsearch.xpack.security.authc.ldap.support.LdapTestCase; @@ -21,13 +22,14 @@ import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.ssl.SSLService; +import org.elasticsearch.xpack.ssl.VerificationMode; import org.junit.After; import org.junit.Before; import java.util.Arrays; import java.util.Map; -import static org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory.HOSTNAME_VERIFICATION_SETTING; import static org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory.URLS_SETTING; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsString; @@ -52,12 +54,14 @@ public class LdapRealmTests extends LdapTestCase { private ThreadPool threadPool; private ResourceWatcherService resourceWatcherService; private Settings globalSettings; + private SSLService sslService; @Before public void init() throws Exception { threadPool = new TestThreadPool("ldap realm tests"); resourceWatcherService = new ResourceWatcherService(Settings.EMPTY, threadPool); globalSettings = Settings.builder().put("path.home", createTempDir()).build(); + sslService = new SSLService(globalSettings, new Environment(globalSettings)); } @After @@ -71,7 +75,7 @@ public class LdapRealmTests extends LdapTestCase { String userTemplate = VALID_USER_TEMPLATE; Settings settings = buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE); RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings); - LdapSessionFactory ldapFactory = new LdapSessionFactory(config, null); + LdapSessionFactory ldapFactory = new LdapSessionFactory(config, sslService); LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, buildGroupAsRoleMapper(resourceWatcherService), threadPool); @@ -90,7 +94,7 @@ public class LdapRealmTests extends LdapTestCase { .build(); RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings); - LdapSessionFactory ldapFactory = new LdapSessionFactory(config, null); + LdapSessionFactory ldapFactory = new LdapSessionFactory(config, sslService); LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, buildGroupAsRoleMapper(resourceWatcherService), threadPool); @@ -109,7 +113,7 @@ public class LdapRealmTests extends LdapTestCase { .build(); RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings); - LdapSessionFactory ldapFactory = new LdapSessionFactory(config, null); + LdapSessionFactory ldapFactory = new LdapSessionFactory(config, sslService); ldapFactory = spy(ldapFactory); LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, buildGroupAsRoleMapper(resourceWatcherService), threadPool); @@ -132,7 +136,7 @@ public class LdapRealmTests extends LdapTestCase { .build(); RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings); - LdapSessionFactory ldapFactory = new LdapSessionFactory(config, null); + LdapSessionFactory ldapFactory = new LdapSessionFactory(config, sslService); DnRoleMapper roleMapper = buildGroupAsRoleMapper(resourceWatcherService); ldapFactory = spy(ldapFactory); LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, roleMapper, threadPool); @@ -165,7 +169,7 @@ public class LdapRealmTests extends LdapTestCase { .build(); RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings); - LdapSessionFactory ldapFactory = new LdapSessionFactory(config, null); + LdapSessionFactory ldapFactory = new LdapSessionFactory(config, sslService); ldapFactory = spy(ldapFactory); LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, buildGroupAsRoleMapper(resourceWatcherService), threadPool); @@ -188,10 +192,10 @@ public class LdapRealmTests extends LdapTestCase { .putArray(USER_DN_TEMPLATES_SETTING_KEY, userTemplate) .put("group_search.base_dn", groupSearchBase) .put("group_search.scope", LdapSearchScope.SUB_TREE) - .put(HOSTNAME_VERIFICATION_SETTING, false) + .put("ssl.verification_mode", VerificationMode.CERTIFICATE) .build(); RealmConfig config = new RealmConfig("test-ldap-realm", settings, globalSettings); - SessionFactory sessionFactory = LdapRealm.sessionFactory(config, null, LdapRealm.LDAP_TYPE); + SessionFactory sessionFactory = LdapRealm.sessionFactory(config, sslService, LdapRealm.LDAP_TYPE); assertThat(sessionFactory, is(instanceOf(LdapSessionFactory.class))); } @@ -204,10 +208,10 @@ public class LdapRealmTests extends LdapTestCase { .put("bind_password", PASSWORD) .put("group_search.base_dn", groupSearchBase) .put("group_search.scope", LdapSearchScope.SUB_TREE) - .put(HOSTNAME_VERIFICATION_SETTING, false) + .put("ssl.verification_mode", VerificationMode.CERTIFICATE) .build(); RealmConfig config = new RealmConfig("test-ldap-realm-user-search", settings, globalSettings); - SessionFactory sessionFactory = LdapRealm.sessionFactory(config, null, LdapRealm.LDAP_TYPE); + SessionFactory sessionFactory = LdapRealm.sessionFactory(config, sslService, LdapRealm.LDAP_TYPE); try { assertThat(sessionFactory, is(instanceOf(LdapUserSearchSessionFactory.class))); } finally { @@ -222,7 +226,7 @@ public class LdapRealmTests extends LdapTestCase { .put("user_search.base_dn", "cn=bar") .put("group_search.base_dn", "") .put("group_search.scope", LdapSearchScope.SUB_TREE) - .put(HOSTNAME_VERIFICATION_SETTING, false) + .put("ssl.verification_mode", VerificationMode.CERTIFICATE) .build(); RealmConfig config = new RealmConfig("test-ldap-realm-user-search", settings, globalSettings); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -238,7 +242,7 @@ public class LdapRealmTests extends LdapTestCase { .putArray(URLS_SETTING, ldapUrls()) .put("group_search.base_dn", "") .put("group_search.scope", LdapSearchScope.SUB_TREE) - .put(HOSTNAME_VERIFICATION_SETTING, false) + .put("ssl.verification_mode", VerificationMode.CERTIFICATE) .build(); RealmConfig config = new RealmConfig("test-ldap-realm-user-search", settings, globalSettings); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -259,7 +263,7 @@ public class LdapRealmTests extends LdapTestCase { .build(); RealmConfig config = new RealmConfig("test-ldap-realm-userdn", settings, globalSettings); - LdapSessionFactory ldapFactory = new LdapSessionFactory(config, null); + LdapSessionFactory ldapFactory = new LdapSessionFactory(config, sslService); LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, new DnRoleMapper(LdapRealm.LDAP_TYPE, config, resourceWatcherService, null), threadPool); @@ -279,7 +283,7 @@ public class LdapRealmTests extends LdapTestCase { .put("group_search.base_dn", groupSearchBase) .put("group_search.scope", LdapSearchScope.SUB_TREE) .put(LdapSessionFactory.USER_DN_TEMPLATES_SETTING.getKey(), "--") - .put(HOSTNAME_VERIFICATION_SETTING, false); + .put("ssl.verification_mode", VerificationMode.CERTIFICATE); int order = randomIntBetween(0, 10); settings.put("order", order); @@ -291,7 +295,7 @@ public class LdapRealmTests extends LdapTestCase { RealmConfig config = new RealmConfig("ldap-realm", settings.build(), globalSettings); - LdapSessionFactory ldapFactory = new LdapSessionFactory(config, null); + LdapSessionFactory ldapFactory = new LdapSessionFactory(config, sslService); LdapRealm realm = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, new DnRoleMapper(LdapRealm.LDAP_TYPE, config, resourceWatcherService, null), threadPool); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java index 76a22c40b19..c3e6900805a 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java @@ -10,6 +10,7 @@ import com.unboundid.ldap.sdk.LDAPException; import com.unboundid.ldap.sdk.LDAPURL; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException; +import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession; @@ -18,6 +19,7 @@ import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory; import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.elasticsearch.xpack.security.authc.support.SecuredStringTests; import org.elasticsearch.test.junit.annotations.Network; +import org.elasticsearch.xpack.ssl.SSLService; import org.junit.Before; import java.util.List; @@ -32,10 +34,12 @@ import static org.hamcrest.Matchers.lessThan; public class LdapSessionFactoryTests extends LdapTestCase { private Settings globalSettings; + private SSLService sslService; @Before public void setup() { globalSettings = Settings.builder().put("path.home", createTempDir()).build(); + sslService = new SSLService(globalSettings, new Environment(globalSettings)); } public void testBindWithReadTimeout() throws Exception { @@ -51,7 +55,7 @@ public class LdapSessionFactoryTests extends LdapTestCase { .build(); RealmConfig config = new RealmConfig("ldap_realm", settings, globalSettings); - LdapSessionFactory sessionFactory = new LdapSessionFactory(config, null); + LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService); String user = "Horatio Hornblower"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -81,7 +85,7 @@ public class LdapSessionFactoryTests extends LdapTestCase { .build(); RealmConfig config = new RealmConfig("ldap_realm", settings, globalSettings); - LdapSessionFactory sessionFactory = new LdapSessionFactory(config, null); + LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService); String user = "Horatio Hornblower"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -104,7 +108,7 @@ public class LdapSessionFactoryTests extends LdapTestCase { RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrls(), userTemplates, groupSearchBase, LdapSearchScope.SUB_TREE), globalSettings); - LdapSessionFactory sessionFactory = new LdapSessionFactory(config, null); + LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService); String user = "Horatio Hornblower"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -125,7 +129,7 @@ public class LdapSessionFactoryTests extends LdapTestCase { RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrls(), userTemplates, groupSearchBase, LdapSearchScope.SUB_TREE), globalSettings); - LdapSessionFactory ldapFac = new LdapSessionFactory(config, null); + LdapSessionFactory ldapFac = new LdapSessionFactory(config, sslService); String user = "Horatio Hornblower"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -143,7 +147,7 @@ public class LdapSessionFactoryTests extends LdapTestCase { RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE), globalSettings); - LdapSessionFactory ldapFac = new LdapSessionFactory(config, null); + LdapSessionFactory ldapFac = new LdapSessionFactory(config, sslService); String user = "Horatio Hornblower"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -160,7 +164,7 @@ public class LdapSessionFactoryTests extends LdapTestCase { RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL), globalSettings); - LdapSessionFactory ldapFac = new LdapSessionFactory(config, null); + LdapSessionFactory ldapFac = new LdapSessionFactory(config, sslService); String user = "Horatio Hornblower"; try (LdapSession ldap = session(ldapFac, user, SecuredStringTests.build("pass"))) { @@ -175,7 +179,7 @@ public class LdapSessionFactoryTests extends LdapTestCase { RealmConfig config = new RealmConfig("ldap_realm", buildLdapSettings(ldapUrls(), userTemplate, groupSearchBase, LdapSearchScope.BASE), globalSettings); - LdapSessionFactory ldapFac = new LdapSessionFactory(config, null); + LdapSessionFactory ldapFac = new LdapSessionFactory(config, sslService); String user = "Horatio Hornblower"; SecuredString userPass = SecuredStringTests.build("pass"); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java index 1eb07cd895b..6096762cf7e 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java @@ -32,6 +32,7 @@ import java.nio.file.Path; import java.text.MessageFormat; import java.util.List; import java.util.Locale; +import java.util.Map; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -72,7 +73,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .put("user_search.pool.enabled", randomBoolean()) .build(), globalSettings); - LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, null); + LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); try { assertThat(sessionFactory.supportsUnauthenticatedSession(), is(true)); } finally { @@ -93,7 +94,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .put("user_search.pool.enabled", randomBoolean()) .build(), globalSettings); - LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, null); + LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); String user = "William Bush"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -129,7 +130,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .put("user_search.pool.enabled", randomBoolean()) .build(), globalSettings); - LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, null); + LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); String user = "William Bush"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -156,7 +157,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .put("user_search.pool.enabled", randomBoolean()) .build(), globalSettings); - LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, null); + LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); String user = "William Bush"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -192,7 +193,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .put("user_search.pool.enabled", randomBoolean()) .build(), globalSettings); - LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, null); + LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); String user = "William Bush"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -219,7 +220,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .put("user_search.pool.enabled", randomBoolean()) .build(), globalSettings); - LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, null); + LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); String user = "William Bush"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -254,7 +255,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .put("user_search.pool.enabled", randomBoolean()) .build(), globalSettings); - LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, null); + LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); String user = "William Bush"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -279,7 +280,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .put("user_search.pool.enabled", randomBoolean()) .build(), globalSettings); - LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, null); + LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); String user = "wbush"; SecuredString userPass = SecuredStringTests.build("pass"); @@ -314,6 +315,13 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .put("user_search.attribute", "cn") .put("user_search.pool.enabled", randomBoolean()) .build(); + Settings.Builder builder = Settings.builder() + .put(globalSettings); + for (Map.Entry entry : settings.getAsMap().entrySet()) { + builder.put("xpack.security.authc.realms.ldap." + entry.getKey(), entry.getValue()); + } + Settings fullSettings = builder.build(); + sslService = new SSLService(fullSettings, new Environment(fullSettings)); RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings); LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); @@ -357,6 +365,13 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .put("bind_password", OpenLdapTests.PASSWORD) .put("user_search.pool.enabled", randomBoolean()) .build(), globalSettings); + Settings.Builder builder = Settings.builder() + .put(globalSettings); + for (Map.Entry entry : config.settings().getAsMap().entrySet()) { + builder.put("xpack.security.authc.realms.ldap." + entry.getKey(), entry.getValue()); + } + Settings settings = builder.build(); + sslService = new SSLService(settings, new Environment(settings)); LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); String[] users = new String[] { "cap", "hawkeye", "hulk", "ironman", "thor" }; @@ -443,7 +458,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { LdapUserSearchSessionFactory searchSessionFactory = null; try { - searchSessionFactory = new LdapUserSearchSessionFactory(config, null); + searchSessionFactory = new LdapUserSearchSessionFactory(config, sslService); } finally { if (searchSessionFactory != null) { searchSessionFactory.shutdown(); @@ -491,7 +506,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { RealmConfig config = new RealmConfig("ldap_realm", ldapSettings, globalSettings); LdapUserSearchSessionFactory searchSessionFactory = null; try { - searchSessionFactory = new LdapUserSearchSessionFactory(config, null); + searchSessionFactory = new LdapUserSearchSessionFactory(config, sslService); } finally { if (searchSessionFactory != null) { searchSessionFactory.shutdown(); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapTests.java index 056c44c89ca..24a49d9c01a 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.security.authc.support.SecuredStringTests; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.xpack.ssl.SSLService; +import org.elasticsearch.xpack.ssl.VerificationMode; import org.junit.Before; import java.nio.file.Path; @@ -54,10 +55,19 @@ public class OpenLdapTests extends ESTestCase { if (useGlobalSSL) { builder.put("xpack.ssl.keystore.path", keystore) .put("xpack.ssl.keystore.password", "changeit"); + + // fake realm to load config with certificate verification mode + builder.put("xpack.security.authc.realms.bar.ssl.keystore.path", keystore); + builder.put("xpack.security.authc.realms.bar.ssl.keystore.password", "changeit"); + builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } else { - // fake a realm so ssl will get loaded + // fake realms so ssl will get loaded builder.put("xpack.security.authc.realms.foo.ssl.truststore.path", keystore); builder.put("xpack.security.authc.realms.foo.ssl.truststore.password", "changeit"); + builder.put("xpack.security.authc.realms.foo.ssl.verification_mode", VerificationMode.FULL); + builder.put("xpack.security.authc.realms.bar.ssl.truststore.path", keystore); + builder.put("xpack.security.authc.realms.bar.ssl.truststore.password", "changeit"); + builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } globalSettings = builder.build(); Environment environment = new Environment(globalSettings); @@ -120,7 +130,7 @@ public class OpenLdapTests extends ESTestCase { Settings settings = Settings.builder() .put(buildLdapSettings(OPEN_LDAP_URL, userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE)) .put("group_search.filter", "(objectClass=*)") - .put(SessionFactory.HOSTNAME_VERIFICATION_SETTING, false) + .put("ssl.verification_mode", VerificationMode.CERTIFICATE) .put(SessionFactory.TIMEOUT_TCP_READ_SETTING, "1ms") //1 millisecond .build(); RealmConfig config = new RealmConfig("oldap-test", settings, globalSettings); @@ -137,7 +147,7 @@ public class OpenLdapTests extends ESTestCase { String userTemplate = "uid={0},ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com"; Settings settings = Settings.builder() .put(buildLdapSettings(OPEN_LDAP_URL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL)) - .put(LdapSessionFactory.HOSTNAME_VERIFICATION_SETTING, true) + .put("ssl.verification_mode", VerificationMode.FULL) .build(); RealmConfig config = new RealmConfig("oldap-test", settings, globalSettings); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java index 9e0b59f63be..56bf61d2312 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java @@ -5,11 +5,16 @@ */ package org.elasticsearch.xpack.security.authc.ldap; +import com.unboundid.ldap.sdk.Attribute; +import com.unboundid.ldap.sdk.SearchRequest; +import com.unboundid.ldap.sdk.SearchScope; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils; import org.elasticsearch.xpack.security.support.NoOpLogger; import org.elasticsearch.test.junit.annotations.Network; +import java.util.Collection; import java.util.List; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -34,6 +39,20 @@ public class UserAttributeGroupsResolverTests extends GroupsResolverTestCase { containsString("Philanthropists"))); } + public void testResolveFromPreloadedAttributes() throws Exception { + SearchRequest preSearch = new SearchRequest(BRUCE_BANNER_DN, SearchScope.BASE, LdapUtils.OBJECT_CLASS_PRESENCE_FILTER, "memberOf"); + final Collection attributes = ldapConnection.searchForEntry(preSearch).getAttributes(); + + UserAttributeGroupsResolver resolver = new UserAttributeGroupsResolver(Settings.EMPTY); + List groups = + resolveBlocking(resolver, ldapConnection, BRUCE_BANNER_DN, TimeValue.timeValueSeconds(20), NoOpLogger.INSTANCE, attributes); + assertThat(groups, containsInAnyOrder( + containsString("Avengers"), + containsString("SHIELD"), + containsString("Geniuses"), + containsString("Philanthropists"))); + } + public void testResolveCustomGroupAttribute() throws Exception { Settings settings = Settings.builder() .put("user_group_attribute", "seeAlso") @@ -68,4 +87,4 @@ public class UserAttributeGroupsResolverTests extends GroupsResolverTestCase { protected String bindPassword() { return ActiveDirectorySessionFactoryTests.PASSWORD; } -} \ No newline at end of file +} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java index 30c5bfaf40d..319e3a990de 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.security.authc.support.DnRoleMapper; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.security.authc.support.SecuredString; +import org.elasticsearch.xpack.ssl.VerificationMode; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -91,7 +92,7 @@ public abstract class LdapTestCase extends ESTestCase { .putArray(USER_DN_TEMPLATES_SETTING_KEY, userTemplate) .put("group_search.base_dn", groupSearchBase) .put("group_search.scope", scope) - .put(HOSTNAME_VERIFICATION_SETTING, false); + .put("ssl.verification_mode", VerificationMode.CERTIFICATE); if (serverSetType != null) { builder.put(LdapLoadBalancing.LOAD_BALANCE_SETTINGS + "." + LdapLoadBalancing.LOAD_BALANCE_TYPE_SETTING, serverSetType.toString()); @@ -100,11 +101,15 @@ public abstract class LdapTestCase extends ESTestCase { } public static Settings buildLdapSettings(String[] ldapUrl, String userTemplate, boolean hostnameVerification) { - return Settings.builder() + Settings.Builder builder = Settings.builder() .putArray(URLS_SETTING, ldapUrl) - .putArray(USER_DN_TEMPLATES_SETTING_KEY, userTemplate) - .put(HOSTNAME_VERIFICATION_SETTING, hostnameVerification) - .build(); + .putArray(USER_DN_TEMPLATES_SETTING_KEY, userTemplate); + if (randomBoolean()) { + builder.put("ssl.verification_mode", hostnameVerification ? VerificationMode.FULL : VerificationMode.CERTIFICATE); + } else { + builder.put(HOSTNAME_VERIFICATION_SETTING, hostnameVerification); + } + return builder.build(); } protected DnRoleMapper buildGroupAsRoleMapper(ResourceWatcherService resourceWatcherService) { diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java index fef0466460f..c223045320e 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java @@ -9,6 +9,7 @@ import com.unboundid.ldap.listener.InMemoryDirectoryServer; import com.unboundid.ldap.sdk.LDAPConnection; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.elasticsearch.xpack.ssl.SSLService; @@ -167,7 +168,7 @@ public class SessionFactoryLoadBalancingTests extends LdapTestCase { LdapSearchScope.SUB_TREE, loadBalancing); RealmConfig config = new RealmConfig("test-session-factory", settings, Settings.builder().put("path.home", createTempDir()).build()); - return new TestSessionFactory(config, null); + return new TestSessionFactory(config, new SSLService(Settings.EMPTY, new Environment(config.globalSettings()))); } static class TestSessionFactory extends SessionFactory { diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryTests.java index 28781eb23bf..f74888925b8 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryTests.java @@ -11,9 +11,12 @@ import com.unboundid.util.ssl.TrustAllSSLSocketVerifier; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ssl.SSLService; +import org.elasticsearch.xpack.ssl.VerificationMode; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -21,8 +24,12 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class SessionFactoryTests extends ESTestCase { + public void testConnectionFactoryReturnsCorrectLDAPConnectionOptionsWithDefaultSettings() { - LDAPConnectionOptions options = SessionFactory.connectionOptions(Settings.EMPTY); + final Environment environment = new Environment(Settings.builder().put("path.home", createTempDir()).build()); + RealmConfig realmConfig = new RealmConfig("conn settings", Settings.EMPTY, environment.settings(), environment); + LDAPConnectionOptions options = SessionFactory.connectionOptions(realmConfig, new SSLService(environment.settings(), environment), + logger); assertThat(options.followReferrals(), is(equalTo(true))); assertThat(options.allowConcurrentSocketFactoryUse(), is(equalTo(true))); assertThat(options.getConnectTimeoutMillis(), is(equalTo(5000))); @@ -37,12 +44,36 @@ public class SessionFactoryTests extends ESTestCase { .put(SessionFactory.TIMEOUT_TCP_READ_SETTING, "20ms") .put(SessionFactory.FOLLOW_REFERRALS_SETTING, "false") .build(); - LDAPConnectionOptions options = SessionFactory.connectionOptions(settings); + + final Environment environment = new Environment(Settings.builder().put("path.home", createTempDir()).build()); + RealmConfig realmConfig = new RealmConfig("conn settings", settings, environment.settings(), environment); + LDAPConnectionOptions options = SessionFactory.connectionOptions(realmConfig, new SSLService(environment.settings(), environment), + logger); assertThat(options.followReferrals(), is(equalTo(false))); assertThat(options.allowConcurrentSocketFactoryUse(), is(equalTo(true))); assertThat(options.getConnectTimeoutMillis(), is(equalTo(10))); assertThat(options.getResponseTimeoutMillis(), is(equalTo(20L))); assertThat(options.getSSLSocketVerifier(), is(instanceOf(TrustAllSSLSocketVerifier.class))); + assertWarnings("the setting [xpack.security.authc.realms.conn settings.hostname_verification] has been deprecated and will be " + + "removed in a future version. use [xpack.security.authc.realms.conn settings.ssl.verification_mode] instead"); + + settings = Settings.builder().put("ssl.verification_mode", VerificationMode.CERTIFICATE).build(); + realmConfig = new RealmConfig("conn settings", settings, environment.settings(), environment); + options = SessionFactory.connectionOptions(realmConfig, new SSLService(environment.settings(), environment), + logger); + assertThat(options.getSSLSocketVerifier(), is(instanceOf(TrustAllSSLSocketVerifier.class))); + + settings = Settings.builder().put("ssl.verification_mode", VerificationMode.NONE).build(); + realmConfig = new RealmConfig("conn settings", settings, environment.settings(), environment); + options = SessionFactory.connectionOptions(realmConfig, new SSLService(environment.settings(), environment), + logger); + assertThat(options.getSSLSocketVerifier(), is(instanceOf(TrustAllSSLSocketVerifier.class))); + + settings = Settings.builder().put("ssl.verification_mode", VerificationMode.FULL).build(); + realmConfig = new RealmConfig("conn settings", settings, environment.settings(), environment); + options = SessionFactory.connectionOptions(realmConfig, new SSLService(environment.settings(), environment), + logger); + assertThat(options.getSSLSocketVerifier(), is(instanceOf(HostNameSSLSocketVerifier.class))); } public void testSessionFactoryDoesNotSupportUnauthenticated() { diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index d8b96b28efd..f2bb5b0ca39 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -12,6 +12,8 @@ import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.MockIndicesRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; @@ -63,6 +65,7 @@ import org.elasticsearch.action.termvectors.TermVectorsAction; import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -72,26 +75,35 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.license.GetLicenseAction; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.security.SecurityTemplateService; import org.elasticsearch.xpack.security.action.user.AuthenticateAction; import org.elasticsearch.xpack.security.action.user.AuthenticateRequest; +import org.elasticsearch.xpack.security.action.user.AuthenticateRequestBuilder; +import org.elasticsearch.xpack.security.action.user.ChangePasswordAction; +import org.elasticsearch.xpack.security.action.user.ChangePasswordRequest; +import org.elasticsearch.xpack.security.action.user.ChangePasswordRequestBuilder; +import org.elasticsearch.xpack.security.action.user.DeleteUserAction; +import org.elasticsearch.xpack.security.action.user.PutUserAction; +import org.elasticsearch.xpack.security.action.user.UserRequest; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.security.authc.DefaultAuthenticationFailureHandler; +import org.elasticsearch.xpack.security.authc.esnative.NativeRealm; +import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; +import org.elasticsearch.xpack.security.authc.file.FileRealm; +import org.elasticsearch.xpack.security.authc.ldap.LdapRealm; +import org.elasticsearch.xpack.security.authc.pki.PkiRealm; +import org.elasticsearch.xpack.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.security.authz.accesscontrol.IndicesAccessControl; -import org.elasticsearch.xpack.security.authz.accesscontrol.IndicesAccessControl.IndexAccessControl; -import org.elasticsearch.xpack.security.authz.permission.DefaultRole; -import org.elasticsearch.xpack.security.authz.permission.GlobalPermission; +import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.security.authz.permission.Role; -import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; -import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.security.authz.privilege.GeneralPrivilege; -import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; @@ -100,25 +112,22 @@ import org.junit.Before; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import static org.elasticsearch.test.SecurityTestsUtils.assertAuthenticationException; import static org.elasticsearch.test.SecurityTestsUtils.assertThrowsAuthorizationException; import static org.elasticsearch.test.SecurityTestsUtils.assertThrowsAuthorizationExceptionRunAs; import static org.hamcrest.Matchers.arrayContaining; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Matchers.any; -import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -133,7 +142,7 @@ public class AuthorizationServiceTests extends ESTestCase { private AuthorizationService authorizationService; private ThreadContext threadContext; private ThreadPool threadPool; - private Map roleMap = new HashMap<>(); + private Map roleMap = new HashMap<>(); private CompositeRolesStore rolesStore; @Before @@ -144,12 +153,28 @@ public class AuthorizationServiceTests extends ESTestCase { threadContext = new ThreadContext(Settings.EMPTY); threadPool = mock(ThreadPool.class); when(threadPool.getThreadContext()).thenReturn(threadContext); + final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); doAnswer((i) -> { - ActionListener callback = - (ActionListener) i.getArguments()[1]; - callback.onResponse(roleMap.get(i.getArguments()[0])); + ActionListener callback = + (ActionListener) i.getArguments()[2]; + Set names = (Set) i.getArguments()[0]; + assertNotNull(names); + Set roleDescriptors = new HashSet<>(); + for (String name : names) { + RoleDescriptor descriptor = roleMap.get(name); + if (descriptor != null) { + roleDescriptors.add(descriptor); + } + } + + if (roleDescriptors.isEmpty()) { + callback.onResponse(Role.EMPTY); + } else { + callback.onResponse( + CompositeRolesStore.buildRoleFromDescriptors(roleDescriptors, fieldPermissionsCache)); + } return Void.TYPE; - }).when(rolesStore).roles(any(String.class), any(ActionListener.class)); + }).when(rolesStore).roles(any(Set.class), any(FieldPermissionsCache.class), any(ActionListener.class)); authorizationService = new AuthorizationService(Settings.EMPTY, rolesStore, clusterService, auditTrail, new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(Settings.EMPTY)); } @@ -228,7 +253,8 @@ public class AuthorizationServiceTests extends ESTestCase { public void testThatNonIndicesAndNonClusterActionIsDenied() { TransportRequest request = mock(TransportRequest.class); User user = new User("test user", "a_all"); - roleMap.put("a_all", Role.builder("a_role").add(IndexPrivilege.ALL, "a").build()); + roleMap.put("a_all", new RoleDescriptor("a_role",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() },null)); assertThrowsAuthorizationException( () -> authorize(createAuthentication(user), "whatever", request), @@ -240,7 +266,7 @@ public class AuthorizationServiceTests extends ESTestCase { public void testThatRoleWithNoIndicesIsDenied() { TransportRequest request = new IndicesExistsRequest("a"); User user = new User("test user", "no_indices"); - roleMap.put("no_indices", Role.builder("no_indices").cluster(ClusterPrivilege.action("")).build()); + roleMap.put("no_indices", new RoleDescriptor("a_role",null,null,null)); mockEmptyMetaData(); assertThrowsAuthorizationException( @@ -252,7 +278,8 @@ public class AuthorizationServiceTests extends ESTestCase { public void testSearchAgainstEmptyCluster() { User user = new User("test user", "a_all"); - roleMap.put("a_all", Role.builder("a_role").add(IndexPrivilege.ALL, "a").build()); + roleMap.put("a_all", new RoleDescriptor("a_role", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null)); mockEmptyMetaData(); { @@ -283,7 +310,8 @@ public class AuthorizationServiceTests extends ESTestCase { public void testScrollRelatedRequestsAllowed() { User user = new User("test user", "a_all"); - roleMap.put("a_all", Role.builder("a_role").add(IndexPrivilege.ALL, "a").build()); + roleMap.put("a_all", new RoleDescriptor("a_role", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null)); mockEmptyMetaData(); ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); @@ -317,7 +345,8 @@ public class AuthorizationServiceTests extends ESTestCase { TransportRequest request = new GetIndexRequest().indices("b"); ClusterState state = mockEmptyMetaData(); User user = new User("test user", "a_all"); - roleMap.put("a_all", Role.builder("a_all").add(IndexPrivilege.ALL, "a").build()); + roleMap.put("a_all", new RoleDescriptor("a_role",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() },null)); assertThrowsAuthorizationException( () -> authorize(createAuthentication(user), "indices:a", request), @@ -333,7 +362,8 @@ public class AuthorizationServiceTests extends ESTestCase { request.alias(new Alias("a2")); ClusterState state = mockEmptyMetaData(); User user = new User("test user", "a_all"); - roleMap.put("a_all", Role.builder("a_all").add(IndexPrivilege.ALL, "a").build()); + roleMap.put("a_all", new RoleDescriptor("a_role",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() },null)); assertThrowsAuthorizationException( () -> authorize(createAuthentication(user), CreateIndexAction.NAME, request), @@ -349,7 +379,8 @@ public class AuthorizationServiceTests extends ESTestCase { request.alias(new Alias("a2")); ClusterState state = mockEmptyMetaData(); User user = new User("test user", "a_all"); - roleMap.put("a_all", Role.builder("a_all").add(IndexPrivilege.ALL, "a", "a2").build()); + roleMap.put("a_all", new RoleDescriptor("a_all",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a", "a2").privileges("all").build() },null)); authorize(createAuthentication(user), CreateIndexAction.NAME, request); @@ -367,7 +398,8 @@ public class AuthorizationServiceTests extends ESTestCase { authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); - roleMap.put("a_all", Role.builder("a_all").add(IndexPrivilege.ALL, "a").build()); + roleMap.put("a_all", new RoleDescriptor("a_all",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() },null)); assertThrowsAuthorizationException( () -> authorize(createAuthentication(anonymousUser), "indices:a", request), @@ -389,7 +421,8 @@ public class AuthorizationServiceTests extends ESTestCase { authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(settings)); - roleMap.put("a_all", Role.builder("a_all").add(IndexPrivilege.ALL, "a").build()); + roleMap.put("a_all", new RoleDescriptor("a_all",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() },null)); ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, () -> authorize(createAuthentication(anonymousUser), "indices:a", request)); @@ -405,7 +438,8 @@ public class AuthorizationServiceTests extends ESTestCase { TransportRequest request = new GetIndexRequest().indices("not-an-index-*").indicesOptions(options); ClusterState state = mockEmptyMetaData(); User user = new User("test user", "a_all"); - roleMap.put("a_all", Role.builder("a_all").add(IndexPrivilege.ALL, "a").build()); + roleMap.put("a_all", new RoleDescriptor("a_all",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() },null)); final IndexNotFoundException nfe = expectThrows( IndexNotFoundException.class, @@ -431,7 +465,7 @@ public class AuthorizationServiceTests extends ESTestCase { public void testRunAsRequestWithoutLookedUpBy() { AuthenticateRequest request = new AuthenticateRequest("run as me"); - roleMap.put("can run as", SuperuserRole.INSTANCE); + roleMap.put("can run as", ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); User user = new User("test user", new String[] { "can run as" }, new User("run as me", Strings.EMPTY_ARRAY)); Authentication authentication = new Authentication(user, new RealmRef("foo", "bar", "baz"), null); assertThat(user.runAs(), is(notNullValue())); @@ -446,11 +480,9 @@ public class AuthorizationServiceTests extends ESTestCase { TransportRequest request = mock(TransportRequest.class); User user = new User("test user", new String[] { "can run as" }, new User("run as me", "doesn't exist")); assertThat(user.runAs(), is(notNullValue())); - roleMap.put("can run as", Role - .builder("can run as") - .runAs(new GeneralPrivilege("", "not the right user")) - .add(IndexPrivilege.ALL, "a") - .build()); + roleMap.put("can run as", new RoleDescriptor("can run as",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, + new String[] { "not the right user" })); assertThrowsAuthorizationExceptionRunAs( () -> authorize(createAuthentication(user), "indices:a", request), @@ -463,11 +495,9 @@ public class AuthorizationServiceTests extends ESTestCase { TransportRequest request = new GetIndexRequest().indices("a"); User user = new User("test user", new String[] { "can run as" }, new User("run as me", "b")); assertThat(user.runAs(), is(notNullValue())); - roleMap.put("can run as", Role - .builder("can run as") - .runAs(new GeneralPrivilege("", "run as me")) - .add(IndexPrivilege.ALL, "a") - .build()); + roleMap.put("can run as", new RoleDescriptor("can run as",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, + new String[] { "run as me" })); if (randomBoolean()) { ClusterState state = mock(ClusterState.class); @@ -477,10 +507,8 @@ public class AuthorizationServiceTests extends ESTestCase { .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) .numberOfShards(1).numberOfReplicas(0).build(), true) .build()); - roleMap.put("b", Role - .builder("b") - .add(IndexPrivilege.ALL, "b") - .build()); + roleMap.put("b", new RoleDescriptor("b",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("b").privileges("all").build() }, null)); } else { mockEmptyMetaData(); } @@ -497,11 +525,9 @@ public class AuthorizationServiceTests extends ESTestCase { TransportRequest request = new GetIndexRequest().indices("b"); User user = new User("test user", new String[] { "can run as" }, new User("run as me", "b")); assertThat(user.runAs(), is(notNullValue())); - roleMap.put("can run as", Role - .builder("can run as") - .runAs(new GeneralPrivilege("", "run as me")) - .add(IndexPrivilege.ALL, "a") - .build()); + roleMap.put("can run as", new RoleDescriptor("can run as",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, + new String[] { "run as me" })); ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() @@ -509,10 +535,8 @@ public class AuthorizationServiceTests extends ESTestCase { .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) .numberOfShards(1).numberOfReplicas(0).build(), true) .build()); - roleMap.put("b", Role - .builder("b") - .add(IndexPrivilege.ALL, "b") - .build()); + roleMap.put("b", new RoleDescriptor("b",null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("b").privileges("all").build() }, null)); authorize(createAuthentication(user), "indices:a", request); verify(auditTrail).runAsGranted(user, "indices:a", request); @@ -522,10 +546,8 @@ public class AuthorizationServiceTests extends ESTestCase { public void testNonXPackUserCannotExecuteOperationAgainstSecurityIndex() { User user = new User("all_access_user", "all_access"); - roleMap.put("all_access", Role.builder("all_access") - .add(IndexPrivilege.ALL, "*") - .cluster(ClusterPrivilege.ALL) - .build()); + roleMap.put("all_access", new RoleDescriptor("all access",new String[] { "all" }, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("all").build() }, null)); ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() @@ -577,10 +599,8 @@ public class AuthorizationServiceTests extends ESTestCase { public void testGrantedNonXPackUserCanExecuteMonitoringOperationsAgainstSecurityIndex() { User user = new User("all_access_user", "all_access"); - roleMap.put("all_access", Role.builder("all_access") - .add(IndexPrivilege.ALL, "*") - .cluster(ClusterPrivilege.ALL) - .build()); + roleMap.put("all_access", new RoleDescriptor("all access",new String[] { "all" }, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("all").build() }, null)); ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() @@ -609,8 +629,8 @@ public class AuthorizationServiceTests extends ESTestCase { } public void testXPackUserAndSuperusersCanExecuteOperationAgainstSecurityIndex() { - final User superuser = new User("custom_admin", SuperuserRole.NAME); - roleMap.put(SuperuserRole.NAME, Role.builder(SuperuserRole.DESCRIPTOR).build()); + final User superuser = new User("custom_admin", ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName()); + roleMap.put(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName(), ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() @@ -646,8 +666,8 @@ public class AuthorizationServiceTests extends ESTestCase { } public void testXPackUserAndSuperusersCanExecuteOperationAgainstSecurityIndexWithWildcard() { - final User superuser = new User("custom_admin", SuperuserRole.NAME); - roleMap.put(SuperuserRole.NAME, Role.builder(SuperuserRole.DESCRIPTOR).build()); + final User superuser = new User("custom_admin", ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName()); + roleMap.put(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName(), ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() @@ -674,10 +694,8 @@ public class AuthorizationServiceTests extends ESTestCase { final AnonymousUser anonymousUser = new AnonymousUser(settings); authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); - roleMap.put("anonymous_user_role", Role.builder("anonymous_user_role") - .cluster(ClusterPrivilege.ALL) - .add(IndexPrivilege.ALL, "a") - .build()); + roleMap.put("anonymous_user_role", new RoleDescriptor("anonymous_user_role",new String[] { "all" }, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null)); mockEmptyMetaData(); // sanity check the anonymous user @@ -691,44 +709,24 @@ public class AuthorizationServiceTests extends ESTestCase { } public void testDefaultRoleUserWithoutRoles() { - PlainActionFuture> rolesFuture = new PlainActionFuture<>(); + PlainActionFuture rolesFuture = new PlainActionFuture<>(); authorizationService.roles(new User("no role user"), rolesFuture); - final Collection roles = rolesFuture.actionGet(); - assertEquals(1, roles.size()); - assertEquals(DefaultRole.NAME, roles.iterator().next().name()); + final Role roles = rolesFuture.actionGet(); + assertEquals(Role.EMPTY, roles); } - public void testDefaultRoleUserWithoutRolesAnonymousUserEnabled() { + public void testAnonymousUserEnabledRoleAdded() { Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "anonymous_user_role").build(); final AnonymousUser anonymousUser = new AnonymousUser(settings); authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); - roleMap.put("anonymous_user_role", Role.builder("anonymous_user_role") - .cluster(ClusterPrivilege.ALL) - .add(IndexPrivilege.ALL, "a") - .build()); + roleMap.put("anonymous_user_role", new RoleDescriptor("anonymous_user_role",new String[] { "all" }, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null)); mockEmptyMetaData(); - PlainActionFuture> rolesFuture = new PlainActionFuture<>(); + PlainActionFuture rolesFuture = new PlainActionFuture<>(); authorizationService.roles(new User("no role user"), rolesFuture); - final Collection roles = rolesFuture.actionGet(); - assertEquals(2, roles.size()); - for (Role role : roles) { - assertThat(role.name(), either(equalTo(DefaultRole.NAME)).or(equalTo("anonymous_user_role"))); - } - } - - public void testDefaultRoleUserWithSomeRole() { - roleMap.put("role", Role.builder("role") - .cluster(ClusterPrivilege.ALL) - .add(IndexPrivilege.ALL, "a") - .build()); - PlainActionFuture> rolesFuture = new PlainActionFuture<>(); - authorizationService.roles(new User("user with role", "role"), rolesFuture); - final Collection roles = rolesFuture.actionGet(); - assertEquals(2, roles.size()); - for (Role role : roles) { - assertThat(role.name(), either(equalTo(DefaultRole.NAME)).or(equalTo("role"))); - } + final Role roles = rolesFuture.actionGet(); + assertThat(roles.name(), containsString("anonymous_user_role")); } public void testCompositeActionsAreImmediatelyRejected() { @@ -737,7 +735,7 @@ public class AuthorizationServiceTests extends ESTestCase { String action = compositeRequest.v1(); TransportRequest request = compositeRequest.v2(); User user = new User("test user", "no_indices"); - roleMap.put("no_indices", Role.builder("no_indices").cluster(ClusterPrivilege.action("")).build()); + roleMap.put("no_indices", new RoleDescriptor("no_indices", null, null, null)); assertThrowsAuthorizationException( () -> authorize(createAuthentication(user), action, request), action, "test user"); verify(auditTrail).accessDenied(user, action, request); @@ -750,8 +748,9 @@ public class AuthorizationServiceTests extends ESTestCase { String action = compositeRequest.v1(); TransportRequest request = compositeRequest.v2(); User user = new User("test user", "role"); - roleMap.put("role", Role.builder("role").add(IndexPrivilege.ALL, - randomBoolean() ? "a" : "index").build()); + roleMap.put("role", new RoleDescriptor("role", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices(randomBoolean() ? "a" : "index").privileges("all").build() }, + null)); authorize(createAuthentication(user), action, request); verify(auditTrail).accessGranted(user, action, request); verifyNoMoreInteractions(auditTrail); @@ -761,8 +760,9 @@ public class AuthorizationServiceTests extends ESTestCase { String action = randomCompositeRequest().v1(); TransportRequest request = mock(TransportRequest.class); User user = new User("test user", "role"); - roleMap.put("role", Role.builder("role").add(IndexPrivilege.ALL, - randomBoolean() ? "a" : "index").build()); + roleMap.put("role", new RoleDescriptor("role", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices(randomBoolean() ? "a" : "index").privileges("all").build() }, + null)); IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, () -> authorize(createAuthentication(user), action, request)); assertThat(illegalStateException.getMessage(), containsString("Composite actions must implement CompositeIndicesRequest")); @@ -797,15 +797,162 @@ public class AuthorizationServiceTests extends ESTestCase { TransportRequest request = new MockIndicesRequest(IndicesOptions.strictExpandOpen(), "index"); User userAllowed = new User("userAllowed", "roleAllowed"); - roleMap.put("roleAllowed", Role.builder("roleAllowed").add(IndexPrivilege.ALL, "index").build()); + roleMap.put("roleAllowed", new RoleDescriptor("roleAllowed", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("index").privileges("all").build() }, null)); User userDenied = new User("userDenied", "roleDenied"); - roleMap.put("roleDenied", Role.builder("roleDenied").add(IndexPrivilege.ALL, "a").build()); + roleMap.put("roleDenied", new RoleDescriptor("roleDenied", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null)); mockEmptyMetaData(); authorize(createAuthentication(userAllowed), action, request); assertThrowsAuthorizationException( () -> authorize(createAuthentication(userDenied), action, request), action, "userDenied"); } + public void testSameUserPermission() { + final User user = new User("joe"); + final boolean changePasswordRequest = randomBoolean(); + final TransportRequest request = changePasswordRequest ? + new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request() : + new AuthenticateRequestBuilder(mock(Client.class)).username(user.principal()).request(); + final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; + final Authentication authentication = mock(Authentication.class); + final RealmRef authenticatedBy = mock(RealmRef.class); + when(authentication.getUser()).thenReturn(user); + when(authentication.getRunAsUser()).thenReturn(user); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authenticatedBy.getType()) + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealm.TYPE) : randomAsciiOfLengthBetween(4, 12)); + + assertThat(request, instanceOf(UserRequest.class)); + assertTrue(AuthorizationService.checkSameUserPermissions(action, request, authentication)); + } + + public void testSameUserPermissionDoesNotAllowNonMatchingUsername() { + final User user = new User("joe"); + final boolean changePasswordRequest = randomBoolean(); + final String username = randomFrom("", "joe" + randomAsciiOfLengthBetween(1, 5), randomAsciiOfLengthBetween(3, 10)); + final TransportRequest request = changePasswordRequest ? + new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : + new AuthenticateRequestBuilder(mock(Client.class)).username(username).request(); + final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; + final Authentication authentication = mock(Authentication.class); + final RealmRef authenticatedBy = mock(RealmRef.class); + when(authentication.getUser()).thenReturn(user); + when(authentication.getRunAsUser()).thenReturn(user); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authenticatedBy.getType()) + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealm.TYPE) : randomAsciiOfLengthBetween(4, 12)); + + assertThat(request, instanceOf(UserRequest.class)); + assertFalse(AuthorizationService.checkSameUserPermissions(action, request, authentication)); + + final User user2 = new User("admin", new String[] { "bar" }, user); + when(authentication.getUser()).thenReturn(user2); + when(authentication.getRunAsUser()).thenReturn(user); + final RealmRef lookedUpBy = mock(RealmRef.class); + when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); + when(lookedUpBy.getType()) + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealm.TYPE) : randomAsciiOfLengthBetween(4, 12)); + // this should still fail since the username is still different + assertFalse(AuthorizationService.checkSameUserPermissions(action, request, authentication)); + + if (request instanceof ChangePasswordRequest) { + ((ChangePasswordRequest)request).username("joe"); + } else { + ((AuthenticateRequest)request).username("joe"); + } + assertTrue(AuthorizationService.checkSameUserPermissions(action, request, authentication)); + } + + public void testSameUserPermissionDoesNotAllowOtherActions() { + final User user = mock(User.class); + final TransportRequest request = mock(TransportRequest.class); + final String action = randomFrom(PutUserAction.NAME, DeleteUserAction.NAME, ClusterHealthAction.NAME, ClusterStateAction.NAME, + ClusterStatsAction.NAME, GetLicenseAction.NAME); + final Authentication authentication = mock(Authentication.class); + final RealmRef authenticatedBy = mock(RealmRef.class); + when(authentication.getUser()).thenReturn(user); + when(authentication.getRunAsUser()).thenReturn(randomBoolean() ? user : new User("runAs")); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authenticatedBy.getType()) + .thenReturn(randomAsciiOfLengthBetween(4, 12)); + + assertFalse(AuthorizationService.checkSameUserPermissions(action, request, authentication)); + verifyZeroInteractions(user, request, authentication); + } + + public void testSameUserPermissionRunAsChecksAuthenticatedBy() { + final String username = "joe"; + final User runAs = new User(username); + final User user = new User("admin", new String[] { "bar" }, runAs); + final boolean changePasswordRequest = randomBoolean(); + final TransportRequest request = changePasswordRequest ? + new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : + new AuthenticateRequestBuilder(mock(Client.class)).username(username).request(); + final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; + final Authentication authentication = mock(Authentication.class); + final RealmRef authenticatedBy = mock(RealmRef.class); + final RealmRef lookedUpBy = mock(RealmRef.class); + when(authentication.getUser()).thenReturn(user); + when(authentication.getRunAsUser()).thenReturn(runAs); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); + when(authentication.isRunAs()).thenReturn(true); + when(lookedUpBy.getType()) + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealm.TYPE) : randomAsciiOfLengthBetween(4, 12)); + assertTrue(AuthorizationService.checkSameUserPermissions(action, request, authentication)); + + when(authentication.getRunAsUser()).thenReturn(user); + assertFalse(AuthorizationService.checkSameUserPermissions(action, request, authentication)); + } + + public void testSameUserPermissionDoesNotAllowChangePasswordForOtherRealms() { + final User user = new User("joe"); + final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request(); + final String action = ChangePasswordAction.NAME; + final Authentication authentication = mock(Authentication.class); + final RealmRef authenticatedBy = mock(RealmRef.class); + when(authentication.getUser()).thenReturn(user); + when(authentication.getRunAsUser()).thenReturn(user); + when(authentication.isRunAs()).thenReturn(false); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authenticatedBy.getType()).thenReturn(randomFrom(LdapRealm.LDAP_TYPE, FileRealm.TYPE, LdapRealm.AD_TYPE, PkiRealm.TYPE, + randomAsciiOfLengthBetween(4, 12))); + + assertThat(request, instanceOf(UserRequest.class)); + assertFalse(AuthorizationService.checkSameUserPermissions(action, request, authentication)); + verify(authenticatedBy).getType(); + verify(authentication).getRunAsUser(); + verify(authentication).getAuthenticatedBy(); + verify(authentication).isRunAs(); + verifyNoMoreInteractions(authenticatedBy, authentication); + } + + public void testSameUserPermissionDoesNotAllowChangePasswordForLookedUpByOtherRealms() { + final User runAs = new User("joe"); + final User user = new User("admin", new String[] { "bar" }, runAs); + final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username(runAs.principal()).request(); + final String action = ChangePasswordAction.NAME; + final Authentication authentication = mock(Authentication.class); + final RealmRef authenticatedBy = mock(RealmRef.class); + final RealmRef lookedUpBy = mock(RealmRef.class); + when(authentication.getUser()).thenReturn(user); + when(authentication.getRunAsUser()).thenReturn(runAs); + when(authentication.isRunAs()).thenReturn(true); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); + when(lookedUpBy.getType()).thenReturn(randomFrom(LdapRealm.LDAP_TYPE, FileRealm.TYPE, LdapRealm.AD_TYPE, PkiRealm.TYPE, + randomAsciiOfLengthBetween(4, 12))); + + assertThat(request, instanceOf(UserRequest.class)); + assertFalse(AuthorizationService.checkSameUserPermissions(action, request, authentication)); + verify(authentication).getLookedUpBy(); + verify(authentication).getRunAsUser(); + verify(authentication).isRunAs(); + verify(lookedUpBy).getType(); + verifyNoMoreInteractions(authentication, lookedUpBy, authenticatedBy); + } + private static Tuple randomCompositeRequest() { switch(randomIntBetween(0, 7)) { case 0: @@ -833,57 +980,13 @@ public class AuthorizationServiceTests extends ESTestCase { } public void testDoesNotUseRolesStoreForXPackUser() { - PlainActionFuture> rolesFuture = new PlainActionFuture<>(); + PlainActionFuture rolesFuture = new PlainActionFuture<>(); authorizationService.roles(XPackUser.INSTANCE, rolesFuture); - final Collection roles = rolesFuture.actionGet(); - assertThat(roles, contains(SuperuserRole.INSTANCE)); + final Role roles = rolesFuture.actionGet(); + assertThat(roles, equalTo(ReservedRolesStore.SUPERUSER_ROLE)); verifyZeroInteractions(rolesStore); } - public void testPermissionIncludesAnonymousUserPermissions() { - Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "a_all").build(); - final AnonymousUser anonymousUser = new AnonymousUser(settings); - authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, - new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); - final boolean roleExists = randomBoolean(); - final Role anonymousRole = Role.builder("a_all").add(IndexPrivilege.ALL, "a").build(); - if (roleExists) { - roleMap.put("a_all", anonymousRole); - } - final MetaData metaData = MetaData.builder() - .put(new IndexMetaData.Builder("a") - .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) - .numberOfShards(1).numberOfReplicas(0).build(), true) - .build(); - - User user = new User("no_roles"); - PlainActionFuture> rolesFuture = new PlainActionFuture<>(); - authorizationService.roles(user, rolesFuture); - final Collection roles = rolesFuture.actionGet(); - GlobalPermission globalPermission = authorizationService.permission(roles); - verify(rolesStore).roles(eq("a_all"), any(ActionListener.class)); - - if (roleExists) { - assertThat(roles, containsInAnyOrder(anonymousRole, DefaultRole.INSTANCE)); - assertFalse(globalPermission.isEmpty()); - // by default all users have a DefaultRole that grants cluster actions like change password - assertFalse(globalPermission.cluster().isEmpty()); - assertFalse(globalPermission.indices().isEmpty()); - Map authzMap = - globalPermission.indices().authorize(SearchAction.NAME, Collections.singleton("a"), metaData); - assertTrue(authzMap.containsKey("a")); - assertTrue(authzMap.get("a").isGranted()); - assertFalse(authzMap.get("a").getFieldPermissions().hasFieldLevelSecurity()); - assertNull(authzMap.get("a").getQueries()); - } else { - assertThat(roles, contains(DefaultRole.INSTANCE)); - assertFalse(globalPermission.isEmpty()); - // by default all users have a DefaultRole that grants cluster actions like change password - assertFalse(globalPermission.cluster().isEmpty()); - assertTrue(globalPermission.indices().isEmpty()); - } - } - public void testGetRolesForSystemUserThrowsException() { IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> authorizationService.roles(SystemUser.INSTANCE, null)); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java index 3ae44471c90..2f623ad9fa4 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java @@ -11,14 +11,15 @@ import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.authz.RoleDescriptor.IndicesPrivileges; +import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.security.authz.permission.Role; import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.user.User; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; import java.util.List; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -27,15 +28,18 @@ public class AuthorizedIndicesTests extends ESTestCase { public void testAuthorizedIndicesUserWithoutRoles() { User user = new User("test user"); - AuthorizedIndices authorizedIndices = new AuthorizedIndices(user, Collections.emptyList(), "", MetaData.EMPTY_META_DATA); + AuthorizedIndices authorizedIndices = new AuthorizedIndices(user, Role.EMPTY, "", + MetaData.EMPTY_META_DATA); List list = authorizedIndices.get(); assertTrue(list.isEmpty()); } public void testAuthorizedIndicesUserWithSomeRoles() { User user = new User("test user", "a_star", "b"); - Role aStarRole = Role.builder("a_star").add(IndexPrivilege.ALL, "a*").build(); - Role bRole = Role.builder("b").add(IndexPrivilege.READ, "b").build(); + RoleDescriptor aStarRole = new RoleDescriptor("a_star", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a*").privileges("all").build() }, null); + RoleDescriptor bRole = new RoleDescriptor("b", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("b").privileges("READ").build() }, null); Settings indexSettings = Settings.builder().put("index.version.created", Version.CURRENT).build(); MetaData metaData = MetaData.builder() .put(new IndexMetaData.Builder("a1").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) @@ -50,7 +54,8 @@ public class AuthorizedIndicesTests extends ESTestCase { .putAlias(new AliasMetaData.Builder("ba").build()) .build(), true) .build(); - Collection roles = Arrays.asList(aStarRole, bRole); + Role roles = CompositeRolesStore.buildRoleFromDescriptors(Sets.newHashSet(aStarRole, bRole), + new FieldPermissionsCache(Settings.EMPTY)); AuthorizedIndices authorizedIndices = new AuthorizedIndices(user, roles, SearchAction.NAME, metaData); List list = authorizedIndices.get(); assertThat(list, containsInAnyOrder("a1", "a2", "aaaaaa", "b", "ab")); @@ -61,8 +66,7 @@ public class AuthorizedIndicesTests extends ESTestCase { public void testAuthorizedIndicesUserWithSomeRolesEmptyMetaData() { User user = new User("test user", "role"); Role role = Role.builder("role").add(IndexPrivilege.ALL, "*").build(); - Collection roles = Collections.singletonList(role); - AuthorizedIndices authorizedIndices = new AuthorizedIndices(user, roles, SearchAction.NAME, MetaData.EMPTY_META_DATA); + AuthorizedIndices authorizedIndices = new AuthorizedIndices(user, role, SearchAction.NAME, MetaData.EMPTY_META_DATA); List list = authorizedIndices.get(); assertTrue(list.isEmpty()); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index 1cc883a2e96..3b3060815b4 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.security.authz; +import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; @@ -105,9 +106,8 @@ public class IndexAliasesTests extends SecurityIntegTestCase { client().filterWithHeader(headers).admin().indices().prepareAliases().addAlias("test_1", "test_alias")::get, IndicesAliasesAction.NAME, "create_only"); - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, - client().filterWithHeader(headers).admin().indices().prepareAliases().addAlias("test_*", "test_alias")::get); - assertThat(indexNotFoundException.toString(), containsString("[test_*]")); + assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + .addAlias("test_*", "test_alias")::get, IndicesAliasesAction.NAME, "create_only"); } public void testCreateIndexAndAliasesCreateOnlyPermission() { @@ -130,13 +130,11 @@ public class IndexAliasesTests extends SecurityIntegTestCase { client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "alias_1")::get, IndicesAliasesAction.NAME, "create_only"); - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, - client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "alias_*")::get); - assertThat(indexNotFoundException.toString(), containsString("[alias_*")); + assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + .removeAlias("test_1", "alias_*")::get, IndicesAliasesAction.NAME, "create_only"); - indexNotFoundException = expectThrows(IndexNotFoundException.class, - client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "_all")::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + .removeAlias("test_1", "_all")::get, IndicesAliasesAction.NAME, "create_only"); } public void testGetAliasesCreateOnlyPermissionStrict() { @@ -147,24 +145,21 @@ public class IndexAliasesTests extends SecurityIntegTestCase { assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_1") .setIndices("test_1").setIndicesOptions(IndicesOptions.strictExpand())::get, GetAliasesAction.NAME, "create_only"); - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, client().filterWithHeader(headers) + assertThrowsAuthorizationException(client().filterWithHeader(headers) .admin().indices().prepareGetAliases("_all") - .setIndices("test_1").setIndicesOptions(IndicesOptions.strictExpand())::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + .setIndices("test_1").setIndicesOptions(IndicesOptions.strictExpand())::get, GetAliasesAction.NAME, "create_only"); - indexNotFoundException = expectThrows(IndexNotFoundException.class, client().filterWithHeader(headers).admin().indices() - .prepareGetAliases().setIndices("test_1").setIndicesOptions(IndicesOptions.strictExpand())::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices() + .prepareGetAliases().setIndices("test_1").setIndicesOptions(IndicesOptions.strictExpand())::get, + GetAliasesAction.NAME, "create_only"); - GetAliasesResponse getAliasesResponse = client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_alias") - .setIndices("test_*").setIndicesOptions(IndicesOptions.strictExpand()).get(); - assertEquals(0, getAliasesResponse.getAliases().size()); + assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_alias") + .setIndices("test_*").setIndicesOptions(IndicesOptions.strictExpand())::get, GetAliasesAction.NAME, "create_only"); //this throws exception no matter what the indices options are because the aliases part cannot be resolved to any alias //and there is no way to "allow_no_aliases" like we can do with indices. - indexNotFoundException = expectThrows(IndexNotFoundException.class, - client().filterWithHeader(headers).admin().indices().prepareGetAliases()::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases()::get, + GetAliasesAction.NAME, "create_only"); } public void testGetAliasesCreateOnlyPermissionIgnoreUnavailable() { @@ -172,28 +167,23 @@ public class IndexAliasesTests extends SecurityIntegTestCase { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_only", new SecuredString("test123".toCharArray()))); - GetAliasesResponse getAliasesResponse = client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_1") - .setIndices("test_1").setIndicesOptions(IndicesOptions.lenientExpandOpen()).get(); - assertEquals(0, getAliasesResponse.getAliases().size()); + assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_1") + .setIndices("test_1").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, client().filterWithHeader(headers) - .admin().indices().prepareGetAliases("_all") - .setIndices("test_1").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("_all") + .setIndices("test_1").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); - indexNotFoundException = expectThrows(IndexNotFoundException.class, client().filterWithHeader(headers).admin().indices() - .prepareGetAliases().setIndices("test_1").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases().setIndices("test_1") + .setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); - getAliasesResponse = client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_alias") - .setIndices("test_*").setIndicesOptions(IndicesOptions.lenientExpandOpen()).get(); - assertEquals(0, getAliasesResponse.getAliases().size()); + assertThrowsAuthorizationException( + client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_alias") + .setIndices("test_*").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); //this throws exception no matter what the indices options are because the aliases part cannot be resolved to any alias //and there is no way to "allow_no_aliases" like we can do with indices. - indexNotFoundException = expectThrows(IndexNotFoundException.class, client().filterWithHeader(headers).admin().indices() - .prepareGetAliases().setIndicesOptions(IndicesOptions.lenientExpandOpen())::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices() + .prepareGetAliases().setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); } public void testCreateIndexThenAliasesCreateAndAliasesPermission() { diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 15fefd1912e..5720116b739 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -41,19 +41,19 @@ import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.security.SecurityTemplateService; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.DefaultAuthenticationFailureHandler; +import org.elasticsearch.xpack.security.authz.RoleDescriptor.IndicesPrivileges; +import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.security.authz.permission.Role; -import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; -import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.xpack.security.user.XPackUser; import org.junit.Before; import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -77,7 +77,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { private AuthorizationService authzService; private IndicesAndAliasesResolver defaultIndicesResolver; private IndexNameExpressionResolver indexNameExpressionResolver; - private Map roleMap; + private Map roleMap; @Before public void setup() { @@ -113,16 +113,34 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { String[] authorizedIndices = new String[] { "bar", "bar-closed", "foofoobar", "foofoo", "missing", "foofoo-closed"}; String[] dashIndices = new String[]{"-index10", "-index11", "-index20", "-index21"}; roleMap = new HashMap<>(); - roleMap.put("role", Role.builder("role").add(IndexPrivilege.ALL, authorizedIndices).build()); - roleMap.put("dash", Role.builder("dash").add(IndexPrivilege.ALL, dashIndices).build()); - roleMap.put("test", Role.builder("test").cluster(ClusterPrivilege.MONITOR).build()); - roleMap.put(SuperuserRole.NAME, Role.builder(SuperuserRole.DESCRIPTOR).build()); + roleMap.put("role", new RoleDescriptor("role", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices(authorizedIndices).privileges("all").build() }, null)); + roleMap.put("dash", new RoleDescriptor("dash", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices(dashIndices).privileges("all").build() }, null)); + roleMap.put("test", new RoleDescriptor("role", new String[] { "monitor" }, null, null)); + roleMap.put(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName(), ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); + final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); doAnswer((i) -> { ActionListener callback = - (ActionListener) i.getArguments()[1]; - callback.onResponse(roleMap.get(i.getArguments()[0])); + (ActionListener) i.getArguments()[2]; + Set names = (Set) i.getArguments()[0]; + assertNotNull(names); + Set roleDescriptors = new HashSet<>(); + for (String name : names) { + RoleDescriptor descriptor = roleMap.get(name); + if (descriptor != null) { + roleDescriptors.add(descriptor); + } + } + + if (roleDescriptors.isEmpty()) { + callback.onResponse(Role.EMPTY); + } else { + callback.onResponse( + CompositeRolesStore.buildRoleFromDescriptors(roleDescriptors, fieldPermissionsCache)); + } return Void.TYPE; - }).when(rolesStore).roles(any(String.class), any(ActionListener.class)); + }).when(rolesStore).roles(any(Set.class), any(FieldPermissionsCache.class), any(ActionListener.class)); ClusterService clusterService = mock(ClusterService.class); authzService = new AuthorizationService(settings, rolesStore, clusterService, @@ -1048,8 +1066,8 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testNonXPackUserAccessingSecurityIndex() { User allAccessUser = new User("all_access", "all_access"); - roleMap.put("all_access", - Role.builder("all_access").add(IndexPrivilege.ALL, "*").cluster(ClusterPrivilege.ALL).build()); + roleMap.put("all_access", new RoleDescriptor("all_access", new String[] { "all" }, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("all").build() }, null)); { SearchRequest request = new SearchRequest(); @@ -1093,7 +1111,8 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { // make the user authorized String dateTimeIndex = indexNameExpressionResolver.resolveDateMathExpression(""); String[] authorizedIndices = new String[] { "bar", "bar-closed", "foofoobar", "foofoo", "missing", "foofoo-closed", dateTimeIndex}; - roleMap.put("role", Role.builder("role").add(IndexPrivilege.ALL, authorizedIndices).build()); + roleMap.put("role", new RoleDescriptor("role", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices(authorizedIndices).privileges("all").build() }, null)); SearchRequest request = new SearchRequest(""); if (randomBoolean()) { @@ -1130,7 +1149,8 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { // make the user authorized String[] authorizedIndices = new String[] { "bar", "bar-closed", "foofoobar", "foofoo", "missing", "foofoo-closed", indexNameExpressionResolver.resolveDateMathExpression("")}; - roleMap.put("role", Role.builder("role").add(IndexPrivilege.ALL, authorizedIndices).build()); + roleMap.put("role", new RoleDescriptor("role", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices(authorizedIndices).privileges("all").build() }, null)); GetAliasesRequest request = new GetAliasesRequest("").indices("foo", "foofoo"); Set indices = defaultIndicesResolver.resolve(request, metaData, buildAuthorizedIndices(user, GetAliasesAction.NAME)); //the union of all indices and aliases gets returned @@ -1144,7 +1164,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { // TODO with the removal of DeleteByQuery is there another way to test resolving a write action? private AuthorizedIndices buildAuthorizedIndices(User user, String action) { - PlainActionFuture> rolesListener = new PlainActionFuture<>(); + PlainActionFuture rolesListener = new PlainActionFuture<>(); authzService.roles(user, rolesListener); return new AuthorizedIndices(user, rolesListener.actionGet(), action, metaData); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java index ced68d2c634..003fa4373e0 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.security.support.MetadataUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.security.authz.permission.FieldPermissions; +import java.util.Collections; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -39,7 +40,7 @@ public class RoleDescriptorTests extends ESTestCase { RoleDescriptor.IndicesPrivileges.builder() .indices("i1", "i2") .privileges("read") - .fieldPermissions(new FieldPermissions(new String[]{"body", "title"}, null)) + .grantedFields("body", "title") .query("{\"query\": {\"match_all\": {}}}") .build() }; @@ -54,7 +55,7 @@ public class RoleDescriptorTests extends ESTestCase { RoleDescriptor.IndicesPrivileges.builder() .indices("i1", "i2") .privileges("read") - .fieldPermissions(new FieldPermissions(new String[]{"body", "title"}, null)) + .grantedFields("body", "title") .query("{\"query\": {\"match_all\": {}}}") .build() }; @@ -119,7 +120,7 @@ public class RoleDescriptorTests extends ESTestCase { RoleDescriptor.IndicesPrivileges.builder() .indices("i1", "i2") .privileges("read") - .fieldPermissions(new FieldPermissions(new String[]{"body", "title"}, null)) + .grantedFields("body", "title") .query("{\"query\": {\"match_all\": {}}}") .build() }; @@ -143,4 +144,15 @@ public class RoleDescriptorTests extends ESTestCase { assertArrayEquals(new String[] { "m", "n" }, rd.getRunAs()); assertNull(rd.getIndicesPrivileges()[0].getQuery()); } + + public void testParseIgnoresTransientMetadata() throws Exception { + final RoleDescriptor descriptor = new RoleDescriptor("test", new String[] { "all" }, null, null, + Collections.singletonMap("_unlicensed_feature", true), Collections.singletonMap("foo", "bar")); + XContentBuilder b = jsonBuilder(); + descriptor.toXContent(b, ToXContent.EMPTY_PARAMS); + RoleDescriptor parsed = RoleDescriptor.parse("test", b.bytes(), false); + assertNotNull(parsed); + assertEquals(1, parsed.getTransientMetadata().size()); + assertEquals(true, parsed.getTransientMetadata().get("enabled")); + } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControlTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControlTests.java index 88964d2a0b7..5e1db413662 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControlTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControlTests.java @@ -5,21 +5,9 @@ */ package org.elasticsearch.xpack.security.authz.accesscontrol; -import org.apache.lucene.util.automaton.Automaton; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.security.authz.accesscontrol.IndicesAccessControl.IndexAccessControl; -import org.elasticsearch.xpack.security.authz.permission.FieldPermissions; import java.util.Collections; -import java.util.Set; - -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; /** * Unit tests for {@link IndicesAccessControl} @@ -28,164 +16,7 @@ public class IndicesAccessControlTests extends ESTestCase { public void testEmptyIndicesAccessControl() { IndicesAccessControl indicesAccessControl = new IndicesAccessControl(true, Collections.emptyMap()); - assertThat(indicesAccessControl.isGranted(), is(true)); - assertThat(indicesAccessControl.getIndexPermissions(randomAsciiOfLengthBetween(3,20)), nullValue()); - } - - public void testMergeFields() { - IndexAccessControl indexAccessControl = new IndexAccessControl(true, new FieldPermissions(new String[]{"a", "c"}, null), null); - IndexAccessControl other = new IndexAccessControl(true,new FieldPermissions(new String[]{"b"}, null), null); - - IndexAccessControl merge1 = indexAccessControl.merge(other); - assertTrue(merge1.getFieldPermissions().grantsAccessTo("a")); - assertTrue(merge1.getFieldPermissions().grantsAccessTo("b")); - assertTrue(merge1.getFieldPermissions().grantsAccessTo("c")); - assertTrue(merge1.getFieldPermissions().hasFieldLevelSecurity()); - assertThat(merge1.isGranted(), is(true)); - assertThat(merge1.getQueries(), nullValue()); - - IndexAccessControl merge2 = other.merge(indexAccessControl); - assertTrue(merge2.getFieldPermissions().grantsAccessTo("a")); - assertTrue(merge2.getFieldPermissions().grantsAccessTo("b")); - assertTrue(merge2.getFieldPermissions().grantsAccessTo("c")); - assertTrue(merge2.getFieldPermissions().hasFieldLevelSecurity()); - assertThat(merge2.isGranted(), is(true)); - assertThat(merge2.getQueries(), nullValue()); - } - - public void testMergeEmptyAndNullFields() { - IndexAccessControl indexAccessControl = new IndexAccessControl(true, new FieldPermissions(new String[]{}, null), null); - IndexAccessControl other = new IndexAccessControl(true, new FieldPermissions(), null); - - IndexAccessControl merge1 = indexAccessControl.merge(other); - assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity()); - assertThat(merge1.isGranted(), is(true)); - assertThat(merge1.getQueries(), nullValue()); - - IndexAccessControl merge2 = other.merge(indexAccessControl); - assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity()); - assertThat(merge2.isGranted(), is(true)); - assertThat(merge2.getQueries(), nullValue()); - } - - public void testMergeNullFields() { - IndexAccessControl indexAccessControl = new IndexAccessControl(true, new FieldPermissions(new String[]{"a", "b"}, null), null); - IndexAccessControl other = new IndexAccessControl(true, new FieldPermissions(), null); - - IndexAccessControl merge1 = indexAccessControl.merge(other); - assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity()); - assertThat(merge1.isGranted(), is(true)); - assertThat(merge1.getQueries(), nullValue()); - - IndexAccessControl merge2 = other.merge(indexAccessControl); - assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity()); - assertThat(merge2.isGranted(), is(true)); - assertThat(merge2.getQueries(), nullValue()); - } - - public void testMergeQueries() { - BytesReference query1 = new BytesArray(new byte[] { 0x1 }); - BytesReference query2 = new BytesArray(new byte[] { 0x2 }); - IndexAccessControl indexAccessControl = new IndexAccessControl(true, new FieldPermissions(), Collections.singleton - (query1)); - IndexAccessControl other = new IndexAccessControl(true, new FieldPermissions(), Collections.singleton(query2)); - - IndexAccessControl merge1 = indexAccessControl.merge(other); - assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity()); - assertThat(merge1.isGranted(), is(true)); - assertThat(merge1.getQueries(), containsInAnyOrder(query1, query2)); - - IndexAccessControl merge2 = other.merge(indexAccessControl); - assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity()); - assertThat(merge2.isGranted(), is(true)); - assertThat(merge1.getQueries(), containsInAnyOrder(query1, query2)); - } - - public void testMergeNullQuery() { - BytesReference query1 = new BytesArray(new byte[] { 0x1 }); - IndexAccessControl indexAccessControl = new IndexAccessControl(true, new FieldPermissions(), Collections.singleton - (query1)); - IndexAccessControl other = new IndexAccessControl(true, new FieldPermissions(), null); - - IndexAccessControl merge1 = indexAccessControl.merge(other); - assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity()); - assertThat(merge1.isGranted(), is(true)); - assertThat(merge1.getQueries(), nullValue()); - - IndexAccessControl merge2 = other.merge(indexAccessControl); - assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity()); - assertThat(merge2.isGranted(), is(true)); - assertThat(merge1.getQueries(), nullValue()); - } - - public void testMergeNotGrantedAndGranted() { - final String[] notGrantedFields = randomFrom(new String[]{}, new String[]{"baz"}, null); - final Set notGrantedQueries = randomFrom(Collections.emptySet(), null, - Collections.singleton(new BytesArray(new byte[] { randomByte() }))); - final IndexAccessControl indexAccessControl = new IndexAccessControl(false, new FieldPermissions(notGrantedFields, null), - notGrantedQueries); - - final BytesReference query1 = new BytesArray(new byte[] { 0x1 }); - final String[] fields = - randomFrom(new String[]{"foo"}, new String[]{"foo", "bar"}, new String[]{}, null); - final Set queries = - randomFrom(Collections.singleton(query1), Collections.emptySet(), null); - final IndexAccessControl other = new IndexAccessControl(true, new FieldPermissions(fields, null), queries); - - IndexAccessControl merged = indexAccessControl.merge(other); - assertThat(merged.isGranted(), is(true)); - assertThat(merged.getQueries(), equalTo(queries)); - if (fields == null) { - assertFalse(merged.getFieldPermissions().hasFieldLevelSecurity()); - } else { - assertTrue(merged.getFieldPermissions().hasFieldLevelSecurity()); - if (notGrantedFields != null) { - for (String field : notGrantedFields) { - assertFalse(merged.getFieldPermissions().grantsAccessTo(field)); - } - } - for (String field : fields) { - assertTrue(merged.getFieldPermissions().grantsAccessTo(field)); - } - } - merged = other.merge(indexAccessControl); - assertThat(merged.isGranted(), is(true)); - assertThat(merged.getQueries(), equalTo(queries)); - if (fields == null) { - assertFalse(merged.getFieldPermissions().hasFieldLevelSecurity()); - } else { - assertTrue(merged.getFieldPermissions().hasFieldLevelSecurity()); - if (notGrantedFields != null) { - for (String field : notGrantedFields) { - assertFalse(merged.getFieldPermissions().grantsAccessTo(field)); - } - } - for (String field : fields) { - assertTrue(merged.getFieldPermissions().grantsAccessTo(field)); - } - } - } - - public void testMergeNotGranted() { - final String[] notGrantedFields = randomFrom(new String[]{}, new String[]{"baz"}, null); - final Set notGrantedQueries = randomFrom(Collections.emptySet(), null, - Collections.singleton(new BytesArray(new byte[] { randomByte() }))); - final IndexAccessControl indexAccessControl = new IndexAccessControl(false, new FieldPermissions(notGrantedFields, null), - notGrantedQueries); - - final BytesReference query1 = new BytesArray(new byte[] { 0x1 }); - final String[] fields = - randomFrom(new String[]{"foo"}, new String[]{"foo", "bar"}, new String[]{}, null); - final Set queries = - randomFrom(Collections.singleton(query1), Collections.emptySet(), null); - final IndexAccessControl other = new IndexAccessControl(false, new FieldPermissions(fields, null), queries); - - IndexAccessControl merged = indexAccessControl.merge(other); - assertThat(merged.isGranted(), is(false)); - assertThat(merged.getQueries(), equalTo(notGrantedQueries)); - - merged = other.merge(indexAccessControl); - assertThat(merged.isGranted(), is(false)); - assertThat(merged.getQueries(), equalTo(queries)); + assertTrue(indicesAccessControl.isGranted()); + assertNull(indicesAccessControl.getIndexPermissions(randomAsciiOfLengthBetween(3,20))); } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java index 07b3471515f..9bf6ac6f24a 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java @@ -19,12 +19,15 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.authz.permission.FieldPermissions; +import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.security.authz.permission.IndicesPermission; import org.elasticsearch.xpack.security.authz.permission.Role; import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege; import java.io.IOException; +import java.util.Collections; import java.util.Map; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -41,21 +44,24 @@ public class IndicesPermissionTests extends ESTestCase { ) .putAlias(AliasMetaData.builder("_alias")); MetaData md = MetaData.builder().put(imbBuilder).build(); + FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); // basics: - BytesReference query = new BytesArray("{}"); + Set query = Collections.singleton(new BytesArray("{}")); String[] fields = new String[]{"_field"}; - Role role = Role.builder("_role").add(new FieldPermissions(fields, null), query, IndexPrivilege.ALL, "_index").build(); - IndicesAccessControl permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md); + Role role = Role.builder("_role") + .add(new FieldPermissions(fields, null), query, IndexPrivilege.ALL, "_index").build(); + IndicesAccessControl permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field")); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); assertThat(permissions.getIndexPermissions("_index").getQueries().size(), equalTo(1)); - assertThat(permissions.getIndexPermissions("_index").getQueries().iterator().next(), equalTo(query)); + assertThat(permissions.getIndexPermissions("_index").getQueries(), equalTo(query)); // no document level security: - role = Role.builder("_role").add(new FieldPermissions(fields, null), null, IndexPrivilege.ALL, "_index").build(); - permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md); + role = Role.builder("_role") + .add(new FieldPermissions(fields, null), null, IndexPrivilege.ALL, "_index").build(); + permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field")); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); @@ -63,48 +69,64 @@ public class IndicesPermissionTests extends ESTestCase { // no field level security: role = Role.builder("_role").add(new FieldPermissions(), query, IndexPrivilege.ALL, "_index").build(); - permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md); + permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertFalse(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); assertThat(permissions.getIndexPermissions("_index").getQueries().size(), equalTo(1)); - assertThat(permissions.getIndexPermissions("_index").getQueries().iterator().next(), equalTo(query)); + assertThat(permissions.getIndexPermissions("_index").getQueries(), equalTo(query)); // index group associated with an alias: role = Role.builder("_role").add(new FieldPermissions(fields, null), query, IndexPrivilege.ALL, "_alias").build(); - permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), md); + permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), md, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field")); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); assertThat(permissions.getIndexPermissions("_index").getQueries().size(), equalTo(1)); - assertThat(permissions.getIndexPermissions("_index").getQueries().iterator().next(), equalTo(query)); + assertThat(permissions.getIndexPermissions("_index").getQueries(), equalTo(query)); // match all fields String[] allFields = randomFrom(new String[]{"*"}, new String[]{"foo", "*"}, new String[]{randomAsciiOfLengthBetween(1, 10), "*"}); - role = Role.builder("_role").add(new FieldPermissions(allFields, null), query, IndexPrivilege.ALL, "_alias").build(); - permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), md); + role = Role.builder("_role") + .add(new FieldPermissions(allFields, null), query, IndexPrivilege.ALL, "_alias").build(); + permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), md, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertFalse(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); assertThat(permissions.getIndexPermissions("_index").getQueries().size(), equalTo(1)); - assertThat(permissions.getIndexPermissions("_index").getQueries().iterator().next(), equalTo(query)); + assertThat(permissions.getIndexPermissions("_index").getQueries(), equalTo(query)); } - public void testIndicesPriviledgesStreaming() throws IOException { + public void testIndicesPrivilegesStreaming() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); String[] allowed = new String[]{randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*"}; String[] denied = new String[]{allowed[0] + randomAsciiOfLength(5), allowed[1] + randomAsciiOfLength(5), allowed[2] + randomAsciiOfLength(5)}; - FieldPermissions fieldPermissions = new FieldPermissions(allowed, denied); RoleDescriptor.IndicesPrivileges.Builder indicesPrivileges = RoleDescriptor.IndicesPrivileges.builder(); - indicesPrivileges.fieldPermissions(fieldPermissions); + indicesPrivileges.grantedFields(allowed); + indicesPrivileges.deniedFields(denied); indicesPrivileges.query("{match_all:{}}"); indicesPrivileges.indices(randomAsciiOfLength(5), randomAsciiOfLength(5), randomAsciiOfLength(5)); indicesPrivileges.privileges("all", "read", "priv"); indicesPrivileges.build().writeTo(out); out.close(); StreamInput in = out.bytes().streamInput(); - RoleDescriptor.IndicesPrivileges readIndicesPriviledges = RoleDescriptor.IndicesPrivileges.createFrom(in); - assertEquals(readIndicesPriviledges, indicesPrivileges.build()); + RoleDescriptor.IndicesPrivileges readIndicesPrivileges = RoleDescriptor.IndicesPrivileges.createFrom(in); + assertEquals(readIndicesPrivileges, indicesPrivileges.build()); + + out = new BytesStreamOutput(); + out.setVersion(Version.V_5_0_0); + indicesPrivileges = RoleDescriptor.IndicesPrivileges.builder(); + indicesPrivileges.grantedFields(allowed); + indicesPrivileges.deniedFields(denied); + indicesPrivileges.query("{match_all:{}}"); + indicesPrivileges.indices(readIndicesPrivileges.getIndices()); + indicesPrivileges.privileges("all", "read", "priv"); + indicesPrivileges.build().writeTo(out); + out.close(); + in = out.bytes().streamInput(); + in.setVersion(Version.V_5_0_0); + RoleDescriptor.IndicesPrivileges readIndicesPrivileges2 = RoleDescriptor.IndicesPrivileges.createFrom(in); + assertEquals(readIndicesPrivileges, readIndicesPrivileges2); } // tests that field permissions are merged correctly when we authorize with several groups and don't crash when an index has no group @@ -115,12 +137,13 @@ public class IndicesPermissionTests extends ESTestCase { .put(new IndexMetaData.Builder("a2").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) .build(); + FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); IndicesPermission.Group group1 = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(), null, "a1"); IndicesPermission.Group group2 = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(null, new String[]{"denied_field"}), null, "a1"); - IndicesPermission.Core core = new IndicesPermission.Core(group1, group2); + IndicesPermission core = new IndicesPermission(group1, group2); Map authzMap = - core.authorize(SearchAction.NAME, Sets.newHashSet("a1", "ba"), metaData); + core.authorize(SearchAction.NAME, Sets.newHashSet("a1", "ba"), metaData, fieldPermissionsCache); assertTrue(authzMap.get("a1").getFieldPermissions().grantsAccessTo("denied_field")); assertTrue(authzMap.get("a1").getFieldPermissions().grantsAccessTo(randomAsciiOfLength(5))); // did not define anything for ba so we allow all @@ -137,8 +160,8 @@ public class IndicesPermissionTests extends ESTestCase { , new String[]{"denied_field"}), null, "a2"); IndicesPermission.Group group4 = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(new String[]{"*_field2"} , new String[]{"denied_field2"}), null, "a2"); - core = new IndicesPermission.Core(group1, group2, group3, group4); - authzMap = core.authorize(SearchAction.NAME, Sets.newHashSet("a1", "a2"), metaData); + core = new IndicesPermission(group1, group2, group3, group4); + authzMap = core.authorize(SearchAction.NAME, Sets.newHashSet("a1", "a2"), metaData, fieldPermissionsCache); assertFalse(authzMap.get("a1").getFieldPermissions().hasFieldLevelSecurity()); assertFalse(authzMap.get("a2").getFieldPermissions().grantsAccessTo("denied_field2")); assertFalse(authzMap.get("a2").getFieldPermissions().grantsAccessTo("denied_field")); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java index 864c4a359df..e5ac91ce533 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; @@ -70,12 +69,11 @@ public class SecurityIndexSearcherWrapperIntegrationTests extends ESTestCase { FieldPermissions(), singleton(new BytesArray("{\"match_all\" : {}}"))); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY); - IndicesQueriesRegistry indicesQueriesRegistry = mock(IndicesQueriesRegistry.class); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); - final long nowInMillis = randomPositiveLong(); + final long nowInMillis = randomNonNegativeLong(); QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), indexSettings, null, null, mapperService, null, - null, xContentRegistry(), indicesQueriesRegistry, client, null, () -> nowInMillis); + null, xContentRegistry(), client, null, () -> nowInMillis); QueryShardContext queryShardContext = spy(realQueryShardContext); QueryParseContext queryParseContext = mock(QueryParseContext.class); IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java index e8c1952a2c0..82fbd64c2d2 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java @@ -128,7 +128,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase { indexSettings = IndexSettingsModule.newIndexSettings(index, Settings.EMPTY); NamedAnalyzer namedAnalyzer = new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer()); IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, namedAnalyzer, namedAnalyzer, namedAnalyzer, - Collections.emptyMap()); + Collections.emptyMap(), Collections.emptyMap()); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); mapperService = new MapperService(indexSettings, indexAnalyzers, xContentRegistry(), similarityService, new IndicesModule(emptyList()).getMapperRegistry(), () -> null); @@ -814,8 +814,8 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase { public void testFailIfQueryUsesClient() throws Exception { Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); - final long nowInMillis = randomPositiveLong(); - QueryRewriteContext context = new QueryRewriteContext(null, mapperService, scriptService, xContentRegistry(), null, client, null, + final long nowInMillis = randomNonNegativeLong(); + QueryRewriteContext context = new QueryRewriteContext(null, mapperService, scriptService, xContentRegistry(), client, null, () -> nowInMillis); QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2"); SecurityIndexSearcherWrapper.failIfQueryUsesClient(scriptService, queryBuilder1, context); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/DefaultRoleTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/DefaultRoleTests.java deleted file mode 100644 index 8d0d0797b81..00000000000 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/DefaultRoleTests.java +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; -import org.elasticsearch.client.Client; -import org.elasticsearch.license.GetLicenseAction; -import org.elasticsearch.xpack.security.action.user.AuthenticateRequestBuilder; -import org.elasticsearch.xpack.security.action.user.ChangePasswordRequestBuilder; -import org.elasticsearch.xpack.security.authc.Authentication; -import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; -import org.elasticsearch.xpack.security.authc.esnative.NativeRealm; -import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; -import org.elasticsearch.xpack.security.authc.file.FileRealm; -import org.elasticsearch.xpack.security.authc.ldap.LdapRealm; -import org.elasticsearch.xpack.security.authc.pki.PkiRealm; -import org.elasticsearch.xpack.security.user.User; -import org.elasticsearch.xpack.security.action.user.AuthenticateAction; -import org.elasticsearch.xpack.security.action.user.AuthenticateRequest; -import org.elasticsearch.xpack.security.action.user.ChangePasswordAction; -import org.elasticsearch.xpack.security.action.user.ChangePasswordRequest; -import org.elasticsearch.xpack.security.action.user.DeleteUserAction; -import org.elasticsearch.xpack.security.action.user.PutUserAction; -import org.elasticsearch.xpack.security.action.user.UserRequest; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TransportRequest; - -import java.util.Iterator; - -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; - -/** - * Unit tests for the {@link DefaultRole} - */ -public class DefaultRoleTests extends ESTestCase { - - public void testDefaultRoleHasNoIndicesPrivileges() { - Iterator iter = DefaultRole.INSTANCE.indices().iterator(); - assertThat(iter.hasNext(), is(false)); - } - - public void testDefaultRoleHasNoRunAsPrivileges() { - assertThat(DefaultRole.INSTANCE.runAs().isEmpty(), is(true)); - } - - public void testDefaultRoleAllowsUser() { - final User user = new User("joe"); - final boolean changePasswordRequest = randomBoolean(); - final TransportRequest request = changePasswordRequest ? - new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request() : - new AuthenticateRequestBuilder(mock(Client.class)).username(user.principal()).request(); - final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; - final Authentication authentication = mock(Authentication.class); - final RealmRef authenticatedBy = mock(RealmRef.class); - when(authentication.getUser()).thenReturn(user); - when(authentication.getRunAsUser()).thenReturn(user); - when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); - when(authenticatedBy.getType()) - .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealm.TYPE) : randomAsciiOfLengthBetween(4, 12)); - - assertThat(request, instanceOf(UserRequest.class)); - assertThat(DefaultRole.INSTANCE.cluster().check(action, request, authentication), is(true)); - } - - public void testDefaultRoleDoesNotAllowNonMatchingUsername() { - final User user = new User("joe"); - final boolean changePasswordRequest = randomBoolean(); - final String username = randomFrom("", "joe" + randomAsciiOfLengthBetween(1, 5), randomAsciiOfLengthBetween(3, 10)); - final TransportRequest request = changePasswordRequest ? - new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : - new AuthenticateRequestBuilder(mock(Client.class)).username(username).request(); - final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; - final Authentication authentication = mock(Authentication.class); - final RealmRef authenticatedBy = mock(RealmRef.class); - when(authentication.getUser()).thenReturn(user); - when(authentication.getRunAsUser()).thenReturn(user); - when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); - when(authenticatedBy.getType()) - .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealm.TYPE) : randomAsciiOfLengthBetween(4, 12)); - - assertThat(request, instanceOf(UserRequest.class)); - assertThat(DefaultRole.INSTANCE.cluster().check(action, request, authentication), is(false)); - - final User user2 = new User("admin", new String[] { "bar" }, user); - when(authentication.getUser()).thenReturn(user2); - when(authentication.getRunAsUser()).thenReturn(user); - final RealmRef lookedUpBy = mock(RealmRef.class); - when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); - when(lookedUpBy.getType()) - .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealm.TYPE) : randomAsciiOfLengthBetween(4, 12)); - // this should still fail since the username is still different - assertThat(DefaultRole.INSTANCE.cluster().check(action, request, authentication), is(false)); - - if (request instanceof ChangePasswordRequest) { - ((ChangePasswordRequest)request).username("joe"); - } else { - ((AuthenticateRequest)request).username("joe"); - } - assertThat(DefaultRole.INSTANCE.cluster().check(action, request, authentication), is(true)); - } - - public void testDefaultRoleDoesNotAllowOtherActions() { - final User user = mock(User.class); - final TransportRequest request = mock(TransportRequest.class); - final String action = randomFrom(PutUserAction.NAME, DeleteUserAction.NAME, ClusterHealthAction.NAME, ClusterStateAction.NAME, - ClusterStatsAction.NAME, GetLicenseAction.NAME); - final Authentication authentication = mock(Authentication.class); - final RealmRef authenticatedBy = mock(RealmRef.class); - when(authentication.getUser()).thenReturn(user); - when(authentication.getRunAsUser()).thenReturn(randomBoolean() ? user : new User("runAs")); - when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); - when(authenticatedBy.getType()) - .thenReturn(randomAsciiOfLengthBetween(4, 12)); - - assertThat(DefaultRole.INSTANCE.cluster().check(action, request, authentication), is(false)); - verifyZeroInteractions(user, request, authentication); - } - - public void testDefaultRoleWithRunAsChecksAuthenticatedBy() { - final String username = "joe"; - final User runAs = new User(username); - final User user = new User("admin", new String[] { "bar" }, runAs); - final boolean changePasswordRequest = randomBoolean(); - final TransportRequest request = changePasswordRequest ? - new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : - new AuthenticateRequestBuilder(mock(Client.class)).username(username).request(); - final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; - final Authentication authentication = mock(Authentication.class); - final RealmRef authenticatedBy = mock(RealmRef.class); - final RealmRef lookedUpBy = mock(RealmRef.class); - when(authentication.getUser()).thenReturn(user); - when(authentication.getRunAsUser()).thenReturn(runAs); - when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); - when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); - when(authentication.isRunAs()).thenReturn(true); - when(lookedUpBy.getType()) - .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealm.TYPE) : randomAsciiOfLengthBetween(4, 12)); - - assertThat(DefaultRole.INSTANCE.cluster().check(action, request, authentication), is(true)); - - when(authentication.getRunAsUser()).thenReturn(user); - assertThat(DefaultRole.INSTANCE.cluster().check(action, request, authentication), is(false)); - } - - public void testDefaultRoleDoesNotAllowChangePasswordForOtherRealms() { - final User user = new User("joe"); - final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request(); - final String action = ChangePasswordAction.NAME; - final Authentication authentication = mock(Authentication.class); - final RealmRef authenticatedBy = mock(RealmRef.class); - when(authentication.getUser()).thenReturn(user); - when(authentication.getRunAsUser()).thenReturn(user); - when(authentication.isRunAs()).thenReturn(false); - when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); - when(authenticatedBy.getType()).thenReturn(randomFrom(LdapRealm.LDAP_TYPE, FileRealm.TYPE, LdapRealm.AD_TYPE, PkiRealm.TYPE, - randomAsciiOfLengthBetween(4, 12))); - - assertThat(request, instanceOf(UserRequest.class)); - assertThat(DefaultRole.INSTANCE.cluster().check(action, request, authentication), is(false)); - verify(authenticatedBy).getType(); - verify(authentication).getRunAsUser(); - verify(authentication).getAuthenticatedBy(); - verify(authentication).isRunAs(); - verifyNoMoreInteractions(authenticatedBy, authentication); - } - - public void testDefaultRoleDoesNotAllowChangePasswordForLookedUpByOtherRealms() { - final User runAs = new User("joe"); - final User user = new User("admin", new String[] { "bar" }, runAs); - final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username(runAs.principal()).request(); - final String action = ChangePasswordAction.NAME; - final Authentication authentication = mock(Authentication.class); - final RealmRef authenticatedBy = mock(RealmRef.class); - final RealmRef lookedUpBy = mock(RealmRef.class); - when(authentication.getUser()).thenReturn(user); - when(authentication.getRunAsUser()).thenReturn(runAs); - when(authentication.isRunAs()).thenReturn(true); - when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); - when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); - when(lookedUpBy.getType()).thenReturn(randomFrom(LdapRealm.LDAP_TYPE, FileRealm.TYPE, LdapRealm.AD_TYPE, PkiRealm.TYPE, - randomAsciiOfLengthBetween(4, 12))); - - assertThat(request, instanceOf(UserRequest.class)); - assertThat(DefaultRole.INSTANCE.cluster().check(action, request, authentication), is(false)); - verify(authentication).getLookedUpBy(); - verify(authentication).getRunAsUser(); - verify(authentication).isRunAs(); - verify(lookedUpBy).getType(); - verifyNoMoreInteractions(authentication, lookedUpBy, authenticatedBy); - } -} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionTests.java index 2e35d8e49ce..2e5d8cc07e4 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionTests.java @@ -29,9 +29,8 @@ public class FieldPermissionTests extends ESTestCase { "\"except\": [\"f3\",\"f4\"]" + "}}]}"; RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), false); - assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), - new String[]{"f1", "f2", "f3", "f4"}); - assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray(), new String[]{"f3", "f4"}); + assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] { "f1", "f2", "f3", "f4" }); + assertArrayEquals(rd.getIndicesPrivileges()[0].getDeniedFields(), new String[] { "f3", "f4" }); q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " + "\"field_security\": {" + @@ -39,25 +38,24 @@ public class FieldPermissionTests extends ESTestCase { "\"grant\": [\"f1\", \"f2\", \"f3\", \"f4\"]" + "}}]}"; rd = RoleDescriptor.parse("test", new BytesArray(q), false); - assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), - new String[]{"f1", "f2", "f3", "f4"}); - assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray(), new String[]{"f3", "f4"}); + assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] { "f1", "f2", "f3", "f4" }); + assertArrayEquals(rd.getIndicesPrivileges()[0].getDeniedFields(), new String[] { "f3", "f4" }); q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " + "\"field_security\": {" + "\"grant\": [\"f1\", \"f2\"]" + "}}]}"; rd = RoleDescriptor.parse("test", new BytesArray(q), false); - assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{"f1", "f2"}); - assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray()); + assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] { "f1", "f2" }); + assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " + "\"field_security\": {" + "\"grant\": []" + "}}]}"; rd = RoleDescriptor.parse("test", new BytesArray(q), false); - assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{}); - assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray()); + assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] {}); + assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " + "\"field_security\": {" + @@ -65,8 +63,8 @@ public class FieldPermissionTests extends ESTestCase { "\"grant\": []" + "}}]}"; rd = RoleDescriptor.parse("test", new BytesArray(q), false); - assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{}); - assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray(), new String[]{}); + assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] {}); + assertArrayEquals(rd.getIndicesPrivileges()[0].getDeniedFields(), new String[] {}); final String exceptWithoutGrant = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " + "\"field_security\": {" + @@ -122,10 +120,10 @@ public class FieldPermissionTests extends ESTestCase { " \"except\": [\"f2\"]}," + "\"privileges\": [\"p3\"]}]}"; rd = RoleDescriptor.parse("test", new BytesArray(q), false); - assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{}); - assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray()); - assertArrayEquals(rd.getIndicesPrivileges()[1].getFieldPermissions().getGrantedFieldsArray(), new String[]{"*"}); - assertArrayEquals(rd.getIndicesPrivileges()[1].getFieldPermissions().getDeniedFieldsArray(), new String[]{"f2"}); + assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] {}); + assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); + assertArrayEquals(rd.getIndicesPrivileges()[1].getGrantedFields(), new String[] {"*"}); + assertArrayEquals(rd.getIndicesPrivileges()[1].getDeniedFields(), new String[] {"f2"}); } // test old syntax for field permissions @@ -134,8 +132,8 @@ public class FieldPermissionTests extends ESTestCase { "\"fields\": [\"f1\", \"f2\"]" + "}]}"; RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), true); - assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{"f1", "f2"}); - assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray()); + assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[]{"f1", "f2"}); + assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); final String failingQuery = q; ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray @@ -147,8 +145,8 @@ public class FieldPermissionTests extends ESTestCase { "\"fields\": []" + "}]}"; rd = RoleDescriptor.parse("test", new BytesArray(q), true); - assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{}); - assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray()); + assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[]{}); + assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); final String failingQuery2 = q; e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray (failingQuery2), false)); @@ -159,76 +157,14 @@ public class FieldPermissionTests extends ESTestCase { "\"fields\": null" + "}]}"; rd = RoleDescriptor.parse("test", new BytesArray(q), true); - assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray()); - assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray()); + assertNull(rd.getIndicesPrivileges()[0].getGrantedFields()); + assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); final String failingQuery3 = q; e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray(failingQuery3), false)); assertThat(e.getDetailedMessage(), containsString("[\"fields\": [...]] format has changed for field permissions in role [test]" + ", use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead")); } - public void testMergeFieldPermissions() { - String allowedPrefix1 = randomAsciiOfLength(5); - String allowedPrefix2 = randomAsciiOfLength(5); - String[] allowed1 = new String[]{allowedPrefix1 + "*"}; - String[] allowed2 = new String[]{allowedPrefix2 + "*"}; - String[] denied1 = new String[]{allowedPrefix1 + "a"}; - String[] denied2 = new String[]{allowedPrefix2 + "a"}; - FieldPermissions fieldPermissions1 = new FieldPermissions(allowed1, denied1); - FieldPermissions fieldPermissions2 = new FieldPermissions(allowed2, denied2); - FieldPermissions mergedFieldPermissions = FieldPermissions.merge(fieldPermissions1, fieldPermissions2); - assertTrue(mergedFieldPermissions.grantsAccessTo(allowedPrefix1 + "b")); - assertTrue(mergedFieldPermissions.grantsAccessTo(allowedPrefix2 + "b")); - assertFalse(mergedFieldPermissions.grantsAccessTo(denied1[0])); - assertFalse(mergedFieldPermissions.grantsAccessTo(denied2[0])); - - allowed1 = new String[]{randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*"}; - allowed2 = null; - denied1 = new String[]{allowed1[0] + "a", allowed1[1] + "a"}; - denied2 = null; - fieldPermissions1 = new FieldPermissions(allowed1, denied1); - fieldPermissions2 = new FieldPermissions(allowed2, denied2); - mergedFieldPermissions = FieldPermissions.merge(fieldPermissions1, fieldPermissions2); - assertFalse(mergedFieldPermissions.hasFieldLevelSecurity()); - - allowed1 = new String[]{}; - allowed2 = new String[]{randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*"}; - denied1 = new String[]{}; - denied2 = new String[]{allowed2[0] + "a", allowed2[1] + "a"}; - fieldPermissions1 = new FieldPermissions(allowed1, denied1); - fieldPermissions2 = new FieldPermissions(allowed2, denied2); - mergedFieldPermissions = FieldPermissions.merge(fieldPermissions1, fieldPermissions2); - for (String field : allowed2) { - assertTrue(mergedFieldPermissions.grantsAccessTo(field)); - } - for (String field : denied2) { - assertFalse(mergedFieldPermissions.grantsAccessTo(field)); - } - - allowed1 = randomBoolean() ? null : new String[]{"*"}; - allowed2 = randomBoolean() ? null : new String[]{"*"}; - denied1 = new String[]{"a"}; - denied2 = new String[]{"b"}; - fieldPermissions1 = new FieldPermissions(allowed1, denied1); - fieldPermissions2 = new FieldPermissions(allowed2, denied2); - mergedFieldPermissions = FieldPermissions.merge(fieldPermissions1, fieldPermissions2); - assertTrue(mergedFieldPermissions.grantsAccessTo("a")); - assertTrue(mergedFieldPermissions.grantsAccessTo("b")); - - // test merge does not remove _all - allowed1 = new String[]{"_all"}; - allowed2 = new String[]{}; - denied1 = null; - denied2 = null; - fieldPermissions1 = new FieldPermissions(allowed1, denied1); - assertTrue(fieldPermissions1.allFieldIsAllowed); - fieldPermissions2 = new FieldPermissions(allowed2, denied2); - assertFalse(fieldPermissions2.allFieldIsAllowed); - mergedFieldPermissions = FieldPermissions.merge(fieldPermissions1, fieldPermissions2); - assertTrue(mergedFieldPermissions.grantsAccessTo("_all")); - assertTrue(mergedFieldPermissions.allFieldIsAllowed); - } - public void testFieldPermissionsStreaming() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); String[] allowed = new String[]{randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*"}; @@ -245,10 +181,7 @@ public class FieldPermissionTests extends ESTestCase { public void testFieldPermissionsHashCodeThreadSafe() throws Exception { final int numThreads = scaledRandomIntBetween(4, 16); - final FieldPermissions fieldPermissions = randomBoolean() ? - new FieldPermissions(new String[] { "*" }, new String[] { "foo" }) : - FieldPermissions.merge(new FieldPermissions(new String[] { "f*" }, new String[] { "foo" }), - new FieldPermissions(new String[] { "b*" }, new String[] { "bar" })); + final FieldPermissions fieldPermissions = new FieldPermissions(new String[] { "*" }, new String[] { "foo" }); final CountDownLatch latch = new CountDownLatch(numThreads + 1); final AtomicReferenceArray hashCodes = new AtomicReferenceArray<>(numThreads); List threads = new ArrayList<>(numThreads); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsCacheTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsCacheTests.java new file mode 100644 index 00000000000..9b32ffe8340 --- /dev/null +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsCacheTests.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authz.permission; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; + +public class FieldPermissionsCacheTests extends ESTestCase { + + public void testFieldPermissionsCaching() { + FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); + String[] allowed = new String[]{randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*"}; + String[] denied = new String[]{allowed[0] + randomAsciiOfLength(5), allowed[1] + randomAsciiOfLength(5), + allowed[2] + randomAsciiOfLength(5)}; + FieldPermissions fieldPermissions = fieldPermissionsCache.getFieldPermissions(allowed, denied); + assertNotNull(fieldPermissions); + final String[] allowed2 = randomBoolean() ? allowed : Arrays.copyOf(allowed, allowed.length); + final String[] denied2 = randomBoolean() ? denied : Arrays.copyOf(denied, denied.length); + assertSame(fieldPermissions, fieldPermissionsCache.getFieldPermissions(allowed2, denied2)); + } + + public void testMergeFieldPermissions() { + FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); + String allowedPrefix1 = randomAsciiOfLength(5); + String allowedPrefix2 = randomAsciiOfLength(5); + String[] allowed1 = new String[]{allowedPrefix1 + "*"}; + String[] allowed2 = new String[]{allowedPrefix2 + "*"}; + String[] denied1 = new String[]{allowedPrefix1 + "a"}; + String[] denied2 = new String[]{allowedPrefix2 + "a"}; + FieldPermissions fieldPermissions1 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed1, denied1) : + new FieldPermissions(allowed1, denied1); + FieldPermissions fieldPermissions2 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed2, denied2) : + new FieldPermissions(allowed2, denied2); + FieldPermissions mergedFieldPermissions = + fieldPermissionsCache.getFieldPermissions(Arrays.asList(fieldPermissions1, fieldPermissions2)); + assertTrue(mergedFieldPermissions.grantsAccessTo(allowedPrefix1 + "b")); + assertTrue(mergedFieldPermissions.grantsAccessTo(allowedPrefix2 + "b")); + assertFalse(mergedFieldPermissions.grantsAccessTo(denied1[0])); + assertFalse(mergedFieldPermissions.grantsAccessTo(denied2[0])); + + allowed1 = new String[]{randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*"}; + allowed2 = null; + denied1 = new String[]{allowed1[0] + "a", allowed1[1] + "a"}; + denied2 = null; + fieldPermissions1 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed1, denied1) : + new FieldPermissions(allowed1, denied1); + fieldPermissions2 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed2, denied2) : + new FieldPermissions(allowed2, denied2); + mergedFieldPermissions = + fieldPermissionsCache.getFieldPermissions(Arrays.asList(fieldPermissions1, fieldPermissions2)); + assertFalse(mergedFieldPermissions.hasFieldLevelSecurity()); + + allowed1 = new String[]{}; + allowed2 = new String[]{randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*"}; + denied1 = new String[]{}; + denied2 = new String[]{allowed2[0] + "a", allowed2[1] + "a"}; + fieldPermissions1 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed1, denied1) : + new FieldPermissions(allowed1, denied1); + fieldPermissions2 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed2, denied2) : + new FieldPermissions(allowed2, denied2); + mergedFieldPermissions = + fieldPermissionsCache.getFieldPermissions(Arrays.asList(fieldPermissions1, fieldPermissions2)); + for (String field : allowed2) { + assertTrue(mergedFieldPermissions.grantsAccessTo(field)); + } + for (String field : denied2) { + assertFalse(mergedFieldPermissions.grantsAccessTo(field)); + } + + allowed1 = randomBoolean() ? null : new String[]{"*"}; + allowed2 = randomBoolean() ? null : new String[]{"*"}; + denied1 = new String[]{"a"}; + denied2 = new String[]{"b"}; + fieldPermissions1 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed1, denied1) : + new FieldPermissions(allowed1, denied1); + fieldPermissions2 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed2, denied2) : + new FieldPermissions(allowed2, denied2); + mergedFieldPermissions = + fieldPermissionsCache.getFieldPermissions(Arrays.asList(fieldPermissions1, fieldPermissions2)); + assertTrue(mergedFieldPermissions.grantsAccessTo("a")); + assertTrue(mergedFieldPermissions.grantsAccessTo("b")); + + // test merge does not remove _all + allowed1 = new String[]{"_all"}; + allowed2 = new String[]{}; + denied1 = null; + denied2 = null; + fieldPermissions1 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed1, denied1) : + new FieldPermissions(allowed1, denied1); + fieldPermissions2 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed2, denied2) : + new FieldPermissions(allowed2, denied2); + mergedFieldPermissions = + fieldPermissionsCache.getFieldPermissions(Arrays.asList(fieldPermissions1, fieldPermissions2)); + assertTrue(fieldPermissions1.isAllFieldIsAllowed()); + assertFalse(fieldPermissions2.isAllFieldIsAllowed()); + assertTrue(mergedFieldPermissions.grantsAccessTo("_all")); + assertTrue(mergedFieldPermissions.isAllFieldIsAllowed()); + + allowed1 = new String[] { "a*" }; + allowed2 = new String[] { "b*" }; + denied1 = new String[] { "aa*" }; + denied2 = null; + fieldPermissions1 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed1, denied1) : + new FieldPermissions(allowed1, denied1); + fieldPermissions2 = randomBoolean() ? fieldPermissionsCache.getFieldPermissions(allowed2, denied2) : + new FieldPermissions(allowed2, denied2); + mergedFieldPermissions = + fieldPermissionsCache.getFieldPermissions(Arrays.asList(fieldPermissions1, fieldPermissions2)); + assertTrue(mergedFieldPermissions.grantsAccessTo("a")); + assertTrue(mergedFieldPermissions.grantsAccessTo("b")); + assertFalse(mergedFieldPermissions.grantsAccessTo("aa")); + assertFalse(mergedFieldPermissions.grantsAccessTo("aa1")); + assertTrue(mergedFieldPermissions.grantsAccessTo("a1")); + } +} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/IngestAdminRoleTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/IngestAdminRoleTests.java deleted file mode 100644 index efb4d61ae84..00000000000 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/IngestAdminRoleTests.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; -import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; -import org.elasticsearch.action.get.GetAction; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.ingest.DeletePipelineAction; -import org.elasticsearch.action.ingest.GetPipelineAction; -import org.elasticsearch.action.ingest.PutPipelineAction; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.monitoring.action.MonitoringBulkAction; -import org.elasticsearch.xpack.security.authc.Authentication; - -import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; - -public class IngestAdminRoleTests extends ESTestCase { - - public void testClusterPermissions() { - final TransportRequest request = new TransportRequest.Empty(); - final Authentication authentication = mock(Authentication.class); - assertThat(IngestAdminRole.INSTANCE.cluster().check(PutIndexTemplateAction.NAME, request, authentication), is(true)); - assertThat(IngestAdminRole.INSTANCE.cluster().check(GetIndexTemplatesAction.NAME, request, authentication), is(true)); - assertThat(IngestAdminRole.INSTANCE.cluster().check(DeleteIndexTemplateAction.NAME, request, authentication), is(true)); - assertThat(IngestAdminRole.INSTANCE.cluster().check(PutPipelineAction.NAME, request, authentication), is(true)); - assertThat(IngestAdminRole.INSTANCE.cluster().check(GetPipelineAction.NAME, request, authentication), is(true)); - assertThat(IngestAdminRole.INSTANCE.cluster().check(DeletePipelineAction.NAME, request, authentication), is(true)); - - - assertThat(IngestAdminRole.INSTANCE.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); - assertThat(IngestAdminRole.INSTANCE.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); - assertThat(IngestAdminRole.INSTANCE.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); - } - - public void testNoIndicesPermissions() { - assertThat(IngestAdminRole.INSTANCE.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false)); - assertThat(IngestAdminRole.INSTANCE.indices().allowedIndicesMatcher("indices:foo").test(randomAsciiOfLengthBetween(8, 24)), - is(false)); - assertThat(IngestAdminRole.INSTANCE.indices().allowedIndicesMatcher(GetAction.NAME).test(randomAsciiOfLengthBetween(8, 24)), - is(false)); - } - - public void testHasReservedMetadata() { - assertThat(IngestAdminRole.DESCRIPTOR.getMetadata(), hasEntry("_reserved", true)); - } -} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/KibanaRoleTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/KibanaRoleTests.java deleted file mode 100644 index 451257a36f6..00000000000 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/KibanaRoleTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; -import org.elasticsearch.action.delete.DeleteAction; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.xpack.monitoring.action.MonitoringBulkAction; -import org.elasticsearch.xpack.security.authc.Authentication; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TransportRequest; - -import java.util.Arrays; - -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; - -/** - * Tests for the kibana role - */ -public class KibanaRoleTests extends ESTestCase { - - public void testCluster() { - final TransportRequest request = new TransportRequest.Empty(); - final Authentication authentication = mock(Authentication.class); - assertThat(KibanaRole.INSTANCE.cluster().check(ClusterHealthAction.NAME, request, authentication), is(true)); - assertThat(KibanaRole.INSTANCE.cluster().check(ClusterStateAction.NAME, request, authentication), is(true)); - assertThat(KibanaRole.INSTANCE.cluster().check(ClusterStatsAction.NAME, request, authentication), is(true)); - assertThat(KibanaRole.INSTANCE.cluster().check(PutIndexTemplateAction.NAME, request, authentication), is(false)); - assertThat(KibanaRole.INSTANCE.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); - assertThat(KibanaRole.INSTANCE.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); - assertThat(KibanaRole.INSTANCE.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(true)); - } - - public void testRunAs() { - assertThat(KibanaRole.INSTANCE.runAs().isEmpty(), is(true)); - } - - public void testUnauthorizedIndices() { - assertThat(KibanaRole.INSTANCE.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false)); - assertThat(KibanaRole.INSTANCE.indices().allowedIndicesMatcher(IndexAction.NAME).test(".reporting"), is(false)); - assertThat(KibanaRole.INSTANCE.indices().allowedIndicesMatcher("indices:foo").test(randomAsciiOfLengthBetween(8, 24)), is(false)); - } - - public void testKibanaIndices() { - Arrays.asList(".kibana", ".kibana-devnull").forEach(this::testAllIndexAccess); - } - - public void testReportingIndices() { - testAllIndexAccess(".reporting-" + randomAsciiOfLength(randomIntBetween(0, 13))); - } - - private void testAllIndexAccess(String index) { - assertThat(KibanaRole.INSTANCE.indices().allowedIndicesMatcher("indices:foo").test(index), is(true)); - assertThat(KibanaRole.INSTANCE.indices().allowedIndicesMatcher("indices:bar").test(index), is(true)); - assertThat(KibanaRole.INSTANCE.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(true)); - assertThat(KibanaRole.INSTANCE.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); - assertThat(KibanaRole.INSTANCE.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); - assertThat(KibanaRole.INSTANCE.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); - assertThat(KibanaRole.INSTANCE.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(true)); - } -} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/KibanaUserRoleTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/KibanaUserRoleTests.java deleted file mode 100644 index 36ba19047e6..00000000000 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/KibanaUserRoleTests.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; -import org.elasticsearch.action.delete.DeleteAction; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.search.MultiSearchAction; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.xpack.monitoring.action.MonitoringBulkAction; -import org.elasticsearch.xpack.security.authc.Authentication; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TransportRequest; - -import java.util.Arrays; - -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; - -public class KibanaUserRoleTests extends ESTestCase { - - public void testCluster() { - final Authentication authentication = mock(Authentication.class); - final TransportRequest request = new TransportRequest.Empty(); - assertThat(KibanaUserRole.INSTANCE.cluster().check(ClusterHealthAction.NAME, request, authentication), is(true)); - assertThat(KibanaUserRole.INSTANCE.cluster().check(ClusterStateAction.NAME, request, authentication), is(true)); - assertThat(KibanaUserRole.INSTANCE.cluster().check(ClusterStatsAction.NAME, request, authentication), is(true)); - assertThat(KibanaUserRole.INSTANCE.cluster().check(PutIndexTemplateAction.NAME, request, authentication), is(false)); - assertThat(KibanaUserRole.INSTANCE.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); - assertThat(KibanaUserRole.INSTANCE.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); - assertThat(KibanaUserRole.INSTANCE.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); - } - - public void testRunAs() { - assertThat(KibanaUserRole.INSTANCE.runAs().isEmpty(), is(true)); - } - - public void testUnauthorizedIndices() { - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false)); - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher(IndexAction.NAME).test(".reporting"), is(false)); - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher("indices:foo") - .test(randomAsciiOfLengthBetween(8, 24)), is(false)); - } - - public void testKibanaIndices() { - Arrays.asList(".kibana", ".kibana-devnull").forEach(this::testIndexAccess); - } - - private void testIndexAccess(String index) { - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher("indices:foo").test(index), is(false)); - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher("indices:bar").test(index), is(false)); - - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(true)); - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(index), is(true)); - assertThat(KibanaUserRole.INSTANCE.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(true)); - } -} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/MonitoringUserRoleTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/MonitoringUserRoleTests.java deleted file mode 100644 index d721eb8678b..00000000000 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/MonitoringUserRoleTests.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; -import org.elasticsearch.action.delete.DeleteAction; -import org.elasticsearch.action.get.GetAction; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.monitoring.action.MonitoringBulkAction; -import org.elasticsearch.xpack.security.authc.Authentication; - -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; - -/** - * Tests for the builtin monitoring user - */ -public class MonitoringUserRoleTests extends ESTestCase { - - public void testCluster() { - final TransportRequest request = new TransportRequest.Empty(); - final Authentication authentication = mock(Authentication.class); - assertThat(MonitoringUserRole.INSTANCE.cluster().check(ClusterHealthAction.NAME, request, authentication), is(false)); - assertThat(MonitoringUserRole.INSTANCE.cluster().check(ClusterStateAction.NAME, request, authentication), is(false)); - assertThat(MonitoringUserRole.INSTANCE.cluster().check(ClusterStatsAction.NAME, request, authentication), is(false)); - assertThat(MonitoringUserRole.INSTANCE.cluster().check(PutIndexTemplateAction.NAME, request, authentication), is(false)); - assertThat(MonitoringUserRole.INSTANCE.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); - assertThat(MonitoringUserRole.INSTANCE.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); - assertThat(MonitoringUserRole.INSTANCE.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); - } - - public void testRunAs() { - assertThat(MonitoringUserRole.INSTANCE.runAs().isEmpty(), is(true)); - } - - public void testUnauthorizedIndices() { - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test("foo"), is(false)); - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test(".reporting"), is(false)); - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test(".kibana"), is(false)); - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher("indices:foo").test(randomAsciiOfLengthBetween(8, 24)), - is(false)); - } - - public void testMonitoringIndices() { - testReadAccess(".monitoring-" + randomAsciiOfLength(randomIntBetween(0, 13))); - testReadAccess(".marvel-es-" + randomAsciiOfLength(randomIntBetween(0, 13))); - } - - private void testReadAccess(String index) { - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher("indices:foo").test(index), is(false)); - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher("indices:bar").test(index), is(false)); - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false)); - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(false)); - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(false)); - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(false)); - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(false)); - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); - assertThat(MonitoringUserRole.INSTANCE.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); - } -} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/PermissionTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/PermissionTests.java index f940af3fc48..f2f9bda5e8f 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/PermissionTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/PermissionTests.java @@ -6,20 +6,14 @@ package org.elasticsearch.xpack.security.authz.permission; import org.elasticsearch.action.get.GetAction; -import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.security.authz.privilege.GeneralPrivilege; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.authz.privilege.Privilege; import org.junit.Before; -import java.util.Arrays; -import java.util.Collections; -import java.util.Iterator; import java.util.function.Predicate; import static org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege.MONITOR; import static org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege.READ; -import static org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege.union; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -29,9 +23,9 @@ public class PermissionTests extends ESTestCase { @Before public void init() { Role.Builder builder = Role.builder("test"); - builder.add(union(MONITOR), "test_*", "/foo.*/"); - builder.add(union(READ), "baz_*foo", "/fool.*bar/"); - builder.add(union(MONITOR), "/bar.*/"); + builder.add(MONITOR, "test_*", "/foo.*/"); + builder.add(READ, "baz_*foo", "/fool.*bar/"); + builder.add(MONITOR, "/bar.*/"); permission = builder.build(); } @@ -45,23 +39,6 @@ public class PermissionTests extends ESTestCase { assertThat(matcher1, is(matcher2)); } - public void testIndicesGlobalsIterator() { - Role.Builder builder = Role.builder("tc_role"); - builder.cluster(ClusterPrivilege.action("cluster:monitor/nodes/info")); - Role noIndicesPermission = builder.build(); - - IndicesPermission.Globals indicesGlobals = new IndicesPermission.Globals( - Collections.unmodifiableList(Arrays.asList(noIndicesPermission, permission))); - Iterator iterator = indicesGlobals.iterator(); - assertThat(iterator.hasNext(), is(equalTo(true))); - int count = 0; - while (iterator.hasNext()) { - iterator.next(); - count++; - } - assertThat(count, is(equalTo(permission.indices().groups().length))); - } - public void testBuildEmptyRole() { Role.Builder permission = Role.builder("some_role"); Role role = permission.build(); @@ -73,7 +50,7 @@ public class PermissionTests extends ESTestCase { public void testRunAs() { Role permission = Role.builder("some_role") - .runAs(new GeneralPrivilege("name", "user1", "run*")) + .runAs(new Privilege("name", "user1", "run*")) .build(); assertThat(permission.runAs().check("user1"), is(true)); assertThat(permission.runAs().check("user"), is(false)); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/RemoteMonitoringAgentRoleTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/RemoteMonitoringAgentRoleTests.java deleted file mode 100644 index a450e972212..00000000000 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/RemoteMonitoringAgentRoleTests.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; -import org.elasticsearch.action.admin.indices.get.GetIndexAction; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; -import org.elasticsearch.action.delete.DeleteAction; -import org.elasticsearch.action.get.GetAction; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.monitoring.action.MonitoringBulkAction; -import org.elasticsearch.xpack.security.authc.Authentication; - -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; - -/** - * Tests for the remote monitoring agent role - */ -public class RemoteMonitoringAgentRoleTests extends ESTestCase { - - public void testCluster() { - final TransportRequest request = new TransportRequest.Empty(); - final Authentication authentication = mock(Authentication.class); - assertThat(RemoteMonitoringAgentRole.INSTANCE.cluster().check(ClusterHealthAction.NAME, request, authentication), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.cluster().check(ClusterStateAction.NAME, request, authentication), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.cluster().check(ClusterStatsAction.NAME, request, authentication), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.cluster().check(PutIndexTemplateAction.NAME, request, authentication), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), - is(false)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); - } - - public void testRunAs() { - assertThat(RemoteMonitoringAgentRole.INSTANCE.runAs().isEmpty(), is(true)); - } - - public void testUnauthorizedIndices() { - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test("foo"), is(false)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test(".reporting"), is(false)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test(".kibana"), is(false)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher("indices:foo") - .test(randomAsciiOfLengthBetween(8, 24)), is(false)); - } - - public void testKibanaIndices() { - testAllIndexAccess(".monitoring-" + randomAsciiOfLength(randomIntBetween(0, 13))); - testAllIndexAccess(".marvel-es-" + randomAsciiOfLength(randomIntBetween(0, 13))); - } - - private void testAllIndexAccess(String index) { - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher("indices:foo").test(index), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher("indices:bar").test(index), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); - assertThat(RemoteMonitoringAgentRole.INSTANCE.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(index), is(true)); - } -} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/ReportingUserRoleTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/ReportingUserRoleTests.java deleted file mode 100644 index 34544b51a45..00000000000 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/ReportingUserRoleTests.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; -import org.elasticsearch.action.bulk.BulkAction; -import org.elasticsearch.action.delete.DeleteAction; -import org.elasticsearch.action.get.GetAction; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.action.update.UpdateAction; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.monitoring.action.MonitoringBulkAction; -import org.elasticsearch.xpack.security.authc.Authentication; - -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; - -/** - * Unit tests for the built in reporting user role - */ -public class ReportingUserRoleTests extends ESTestCase { - - public void testCluster() { - final TransportRequest request = new TransportRequest.Empty(); - final Authentication authentication = mock(Authentication.class); - assertThat(ReportingUserRole.INSTANCE.cluster().check(ClusterHealthAction.NAME, request, authentication), is(false)); - assertThat(ReportingUserRole.INSTANCE.cluster().check(ClusterStateAction.NAME, request, authentication), is(false)); - assertThat(ReportingUserRole.INSTANCE.cluster().check(ClusterStatsAction.NAME, request, authentication), is(false)); - assertThat(ReportingUserRole.INSTANCE.cluster().check(PutIndexTemplateAction.NAME, request, authentication), is(false)); - assertThat(ReportingUserRole.INSTANCE.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); - assertThat(ReportingUserRole.INSTANCE.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); - assertThat(ReportingUserRole.INSTANCE.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); - } - - public void testRunAs() { - assertThat(ReportingUserRole.INSTANCE.runAs().isEmpty(), is(true)); - } - - public void testUnauthorizedIndices() { - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test("foo"), is(false)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test(".reporting"), is(false)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test(".kibana"), is(false)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher("indices:foo").test(randomAsciiOfLengthBetween(8, 24)), - is(false)); - } - - public void testReadWriteAccess() { - final String index = ".reporting-" + randomAsciiOfLength(randomIntBetween(0, 13)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher("indices:foo").test(index), is(false)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher("indices:bar").test(index), is(false)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(false)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(false)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(UpdateAction.NAME).test(index), is(true)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); - assertThat(ReportingUserRole.INSTANCE.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(true)); - } -} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/SuperuserRoleTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/SuperuserRoleTests.java deleted file mode 100644 index 3545fc12d9d..00000000000 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/permission/SuperuserRoleTests.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz.permission; - -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.cluster.metadata.AliasMetaData; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.xpack.security.action.role.PutRoleAction; -import org.elasticsearch.xpack.security.action.user.PutUserAction; -import org.elasticsearch.xpack.security.authc.Authentication; -import org.elasticsearch.xpack.security.authz.accesscontrol.IndicesAccessControl.IndexAccessControl; -import org.elasticsearch.xpack.security.user.User; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.TransportRequest; - -import java.util.Map; - -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -/** - * Tests for the superuser role - */ -public class SuperuserRoleTests extends ESTestCase { - - public void testCluster() { - final User user = new User("joe", SuperuserRole.NAME); - final Authentication authentication = mock(Authentication.class); - when(authentication.getUser()).thenReturn(user); - final TransportRequest request = new TransportRequest.Empty(); - - assertThat(SuperuserRole.INSTANCE.cluster().check(ClusterHealthAction.NAME, request, authentication), is(true)); - assertThat(SuperuserRole.INSTANCE.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(true)); - assertThat(SuperuserRole.INSTANCE.cluster().check(PutUserAction.NAME, request, authentication), is(true)); - assertThat(SuperuserRole.INSTANCE.cluster().check(PutRoleAction.NAME, request, authentication), is(true)); - assertThat(SuperuserRole.INSTANCE.cluster().check(PutIndexTemplateAction.NAME, request, authentication), is(true)); - assertThat(SuperuserRole.INSTANCE.cluster().check("internal:admin/foo", request, authentication), is(false)); - } - - public void testIndices() { - final Settings indexSettings = Settings.builder().put("index.version.created", Version.CURRENT).build(); - final MetaData metaData = new MetaData.Builder() - .put(new IndexMetaData.Builder("a1").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) - .put(new IndexMetaData.Builder("a2").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) - .put(new IndexMetaData.Builder("aaaaaa").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) - .put(new IndexMetaData.Builder("bbbbb").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) - .put(new IndexMetaData.Builder("b") - .settings(indexSettings) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(new AliasMetaData.Builder("ab").build()) - .putAlias(new AliasMetaData.Builder("ba").build()) - .build(), true) - .build(); - - Map authzMap = - SuperuserRole.INSTANCE.indices().authorize(SearchAction.NAME, Sets.newHashSet("a1", "ba"), metaData); - assertThat(authzMap.get("a1").isGranted(), is(true)); - assertThat(authzMap.get("b").isGranted(), is(true)); - authzMap = SuperuserRole.INSTANCE.indices().authorize(DeleteIndexAction.NAME, Sets.newHashSet("a1", "ba"), metaData); - assertThat(authzMap.get("a1").isGranted(), is(true)); - assertThat(authzMap.get("b").isGranted(), is(true)); - authzMap = SuperuserRole.INSTANCE.indices().authorize(IndexAction.NAME, Sets.newHashSet("a2", "ba"), metaData); - assertThat(authzMap.get("a2").isGranted(), is(true)); - assertThat(authzMap.get("b").isGranted(), is(true)); - authzMap = SuperuserRole.INSTANCE.indices().authorize(UpdateSettingsAction.NAME, Sets.newHashSet("aaaaaa", "ba"), metaData); - assertThat(authzMap.get("aaaaaa").isGranted(), is(true)); - assertThat(authzMap.get("b").isGranted(), is(true)); - assertTrue(SuperuserRole.INSTANCE.indices().check(SearchAction.NAME)); - assertFalse(SuperuserRole.INSTANCE.indices().check("unknown")); - } - - public void testRunAs() { - assertThat(SuperuserRole.INSTANCE.runAs().check(randomAsciiOfLengthBetween(1, 30)), is(true)); - } -} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/privilege/PrivilegeTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/privilege/PrivilegeTests.java index a8e48882f3b..d3bedd7d62c 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/privilege/PrivilegeTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/privilege/PrivilegeTests.java @@ -5,20 +5,16 @@ */ package org.elasticsearch.xpack.security.authz.privilege; -import org.elasticsearch.action.ingest.DeletePipelineAction; -import org.elasticsearch.action.ingest.GetPipelineAction; -import org.elasticsearch.action.ingest.PutPipelineAction; -import org.elasticsearch.action.ingest.SimulatePipelineAction; -import org.elasticsearch.xpack.security.support.AutomatonPredicate; +import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.security.support.Automatons; import org.elasticsearch.test.ESTestCase; import org.junit.Rule; import org.junit.rules.ExpectedException; +import java.util.Set; import java.util.function.Predicate; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -26,24 +22,8 @@ public class PrivilegeTests extends ESTestCase { @Rule public ExpectedException thrown = ExpectedException.none(); - public void testName() throws Exception { - Privilege.Name name12 = new Privilege.Name("name1", "name2"); - Privilege.Name name34 = new Privilege.Name("name3", "name4"); - Privilege.Name name1234 = randomBoolean() ? name12.add(name34) : name34.add(name12); - assertThat(name1234, equalTo(new Privilege.Name("name1", "name2", "name3", "name4"))); - - Privilege.Name name1 = name12.remove(new Privilege.Name("name2")); - assertThat(name1, equalTo(new Privilege.Name("name1"))); - - Privilege.Name name = name1.remove(new Privilege.Name("name1")); - assertThat(name, is(Privilege.Name.NONE)); - - Privilege.Name none = new Privilege.Name("name1", "name2", "none").remove(name12); - assertThat(none, is(Privilege.Name.NONE)); - } - public void testSubActionPattern() throws Exception { - AutomatonPredicate predicate = new AutomatonPredicate(Automatons.patterns("foo*")); + Predicate predicate = Automatons.predicate("foo*"); assertThat(predicate.test("foo[n][nodes]"), is(true)); assertThat(predicate.test("foo[n]"), is(true)); assertThat(predicate.test("bar[n][nodes]"), is(false)); @@ -51,36 +31,36 @@ public class PrivilegeTests extends ESTestCase { } public void testCluster() throws Exception { - Privilege.Name name = new Privilege.Name("monitor"); + Set name = Sets.newHashSet("monitor"); ClusterPrivilege cluster = ClusterPrivilege.get(name); assertThat(cluster, is(ClusterPrivilege.MONITOR)); - // since "all" implies "monitor", this should collapse to All - name = new Privilege.Name("monitor", "all"); + // since "all" implies "monitor", this should be the same language as All + name = Sets.newHashSet("monitor", "all"); cluster = ClusterPrivilege.get(name); - assertThat(cluster, is(ClusterPrivilege.ALL)); + assertTrue(Operations.sameLanguage(ClusterPrivilege.ALL.automaton, cluster.automaton)); - name = new Privilege.Name("monitor", "none"); + name = Sets.newHashSet("monitor", "none"); cluster = ClusterPrivilege.get(name); - assertThat(cluster, is(ClusterPrivilege.MONITOR)); + assertTrue(Operations.sameLanguage(ClusterPrivilege.MONITOR.automaton, cluster.automaton)); - Privilege.Name name2 = new Privilege.Name("none", "monitor"); + Set name2 = Sets.newHashSet("none", "monitor"); ClusterPrivilege cluster2 = ClusterPrivilege.get(name2); assertThat(cluster, is(cluster2)); } public void testClusterTemplateActions() throws Exception { - Privilege.Name name = new Privilege.Name("indices:admin/template/delete"); + Set name = Sets.newHashSet("indices:admin/template/delete"); ClusterPrivilege cluster = ClusterPrivilege.get(name); assertThat(cluster, notNullValue()); assertThat(cluster.predicate().test("indices:admin/template/delete"), is(true)); - name = new Privilege.Name("indices:admin/template/get"); + name = Sets.newHashSet("indices:admin/template/get"); cluster = ClusterPrivilege.get(name); assertThat(cluster, notNullValue()); assertThat(cluster.predicate().test("indices:admin/template/get"), is(true)); - name = new Privilege.Name("indices:admin/template/put"); + name = Sets.newHashSet("indices:admin/template/put"); cluster = ClusterPrivilege.get(name); assertThat(cluster, notNullValue()); assertThat(cluster.predicate().test("indices:admin/template/put"), is(true)); @@ -88,56 +68,20 @@ public class PrivilegeTests extends ESTestCase { public void testClusterInvalidName() throws Exception { thrown.expect(IllegalArgumentException.class); - Privilege.Name actionName = new Privilege.Name("foobar"); + Set actionName = Sets.newHashSet("foobar"); ClusterPrivilege.get(actionName); } public void testClusterAction() throws Exception { - Privilege.Name actionName = new Privilege.Name("cluster:admin/snapshot/delete"); + Set actionName = Sets.newHashSet("cluster:admin/snapshot/delete"); ClusterPrivilege cluster = ClusterPrivilege.get(actionName); assertThat(cluster, notNullValue()); assertThat(cluster.predicate().test("cluster:admin/snapshot/delete"), is(true)); assertThat(cluster.predicate().test("cluster:admin/snapshot/dele"), is(false)); } - public void testClusterAddCustom() throws Exception { - ClusterPrivilege.addCustom("foo", "cluster:bar"); - boolean found = false; - for (ClusterPrivilege cluster : ClusterPrivilege.values()) { - if ("foo".equals(cluster.name.toString())) { - found = true; - assertThat(cluster.predicate().test("cluster:bar"), is(true)); - } - } - assertThat(found, is(true)); - ClusterPrivilege cluster = ClusterPrivilege.get(new Privilege.Name("foo")); - assertThat(cluster, notNullValue()); - assertThat(cluster.name().toString(), is("foo")); - assertThat(cluster.predicate().test("cluster:bar"), is(true)); - } - - public void testClusterAddCustomInvalidPattern() throws Exception { - try { - ClusterPrivilege.addCustom("foo", "bar"); - fail("Expected IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("cannot register custom cluster privilege [foo]")); - assertThat(e.getMessage(), containsString("must follow the 'cluster:*' format")); - } - } - - public void testClusterAddCustomAlreadyExists() throws Exception { - try { - ClusterPrivilege.addCustom("all", "bar"); - fail("Expected IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("cannot register custom cluster privilege [all]")); - assertThat(e.getMessage(), containsString("must follow the 'cluster:*' format")); - } - } - public void testIndexAction() throws Exception { - Privilege.Name actionName = new Privilege.Name("indices:admin/mapping/delete"); + Set actionName = Sets.newHashSet("indices:admin/mapping/delete"); IndexPrivilege index = IndexPrivilege.get(actionName); assertThat(index, notNullValue()); assertThat(index.predicate().test("indices:admin/mapping/delete"), is(true)); @@ -145,87 +89,20 @@ public class PrivilegeTests extends ESTestCase { } public void testIndexCollapse() throws Exception { - IndexPrivilege[] values = IndexPrivilege.values().toArray(new IndexPrivilege[IndexPrivilege.values().size()]); + IndexPrivilege[] values = IndexPrivilege.values().values().toArray(new IndexPrivilege[IndexPrivilege.values().size()]); IndexPrivilege first = values[randomIntBetween(0, values.length-1)]; IndexPrivilege second = values[randomIntBetween(0, values.length-1)]; - Privilege.Name name = new Privilege.Name(first.name().toString(), second.name().toString()); + Set name = Sets.newHashSet(first.name().iterator().next(), second.name().iterator().next()); IndexPrivilege index = IndexPrivilege.get(name); - if (first.implies(second)) { - assertThat(index, is(first)); - } - - if (second.implies(first)) { - assertThat(index, is(second)); - } - } - - public void testIndexImplies() throws Exception { - IndexPrivilege[] values = IndexPrivilege.values().toArray(new IndexPrivilege[IndexPrivilege.values().size()]); - IndexPrivilege first = values[randomIntBetween(0, values.length-1)]; - IndexPrivilege second = values[randomIntBetween(0, values.length-1)]; - - Privilege.Name name = new Privilege.Name(first.name().toString(), second.name().toString()); - IndexPrivilege index = IndexPrivilege.get(name); - - assertThat(index.implies(first), is(true)); - assertThat(index.implies(second), is(true)); - - if (first.implies(second)) { - assertThat(index, is(first)); - } - - if (second.implies(first)) { - if (index != second) { - IndexPrivilege idx = IndexPrivilege.get(name); - idx.name().toString(); - } - assertThat(index, is(second)); - } - - for (IndexPrivilege other : IndexPrivilege.values()) { - if (first.implies(other) || second.implies(other) || index.isAlias(other)) { - assertThat("index privilege [" + index + "] should imply [" + other + "]", index.implies(other), is(true)); - } else if (other.implies(first) && other.implies(second)) { - assertThat("index privilege [" + index + "] should not imply [" + other + "]", index.implies(other), is(false)); - } - } - } - - public void testIndexAddCustom() throws Exception { - IndexPrivilege.addCustom("foo", "indices:bar"); - boolean found = false; - for (IndexPrivilege index : IndexPrivilege.values()) { - if ("foo".equals(index.name.toString())) { - found = true; - assertThat(index.predicate().test("indices:bar"), is(true)); - } - } - assertThat(found, is(true)); - IndexPrivilege index = IndexPrivilege.get(new Privilege.Name("foo")); - assertThat(index, notNullValue()); - assertThat(index.name().toString(), is("foo")); - assertThat(index.predicate().test("indices:bar"), is(true)); - } - - public void testIndexAddCustomInvalidPattern() throws Exception { - try { - IndexPrivilege.addCustom("foo", "bar"); - fail("Expected IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("cannot register custom index privilege [foo]")); - assertThat(e.getMessage(), containsString("must follow the 'indices:*' format")); - } - } - - public void testIndexAddCustomAlreadyExists() throws Exception { - try { - IndexPrivilege.addCustom("all", "bar"); - fail("Expected IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("cannot register custom index privilege [all]")); - assertThat(e.getMessage(), containsString("must follow the 'indices:*' format")); + if (Operations.subsetOf(second.getAutomaton(), first.getAutomaton())) { + assertTrue(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); + } else if (Operations.subsetOf(first.getAutomaton(), second.getAutomaton())) { + assertTrue(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); + } else { + assertFalse(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); + assertFalse(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java new file mode 100644 index 00000000000..ecb7111df93 --- /dev/null +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -0,0 +1,196 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authz.store; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.authz.RoleDescriptor.IndicesPrivileges; +import org.elasticsearch.xpack.security.authz.permission.Role; +import org.elasticsearch.xpack.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; + +import java.util.Collections; +import java.util.Set; + +import static org.elasticsearch.mock.orig.Mockito.times; +import static org.elasticsearch.mock.orig.Mockito.verifyNoMoreInteractions; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anySetOf; +import static org.mockito.Matchers.eq; +import static org.mockito.Matchers.isA; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class CompositeRolesStoreTests extends ESTestCase { + + public void testRolesWhenDlsFlsUnlicensed() { + XPackLicenseState licenseState = mock(XPackLicenseState.class); + when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false); + RoleDescriptor flsRole = new RoleDescriptor("fls", null, new IndicesPrivileges[] { + IndicesPrivileges.builder() + .grantedFields("*") + .deniedFields("foo") + .indices("*") + .privileges("read") + .build() + }, null); + RoleDescriptor dlsRole = new RoleDescriptor("dls", null, new IndicesPrivileges[] { + IndicesPrivileges.builder() + .indices("*") + .privileges("read") + .query(QueryBuilders.matchAllQuery().buildAsBytes()) + .build() + }, null); + RoleDescriptor flsDlsRole = new RoleDescriptor("fls_dls", null, new IndicesPrivileges[] { + IndicesPrivileges.builder() + .indices("*") + .privileges("read") + .grantedFields("*") + .deniedFields("foo") + .query(QueryBuilders.matchAllQuery().buildAsBytes()) + .build() + }, null); + RoleDescriptor noFlsDlsRole = new RoleDescriptor("no_fls_dls", null, new IndicesPrivileges[] { + IndicesPrivileges.builder() + .indices("*") + .privileges("read") + .build() + }, null); + FileRolesStore fileRolesStore = mock(FileRolesStore.class); + when(fileRolesStore.roleDescriptors(Collections.singleton("fls"))).thenReturn(Collections.singleton(flsRole)); + when(fileRolesStore.roleDescriptors(Collections.singleton("dls"))).thenReturn(Collections.singleton(dlsRole)); + when(fileRolesStore.roleDescriptors(Collections.singleton("fls_dls"))).thenReturn(Collections.singleton(flsDlsRole)); + when(fileRolesStore.roleDescriptors(Collections.singleton("no_fls_dls"))).thenReturn(Collections.singleton(noFlsDlsRole)); + CompositeRolesStore compositeRolesStore = new CompositeRolesStore(Settings.EMPTY, fileRolesStore, mock(NativeRolesStore.class), + mock(ReservedRolesStore.class), licenseState); + + FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); + PlainActionFuture roleFuture = new PlainActionFuture<>(); + compositeRolesStore.roles(Collections.singleton("fls"), fieldPermissionsCache, roleFuture); + assertEquals(Role.EMPTY, roleFuture.actionGet()); + + roleFuture = new PlainActionFuture<>(); + compositeRolesStore.roles(Collections.singleton("dls"), fieldPermissionsCache, roleFuture); + assertEquals(Role.EMPTY, roleFuture.actionGet()); + + roleFuture = new PlainActionFuture<>(); + compositeRolesStore.roles(Collections.singleton("fls_dls"), fieldPermissionsCache, roleFuture); + assertEquals(Role.EMPTY, roleFuture.actionGet()); + + roleFuture = new PlainActionFuture<>(); + compositeRolesStore.roles(Collections.singleton("no_fls_dls"), fieldPermissionsCache, roleFuture); + assertNotEquals(Role.EMPTY, roleFuture.actionGet()); + } + + public void testRolesWhenDlsFlsLicensed() { + XPackLicenseState licenseState = mock(XPackLicenseState.class); + when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); + RoleDescriptor flsRole = new RoleDescriptor("fls", null, new IndicesPrivileges[] { + IndicesPrivileges.builder() + .grantedFields("*") + .deniedFields("foo") + .indices("*") + .privileges("read") + .build() + }, null); + RoleDescriptor dlsRole = new RoleDescriptor("dls", null, new IndicesPrivileges[] { + IndicesPrivileges.builder() + .indices("*") + .privileges("read") + .query(QueryBuilders.matchAllQuery().buildAsBytes()) + .build() + }, null); + RoleDescriptor flsDlsRole = new RoleDescriptor("fls_dls", null, new IndicesPrivileges[] { + IndicesPrivileges.builder() + .indices("*") + .privileges("read") + .grantedFields("*") + .deniedFields("foo") + .query(QueryBuilders.matchAllQuery().buildAsBytes()) + .build() + }, null); + RoleDescriptor noFlsDlsRole = new RoleDescriptor("no_fls_dls", null, new IndicesPrivileges[] { + IndicesPrivileges.builder() + .indices("*") + .privileges("read") + .build() + }, null); + FileRolesStore fileRolesStore = mock(FileRolesStore.class); + when(fileRolesStore.roleDescriptors(Collections.singleton("fls"))).thenReturn(Collections.singleton(flsRole)); + when(fileRolesStore.roleDescriptors(Collections.singleton("dls"))).thenReturn(Collections.singleton(dlsRole)); + when(fileRolesStore.roleDescriptors(Collections.singleton("fls_dls"))).thenReturn(Collections.singleton(flsDlsRole)); + when(fileRolesStore.roleDescriptors(Collections.singleton("no_fls_dls"))).thenReturn(Collections.singleton(noFlsDlsRole)); + CompositeRolesStore compositeRolesStore = new CompositeRolesStore(Settings.EMPTY, fileRolesStore, mock(NativeRolesStore.class), + mock(ReservedRolesStore.class), licenseState); + + FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); + PlainActionFuture roleFuture = new PlainActionFuture<>(); + compositeRolesStore.roles(Collections.singleton("fls"), fieldPermissionsCache, roleFuture); + assertNotEquals(Role.EMPTY, roleFuture.actionGet()); + + roleFuture = new PlainActionFuture<>(); + compositeRolesStore.roles(Collections.singleton("dls"), fieldPermissionsCache, roleFuture); + assertNotEquals(Role.EMPTY, roleFuture.actionGet()); + + roleFuture = new PlainActionFuture<>(); + compositeRolesStore.roles(Collections.singleton("fls_dls"), fieldPermissionsCache, roleFuture); + assertNotEquals(Role.EMPTY, roleFuture.actionGet()); + + roleFuture = new PlainActionFuture<>(); + compositeRolesStore.roles(Collections.singleton("no_fls_dls"), fieldPermissionsCache, roleFuture); + assertNotEquals(Role.EMPTY, roleFuture.actionGet()); + } + + public void testNegativeLookupsAreCached() { + final FileRolesStore fileRolesStore = mock(FileRolesStore.class); + when(fileRolesStore.roleDescriptors(anySetOf(String.class))).thenReturn(Collections.emptySet()); + final NativeRolesStore nativeRolesStore = mock(NativeRolesStore.class); + doAnswer((invocationOnMock) -> { + ActionListener> callback = (ActionListener>) invocationOnMock.getArguments()[1]; + callback.onResponse(Collections.emptySet()); + return null; + }).when(nativeRolesStore).getRoleDescriptors(isA(String[].class), any(ActionListener.class)); + final ReservedRolesStore reservedRolesStore = spy(new ReservedRolesStore()); + + final CompositeRolesStore compositeRolesStore = + new CompositeRolesStore(Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, new XPackLicenseState()); + verify(fileRolesStore).addListener(any(Runnable.class)); // adds a listener in ctor + + final String roleName = randomAsciiOfLengthBetween(1, 10); + PlainActionFuture future = new PlainActionFuture<>(); + final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); + compositeRolesStore.roles(Collections.singleton(roleName), fieldPermissionsCache, future); + final Role role = future.actionGet(); + assertEquals(Role.EMPTY, role); + verify(reservedRolesStore).roleDescriptors(); + verify(fileRolesStore).roleDescriptors(eq(Collections.singleton(roleName))); + verify(nativeRolesStore).getRoleDescriptors(isA(String[].class), any(ActionListener.class)); + + final int numberOfTimesToCall = scaledRandomIntBetween(0, 32); + final boolean getSuperuserRole = randomBoolean() && roleName.equals(ReservedRolesStore.SUPERUSER_ROLE.name()) == false; + final Set names = getSuperuserRole ? Sets.newHashSet(roleName, ReservedRolesStore.SUPERUSER_ROLE.name()) : + Collections.singleton(roleName); + for (int i = 0; i < numberOfTimesToCall; i++) { + future = new PlainActionFuture<>(); + compositeRolesStore.roles(names, fieldPermissionsCache, future); + future.actionGet(); + } + + if (getSuperuserRole && numberOfTimesToCall > 0) { + // the superuser role was requested so we get the role descriptors again + verify(reservedRolesStore, times(2)).roleDescriptors(); + } + verifyNoMoreInteractions(fileRolesStore, reservedRolesStore, nativeRolesStore); + } +} diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index ca7ad5e9c8d..d41066c6e77 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -7,8 +7,11 @@ package org.elasticsearch.xpack.security.authz.store; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.automaton.MinimizationOperations; +import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -39,6 +42,7 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; @@ -50,18 +54,21 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class FileRolesStoreTests extends ESTestCase { public void testParseFile() throws Exception { Path path = getDataPath("roles.yml"); - Map roles = FileRolesStore.parseFile(path, logger, Settings.builder() + Map roles = FileRolesStore.parseFile(path, logger, Settings.builder() .put(XPackSettings.DLS_FLS_ENABLED.getKey(), true) - .build()); + .build(), new XPackLicenseState()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(9)); - Role role = roles.get("role1"); + RoleDescriptor descriptor = roles.get("role1"); + assertNotNull(descriptor); + Role role = Role.builder(descriptor, null).build(); assertThat(role, notNullValue()); assertThat(role.name(), equalTo("role1")); assertThat(role.cluster(), notNullValue()); @@ -69,7 +76,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); assertThat(role.indices().groups().length, is(2)); - assertThat(role.runAs(), is(RunAsPermission.Core.NONE)); + assertThat(role.runAs(), is(RunAsPermission.NONE)); IndicesPermission.Group group = role.indices().groups()[0]; assertThat(group.indices(), notNullValue()); @@ -84,10 +91,12 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("idx3")); assertThat(group.privilege(), notNullValue()); - assertThat(group.privilege().implies(IndexPrivilege.READ), is(true)); - assertThat(group.privilege().implies(IndexPrivilege.WRITE),is(true)); + assertTrue(Operations.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton())); + assertTrue(Operations.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton())); - role = roles.get("role1.ab"); + descriptor = roles.get("role1.ab"); + assertNotNull(descriptor); + role = Role.builder(descriptor, null).build(); assertThat(role, notNullValue()); assertThat(role.name(), equalTo("role1.ab")); assertThat(role.cluster(), notNullValue()); @@ -95,65 +104,77 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); assertThat(role.indices().groups().length, is(0)); - assertThat(role.runAs(), is(RunAsPermission.Core.NONE)); + assertThat(role.runAs(), is(RunAsPermission.NONE)); - role = roles.get("role2"); + descriptor = roles.get("role2"); + assertNotNull(descriptor); + role = Role.builder(descriptor, null).build(); assertThat(role, notNullValue()); assertThat(role.name(), equalTo("role2")); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster().privilege(), is(ClusterPrivilege.ALL)); // MONITOR is collapsed into ALL + assertTrue(Operations.sameLanguage(role.cluster().privilege().getAutomaton(), ClusterPrivilege.ALL.getAutomaton())); assertThat(role.indices(), notNullValue()); - assertThat(role.indices(), is(IndicesPermission.Core.NONE)); - assertThat(role.runAs(), is(RunAsPermission.Core.NONE)); + assertThat(role.indices(), is(IndicesPermission.NONE)); + assertThat(role.runAs(), is(RunAsPermission.NONE)); - role = roles.get("role3"); + descriptor = roles.get("role3"); + assertNotNull(descriptor); + role = Role.builder(descriptor, null).build(); assertThat(role, notNullValue()); assertThat(role.name(), equalTo("role3")); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.Core.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); assertThat(role.indices().groups().length, is(1)); - assertThat(role.runAs(), is(RunAsPermission.Core.NONE)); + assertThat(role.runAs(), is(RunAsPermission.NONE)); group = role.indices().groups()[0]; assertThat(group.indices(), notNullValue()); assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("/.*_.*/")); assertThat(group.privilege(), notNullValue()); - assertThat(group.privilege().isAlias(IndexPrivilege.union(IndexPrivilege.READ, IndexPrivilege.WRITE)), is(true)); + assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), + MinimizationOperations.minimize(Operations.union(IndexPrivilege.READ.getAutomaton(), IndexPrivilege.WRITE.getAutomaton()), + Operations.DEFAULT_MAX_DETERMINIZED_STATES))); - role = roles.get("role4"); - assertThat(role, nullValue()); + descriptor = roles.get("role4"); + assertNull(descriptor); - role = roles.get("role_run_as"); + descriptor = roles.get("role_run_as"); + assertNotNull(descriptor); + role = Role.builder(descriptor, null).build(); assertThat(role, notNullValue()); assertThat(role.name(), equalTo("role_run_as")); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.Core.NONE)); - assertThat(role.indices(), is(IndicesPermission.Core.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.indices(), is(IndicesPermission.NONE)); assertThat(role.runAs(), notNullValue()); assertThat(role.runAs().check("user1"), is(true)); assertThat(role.runAs().check("user2"), is(true)); assertThat(role.runAs().check("user" + randomIntBetween(3, 9)), is(false)); - role = roles.get("role_run_as1"); + descriptor = roles.get("role_run_as1"); + assertNotNull(descriptor); + role = Role.builder(descriptor, null).build(); assertThat(role, notNullValue()); assertThat(role.name(), equalTo("role_run_as1")); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.Core.NONE)); - assertThat(role.indices(), is(IndicesPermission.Core.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.indices(), is(IndicesPermission.NONE)); assertThat(role.runAs(), notNullValue()); assertThat(role.runAs().check("user1"), is(true)); assertThat(role.runAs().check("user2"), is(true)); assertThat(role.runAs().check("user" + randomIntBetween(3, 9)), is(false)); - role = roles.get("role_fields"); + descriptor = roles.get("role_fields"); + assertNotNull(descriptor); + role = Role.builder(descriptor, null).build(); assertThat(role, notNullValue()); assertThat(role.name(), equalTo("role_fields")); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.Core.NONE)); - assertThat(role.runAs(), is(RunAsPermission.Core.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.runAs(), is(RunAsPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); assertThat(role.indices().groups().length, is(1)); @@ -163,17 +184,19 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("field_idx")); assertThat(group.privilege(), notNullValue()); - assertThat(group.privilege().isAlias(IndexPrivilege.READ), is(true)); + assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); - role = roles.get("role_query"); + descriptor = roles.get("role_query"); + assertNotNull(descriptor); + role = Role.builder(descriptor, null).build(); assertThat(role, notNullValue()); assertThat(role.name(), equalTo("role_query")); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.Core.NONE)); - assertThat(role.runAs(), is(RunAsPermission.Core.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.runAs(), is(RunAsPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); assertThat(role.indices().groups().length, is(1)); @@ -183,16 +206,18 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_idx")); assertThat(group.privilege(), notNullValue()); - assertThat(group.privilege().isAlias(IndexPrivilege.READ), is(true)); + assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertFalse(group.getFieldPermissions().hasFieldLevelSecurity()); assertThat(group.getQuery(), notNullValue()); - role = roles.get("role_query_fields"); + descriptor = roles.get("role_query_fields"); + assertNotNull(descriptor); + role = Role.builder(descriptor, null).build(); assertThat(role, notNullValue()); assertThat(role.name(), equalTo("role_query_fields")); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.Core.NONE)); - assertThat(role.runAs(), is(RunAsPermission.Core.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.runAs(), is(RunAsPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); assertThat(role.indices().groups().length, is(1)); @@ -202,7 +227,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_fields_idx")); assertThat(group.privilege(), notNullValue()); - assertThat(group.privilege().isAlias(IndexPrivilege.READ), is(true)); + assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); @@ -212,9 +237,9 @@ public class FileRolesStoreTests extends ESTestCase { public void testParseFileWithFLSAndDLSDisabled() throws Exception { Path path = getDataPath("roles.yml"); Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR); - Map roles = FileRolesStore.parseFile(path, logger, Settings.builder() + Map roles = FileRolesStore.parseFile(path, logger, Settings.builder() .put(XPackSettings.DLS_FLS_ENABLED.getKey(), false) - .build()); + .build(), new XPackLicenseState()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(6)); assertThat(roles.get("role_fields"), nullValue()); @@ -235,13 +260,37 @@ public class FileRolesStoreTests extends ESTestCase { "]. document and field level security is not enabled.")); } + public void testParseFileWithFLSAndDLSUnlicensed() throws Exception { + Path path = getDataPath("roles.yml"); + Logger logger = CapturingLogger.newCapturingLogger(Level.WARN); + XPackLicenseState licenseState = mock(XPackLicenseState.class); + when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, licenseState); + assertThat(roles, notNullValue()); + assertThat(roles.size(), is(9)); + assertNotNull(roles.get("role_fields")); + assertNotNull(roles.get("role_query")); + assertNotNull(roles.get("role_query_fields")); + + List events = CapturingLogger.output(logger.getName(), Level.WARN); + assertThat(events, hasSize(3)); + assertThat( + events.get(0), + startsWith("role [role_fields] uses document and/or field level security, which is not enabled by the current license")); + assertThat(events.get(1), + startsWith("role [role_query] uses document and/or field level security, which is not enabled by the current license")); + assertThat(events.get(2), + startsWith("role [role_query_fields] uses document and/or field level security, which is not enabled by the current " + + "license")); + } + /** * This test is mainly to make sure we can read the default roles.yml config */ public void testDefaultRolesFile() throws Exception { // TODO we should add the config dir to the resources so we don't copy this stuff around... Path path = getDataPath("default_roles.yml"); - Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(0)); } @@ -267,13 +316,14 @@ public class FileRolesStoreTests extends ESTestCase { threadPool = new TestThreadPool("test"); watcherService = new ResourceWatcherService(settings, threadPool); final CountDownLatch latch = new CountDownLatch(1); - FileRolesStore store = new FileRolesStore(settings, env, watcherService, latch::countDown); - store.start(); + FileRolesStore store = new FileRolesStore(settings, env, watcherService, latch::countDown, new XPackLicenseState()); - Role role = store.role("role1"); - assertThat(role, notNullValue()); - role = store.role("role5"); - assertThat(role, nullValue()); + Set descriptors = store.roleDescriptors(Collections.singleton("role1")); + assertThat(descriptors, notNullValue()); + assertEquals(1, descriptors.size()); + descriptors = store.roleDescriptors(Collections.singleton("role5")); + assertThat(descriptors, notNullValue()); + assertTrue(descriptors.isEmpty()); watcherService.start(); @@ -290,11 +340,14 @@ public class FileRolesStoreTests extends ESTestCase { fail("Waited too long for the updated file to be picked up"); } - role = store.role("role5"); + descriptors = store.roleDescriptors(Collections.singleton("role5")); + assertThat(descriptors, notNullValue()); + assertEquals(1, descriptors.size()); + Role role = Role.builder(descriptors.iterator().next(), null).build(); assertThat(role, notNullValue()); assertThat(role.name(), equalTo("role5")); - assertThat(role.cluster().check("cluster:monitor/foo/bar", null, null), is(true)); - assertThat(role.cluster().check("cluster:admin/foo/bar", null, null), is(false)); + assertThat(role.cluster().check("cluster:monitor/foo/bar"), is(true)); + assertThat(role.cluster().check("cluster:admin/foo/bar"), is(false)); } finally { if (watcherService != null) { @@ -307,17 +360,19 @@ public class FileRolesStoreTests extends ESTestCase { public void testThatEmptyFileDoesNotResultInLoop() throws Exception { Path file = createTempFile(); Files.write(file, Collections.singletonList("#"), StandardCharsets.UTF_8); - Map roles = FileRolesStore.parseFile(file, logger, Settings.EMPTY); + Map roles = FileRolesStore.parseFile(file, logger, Settings.EMPTY, new XPackLicenseState()); assertThat(roles.keySet(), is(empty())); } public void testThatInvalidRoleDefinitions() throws Exception { Path path = getDataPath("invalid_roles.yml"); Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR); - Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState()); assertThat(roles.size(), is(1)); assertThat(roles, hasKey("valid_role")); - Role role = roles.get("valid_role"); + RoleDescriptor descriptor = roles.get("valid_role"); + assertNotNull(descriptor); + Role role = Role.builder(descriptor, null).build(); assertThat(role, notNullValue()); assertThat(role.name(), equalTo("valid_role")); @@ -354,7 +409,7 @@ public class FileRolesStoreTests extends ESTestCase { Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); Path path = getDataPath("reserved_roles.yml"); - Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(1)); @@ -366,7 +421,7 @@ public class FileRolesStoreTests extends ESTestCase { // the system role will always be checked first assertThat(events.get(0), containsString("Role [_system] is reserved")); assertThat(events.get(1), containsString("Role [superuser] is reserved")); - assertThat(events.get(2), containsString("Role [kibana] is reserved")); + assertThat(events.get(2), containsString("Role [kibana_system] is reserved")); assertThat(events.get(3), containsString("Role [transport_client] is reserved")); } @@ -386,8 +441,7 @@ public class FileRolesStoreTests extends ESTestCase { .put(XPackSettings.DLS_FLS_ENABLED.getKey(), flsDlsEnabled) .build(); Environment env = new Environment(settings); - FileRolesStore store = new FileRolesStore(settings, env, mock(ResourceWatcherService.class)); - store.start(); + FileRolesStore store = new FileRolesStore(settings, env, mock(ResourceWatcherService.class), new XPackLicenseState()); Map usageStats = store.usageStats(); @@ -401,10 +455,9 @@ public class FileRolesStoreTests extends ESTestCase { Path path = getDataPath("roles2xformat.yml"); byte[] bytes = Files.readAllBytes(path); String roleString = new String(bytes, Charset.defaultCharset()); - RoleDescriptor role = FileRolesStore.parseRoleDescriptor(roleString, path, logger, true, - Settings.EMPTY); + RoleDescriptor role = FileRolesStore.parseRoleDescriptor(roleString, path, logger, true, Settings.EMPTY); RoleDescriptor.IndicesPrivileges indicesPrivileges = role.getIndicesPrivileges()[0]; - assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("foo")); - assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("boo")); + assertThat(indicesPrivileges.getGrantedFields(), arrayContaining("foo", "boo")); + assertNull(indicesPrivileges.getDeniedFields()); } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index 2a200075c15..40a8ed3c7b7 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -5,45 +5,56 @@ */ package org.elasticsearch.xpack.security.authz.store; +import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; -import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.UnassignedInfo.Reason; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; -import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.security.SecurityTemplateService; +import org.elasticsearch.xpack.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.security.authz.permission.Role; +import org.elasticsearch.xpack.security.authz.RoleDescriptor.IndicesPrivileges; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; +import java.util.List; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; -import static org.elasticsearch.xpack.security.authz.IndicesAndAliasesResolverTests.indexBuilder; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.hamcrest.Matchers.arrayContaining; public class NativeRolesStoreTests extends ESTestCase { @@ -52,50 +63,164 @@ public class NativeRolesStoreTests extends ESTestCase { Path path = getDataPath("roles2xformat.json"); byte[] bytes = Files.readAllBytes(path); String roleString = new String(bytes, Charset.defaultCharset()); - RoleDescriptor role = NativeRolesStore.transformRole("role1", new BytesArray(roleString), logger); + RoleDescriptor role = NativeRolesStore.transformRole("role1", new BytesArray(roleString), logger, new XPackLicenseState()); + assertNotNull(role); + assertNotNull(role.getIndicesPrivileges()); RoleDescriptor.IndicesPrivileges indicesPrivileges = role.getIndicesPrivileges()[0]; - assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("foo")); - assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("boo")); + assertThat(indicesPrivileges.getGrantedFields(), arrayContaining("foo", "boo")); + assertNull(indicesPrivileges.getDeniedFields()); } - public void testNegativeLookupsAreCached() { + public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { + XPackLicenseState licenseState = mock(XPackLicenseState.class); + when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false); + RoleDescriptor flsRole = new RoleDescriptor("fls", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().privileges("READ").indices("*") + .grantedFields("*") + .deniedFields("foo") + .build() }, + null); + assertFalse(flsRole.getTransientMetadata().containsKey("unlicensed_features")); + + RoleDescriptor dlsRole = new RoleDescriptor("dls", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ") + .query(QueryBuilders.matchAllQuery().buildAsBytes()) + .build() }, + null); + assertFalse(dlsRole.getTransientMetadata().containsKey("unlicensed_features")); + + RoleDescriptor flsDlsRole = new RoleDescriptor("fls_ dls", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ") + .grantedFields("*") + .deniedFields("foo") + .query(QueryBuilders.matchAllQuery().buildAsBytes()) + .build() }, + null); + assertFalse(flsDlsRole.getTransientMetadata().containsKey("unlicensed_features")); + + RoleDescriptor noFlsDlsRole = new RoleDescriptor("no_fls_dls", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ").build() }, + null); + assertFalse(noFlsDlsRole.getTransientMetadata().containsKey("unlicensed_features")); + + XContentBuilder builder = flsRole.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS); + BytesReference bytes = builder.bytes(); + RoleDescriptor role = NativeRolesStore.transformRole("fls", bytes, logger, licenseState); + assertNotNull(role); + assertTrue(role.getTransientMetadata().containsKey("unlicensed_features")); + assertThat(role.getTransientMetadata().get("unlicensed_features"), instanceOf(List.class)); + assertThat((List) role.getTransientMetadata().get("unlicensed_features"), contains("fls")); + + builder = dlsRole.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS); + bytes = builder.bytes(); + role = NativeRolesStore.transformRole("dls", bytes, logger, licenseState); + assertNotNull(role); + assertTrue(role.getTransientMetadata().containsKey("unlicensed_features")); + assertThat(role.getTransientMetadata().get("unlicensed_features"), instanceOf(List.class)); + assertThat((List) role.getTransientMetadata().get("unlicensed_features"), contains("dls")); + + builder = flsDlsRole.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS); + bytes = builder.bytes(); + role = NativeRolesStore.transformRole("fls_dls", bytes, logger, licenseState); + assertNotNull(role); + assertTrue(role.getTransientMetadata().containsKey("unlicensed_features")); + assertThat(role.getTransientMetadata().get("unlicensed_features"), instanceOf(List.class)); + assertThat((List) role.getTransientMetadata().get("unlicensed_features"), contains("fls", "dls")); + + builder = noFlsDlsRole.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS); + bytes = builder.bytes(); + role = NativeRolesStore.transformRole("no_fls_dls", bytes, logger, licenseState); + assertNotNull(role); + assertFalse(role.getTransientMetadata().containsKey("unlicensed_features")); + + when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); + builder = flsRole.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS); + bytes = builder.bytes(); + role = NativeRolesStore.transformRole("fls", bytes, logger, licenseState); + assertNotNull(role); + assertFalse(role.getTransientMetadata().containsKey("unlicensed_features")); + + builder = dlsRole.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS); + bytes = builder.bytes(); + role = NativeRolesStore.transformRole("dls", bytes, logger, licenseState); + assertNotNull(role); + assertFalse(role.getTransientMetadata().containsKey("unlicensed_features")); + + builder = flsDlsRole.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS); + bytes = builder.bytes(); + role = NativeRolesStore.transformRole("fls_dls", bytes, logger, licenseState); + assertNotNull(role); + assertFalse(role.getTransientMetadata().containsKey("unlicensed_features")); + + builder = noFlsDlsRole.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS); + bytes = builder.bytes(); + role = NativeRolesStore.transformRole("no_fls_dls", bytes, logger, licenseState); + assertNotNull(role); + assertFalse(role.getTransientMetadata().containsKey("unlicensed_features")); + } + + public void testPutOfRoleWithFlsDlsUnlicensed() { final InternalClient internalClient = mock(InternalClient.class); + final XPackLicenseState licenseState = mock(XPackLicenseState.class); final AtomicBoolean methodCalled = new AtomicBoolean(false); - final NativeRolesStore rolesStore = new NativeRolesStore(Settings.EMPTY, internalClient) { + final NativeRolesStore rolesStore = new NativeRolesStore(Settings.EMPTY, internalClient, licenseState) { @Override public State state() { return State.STARTED; } @Override - void executeGetRoleRequest(String role, ActionListener listener) { + void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener listener) { if (methodCalled.compareAndSet(false, true)) { - listener.onResponse(new GetResponse(new GetResult(SecurityTemplateService.SECURITY_INDEX_NAME, "role", - role, -1, false, BytesArray.EMPTY, Collections.emptyMap()))); + listener.onResponse(true); } else { fail("method called more than once!"); } } }; - // setup the roles store so the security index exists - rolesStore.clusterChanged(new ClusterChangedEvent("negative_lookups", getClusterStateWithSecurityIndex(), getEmptyClusterState())); + rolesStore.clusterChanged(new ClusterChangedEvent("fls_dls_license", getClusterStateWithSecurityIndex(), getEmptyClusterState())); - final String roleName = randomAsciiOfLengthBetween(1, 10); - PlainActionFuture future = new PlainActionFuture<>(); - rolesStore.role(roleName, future); - Role role = future.actionGet(); - assertTrue(methodCalled.get()); - assertNull(role); + PutRoleRequest putRoleRequest = new PutRoleRequest(); + RoleDescriptor flsRole = new RoleDescriptor("fls", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().privileges("READ").indices("*") + .grantedFields("*") + .deniedFields("foo") + .build() }, + null); + PlainActionFuture future = new PlainActionFuture<>(); + rolesStore.putRole(putRoleRequest, flsRole, future); + ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); + assertThat(e.getMessage(), containsString("field and document level security")); - final int numberOfRetries = scaledRandomIntBetween(1, 20); - for (int i = 0; i < numberOfRetries; i++) { - future = new PlainActionFuture<>(); - rolesStore.role(roleName, future); - role = future.actionGet(); - assertTrue(methodCalled.get()); - assertNull(role); - } + RoleDescriptor dlsRole = new RoleDescriptor("dls", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ") + .query(QueryBuilders.matchAllQuery().buildAsBytes()) + .build() }, + null); + future = new PlainActionFuture<>(); + rolesStore.putRole(putRoleRequest, dlsRole, future); + e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); + assertThat(e.getMessage(), containsString("field and document level security")); + + RoleDescriptor flsDlsRole = new RoleDescriptor("fls_ dls", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ") + .grantedFields("*") + .deniedFields("foo") + .query(QueryBuilders.matchAllQuery().buildAsBytes()) + .build() }, + null); + future = new PlainActionFuture<>(); + rolesStore.putRole(putRoleRequest, flsDlsRole, future); + e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); + assertThat(e.getMessage(), containsString("field and document level security")); + + RoleDescriptor noFlsDlsRole = new RoleDescriptor("no_fls_dls", null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ").build() }, + null); + future = new PlainActionFuture<>(); + rolesStore.putRole(putRoleRequest, noFlsDlsRole, future); + assertTrue(future.actionGet()); } private ClusterState getClusterStateWithSecurityIndex() { @@ -105,7 +230,11 @@ public class NativeRolesStoreTests extends ESTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build(); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(SecurityTemplateService.SECURITY_INDEX_NAME).settings(settings)).build(); + .put(IndexMetaData.builder(SecurityTemplateService.SECURITY_INDEX_NAME).settings(settings)) + .put(new IndexTemplateMetaData(SecurityTemplateService.SECURITY_TEMPLATE_NAME, 0, 0, + Collections.singletonList(SecurityTemplateService.SECURITY_INDEX_NAME), Settings.EMPTY, ImmutableOpenMap.of(), + ImmutableOpenMap.of(), ImmutableOpenMap.of())) + .build(); Index index = new Index(SecurityTemplateService.SECURITY_INDEX_NAME, UUID.randomUUID().toString()); ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(index, 0), true, EXISTING_STORE_INSTANCE, new UnassignedInfo(Reason.INDEX_CREATED, "")); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java index 418fdba6b6f..429795dbda0 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java @@ -5,124 +5,338 @@ */ package org.elasticsearch.xpack.security.authz.store; -import org.elasticsearch.xpack.security.SecurityContext; -import org.elasticsearch.xpack.security.authz.permission.IngestAdminRole; -import org.elasticsearch.xpack.security.authz.permission.KibanaRole; -import org.elasticsearch.xpack.security.authz.permission.KibanaUserRole; -import org.elasticsearch.xpack.security.authz.permission.MonitoringUserRole; -import org.elasticsearch.xpack.security.authz.permission.RemoteMonitoringAgentRole; -import org.elasticsearch.xpack.security.authz.permission.ReportingUserRole; -import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; -import org.elasticsearch.xpack.security.authz.permission.TransportClientRole; -import org.elasticsearch.xpack.security.user.ElasticUser; -import org.elasticsearch.xpack.security.user.KibanaUser; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; +import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; +import org.elasticsearch.action.admin.indices.get.GetIndexAction; +import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; +import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; +import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.delete.DeleteAction; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.ingest.DeletePipelineAction; +import org.elasticsearch.action.ingest.GetPipelineAction; +import org.elasticsearch.action.ingest.PutPipelineAction; +import org.elasticsearch.action.search.MultiSearchAction; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.update.UpdateAction; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.xpack.monitoring.action.MonitoringBulkAction; +import org.elasticsearch.xpack.security.action.role.PutRoleAction; +import org.elasticsearch.xpack.security.action.user.PutUserAction; +import org.elasticsearch.xpack.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.security.authz.accesscontrol.IndicesAccessControl.IndexAccessControl; +import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; +import org.elasticsearch.xpack.security.authz.permission.Role; import org.elasticsearch.xpack.security.user.SystemUser; -import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.test.ESTestCase; -import org.junit.Before; -import static org.hamcrest.Matchers.contains; +import java.util.Arrays; +import java.util.Map; + +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.sameInstance; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; /** * Unit tests for the {@link ReservedRolesStore} */ public class ReservedRolesStoreTests extends ESTestCase { - private final User user = new User("joe"); - private SecurityContext securityContext; - private ReservedRolesStore reservedRolesStore; - - @Before - public void setupMocks() { - securityContext = mock(SecurityContext.class); - when(securityContext.getUser()).thenReturn(user); - reservedRolesStore = new ReservedRolesStore(securityContext); - } - - public void testRetrievingReservedRolesNonKibanaUser() { - if (randomBoolean()) { - when(securityContext.getUser()).thenReturn(new ElasticUser(true)); - } - - assertThat(reservedRolesStore.role(SuperuserRole.NAME), sameInstance(SuperuserRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(SuperuserRole.NAME), sameInstance(SuperuserRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(TransportClientRole.NAME), sameInstance(TransportClientRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(TransportClientRole.NAME), sameInstance(TransportClientRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(KibanaUserRole.NAME), sameInstance(KibanaUserRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(KibanaUserRole.NAME), sameInstance(KibanaUserRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(IngestAdminRole.NAME), sameInstance(IngestAdminRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(IngestAdminRole.NAME), sameInstance(IngestAdminRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(MonitoringUserRole.NAME), sameInstance(MonitoringUserRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(MonitoringUserRole.NAME), sameInstance(MonitoringUserRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(RemoteMonitoringAgentRole.NAME), sameInstance(RemoteMonitoringAgentRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(RemoteMonitoringAgentRole.NAME), sameInstance(RemoteMonitoringAgentRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(ReportingUserRole.NAME), sameInstance(ReportingUserRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(ReportingUserRole.NAME), sameInstance(ReportingUserRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.roleDescriptors(), contains(SuperuserRole.DESCRIPTOR, TransportClientRole.DESCRIPTOR, - KibanaUserRole.DESCRIPTOR, MonitoringUserRole.DESCRIPTOR, RemoteMonitoringAgentRole.DESCRIPTOR, - IngestAdminRole.DESCRIPTOR, ReportingUserRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(KibanaRole.NAME), nullValue()); - assertThat(reservedRolesStore.roleDescriptor(KibanaRole.NAME), nullValue()); - - assertThat(reservedRolesStore.role(SystemUser.ROLE_NAME), nullValue()); - } - - public void testRetrievingReservedRoleKibanaUser() { - when(securityContext.getUser()).thenReturn(new KibanaUser(true)); - assertThat(reservedRolesStore.role(SuperuserRole.NAME), sameInstance(SuperuserRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(SuperuserRole.NAME), sameInstance(SuperuserRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(TransportClientRole.NAME), sameInstance(TransportClientRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(TransportClientRole.NAME), sameInstance(TransportClientRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(KibanaUserRole.NAME), sameInstance(KibanaUserRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(KibanaUserRole.NAME), sameInstance(KibanaUserRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(IngestAdminRole.NAME), sameInstance(IngestAdminRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(IngestAdminRole.NAME), sameInstance(IngestAdminRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(KibanaRole.NAME), sameInstance(KibanaRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(KibanaRole.NAME), sameInstance(KibanaRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(MonitoringUserRole.NAME), sameInstance(MonitoringUserRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(MonitoringUserRole.NAME), sameInstance(MonitoringUserRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(RemoteMonitoringAgentRole.NAME), sameInstance(RemoteMonitoringAgentRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(RemoteMonitoringAgentRole.NAME), sameInstance(RemoteMonitoringAgentRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(ReportingUserRole.NAME), sameInstance(ReportingUserRole.INSTANCE)); - assertThat(reservedRolesStore.roleDescriptor(ReportingUserRole.NAME), sameInstance(ReportingUserRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.roleDescriptors(), contains(SuperuserRole.DESCRIPTOR, TransportClientRole.DESCRIPTOR, - KibanaUserRole.DESCRIPTOR, KibanaRole.DESCRIPTOR, MonitoringUserRole.DESCRIPTOR, RemoteMonitoringAgentRole.DESCRIPTOR, - IngestAdminRole.DESCRIPTOR, ReportingUserRole.DESCRIPTOR)); - - assertThat(reservedRolesStore.role(SystemUser.ROLE_NAME), nullValue()); - } - public void testIsReserved() { - assertThat(ReservedRolesStore.isReserved(KibanaRole.NAME), is(true)); - assertThat(ReservedRolesStore.isReserved(SuperuserRole.NAME), is(true)); + assertThat(ReservedRolesStore.isReserved("kibana_system"), is(true)); + assertThat(ReservedRolesStore.isReserved("superuser"), is(true)); assertThat(ReservedRolesStore.isReserved("foobar"), is(false)); assertThat(ReservedRolesStore.isReserved(SystemUser.ROLE_NAME), is(true)); - assertThat(ReservedRolesStore.isReserved(TransportClientRole.NAME), is(true)); - assertThat(ReservedRolesStore.isReserved(KibanaUserRole.NAME), is(true)); - assertThat(ReservedRolesStore.isReserved(IngestAdminRole.NAME), is(true)); - assertThat(ReservedRolesStore.isReserved(RemoteMonitoringAgentRole.NAME), is(true)); - assertThat(ReservedRolesStore.isReserved(MonitoringUserRole.NAME), is(true)); - assertThat(ReservedRolesStore.isReserved(ReportingUserRole.NAME), is(true)); + assertThat(ReservedRolesStore.isReserved("transport_client"), is(true)); + assertThat(ReservedRolesStore.isReserved("kibana_user"), is(true)); + assertThat(ReservedRolesStore.isReserved("ingest_admin"), is(true)); + assertThat(ReservedRolesStore.isReserved("remote_monitoring_agent"), is(true)); + assertThat(ReservedRolesStore.isReserved("monitoring_user"), is(true)); + assertThat(ReservedRolesStore.isReserved("reporting_user"), is(true)); + } + + public void testIngestAdminRole() { + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("ingest_admin"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role ingestAdminRole = Role.builder(roleDescriptor, null).build(); + assertThat(ingestAdminRole.cluster().check(PutIndexTemplateAction.NAME), is(true)); + assertThat(ingestAdminRole.cluster().check(GetIndexTemplatesAction.NAME), is(true)); + assertThat(ingestAdminRole.cluster().check(DeleteIndexTemplateAction.NAME), is(true)); + assertThat(ingestAdminRole.cluster().check(PutPipelineAction.NAME), is(true)); + assertThat(ingestAdminRole.cluster().check(GetPipelineAction.NAME), is(true)); + assertThat(ingestAdminRole.cluster().check(DeletePipelineAction.NAME), is(true)); + + assertThat(ingestAdminRole.cluster().check(ClusterRerouteAction.NAME), is(false)); + assertThat(ingestAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); + assertThat(ingestAdminRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + + assertThat(ingestAdminRole.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false)); + assertThat(ingestAdminRole.indices().allowedIndicesMatcher("indices:foo").test(randomAsciiOfLengthBetween(8, 24)), + is(false)); + assertThat(ingestAdminRole.indices().allowedIndicesMatcher(GetAction.NAME).test(randomAsciiOfLengthBetween(8, 24)), + is(false)); + } + + public void testKibanaRole() { + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("kibana_system"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role kibanaRole = Role.builder(roleDescriptor, null).build(); + assertThat(kibanaRole.cluster().check(ClusterHealthAction.NAME), is(true)); + assertThat(kibanaRole.cluster().check(ClusterStateAction.NAME), is(true)); + assertThat(kibanaRole.cluster().check(ClusterStatsAction.NAME), is(true)); + assertThat(kibanaRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); + assertThat(kibanaRole.cluster().check(ClusterRerouteAction.NAME), is(false)); + assertThat(kibanaRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); + assertThat(kibanaRole.cluster().check(MonitoringBulkAction.NAME), is(true)); + + assertThat(kibanaRole.runAs().check(randomAsciiOfLengthBetween(1, 12)), is(false)); + + assertThat(kibanaRole.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(".reporting"), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(randomAsciiOfLengthBetween(8, 24)), is(false)); + + Arrays.asList(".kibana", ".kibana-devnull", ".reporting-" + randomAsciiOfLength(randomIntBetween(0, 13))).forEach((index) -> { + logger.info("index name [{}]", index); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(true)); + }); + } + + public void testKibanaUserRole() { + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("kibana_user"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role kibanaUserRole = Role.builder(roleDescriptor, null).build(); + assertThat(kibanaUserRole.cluster().check(ClusterHealthAction.NAME), is(true)); + assertThat(kibanaUserRole.cluster().check(ClusterStateAction.NAME), is(true)); + assertThat(kibanaUserRole.cluster().check(ClusterStatsAction.NAME), is(true)); + assertThat(kibanaUserRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); + assertThat(kibanaUserRole.cluster().check(ClusterRerouteAction.NAME), is(false)); + assertThat(kibanaUserRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); + assertThat(kibanaUserRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + + assertThat(kibanaUserRole.runAs().check(randomAsciiOfLengthBetween(1, 12)), is(false)); + + assertThat(kibanaUserRole.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false)); + assertThat(kibanaUserRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(".reporting"), is(false)); + assertThat(kibanaUserRole.indices().allowedIndicesMatcher("indices:foo") + .test(randomAsciiOfLengthBetween(8, 24)), is(false)); + + Arrays.asList(".kibana", ".kibana-devnull").forEach((index) -> { + logger.info("index name [{}]", index); + assertThat(kibanaUserRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(false)); + assertThat(kibanaUserRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(false)); + + assertThat(kibanaUserRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); + assertThat(kibanaUserRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(true)); + assertThat(kibanaUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); + assertThat(kibanaUserRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); + assertThat(kibanaUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); + assertThat(kibanaUserRole.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(index), is(true)); + assertThat(kibanaUserRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(true)); + }); + } + + public void testMonitoringUserRole() { + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("monitoring_user"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role monitoringUserRole = Role.builder(roleDescriptor, null).build(); + assertThat(monitoringUserRole.cluster().check(ClusterHealthAction.NAME), is(false)); + assertThat(monitoringUserRole.cluster().check(ClusterStateAction.NAME), is(false)); + assertThat(monitoringUserRole.cluster().check(ClusterStatsAction.NAME), is(false)); + assertThat(monitoringUserRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); + assertThat(monitoringUserRole.cluster().check(ClusterRerouteAction.NAME), is(false)); + assertThat(monitoringUserRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); + assertThat(monitoringUserRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + + assertThat(monitoringUserRole.runAs().check(randomAsciiOfLengthBetween(1, 12)), is(false)); + + assertThat(monitoringUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test("foo"), is(false)); + assertThat(monitoringUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(".reporting"), is(false)); + assertThat(monitoringUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(".kibana"), is(false)); + assertThat(monitoringUserRole.indices().allowedIndicesMatcher("indices:foo").test(randomAsciiOfLengthBetween(8, 24)), + is(false)); + + Arrays.asList(".monitoring-" + randomAsciiOfLength(randomIntBetween(0, 13)), + ".marvel-es-" + randomAsciiOfLength(randomIntBetween(0, 13))).forEach((index) -> { + assertThat(monitoringUserRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(false)); + assertThat(monitoringUserRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(false)); + assertThat(monitoringUserRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false)); + assertThat(monitoringUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(false)); + assertThat(monitoringUserRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(false)); + assertThat(monitoringUserRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(false)); + assertThat(monitoringUserRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(false)); + assertThat(monitoringUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); + assertThat(monitoringUserRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); + }); + } + + public void testRemoteMonitoringAgentRole() { + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("remote_monitoring_agent"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role remoteMonitoringAgentRole = Role.builder(roleDescriptor, null).build(); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterHealthAction.NAME), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterStateAction.NAME), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterStatsAction.NAME), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(PutIndexTemplateAction.NAME), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterRerouteAction.NAME), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterUpdateSettingsAction.NAME), + is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + + assertThat(remoteMonitoringAgentRole.runAs().check(randomAsciiOfLengthBetween(1, 12)), is(false)); + + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(SearchAction.NAME).test("foo"), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(".reporting"), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(".kibana"), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:foo") + .test(randomAsciiOfLengthBetween(8, 24)), is(false)); + + Arrays.asList(".monitoring-" + randomAsciiOfLength(randomIntBetween(0, 13)), + ".marvel-es-" + randomAsciiOfLength(randomIntBetween(0, 13))).forEach((index) -> { + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(index), is(true)); + }); + } + + public void testReportingUserRole() { + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("reporting_user"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role reportingUserRole = Role.builder(roleDescriptor, null).build(); + assertThat(reportingUserRole.cluster().check(ClusterHealthAction.NAME), is(false)); + assertThat(reportingUserRole.cluster().check(ClusterStateAction.NAME), is(false)); + assertThat(reportingUserRole.cluster().check(ClusterStatsAction.NAME), is(false)); + assertThat(reportingUserRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); + assertThat(reportingUserRole.cluster().check(ClusterRerouteAction.NAME), is(false)); + assertThat(reportingUserRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); + assertThat(reportingUserRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + + assertThat(reportingUserRole.runAs().check(randomAsciiOfLengthBetween(1, 12)), is(false)); + + assertThat(reportingUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test("foo"), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(".reporting"), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(".kibana"), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher("indices:foo").test(randomAsciiOfLengthBetween(8, 24)), + is(false)); + + final String index = ".reporting-" + randomAsciiOfLength(randomIntBetween(0, 13)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(UpdateAction.NAME).test(index), is(true)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(true)); + } + + public void testSuperuserRole() { + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("superuser"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role superuserRole = Role.builder(roleDescriptor, null).build(); + assertThat(superuserRole.cluster().check(ClusterHealthAction.NAME), is(true)); + assertThat(superuserRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(true)); + assertThat(superuserRole.cluster().check(PutUserAction.NAME), is(true)); + assertThat(superuserRole.cluster().check(PutRoleAction.NAME), is(true)); + assertThat(superuserRole.cluster().check(PutIndexTemplateAction.NAME), is(true)); + assertThat(superuserRole.cluster().check("internal:admin/foo"), is(false)); + + final Settings indexSettings = Settings.builder().put("index.version.created", Version.CURRENT).build(); + final MetaData metaData = new MetaData.Builder() + .put(new IndexMetaData.Builder("a1").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) + .put(new IndexMetaData.Builder("a2").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) + .put(new IndexMetaData.Builder("aaaaaa").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) + .put(new IndexMetaData.Builder("bbbbb").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) + .put(new IndexMetaData.Builder("b") + .settings(indexSettings) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(new AliasMetaData.Builder("ab").build()) + .putAlias(new AliasMetaData.Builder("ba").build()) + .build(), true) + .build(); + + FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); + Map authzMap = + superuserRole.indices().authorize(SearchAction.NAME, Sets.newHashSet("a1", "ba"), metaData, fieldPermissionsCache); + assertThat(authzMap.get("a1").isGranted(), is(true)); + assertThat(authzMap.get("b").isGranted(), is(true)); + authzMap = superuserRole.indices().authorize(DeleteIndexAction.NAME, Sets.newHashSet("a1", "ba"), metaData, fieldPermissionsCache); + assertThat(authzMap.get("a1").isGranted(), is(true)); + assertThat(authzMap.get("b").isGranted(), is(true)); + authzMap = superuserRole.indices().authorize(IndexAction.NAME, Sets.newHashSet("a2", "ba"), metaData, fieldPermissionsCache); + assertThat(authzMap.get("a2").isGranted(), is(true)); + assertThat(authzMap.get("b").isGranted(), is(true)); + authzMap = superuserRole.indices() + .authorize(UpdateSettingsAction.NAME, Sets.newHashSet("aaaaaa", "ba"), metaData, fieldPermissionsCache); + assertThat(authzMap.get("aaaaaa").isGranted(), is(true)); + assertThat(authzMap.get("b").isGranted(), is(true)); + assertTrue(superuserRole.indices().check(SearchAction.NAME)); + assertFalse(superuserRole.indices().check("unknown")); + + assertThat(superuserRole.runAs().check(randomAsciiOfLengthBetween(1, 30)), is(true)); + } + + public void testLogstashSystemRole() { + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("logstash_system"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role logstashSystemRole = Role.builder(roleDescriptor, null).build(); + assertThat(logstashSystemRole.cluster().check(ClusterHealthAction.NAME), is(true)); + assertThat(logstashSystemRole.cluster().check(ClusterStateAction.NAME), is(true)); + assertThat(logstashSystemRole.cluster().check(ClusterStatsAction.NAME), is(true)); + assertThat(logstashSystemRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); + assertThat(logstashSystemRole.cluster().check(ClusterRerouteAction.NAME), is(false)); + assertThat(logstashSystemRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); + assertThat(logstashSystemRole.cluster().check(MonitoringBulkAction.NAME), is(true)); + + assertThat(logstashSystemRole.runAs().check(randomAsciiOfLengthBetween(1, 30)), is(false)); + + assertThat(logstashSystemRole.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false)); + assertThat(logstashSystemRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(".reporting"), is(false)); + assertThat(logstashSystemRole.indices().allowedIndicesMatcher("indices:foo").test(randomAsciiOfLengthBetween(8, 24)), + is(false)); } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java index 4d8d26ff548..c13ea7ed832 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java @@ -6,14 +6,33 @@ package org.elasticsearch.xpack.security.test; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.SecurityTemplateService; +import org.elasticsearch.xpack.security.authz.store.NativeRolesStoreTests; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.util.UUID; + +import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; public class SecurityTestUtils { @@ -49,4 +68,36 @@ public class SecurityTestUtils { public static String writeFile(Path folder, String name, String content) { return writeFile(folder, name, content.getBytes(StandardCharsets.UTF_8)); } + + public static ClusterState getClusterStateWithSecurityIndex() { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder(SecurityTemplateService.SECURITY_INDEX_NAME).settings(settings)).build(); + RoutingTable routingTable = buildSecurityIndexRoutingTable(); + + return ClusterState.builder(new ClusterName(NativeRolesStoreTests.class.getName())) + .metaData(metaData) + .routingTable(routingTable) + .build(); + } + + public static RoutingTable buildSecurityIndexRoutingTable() { + Index index = new Index(SecurityTemplateService.SECURITY_INDEX_NAME, UUID.randomUUID().toString()); + ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(index, 0), true, EXISTING_STORE_INSTANCE, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); + String nodeId = ESTestCase.randomAsciiOfLength(8); + IndexShardRoutingTable table = new IndexShardRoutingTable.Builder(new ShardId(index, 0)) + .addShard(shardRouting.initialize(nodeId, null, shardRouting.getExpectedShardSize()).moveToStarted()) + .build(); + return RoutingTable.builder() + .add(IndexRoutingTable + .builder(index) + .addIndexShard(table) + .build()) + .build(); + } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java index e6496f277e1..316a0e84cb2 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.security.transport; +import org.elasticsearch.Version; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -21,6 +22,7 @@ import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponse.Empty; import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.security.SecurityContext; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; @@ -28,6 +30,7 @@ import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.crypto.CryptoService; import org.elasticsearch.xpack.security.transport.SecurityServerTransportInterceptor.ContextRestoreResponseHandler; +import org.elasticsearch.xpack.security.user.KibanaUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.xpack.ssl.SSLService; @@ -37,10 +40,12 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; +import static org.hamcrest.Matchers.arrayContaining; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; @@ -63,7 +68,7 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { threadContext = new ThreadContext(settings); when(threadPool.getThreadContext()).thenReturn(threadContext); cryptoService = new CryptoService(settings, new Environment(settings)); - securityContext = spy(new SecurityContext(settings, threadPool, cryptoService)); + securityContext = spy(new SecurityContext(settings, threadPool.getThreadContext(), cryptoService)); xPackLicenseState = mock(XPackLicenseState.class); when(xPackLicenseState.isAuthAllowed()).thenReturn(true); } @@ -112,7 +117,9 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { sendingUser.set(securityContext.getUser()); } }); - sender.sendRequest(null, "indices:foo", null, null, null); + Transport.Connection connection = mock(Transport.Connection.class); + when(connection.getVersion()).thenReturn(Version.CURRENT); + sender.sendRequest(connection, "indices:foo", null, null, null); assertTrue(calledWrappedSender.get()); assertEquals(user, sendingUser.get()); assertEquals(user, securityContext.getUser()); @@ -168,8 +175,10 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { fail("sender should not be called!"); } }); + Transport.Connection connection = mock(Transport.Connection.class); + when(connection.getVersion()).thenReturn(Version.CURRENT); IllegalStateException e = - expectThrows(IllegalStateException.class, () -> sender.sendRequest(null, "indices:foo", null, null, null)); + expectThrows(IllegalStateException.class, () -> sender.sendRequest(connection, "indices:foo", null, null, null)); assertEquals("there should always be a user when sending a message", e.getMessage()); assertNull(securityContext.getUser()); verify(xPackLicenseState).isAuthAllowed(); @@ -177,6 +186,66 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { verifyNoMoreInteractions(xPackLicenseState); } + public void testSendWithKibanaUser() throws Exception { + final User user = new KibanaUser(true); + final Authentication authentication = new Authentication(user, new RealmRef("reserved", "reserved", "node1"), null); + authentication.writeToContext(threadContext, cryptoService, AuthenticationService.SIGN_USER_HEADER.get(settings)); + threadContext.putTransient(AuthorizationService.ORIGINATING_ACTION_KEY, "indices:foo"); + + SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, + mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), + securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + + AtomicBoolean calledWrappedSender = new AtomicBoolean(false); + AtomicReference sendingUser = new AtomicReference<>(); + AsyncSender intercepted = new AsyncSender() { + @Override + public void sendRequest(Transport.Connection connection, String action, TransportRequest request, + TransportRequestOptions options, TransportResponseHandler handler) { + if (calledWrappedSender.compareAndSet(false, true) == false) { + fail("sender called more than once!"); + } + sendingUser.set(securityContext.getUser()); + } + }; + AsyncSender sender = interceptor.interceptSender(intercepted); + Transport.Connection connection = mock(Transport.Connection.class); + when(connection.getVersion()).thenReturn(Version.fromId(randomIntBetween(Version.V_5_0_0_ID, Version.V_5_2_0_ID_UNRELEASED - 100))); + sender.sendRequest(connection, "indices:foo[s]", null, null, null); + assertTrue(calledWrappedSender.get()); + assertNotEquals(user, sendingUser.get()); + assertEquals(KibanaUser.NAME, sendingUser.get().principal()); + assertThat(sendingUser.get().roles(), arrayContaining("kibana")); + assertEquals(user, securityContext.getUser()); + + // reset and test with version that was changed + calledWrappedSender.set(false); + sendingUser.set(null); + when(connection.getVersion()).thenReturn(Version.V_5_2_0_UNRELEASED); + sender.sendRequest(connection, "indices:foo[s]", null, null, null); + assertTrue(calledWrappedSender.get()); + assertEquals(user, sendingUser.get()); + + // reset and disable reserved realm + calledWrappedSender.set(false); + sendingUser.set(null); + when(connection.getVersion()).thenReturn(Version.V_5_0_0); + settings = Settings.builder().put(settings).put(XPackSettings.RESERVED_REALM_ENABLED_SETTING.getKey(), false).build(); + interceptor = new SecurityServerTransportInterceptor(settings, threadPool, + mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), + securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + sender = interceptor.interceptSender(intercepted); + sender.sendRequest(connection, "indices:foo[s]", null, null, null); + assertTrue(calledWrappedSender.get()); + assertEquals(user, sendingUser.get()); + + verify(xPackLicenseState, times(3)).isAuthAllowed(); + verify(securityContext, times(1)).executeAsUser(any(User.class), any(Consumer.class)); + verifyNoMoreInteractions(xPackLicenseState); + } + public void testContextRestoreResponseHandler() throws Exception { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java index af7f629b4c3..f6f1c7af65c 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.security.transport; import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.MockIndicesRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexAction; @@ -17,27 +18,33 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportSettings; +import org.elasticsearch.xpack.security.SecurityContext; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.permission.Role; -import org.elasticsearch.xpack.security.authz.permission.SuperuserRole; +import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.security.crypto.CryptoService; +import org.elasticsearch.xpack.security.user.KibanaUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.xpack.security.user.XPackUser; import org.junit.Before; -import java.util.Collection; +import java.io.IOException; import java.util.Collections; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.mock.orig.Mockito.times; import static org.elasticsearch.xpack.security.support.Exceptions.authenticationError; import static org.elasticsearch.xpack.security.support.Exceptions.authorizationError; +import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; @@ -51,6 +58,7 @@ import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; public class ServerTransportFilterTests extends ESTestCase { + private AuthenticationService authcService; private AuthorizationService authzService; private TransportChannel channel; @@ -73,6 +81,7 @@ public class ServerTransportFilterTests extends ESTestCase { public void testInbound() throws Exception { TransportRequest request = mock(TransportRequest.class); Authentication authentication = mock(Authentication.class); + when(authentication.getVersion()).thenReturn(Version.CURRENT); when(authentication.getUser()).thenReturn(SystemUser.INSTANCE); when(authentication.getRunAsUser()).thenReturn(SystemUser.INSTANCE); doAnswer((i) -> { @@ -85,7 +94,7 @@ public class ServerTransportFilterTests extends ESTestCase { PlainActionFuture future = new PlainActionFuture<>(); filter.inbound("_action", request, channel, future); //future.get(); // don't block it's not called really just mocked - verify(authzService).authorize(authentication, "_action", request, Collections.emptyList(), Collections.emptyList()); + verify(authzService).authorize(authentication, "_action", request, null, null); } public void testInboundDestructiveOperations() throws Exception { @@ -94,6 +103,7 @@ public class ServerTransportFilterTests extends ESTestCase { IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()), randomFrom("*", "_all", "test*")); Authentication authentication = mock(Authentication.class); + when(authentication.getVersion()).thenReturn(Version.CURRENT); when(authentication.getUser()).thenReturn(SystemUser.INSTANCE); doAnswer((i) -> { ActionListener callback = @@ -108,7 +118,7 @@ public class ServerTransportFilterTests extends ESTestCase { verify(listener).onFailure(isA(IllegalArgumentException.class)); verifyNoMoreInteractions(authzService); } else { - verify(authzService).authorize(authentication, action, request, Collections.emptyList(), Collections.emptyList()); + verify(authzService).authorize(authentication, action, request, null, null); } } @@ -143,29 +153,29 @@ public class ServerTransportFilterTests extends ESTestCase { callback.onResponse(authentication); return Void.TYPE; }).when(authcService).authenticate(eq("_action"), eq(request), eq(null), any(ActionListener.class)); + final Role empty = Role.EMPTY; doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[1]; - callback.onResponse(Collections.emptyList()); + callback.onResponse(empty); return Void.TYPE; }).when(authzService).roles(any(User.class), any(ActionListener.class)); + when(authentication.getVersion()).thenReturn(Version.CURRENT); when(authentication.getUser()).thenReturn(XPackUser.INSTANCE); when(authentication.getRunAsUser()).thenReturn(XPackUser.INSTANCE); PlainActionFuture future = new PlainActionFuture<>(); doThrow(authorizationError("authz failed")).when(authzService).authorize(authentication, "_action", request, - Collections.emptyList(), Collections.emptyList()); - try { + empty, null); + ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> { filter.inbound("_action", request, channel, future); future.actionGet(); - fail("expected filter inbound to throw an authorization exception on authorization error"); - } catch (ElasticsearchSecurityException e) { - assertThat(e.getMessage(), equalTo("authz failed")); - } + }); + assertThat(e.getMessage(), equalTo("authz failed")); } public void testClientProfileRejectsNodeActions() throws Exception { TransportRequest request = mock(TransportRequest.class); - ServerTransportFilter filter = getClientFilter(); + ServerTransportFilter filter = getClientFilter(true); ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> filter.inbound("internal:foo/bar", request, channel, new PlainActionFuture<>())); assertEquals("executing internal/shard actions is considered malicious and forbidden", e.getMessage()); @@ -179,14 +189,13 @@ public class ServerTransportFilterTests extends ESTestCase { public void testNodeProfileAllowsNodeActions() throws Exception { final String internalAction = "internal:foo/bar"; final String nodeOrShardAction = "indices:action" + randomFrom("[s]", "[p]", "[r]", "[n]", "[s][p]", "[s][r]", "[f]"); - ServerTransportFilter filter = getNodeFilter(); + ServerTransportFilter filter = getNodeFilter(true); TransportRequest request = mock(TransportRequest.class); Authentication authentication = new Authentication(new User("test", "superuser"), new RealmRef("test", "test", "node1"), null); - final Collection userRoles = Collections.singletonList(SuperuserRole.INSTANCE); doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[1]; - callback.onResponse(authentication.getUser().equals(i.getArguments()[0]) ? userRoles : Collections.emptyList()); + callback.onResponse(authentication.getUser().equals(i.getArguments()[0]) ? ReservedRolesStore.SUPERUSER_ROLE : null); return Void.TYPE; }).when(authzService).roles(any(User.class), any(ActionListener.class)); doAnswer((i) -> { @@ -205,26 +214,74 @@ public class ServerTransportFilterTests extends ESTestCase { filter.inbound(internalAction, request, channel, new PlainActionFuture<>()); verify(authcService).authenticate(eq(internalAction), eq(request), eq(null), any(ActionListener.class)); verify(authzService).roles(eq(authentication.getUser()), any(ActionListener.class)); - verify(authzService).authorize(authentication, internalAction, request, userRoles, Collections.emptyList()); + verify(authzService).authorize(authentication, internalAction, request, ReservedRolesStore.SUPERUSER_ROLE, null); filter.inbound(nodeOrShardAction, request, channel, new PlainActionFuture<>()); verify(authcService).authenticate(eq(nodeOrShardAction), eq(request), eq(null), any(ActionListener.class)); verify(authzService, times(2)).roles(eq(authentication.getUser()), any(ActionListener.class)); - verify(authzService).authorize(authentication, nodeOrShardAction, request, userRoles, Collections.emptyList()); + verify(authzService).authorize(authentication, nodeOrShardAction, request, ReservedRolesStore.SUPERUSER_ROLE, null); verifyNoMoreInteractions(authcService, authzService); } - private ServerTransportFilter getClientOrNodeFilter() { - return randomBoolean() ? getNodeFilter() : getClientFilter(); + public void testHandlesKibanaUserCompatibility() throws Exception { + TransportRequest request = mock(TransportRequest.class); + User user = new User("kibana", "kibana"); + Authentication authentication = mock(Authentication.class); + when(authentication.getVersion()) + .thenReturn(Version.fromId(randomIntBetween(Version.V_5_0_0_ID, Version.V_5_2_0_ID_UNRELEASED - 100))); + when(authentication.getUser()).thenReturn(user); + when(authentication.getRunAsUser()).thenReturn(user); + doAnswer((i) -> { + ActionListener callback = + (ActionListener) i.getArguments()[3]; + callback.onResponse(authentication); + return Void.TYPE; + }).when(authcService).authenticate(eq("_action"), eq(request), eq(null), any(ActionListener.class)); + AtomicReference rolesRef = new AtomicReference<>(); + final Role empty = Role.EMPTY; + doAnswer((i) -> { + ActionListener callback = + (ActionListener) i.getArguments()[1]; + rolesRef.set(((User) i.getArguments()[0]).roles()); + callback.onResponse(empty); + return Void.TYPE; + }).when(authzService).roles(any(User.class), any(ActionListener.class)); + ServerTransportFilter filter = getClientOrNodeFilter(); + PlainActionFuture future = new PlainActionFuture<>(); + filter.inbound("_action", request, channel, future); + assertNotNull(rolesRef.get()); + assertThat(rolesRef.get(), arrayContaining("kibana_system")); + + // test with a version that doesn't need changing + filter = getClientOrNodeFilter(); + rolesRef.set(null); + user = new KibanaUser(true); + when(authentication.getUser()).thenReturn(user); + when(authentication.getRunAsUser()).thenReturn(user); + when(authentication.getVersion()).thenReturn(Version.V_5_2_0_UNRELEASED); + future = new PlainActionFuture<>(); + filter.inbound("_action", request, channel, future); + assertNotNull(rolesRef.get()); + assertThat(rolesRef.get(), arrayContaining("kibana_system")); } - private ServerTransportFilter.ClientProfile getClientFilter() { - return new ServerTransportFilter.ClientProfile(authcService, authzService, new ThreadContext(Settings.EMPTY), false, - destructiveOperations); + private ServerTransportFilter getClientOrNodeFilter() throws IOException { + return randomBoolean() ? getNodeFilter(true) : getClientFilter(true); } - private ServerTransportFilter.NodeProfile getNodeFilter() { - return new ServerTransportFilter.NodeProfile(authcService, authzService, new ThreadContext(Settings.EMPTY), false, - destructiveOperations); + private ServerTransportFilter.ClientProfile getClientFilter(boolean reservedRealmEnabled) throws IOException { + Settings settings = Settings.builder().put("path.home", createTempDir()).build(); + ThreadContext threadContext = new ThreadContext(settings); + return new ServerTransportFilter.ClientProfile(authcService, authzService, threadContext, false, destructiveOperations, + reservedRealmEnabled, + new SecurityContext(settings, threadContext, new CryptoService(Settings.EMPTY, new Environment(settings)))); + } + + private ServerTransportFilter.NodeProfile getNodeFilter(boolean reservedRealmEnabled) throws IOException { + Settings settings = Settings.builder().put("path.home", createTempDir()).build(); + ThreadContext threadContext = new ThreadContext(settings); + return new ServerTransportFilter.NodeProfile(authcService, authzService, threadContext, false, destructiveOperations, + reservedRealmEnabled, + new SecurityContext(settings, threadContext, new CryptoService(Settings.EMPTY, new Environment(settings)))); } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java index 9f31e03899a..4629606c746 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.xpack.security.authc.support.SecuredString; import java.io.IOException; @@ -29,7 +28,7 @@ public abstract class XPackRestTestCase extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/WatcherMetaDataSerializationTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/WatcherMetaDataSerializationTests.java index 188e9b2b199..0a182d36533 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/WatcherMetaDataSerializationTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/WatcherMetaDataSerializationTests.java @@ -5,10 +5,12 @@ */ package org.elasticsearch.xpack.watcher; +import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoriesMetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -16,6 +18,8 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; import java.util.Collections; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -43,11 +47,11 @@ public class WatcherMetaDataSerializationTests extends ESTestCase { RepositoriesMetaData repositoriesMetaData = new RepositoriesMetaData(repositoryMetaData); final MetaData.Builder metaDataBuilder = MetaData.builder(); if (randomBoolean()) { // random order of insertion - metaDataBuilder.putCustom(watcherMetaData.type(), watcherMetaData); - metaDataBuilder.putCustom(repositoriesMetaData.type(), repositoriesMetaData); + metaDataBuilder.putCustom(watcherMetaData.getWriteableName(), watcherMetaData); + metaDataBuilder.putCustom(repositoriesMetaData.getWriteableName(), repositoriesMetaData); } else { - metaDataBuilder.putCustom(repositoriesMetaData.type(), repositoriesMetaData); - metaDataBuilder.putCustom(watcherMetaData.type(), watcherMetaData); + metaDataBuilder.putCustom(repositoriesMetaData.getWriteableName(), repositoriesMetaData); + metaDataBuilder.putCustom(watcherMetaData.getWriteableName(), watcherMetaData); } // serialize metadata XContentBuilder builder = XContentFactory.jsonBuilder(); @@ -59,17 +63,25 @@ public class WatcherMetaDataSerializationTests extends ESTestCase { // deserialize metadata again MetaData metaData = MetaData.Builder.fromXContent(createParser(builder)); // check that custom metadata still present - assertThat(metaData.custom(watcherMetaData.type()), notNullValue()); - assertThat(metaData.custom(repositoriesMetaData.type()), notNullValue()); + assertThat(metaData.custom(watcherMetaData.getWriteableName()), notNullValue()); + assertThat(metaData.custom(repositoriesMetaData.getWriteableName()), notNullValue()); } private static WatcherMetaData getWatcherMetaDataFromXContent(XContentParser parser) throws Exception { parser.nextToken(); // consume null parser.nextToken(); // consume "watcher" - WatcherMetaData watcherMetaDataFromXContent = (WatcherMetaData)WatcherMetaData.PROTO.fromXContent(parser); + WatcherMetaData watcherMetaDataFromXContent = (WatcherMetaData)WatcherMetaData.fromXContent(parser); parser.nextToken(); // consume endObject assertThat(parser.nextToken(), nullValue()); return watcherMetaDataFromXContent; } + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(Stream.concat( + new Watcher(Settings.EMPTY).getNamedXContent().stream(), + ClusterModule.getNamedXWriteables().stream() + ).collect(Collectors.toList())); + } + } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java index ce366b89115..950b7b59692 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java @@ -287,6 +287,7 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase { }, 20, TimeUnit.SECONDS); } + @AwaitsFix(bugUrl = "https://github.com/elastic/x-pack/issues/4561") public void testFailingActionDoesGetThrottled() throws Exception { TimeValue throttlePeriod = new TimeValue(60, TimeUnit.MINUTES); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java index a391e3f3b07..5cbd61ab9ba 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.xpack.common.http.HttpMethod; import org.elasticsearch.xpack.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.common.http.Scheme; import org.elasticsearch.xpack.common.http.auth.basic.BasicAuth; @@ -69,7 +70,8 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest HttpRequestTemplate.Builder builder = HttpRequestTemplate.builder("localhost", webServer.getPort()) .scheme(Scheme.HTTPS) .path(new TextTemplate("/test/_id")) - .body(new TextTemplate("{key=value}")); + .body(new TextTemplate("{key=value}")) + .method(HttpMethod.POST); watcherClient().preparePutWatch("_id") .setSource(watchBuilder() @@ -91,6 +93,7 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest SearchResponse response = searchWatchRecords(b -> b.setQuery(QueryBuilders.termQuery(WatchRecord.Field.STATE.getPreferredName(), "executed"))); + assertNoFailures(response); XContentSource source = xContentSource(response.getHits().getAt(0).sourceRef()); String body = source.getValue("result.actions.0.webhook.response.body"); @@ -108,7 +111,8 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest .scheme(Scheme.HTTPS) .auth(new BasicAuth("_username", "_password".toCharArray())) .path(new TextTemplate("/test/_id")) - .body(new TextTemplate("{key=value}")); + .body(new TextTemplate("{key=value}")) + .method(HttpMethod.POST); watcherClient().preparePutWatch("_id") .setSource(watchBuilder() diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java index 2027c297f6f..4509b9c2807 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java @@ -5,10 +5,15 @@ */ package org.elasticsearch.xpack.watcher.actions.webhook; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.xpack.common.http.HttpMethod; import org.elasticsearch.xpack.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.common.http.auth.basic.BasicAuth; import org.elasticsearch.xpack.common.text.TextTemplate; @@ -20,6 +25,8 @@ import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import org.junit.After; import org.junit.Before; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertExists; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder; import static org.elasticsearch.xpack.watcher.input.InputBuilders.simpleInput; @@ -36,6 +43,16 @@ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase private MockWebServer webServer = new MockWebServer();; + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put("http.enabled", true).build(); + } + + @Override + protected boolean enableSecurity() { + return true; + } + @Before public void startWebservice() throws Exception { webServer.start(); @@ -52,7 +69,8 @@ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase .path(new TextTemplate("/test/_id")) .putParam("param1", new TextTemplate("value1")) .putParam("watch_id", new TextTemplate("_id")) - .body(new TextTemplate("_body")); + .body(new TextTemplate("_body")) + .method(HttpMethod.POST); watcherClient().preparePutWatch("_id") .setSource(watchBuilder() @@ -68,7 +86,6 @@ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase } assertWatchWithMinimumPerformedActionsCount("_id", 1, false); - assertThat(webServer.requests(), hasSize(1)); assertThat(webServer.requests().get(0).getUri().getQuery(), anyOf(equalTo("watch_id=_id¶m1=value1"), equalTo("param1=value1&watch_id=_id"))); @@ -94,7 +111,8 @@ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase .path(new TextTemplate("/test/_id")) .putParam("param1", new TextTemplate("value1")) .putParam("watch_id", new TextTemplate("_id")) - .body(new TextTemplate("_body")); + .body(new TextTemplate("_body")) + .method(HttpMethod.POST); watcherClient().preparePutWatch("_id") .setSource(watchBuilder() @@ -117,4 +135,31 @@ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase assertThat(webServer.requests().get(0).getBody(), is("_body")); assertThat(webServer.requests().get(0).getHeader("Authorization"), is(("Basic X3VzZXJuYW1lOl9wYXNzd29yZA=="))); } + + public void testWebhookWithTimebasedIndex() throws Exception { + assertAcked(client().admin().indices().prepareCreate("").get()); + + HttpServerTransport serverTransport = internalCluster().getDataNodeInstance(HttpServerTransport.class); + TransportAddress publishAddress = serverTransport.boundAddress().publishAddress(); + + String host = publishAddress.address().getHostString(); + HttpRequestTemplate.Builder builder = HttpRequestTemplate.builder(host, publishAddress.getPort()) + .path(new TextTemplate("/%3Clogstash-%7Bnow%2Fd%7D%3E/log/1")) + .body(new TextTemplate("{\"foo\":\"bar\"}")) + .auth(new BasicAuth("test", "changeme".toCharArray())) + .method(HttpMethod.PUT); + + watcherClient().preparePutWatch("_id") + .setSource(watchBuilder() + .trigger(schedule(interval("5s"))) + .input(simpleInput("key", "value")) + .condition(AlwaysCondition.INSTANCE) + .addAction("_id", ActionBuilders.webhookAction(builder))) + .get(); + + watcherClient().prepareExecuteWatch("_id").get(); + + GetResponse response = client().prepareGet("", "log", "1").get(); + assertExists(response); + } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java index 1523c5f2435..8ca8fd4cf4c 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.support.clock.ClockMock; @@ -798,6 +799,45 @@ public class ExecutionServiceTests extends ESTestCase { verify(ctx).abortBeforeExecution(eq(ExecutionState.NOT_EXECUTED_ALREADY_QUEUED), eq("Watch is already queued in thread pool")); } + public void testExecuteWatchNotFound() throws Exception { + Watch watch = mock(Watch.class); + when(watch.id()).thenReturn("_id"); + WatchExecutionContext ctx = mock(WatchExecutionContext.class); + when(ctx.knownWatch()).thenReturn(true); + when(ctx.watch()).thenReturn(watch); + + GetResponse getResponse = mock(GetResponse.class); + when(getResponse.isExists()).thenReturn(false); + boolean exceptionThrown = false; + if (randomBoolean()) { + when(client.getWatch("_id")).thenReturn(getResponse); + } else { + // this emulates any failure while getting the watch, while index not found is an accepted issue + if (randomBoolean()) { + exceptionThrown = true; + ElasticsearchException e = new ElasticsearchException("something went wrong, i.e. index not found"); + when(client.getWatch("_id")).thenThrow(e); + WatchExecutionResult result = new WatchExecutionResult(ctx, randomInt(10)); + WatchRecord wr = new WatchRecord.ExceptionWatchRecord(ctx, result, e); + when(ctx.abortFailedExecution(eq(e))).thenReturn(wr); + } else { + when(client.getWatch("_id")).thenThrow(new IndexNotFoundException(".watch")); + } + } + + WatchRecord.MessageWatchRecord record = mock(WatchRecord.MessageWatchRecord.class); + when(record.state()).thenReturn(ExecutionState.NOT_EXECUTED_WATCH_MISSING); + when(ctx.abortBeforeExecution(eq(ExecutionState.NOT_EXECUTED_WATCH_MISSING), any())).thenReturn(record); + when(ctx.executionPhase()).thenReturn(ExecutionPhase.AWAITS_EXECUTION); + + WatchRecord watchRecord = executionService.execute(ctx); + if (exceptionThrown) { + assertThat(watchRecord.state(), is(ExecutionState.FAILED)); + } else { + assertThat(watchRecord.state(), is(ExecutionState.NOT_EXECUTED_WATCH_MISSING)); + } + } + private Tuple whenCondition(final WatchExecutionContext context) { Condition.Result conditionResult = mock(Condition.Result.class); when(conditionResult.met()).thenReturn(true); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index 63cf9e34155..df260620a1d 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.xpack.common.http.HttpMethod; import org.elasticsearch.xpack.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; import org.elasticsearch.xpack.watcher.execution.ExecutionState; @@ -66,6 +67,7 @@ public class HistoryTemplateHttpMappingsTests extends AbstractWatcherIntegration .condition(AlwaysCondition.INSTANCE) .addAction("_webhook", webhookAction(HttpRequestTemplate.builder("localhost", webServer.getPort()) .path("/webhook/path") + .method(HttpMethod.POST) .body("_body")))) .get(); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java index b49daba667e..b7ab542f5a0 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilders; @@ -23,7 +23,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.joda.time.DateTime; -import java.io.IOException; import java.time.Clock; import java.util.Arrays; import java.util.HashMap; @@ -73,15 +72,14 @@ public class WatcherUtilsTests extends ESTestCase { Map otherMap = new HashMap<>(); otherMap.putAll(expected); expected.put("key5", otherMap); - ToXContent content = new ToXContent() { - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - for (Map.Entry entry : expected.entrySet()) { - builder.field(entry.getKey()); - builder.value(entry.getValue()); - } - return builder; + ToXContentObject content = (builder, params) -> { + builder.startObject(); + for (Map.Entry entry : expected.entrySet()) { + builder.field(entry.getKey()); + builder.value(entry.getValue()); } + builder.endObject(); + return builder; }; Map result = WatcherUtils.responseToData(content); assertThat(result, equalTo(expected)); @@ -123,7 +121,7 @@ public class WatcherUtilsTests extends ESTestCase { request.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentParser parser = createParser(builder); assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - WatcherSearchTemplateRequest result = WatcherSearchTemplateRequest.fromXContent(logger, parser, DEFAULT_SEARCH_TYPE, null, null); + WatcherSearchTemplateRequest result = WatcherSearchTemplateRequest.fromXContent(parser, DEFAULT_SEARCH_TYPE); assertThat(result.getIndices(), arrayContainingInAnyOrder(expectedIndices != null ? expectedIndices : new String[0])); assertThat(result.getTypes(), arrayContainingInAnyOrder(expectedTypes != null ? expectedTypes : new String[0])); @@ -212,7 +210,7 @@ public class WatcherUtilsTests extends ESTestCase { XContentParser parser = createParser(builder); assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - WatcherSearchTemplateRequest result = WatcherSearchTemplateRequest.fromXContent(logger, parser, DEFAULT_SEARCH_TYPE, null, null); + WatcherSearchTemplateRequest result = WatcherSearchTemplateRequest.fromXContent(parser, DEFAULT_SEARCH_TYPE); assertThat(result.getIndices(), arrayContainingInAnyOrder(indices)); assertThat(result.getTypes(), arrayContainingInAnyOrder(types)); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java index 31960a5c63b..918b468ff41 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java @@ -31,9 +31,7 @@ public class WatcherSearchTemplateRequestTests extends ESTestCase { private void assertTemplate(String source, String expectedScript, String expectedLang, Map expectedParams) { try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { parser.nextToken(); - - WatcherSearchTemplateRequest result = WatcherSearchTemplateRequest.fromXContent( - logger, parser, randomFrom(SearchType.values()), null, null); + WatcherSearchTemplateRequest result = WatcherSearchTemplateRequest.fromXContent(parser, randomFrom(SearchType.values())); assertNotNull(result.getTemplate()); assertThat(result.getTemplate().getIdOrCode(), equalTo(expectedScript)); assertThat(result.getTemplate().getLang(), equalTo(expectedLang)); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java index bb255f52e83..81f7d3ffdbd 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.script.MockMustacheScriptEngine; @@ -145,8 +144,7 @@ public class SearchInputTests extends ESIntegTestCase { XContentParser parser = createParser(builder); parser.nextToken(); - IndicesQueriesRegistry indicesQueryRegistry = internalCluster().getInstance(IndicesQueriesRegistry.class); - SearchRequestParsers searchParsers = new SearchRequestParsers(indicesQueryRegistry, null, null, null); + SearchRequestParsers searchParsers = new SearchRequestParsers(); SearchInputFactory factory = new SearchInputFactory(Settings.EMPTY, WatcherClientProxy.of(client()), searchParsers, xContentRegistry(), scriptService()); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SlackServiceTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SlackServiceTests.java index e49cf6f834e..6503f0fd374 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SlackServiceTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SlackServiceTests.java @@ -47,8 +47,6 @@ public class SlackServiceTests extends AbstractWatcherIntegrationTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - - // this is for the `test-watcher-integration` group level integration in HipChat .put("xpack.notification.slack.account.test_account.url", "https://hooks.slack.com/services/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") .build(); @@ -81,7 +79,7 @@ public class SlackServiceTests extends AbstractWatcherIntegrationTestCase { public void testWatchWithSlackAction() throws Exception { String account = "test_account"; SlackAction.Builder actionBuilder = slackAction(account, SlackMessage.Template.builder() - .setText("slack integration test` " + DateTime.now()) + .setText("slack integration test `testWatchWithSlackAction()` " + DateTime.now()) .addTo("#watcher-test", "#watcher-test-2")); PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("1").setSource(watchBuilder() diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java index 8dfa39838e8..94098cf7355 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java @@ -5,8 +5,7 @@ */ package org.elasticsearch.xpack.watcher.transport.action.get; -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; @@ -25,7 +24,6 @@ import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule; import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interval; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -62,7 +60,10 @@ public class GetWatchTests extends AbstractWatcherIntegrationTestCase { } public void testGetNotFound() throws Exception { - assertAcked(client().admin().indices().prepareCreate(Watch.INDEX)); + GetAliasesResponse aliasesResponse = client().admin().indices().prepareGetAliases(Watch.INDEX).get(); + if (aliasesResponse.getAliases().isEmpty()) { + assertAcked(client().admin().indices().prepareCreate(Watch.INDEX)); + } GetWatchResponse getResponse = watcherClient().getWatch(new GetWatchRequest("_name")).get(); assertThat(getResponse, notNullValue()); assertThat(getResponse.getId(), is("_name")); diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/transport/action/put/PutWatchTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/transport/action/put/PutWatchTests.java index 2cb5b78ce46..ddc9c6c2220 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/transport/action/put/PutWatchTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/transport/action/put/PutWatchTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.watcher.transport.action.put; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; @@ -45,15 +46,12 @@ public class PutWatchTests extends AbstractWatcherIntegrationTestCase { public void testPutNoTrigger() throws Exception { ensureWatcherStarted(); - try { - watcherClient().preparePutWatch("_name").setSource(watchBuilder() - .input(simpleInput()) - .condition(AlwaysCondition.INSTANCE) - .addAction("_action1", loggingAction("{{ctx.watch_id}}"))) - .get(); - fail("Expected IllegalStateException"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), is("failed to build watch source. no trigger defined")); - } + ElasticsearchException exception = expectThrows(ElasticsearchException.class, + () -> watcherClient().preparePutWatch("_name").setSource(watchBuilder() + .input(simpleInput()) + .condition(AlwaysCondition.INSTANCE) + .addAction("_action1", loggingAction("{{ctx.watch_id}}"))) + .get()); + assertEquals("Failed to build ToXContent", exception.getMessage()); } } diff --git a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java index c4284cfd9a2..415f85123e2 100644 --- a/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java +++ b/elasticsearch/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java @@ -8,16 +8,18 @@ package org.elasticsearch.xpack.watcher.watch; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.MatchAllQueryBuilder; -import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.ScriptQueryBuilder; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; @@ -286,12 +288,7 @@ public class WatchTests extends ESTestCase { ActionRegistry actionRegistry = registry(Collections.emptyList(), conditionRegistry, transformRegistry); Watch.Parser watchParser = new Watch.Parser(settings, triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); - IndicesQueriesRegistry queryRegistry = new IndicesQueriesRegistry(); - QueryParser queryParser1 = MatchAllQueryBuilder::fromXContent; - queryRegistry.register(queryParser1, MatchAllQueryBuilder.NAME); - QueryParser queryParser2 = ScriptQueryBuilder::fromXContent; - queryRegistry.register(queryParser2, ScriptQueryBuilder.NAME); - SearchRequestParsers searchParsers = new SearchRequestParsers(queryRegistry, null, null, null); + SearchRequestParsers searchParsers = new SearchRequestParsers(); WatcherSearchTemplateService searchTemplateService = new WatcherSearchTemplateService(settings, scriptService, searchParsers, xContentRegistry()); @@ -412,12 +409,7 @@ public class WatchTests extends ESTestCase { Map parsers = new HashMap<>(); switch (inputType) { case SearchInput.TYPE: - IndicesQueriesRegistry queryRegistry = new IndicesQueriesRegistry(); - QueryParser queryParser1 = MatchAllQueryBuilder::fromXContent; - queryRegistry.register(queryParser1, MatchAllQueryBuilder.NAME); - QueryParser queryParser2 = ScriptQueryBuilder::fromXContent; - queryRegistry.register(queryParser2, ScriptQueryBuilder.NAME); - SearchRequestParsers searchParsers = new SearchRequestParsers(queryRegistry, null, null, null); + SearchRequestParsers searchParsers = new SearchRequestParsers(); parsers.put(SearchInput.TYPE, new SearchInputFactory(settings, client, searchParsers, xContentRegistry(), scriptService)); return new InputRegistry(Settings.EMPTY, parsers); default: @@ -465,10 +457,7 @@ public class WatchTests extends ESTestCase { } private TransformRegistry transformRegistry() { - IndicesQueriesRegistry queryRegistry = new IndicesQueriesRegistry(); - QueryParser queryParser = MatchAllQueryBuilder::fromXContent; - queryRegistry.register(queryParser, MatchAllQueryBuilder.NAME); - SearchRequestParsers searchParsers = new SearchRequestParsers(queryRegistry, null, null, null); + SearchRequestParsers searchParsers = new SearchRequestParsers(); Map factories = new HashMap<>(); factories.put(ScriptTransform.TYPE, new ScriptTransformFactory(settings, scriptService)); factories.put(SearchTransform.TYPE, new SearchTransformFactory(settings, client, searchParsers, xContentRegistry(), scriptService)); @@ -530,6 +519,16 @@ public class WatchTests extends ESTestCase { licenseState); } + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(Arrays.asList( + new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(MatchAllQueryBuilder.NAME), (p, c) -> + MatchAllQueryBuilder.fromXContent((QueryParseContext) c)), + new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(ScriptQueryBuilder.NAME), (p, c) -> + ScriptQueryBuilder.fromXContent((QueryParseContext) c)) + )); + } + static class ParseOnlyScheduleTriggerEngine extends ScheduleTriggerEngine { public ParseOnlyScheduleTriggerEngine(Settings settings, ScheduleRegistry registry, Clock clock) { diff --git a/elasticsearch/src/test/resources/org/elasticsearch/xpack/security/authz/store/reserved_roles.yml b/elasticsearch/src/test/resources/org/elasticsearch/xpack/security/authz/store/reserved_roles.yml index 30735f7d8ec..cfc68d56260 100644 --- a/elasticsearch/src/test/resources/org/elasticsearch/xpack/security/authz/store/reserved_roles.yml +++ b/elasticsearch/src/test/resources/org/elasticsearch/xpack/security/authz/store/reserved_roles.yml @@ -23,7 +23,7 @@ superuser: run_as: - '*' -kibana: +kibana_system: cluster: - all diff --git a/qa/audit-tests/build.gradle b/qa/audit-tests/build.gradle index e0d5a6252fa..e8a926fce8a 100644 --- a/qa/audit-tests/build.gradle +++ b/qa/audit-tests/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { @@ -32,4 +33,3 @@ integTest { } } } - diff --git a/qa/core-rest-tests-with-security/build.gradle b/qa/core-rest-tests-with-security/build.gradle index a3e54ecf7ce..41f8194076b 100644 --- a/qa/core-rest-tests-with-security/build.gradle +++ b/qa/core-rest-tests-with-security/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { diff --git a/qa/core-rest-tests-with-security/src/test/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java b/qa/core-rest-tests-with-security/src/test/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java index 4e15dc206cb..bebd2d11bea 100644 --- a/qa/core-rest-tests-with-security/src/test/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java +++ b/qa/core-rest-tests-with-security/src/test/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.xpack.security.authc.support.SecuredString; import java.io.IOException; @@ -29,7 +28,7 @@ public class CoreWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTest } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/reindex-tests-with-security/build.gradle b/qa/reindex-tests-with-security/build.gradle index 1a16201626a..6a3272fd225 100644 --- a/qa/reindex-tests-with-security/build.gradle +++ b/qa/reindex-tests-with-security/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { diff --git a/qa/reindex-tests-with-security/src/test/java/org/elasticsearch/xpack/security/ReindexWithSecurityClientYamlTestSuiteIT.java b/qa/reindex-tests-with-security/src/test/java/org/elasticsearch/xpack/security/ReindexWithSecurityClientYamlTestSuiteIT.java index 3809deb4f0d..2cad5ac0de1 100644 --- a/qa/reindex-tests-with-security/src/test/java/org/elasticsearch/xpack/security/ReindexWithSecurityClientYamlTestSuiteIT.java +++ b/qa/reindex-tests-with-security/src/test/java/org/elasticsearch/xpack/security/ReindexWithSecurityClientYamlTestSuiteIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.xpack.security.authc.support.SecuredString; import java.io.IOException; @@ -28,7 +27,7 @@ public class ReindexWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteT } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index b153a5e0f56..e676b574988 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -52,12 +52,12 @@ task oldClusterTest(type: RestIntegTestTask) { cluster { plugin ':x-pack:elasticsearch' distribution = 'zip' - bwcVersion = '6.0.0-alpha1-SNAPSHOT' // TODO: either randomize, or make this settable with sysprop + bwcVersion = '5.3.0-SNAPSHOT' // TODO: either randomize, or make this settable with sysprop numBwcNodes = 2 numNodes = 2 clusterName = 'rolling-upgrade' waitCondition = waitWithAuth - systemProperty 'es.logger.org.elasticsearch.xpack.security', 'TRACE' + setting 'logger.org.elasticsearch.xpack.security', 'TRACE' } systemProperty 'tests.rest.suite', 'old_cluster' } @@ -105,7 +105,15 @@ dependencies { // copy x-pack plugin info so it is on the classpath and security manager has the right permissions String outputDir = "generated-resources/${project.name}" +task copyXPackRestSpec(type: Copy) { + dependsOn(project.configurations.restSpec, 'processTestResources') + from project(':x-pack:elasticsearch').sourceSets.test.resources + include 'rest-api-spec/api/**' + into project.sourceSets.test.output.resourcesDir +} + task copyXPackPluginProps(type: Copy) { + dependsOn(copyXPackRestSpec) from project(':x-pack:elasticsearch').file('src/main/plugin-metadata') from project(':x-pack:elasticsearch').tasks.pluginProperties into outputDir diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index 543f818c73f..2a88c112baf 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -7,12 +7,12 @@ package org.elasticsearch.upgrades; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; + import org.apache.lucene.util.TimeUnits; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -31,7 +31,7 @@ public class UpgradeClusterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCa } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return createParameters(); } diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/30_kibana_write.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/30_kibana_write.yml new file mode 100644 index 00000000000..d3908f3fa46 --- /dev/null +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/30_kibana_write.yml @@ -0,0 +1,52 @@ +--- +"Verify kibana user role works in mixed cluster": + - do: + headers: + Authorization: "Basic a2liYW5hOmNoYW5nZW1l" + cluster.health: + wait_for_status: yellow + wait_for_nodes: 2 + timeout: 25s + - match: { timed_out: false } + + - do: + headers: + Authorization: "Basic a2liYW5hOmNoYW5nZW1l" + indices.create: + index: .kibana-foo + wait_for_active_shards : all + body: + settings: + index: + number_of_replicas: 1 + + - do: + headers: + Authorization: "Basic a2liYW5hOmNoYW5nZW1l" + bulk: + refresh: true + body: + - '{"index": {"_index": ".kibana-foo", "_type": "test_type"}}' + - '{"f1": "v1_old", "f2": 0}' + - '{"index": {"_index": ".kibana-foo", "_type": "test_type"}}' + - '{"f1": "v2_old", "f2": 1}' + - '{"index": {"_index": ".kibana-foo", "_type": "test_type"}}' + - '{"f1": "v3_old", "f2": 2}' + - '{"index": {"_index": ".kibana-foo", "_type": "test_type"}}' + - '{"f1": "v4_old", "f2": 3}' + - '{"index": {"_index": ".kibana-foo", "_type": "test_type"}}' + - '{"f1": "v5_old", "f2": 4}' + + - do: + headers: + Authorization: "Basic a2liYW5hOmNoYW5nZW1l" + indices.flush: + index: .kibana-foo + + - do: + headers: + Authorization: "Basic a2liYW5hOmNoYW5nZW1l" + search: + index: .kibana-foo + + - match: { hits.total: 5 } diff --git a/qa/security-client-tests/build.gradle b/qa/security-client-tests/build.gradle index a8acb65e7cd..65fab67318f 100644 --- a/qa/security-client-tests/build.gradle +++ b/qa/security-client-tests/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { @@ -14,6 +15,7 @@ task copyXPackPluginProps(type: Copy) { project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) integTest { + systemProperty 'tests.security.manager', 'false' cluster { plugin ':x-pack:elasticsearch' setupCommand 'setupDummyUser', @@ -32,4 +34,3 @@ integTest { } } } - diff --git a/qa/security-example-realm/build.gradle b/qa/security-example-realm/build.gradle index d131c9548ed..f1722472016 100644 --- a/qa/security-example-realm/build.gradle +++ b/qa/security-example-realm/build.gradle @@ -44,6 +44,7 @@ task buildZip(type:Zip, dependsOn: [jar]) { } task integTest(type: org.elasticsearch.gradle.test.RestIntegTestTask, dependsOn: buildZip) { + systemProperty 'tests.security.manager', 'false' cluster { plugin ':x-pack:elasticsearch' setting 'xpack.security.authc.realms.custom.order', '0' diff --git a/qa/security-migrate-tests/build.gradle b/qa/security-migrate-tests/build.gradle index 20e6f3b72d0..a9055c35dc0 100644 --- a/qa/security-migrate-tests/build.gradle +++ b/qa/security-migrate-tests/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { diff --git a/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java b/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java index 1a697a55356..6e209f175d6 100644 --- a/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java +++ b/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.security.action.user.PutUserResponse; import org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool; import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.elasticsearch.xpack.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.security.authz.permission.FieldPermissions; import org.elasticsearch.xpack.security.client.SecurityClient; import org.elasticsearch.xpack.security.user.User; import org.junit.Before; @@ -92,17 +93,19 @@ public class MigrateToolIT extends MigrateToolTestCase { RoleDescriptor.IndicesPrivileges[] ips = rd.getIndicesPrivileges(); assertEquals(ips.length, 2); for (RoleDescriptor.IndicesPrivileges ip : ips) { + final FieldPermissions fieldPermissions = new FieldPermissions(ip.getGrantedFields(), ip.getDeniedFields()); if (Arrays.equals(ip.getIndices(), new String[]{"index1", "index2"})) { assertArrayEquals(ip.getPrivileges(), new String[]{"read", "write", "create_index", "indices:admin/refresh"}); - assertTrue(ip.getFieldPermissions().hasFieldLevelSecurity()); - assertTrue(ip.getFieldPermissions().grantsAccessTo("bar")); - assertTrue(ip.getFieldPermissions().grantsAccessTo("foo")); + assertTrue(fieldPermissions.hasFieldLevelSecurity()); + assertTrue(fieldPermissions.grantsAccessTo("bar")); + assertTrue(fieldPermissions.grantsAccessTo("foo")); assertNotNull(ip.getQuery()); - assertThat(ip.getQuery().utf8ToString(), containsString("{\"bool\":{\"must_not\":{\"match\":{\"hidden\":true}}}}")); + assertThat(ip.getQuery().iterator().next().utf8ToString(), + containsString("{\"bool\":{\"must_not\":{\"match\":{\"hidden\":true}}}}")); } else { assertArrayEquals(ip.getIndices(), new String[]{"*"}); assertArrayEquals(ip.getPrivileges(), new String[]{"read"}); - assertFalse(ip.getFieldPermissions().hasFieldLevelSecurity()); + assertFalse(fieldPermissions.hasFieldLevelSecurity()); assertNull(ip.getQuery()); } } diff --git a/qa/smoke-test-graph-with-security/build.gradle b/qa/smoke-test-graph-with-security/build.gradle index 2e89a95aebe..86924970a3a 100644 --- a/qa/smoke-test-graph-with-security/build.gradle +++ b/qa/smoke-test-graph-with-security/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { @@ -35,4 +36,3 @@ integTest { } } } - diff --git a/qa/smoke-test-graph-with-security/src/test/java/org/elasticsearch/smoketest/GraphWithSecurityIT.java b/qa/smoke-test-graph-with-security/src/test/java/org/elasticsearch/smoketest/GraphWithSecurityIT.java index 675eed7f99e..082ba9b9b86 100644 --- a/qa/smoke-test-graph-with-security/src/test/java/org/elasticsearch/smoketest/GraphWithSecurityIT.java +++ b/qa/smoke-test-graph-with-security/src/test/java/org/elasticsearch/smoketest/GraphWithSecurityIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.xpack.security.authc.support.SecuredString; import java.io.IOException; @@ -30,7 +29,7 @@ public class GraphWithSecurityIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-monitoring-with-security/build.gradle b/qa/smoke-test-monitoring-with-security/build.gradle index 383d99dff2d..06c29842d20 100644 --- a/qa/smoke-test-monitoring-with-security/build.gradle +++ b/qa/smoke-test-monitoring-with-security/build.gradle @@ -1,4 +1,5 @@ subprojects { + apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { @@ -47,4 +48,3 @@ subprojects { task integTest { dependsOn subprojects.integTest } - diff --git a/qa/smoke-test-monitoring-with-security/insufficient-rights/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT.java b/qa/smoke-test-monitoring-with-security/insufficient-rights/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT.java index ad1275b8074..ba8cd0790b5 100644 --- a/qa/smoke-test-monitoring-with-security/insufficient-rights/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT.java +++ b/qa/smoke-test-monitoring-with-security/insufficient-rights/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.xpack.security.authc.support.SecuredString; import java.io.IOException; @@ -27,7 +26,7 @@ public class SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT extends ESClie } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-monitoring-with-security/sufficient-rights/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT.java b/qa/smoke-test-monitoring-with-security/sufficient-rights/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT.java index 5a9c3b23efa..2024f9d1da2 100644 --- a/qa/smoke-test-monitoring-with-security/sufficient-rights/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT.java +++ b/qa/smoke-test-monitoring-with-security/sufficient-rights/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.xpack.security.authc.support.SecuredString; import java.io.IOException; @@ -26,7 +25,7 @@ public class SmokeTestMonitoringWithSecurityClientYamlTestSuiteIT extends ESClie } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-plugins-ssl/build.gradle b/qa/smoke-test-plugins-ssl/build.gradle index 73d4af8b98c..5ea4c29bd28 100644 --- a/qa/smoke-test-plugins-ssl/build.gradle +++ b/qa/smoke-test-plugins-ssl/build.gradle @@ -10,6 +10,7 @@ import java.nio.charset.StandardCharsets import java.security.KeyStore import java.security.SecureRandom +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { @@ -398,4 +399,3 @@ class SanEvaluator { return buf.toString(); } } - diff --git a/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsSslClientYamlTestSuiteIT.java b/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsSslClientYamlTestSuiteIT.java index ef9ed153ea3..2928478db57 100644 --- a/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsSslClientYamlTestSuiteIT.java +++ b/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsSslClientYamlTestSuiteIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -38,7 +37,7 @@ public class SmokeTestPluginsSslClientYamlTestSuiteIT extends ESClientYamlSuiteT } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-plugins/build.gradle b/qa/smoke-test-plugins/build.gradle index b1531324ad2..3e05c670133 100644 --- a/qa/smoke-test-plugins/build.gradle +++ b/qa/smoke-test-plugins/build.gradle @@ -1,5 +1,6 @@ import org.elasticsearch.gradle.MavenFilteringHack +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { @@ -30,7 +31,7 @@ integTest { return tmpFile.exists() } } -} +} ext.expansions = [ 'expected.plugins.count': pluginsCount @@ -40,4 +41,3 @@ processTestResources { inputs.properties(expansions) MavenFilteringHack.filter(it, expansions) } - diff --git a/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/XSmokeTestPluginsClientYamlTestSuiteIT.java b/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/XSmokeTestPluginsClientYamlTestSuiteIT.java index 371fae7f414..7e71130eda9 100644 --- a/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/XSmokeTestPluginsClientYamlTestSuiteIT.java +++ b/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/XSmokeTestPluginsClientYamlTestSuiteIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.xpack.security.authc.support.SecuredString; import java.io.IOException; @@ -29,7 +28,7 @@ public class XSmokeTestPluginsClientYamlTestSuiteIT extends ESClientYamlSuiteTes } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-security-with-mustache/build.gradle b/qa/smoke-test-security-with-mustache/build.gradle index 908d2778496..75d61fdb178 100644 --- a/qa/smoke-test-security-with-mustache/build.gradle +++ b/qa/smoke-test-security-with-mustache/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { diff --git a/qa/smoke-test-security-with-mustache/src/test/java/org/elasticsearch/smoketest/SmokeTestSecurityWithMustacheClientYamlTestSuiteIT.java b/qa/smoke-test-security-with-mustache/src/test/java/org/elasticsearch/smoketest/SmokeTestSecurityWithMustacheClientYamlTestSuiteIT.java index 78ff4940035..ac353dc852d 100644 --- a/qa/smoke-test-security-with-mustache/src/test/java/org/elasticsearch/smoketest/SmokeTestSecurityWithMustacheClientYamlTestSuiteIT.java +++ b/qa/smoke-test-security-with-mustache/src/test/java/org/elasticsearch/smoketest/SmokeTestSecurityWithMustacheClientYamlTestSuiteIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.xpack.security.authc.support.SecuredString; import java.io.IOException; @@ -28,7 +27,7 @@ public class SmokeTestSecurityWithMustacheClientYamlTestSuiteIT extends ESClient } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-watcher-with-mustache/build.gradle b/qa/smoke-test-watcher-with-mustache/build.gradle index 9fbaa5da129..c552350c462 100644 --- a/qa/smoke-test-watcher-with-mustache/build.gradle +++ b/qa/smoke-test-watcher-with-mustache/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { @@ -6,10 +7,6 @@ dependencies { } integTest { - // JIRA integration tests are ignored until a JIRA server is available for testing - // see https://github.com/elastic/infra/issues/1498 - systemProperty 'tests.rest.blacklist', 'actions/20_jira/*' - cluster { plugin ':x-pack:elasticsearch' setting 'xpack.security.enabled', 'false' @@ -19,14 +16,5 @@ integTest { setting 'script.stored', 'true' // Need to allow more compilations per minute because of the integration tests setting 'script.max_compilations_per_minute', '100' - - // JIRA integration test settings - setting 'xpack.notification.jira.account.test.url', 'http://localhost:8080' - setting 'xpack.notification.jira.account.test.allow_http', 'true' - setting 'xpack.notification.jira.account.test.user', 'jira_user' - setting 'xpack.notification.jira.account.test.password', 'secret' - setting 'xpack.notification.jira.account.test.issue_defaults.project.key', 'BAS' - setting 'xpack.notification.jira.account.test.issue_defaults.labels', ['integration-tests'] - } } diff --git a/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/SmokeTestWatchesWithMustacheClientYamlTestSuiteTestCase.java b/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/SmokeTestWatchesWithMustacheClientYamlTestSuiteTestCase.java index 1f01b4dfb4b..4ad4444670e 100644 --- a/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/SmokeTestWatchesWithMustacheClientYamlTestSuiteTestCase.java +++ b/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/SmokeTestWatchesWithMustacheClientYamlTestSuiteTestCase.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.junit.After; import org.junit.Before; @@ -26,7 +25,7 @@ public abstract class SmokeTestWatchesWithMustacheClientYamlTestSuiteTestCase ex } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherWithMustacheIT.java b/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherWithMustacheIT.java index b2a8163f78c..c1e6bd0f152 100644 --- a/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherWithMustacheIT.java +++ b/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherWithMustacheIT.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; @@ -22,7 +21,7 @@ public class WatcherWithMustacheIT extends SmokeTestWatchesWithMustacheClientYam } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/50_webhook_url_escaping.yaml b/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/50_webhook_url_escaping.yaml new file mode 100644 index 00000000000..0505cfe9f44 --- /dev/null +++ b/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/50_webhook_url_escaping.yaml @@ -0,0 +1,77 @@ +--- +"Test url escaping with url mustache function": + - do: + cluster.health: + wait_for_status: yellow + + - do: + index: + index: + type: log + id: 1 + refresh: true + body: { foo: bar } + + - do: {xpack.watcher.stats:{}} + - match: { "watcher_state": "started" } + - match: { "watch_count": 0 } + + - do: + xpack.watcher.put_watch: + id: "test_watch" + body: > + { + "metadata" : { + "index" : "" + }, + "trigger": { + "schedule": { + "interval": "1h" + } + }, + "input": { + "http" : { + "request" : { + "host" : "localhost", + "port" : 9400, + "path" : "/{{#url}}{{ctx.metadata.index}}{{/url}}/_search" + } + } + }, + "condition" : { + "compare" : { + "ctx.payload.hits.total" : { + "eq" : 1 + } + } + }, + "actions": { + "output": { + "webhook" : { + "method" : "PUT", + "host" : "localhost", + "port" : 9400, + "path" : "/{{#url}}{{ctx.metadata.index}}{{/url}}/log/2", + "params" : { + "refresh" : "true" + }, + "body" : "{ \"foo\": \"bar\" }" + } + } + } + } + + - match: { _id: "test_watch" } + - match: { created: true } + + - do: + xpack.watcher.execute_watch: + id: "test_watch" + + - do: + count: + index: + type: log + + - match: {count : 2} + diff --git a/qa/smoke-test-watcher-with-painless/build.gradle b/qa/smoke-test-watcher-with-painless/build.gradle index 887017f7f80..55daf8585ab 100644 --- a/qa/smoke-test-watcher-with-painless/build.gradle +++ b/qa/smoke-test-watcher-with-painless/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { diff --git a/qa/smoke-test-watcher-with-painless/src/test/java/org/elasticsearch/smoketest/WatcherRestTestCase.java b/qa/smoke-test-watcher-with-painless/src/test/java/org/elasticsearch/smoketest/WatcherRestTestCase.java index de8f23fb65d..c3f1214cbd8 100644 --- a/qa/smoke-test-watcher-with-painless/src/test/java/org/elasticsearch/smoketest/WatcherRestTestCase.java +++ b/qa/smoke-test-watcher-with-painless/src/test/java/org/elasticsearch/smoketest/WatcherRestTestCase.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.junit.After; import org.junit.Before; @@ -26,7 +25,7 @@ public abstract class WatcherRestTestCase extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-watcher-with-security/build.gradle b/qa/smoke-test-watcher-with-security/build.gradle index 44b738867d2..4a76f449800 100644 --- a/qa/smoke-test-watcher-with-security/build.gradle +++ b/qa/smoke-test-watcher-with-security/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { @@ -38,4 +39,3 @@ integTest { } } } - diff --git a/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java b/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java index a8888246596..4990d57dd7d 100644 --- a/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java +++ b/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.junit.After; import org.junit.Before; @@ -34,7 +33,7 @@ public class SmokeTestWatcherWithSecurityClientYamlTestSuiteIT extends ESClientY } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-watcher/build.gradle b/qa/smoke-test-watcher/build.gradle index 524b066f72f..e40ff95d44f 100644 --- a/qa/smoke-test-watcher/build.gradle +++ b/qa/smoke-test-watcher/build.gradle @@ -1,14 +1,92 @@ +import groovy.json.JsonSlurper + +import javax.net.ssl.HttpsURLConnection +import java.nio.charset.StandardCharsets + +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { testCompile project(path: ':x-pack:elasticsearch', configuration: 'runtime') } +ext { + jiraUrl = 'https://elasticsearch.atlassian.net/' + jiraUser = 'xpack-user@elastic.co' + jiraPassword = 'N9M4ea9rfy' + jiraProject = 'XWT' +} + integTest { cluster { plugin ':x-pack:elasticsearch' setting 'xpack.security.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' setting 'http.port', '9400' + // Need to allow more compilations per minute because of the integration tests + setting 'script.max_compilations_per_minute', '100' + + // + // JIRA integration test settings + // + // The integration tests use a JIRA account on elasticsearch.atlassian.net. This account + // has been created by Edward Sy [edward@elastic.co]. It uses the "XPACK WATCHER TEST" + // Jira project available at https://elasticsearch.atlassian.net/projects/XWT/issues/?filter=allopenissues + // and the "xpack-user@elastic.co" username which is also an internal Google Group. + setting 'xpack.notification.jira.account.test.url', jiraUrl + setting 'xpack.notification.jira.account.test.user', jiraUser + setting 'xpack.notification.jira.account.test.password', jiraPassword + setting 'xpack.notification.jira.account.test.issue_defaults.project.key', jiraProject + setting 'xpack.notification.jira.account.test.issue_defaults.labels', ['integration-tests', project.version] } } + +/** Clean up JIRA after tests: delete all created issues **/ +task cleanJira(type: DefaultTask) { + doLast { + def issues = jiraIssues(jiraProject) + assert issues instanceof List + issues.forEach { + // See https://docs.atlassian.com/jira/REST/cloud/#api/2/issue-deleteIssue + logger.debug("Deleting JIRA issue [${it}]") + jiraHttpRequest("issue/${it}", "DELETE", 204) + } + } +} +integTest.finalizedBy cleanJira + +/** List all issues associated to a given Jira project **/ +def jiraIssues(String projectKey) { + // See https://docs.atlassian.com/jira/REST/cloud/#api/2/search-search + def response = jiraHttpRequest("search?maxResults=100&fields=id,self,key&jql=project%3D${projectKey}", "GET", 200) + assert response.issues instanceof List + return response.issues.findAll {it.key.startsWith(projectKey)}.collect {it.key} +} + +/** Execute a HTTP request against the Jira server instance **/ +def jiraHttpRequest(String endpoint, String method, int successCode) { + HttpsURLConnection connection = null; + try { + byte[] credentials = "${jiraUser}:${jiraPassword}".getBytes(StandardCharsets.UTF_8); + connection = (HttpsURLConnection) new URL("${jiraUrl}/rest/api/2/${endpoint}").openConnection(); + connection.setRequestProperty("Authorization", "Basic " + Base64.getEncoder().encodeToString(credentials)); + connection.setRequestMethod(method); + connection.connect(); + + if (connection.getResponseCode() == successCode) { + String response = connection.getInputStream().getText(StandardCharsets.UTF_8.name()); + if (response != null && response.length() > 0) { + return new JsonSlurper().parseText(response) + } + } else { + throw new GradleException("Unexpected response code for [${endpoint}]: got ${connection.getResponseCode()} but expected ${successCode}") + } + } catch (Exception e) { + logger.error("Failed to delete JIRA issues after test execution", e) + } finally { + if (connection != null) { + connection.disconnect(); + } + } + return null +} diff --git a/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/PreventFailingBuildIT.java b/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/PreventFailingBuildIT.java new file mode 100644 index 00000000000..e41b5a7dad4 --- /dev/null +++ b/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/PreventFailingBuildIT.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.smoketest; + +import org.elasticsearch.test.ESIntegTestCase; + +public class PreventFailingBuildIT extends ESIntegTestCase { + + public void testSoThatTestsDoNotFail() { + // Noop + + // This is required because SmokeTestWatcherClientYamlTestSuiteIT + // requires network access, so if network tests are not enable no + // tests will be run in the entire project and all tests will fail. + } +} diff --git a/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherClientYamlTestSuiteIT.java b/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherClientYamlTestSuiteIT.java index 7f99118078f..2f375eb0240 100644 --- a/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherClientYamlTestSuiteIT.java +++ b/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherClientYamlTestSuiteIT.java @@ -8,9 +8,9 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.junit.After; import org.junit.Before; @@ -20,6 +20,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; /** Runs rest tests against external cluster */ +@Network // Needed to access to an external Jira server public class SmokeTestWatcherClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public SmokeTestWatcherClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -27,7 +28,7 @@ public class SmokeTestWatcherClientYamlTestSuiteIT extends ESClientYamlSuiteTest } @ParametersFactory - public static Iterable parameters() throws IOException, ClientYamlTestParseException { + public static Iterable parameters() throws IOException { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/actions/20_jira.yaml b/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/actions/20_jira.yaml similarity index 96% rename from qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/actions/20_jira.yaml rename to qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/actions/20_jira.yaml index 1fe3118836a..2b602c934cb 100644 --- a/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/actions/20_jira.yaml +++ b/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/actions/20_jira.yaml @@ -14,7 +14,7 @@ }, "trigger": { "schedule": { - "interval": "1s" + "interval": "1d" } }, "input": { @@ -30,7 +30,7 @@ "account": "test", "fields": { "summary": "{{ctx.metadata.custom_title}} {{ctx.watch_id}}", - "description": "Issue created by the REST integration test [/watcher/actions/10_jira.yaml]", + "description": "Issue created by the REST integration test [/watcher/actions/20_jira.yaml]", "issuetype" : { "name": "Bug" } @@ -85,10 +85,10 @@ - match: { hits.hits.0._source.result.actions.0.jira.account: "test" } - match: { hits.hits.0._source.result.actions.0.jira.fields.summary: "Hello from jira_watch" } - match: { hits.hits.0._source.result.actions.0.jira.fields.issuetype.name: "Bug" } - - match: { hits.hits.0._source.result.actions.0.jira.fields.project.key: "BAS" } + - match: { hits.hits.0._source.result.actions.0.jira.fields.project.key: "XWT" } - match: { hits.hits.0._source.result.actions.0.jira.fields.labels.0: "integration-tests" } - match: { hits.hits.0._source.result.actions.0.jira.result.id: /\d+/ } - - match: { hits.hits.0._source.result.actions.0.jira.result.key: /BAS-\d+/ } + - match: { hits.hits.0._source.result.actions.0.jira.result.key: /XWT-\d+/ } - match: { hits.hits.0._source.result.actions.0.jira.result.self: /http(.)*/ } - set: { hits.hits.0._id: id } @@ -100,7 +100,7 @@ body: query: match: - result.actions.jira.fields.project.key: "BAS" + result.actions.jira.fields.project.key: "XWT" - match: { hits.total: 1 } - match: { hits.hits.0._id: $id } - match: { hits.hits.0._source.result.actions.0.jira.result.self: $self } @@ -205,12 +205,12 @@ - match: { hits.hits.0._source.result.actions.0.jira.account: "test" } - match: { hits.hits.0._source.result.actions.0.jira.fields.summary: "Hello from wrong_jira_watch" } - is_false: hits.hits.0._source.result.actions.0.jira.fields.issuetype.name - - match: { hits.hits.0._source.result.actions.0.jira.fields.project.key: "BAS" } + - match: { hits.hits.0._source.result.actions.0.jira.fields.project.key: "XWT" } - match: { hits.hits.0._source.result.actions.0.jira.fields.labels.0: "integration-tests" } - match: { hits.hits.0._source.result.actions.0.jira.reason: "Bad Request - Field [issuetype] has error [issue type is required]\n" } - match: { hits.hits.0._source.result.actions.0.jira.request.method: "post" } - match: { hits.hits.0._source.result.actions.0.jira.request.path: "/rest/api/2/issue" } - - match: { hits.hits.0._source.result.actions.0.jira.request.auth.basic.username: "jira_user" } + - match: { hits.hits.0._source.result.actions.0.jira.request.auth.basic.username: "xpack-user@elastic.co" } - is_false: hits.hits.0._source.result.actions.0.jira.request.auth.basic.password - match: { hits.hits.0._source.result.actions.0.jira.response.body: "{\"errorMessages\":[],\"errors\":{\"issuetype\":\"issue type is required\"}}" } @@ -228,7 +228,7 @@ { "trigger": { "schedule": { - "interval": "1s" + "interval": "1d" } }, "input": { @@ -244,7 +244,7 @@ "account": "test", "fields": { "summary": "Jira watch with custom field of string type", - "description": "Issue created by the REST integration test [/watcher/actions/10_jira.yaml]", + "description": "Issue created by the REST integration test [/watcher/actions/20_jira.yaml]", "issuetype" : { "name": "Bug" }, @@ -279,7 +279,7 @@ { "trigger": { "schedule": { - "interval": "1s" + "interval": "1d" } }, "input": { @@ -295,7 +295,7 @@ "account": "test", "fields": { "summary": "Jira watch with custom field of object (Jira's CascadingSelectField) type", - "description": "Issue created by the REST integration test [/watcher/actions/10_jira.yaml]", + "description": "Issue created by the REST integration test [/watcher/actions/20_jira.yaml]", "issuetype" : { "name": "Bug" }, diff --git a/qa/tribe-tests-with-license/build.gradle b/qa/tribe-tests-with-license/build.gradle index e1c680a2f66..f51e45539bd 100644 --- a/qa/tribe-tests-with-license/build.gradle +++ b/qa/tribe-tests-with-license/build.gradle @@ -2,6 +2,7 @@ import org.elasticsearch.gradle.test.ClusterConfiguration import org.elasticsearch.gradle.test.ClusterFormationTasks import org.elasticsearch.gradle.test.NodeInfo +apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { @@ -79,4 +80,4 @@ integTest { // need to kill the standalone nodes here finalizedBy 'setupClusterOne#stop' finalizedBy 'setupClusterTwo#stop' -} \ No newline at end of file +}