Merge branch 'master' into license-checking/reporting-ux

Original commit: elastic/x-pack-elasticsearch@545fbfc478
This commit is contained in:
Shaunak Kashyap 2016-07-11 16:36:48 -07:00
commit 4e7fc5b625
195 changed files with 3172 additions and 4024 deletions

View File

@ -7,12 +7,15 @@ package org.elasticsearch.license.core;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.BytesRefIterator;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.security.InvalidKeyException; import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
@ -72,4 +75,16 @@ public class LicenseVerifier {
} }
} }
} }
public static boolean verifyLicense(final License license) {
final byte[] publicKeyBytes;
try (InputStream is = LicenseVerifier.class.getResourceAsStream("/public.key")) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Streams.copy(is, out);
publicKeyBytes = out.toByteArray();
} catch (IOException ex) {
throw new IllegalStateException(ex);
}
return verifyLicense(license, publicKeyBytes);
}
} }

View File

@ -36,8 +36,9 @@ public class IndexAuditIT extends ESIntegTestCase {
private static final String USER = "test_user"; private static final String USER = "test_user";
private static final String PASS = "changeme"; private static final String PASS = "changeme";
@AwaitsFix(bugUrl = "https://github.com/elastic/x-plugins/issues/2354")
public void testShieldIndexAuditTrailWorking() throws Exception { public void testShieldIndexAuditTrailWorking() throws Exception {
try (Response response = getRestClient().performRequest("GET", "/", Collections.emptyMap(), null, try (Response response = getRestClient().performRequest("GET", "/",
new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER,
UsernamePasswordToken.basicAuthHeaderValue(USER, new SecuredString(PASS.toCharArray()))))) { UsernamePasswordToken.basicAuthHeaderValue(USER, new SecuredString(PASS.toCharArray()))))) {
assertThat(response.getStatusLine().getStatusCode(), is(200)); assertThat(response.getStatusLine().getStatusCode(), is(200));

View File

@ -47,7 +47,7 @@ public class CustomRealmIT extends ESIntegTestCase {
public void testHttpConnectionWithNoAuthentication() throws Exception { public void testHttpConnectionWithNoAuthentication() throws Exception {
try { try {
getRestClient().performRequest("GET", "/", Collections.emptyMap(), null); getRestClient().performRequest("GET", "/");
fail("request should have failed"); fail("request should have failed");
} catch(ResponseException e) { } catch(ResponseException e) {
Response response = e.getResponse(); Response response = e.getResponse();
@ -58,7 +58,7 @@ public class CustomRealmIT extends ESIntegTestCase {
} }
public void testHttpAuthentication() throws Exception { public void testHttpAuthentication() throws Exception {
try (Response response = getRestClient().performRequest("GET", "/", Collections.emptyMap(), null, try (Response response = getRestClient().performRequest("GET", "/",
new BasicHeader(CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER), new BasicHeader(CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER),
new BasicHeader(CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW))) { new BasicHeader(CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW))) {
assertThat(response.getStatusLine().getStatusCode(), is(200)); assertThat(response.getStatusLine().getStatusCode(), is(200));

View File

@ -18,7 +18,7 @@ subprojects {
cluster { cluster {
systemProperty 'es.logger.level', 'TRACE' systemProperty 'es.logger.level', 'TRACE'
plugin 'x-pack', project(':x-plugins:elasticsearch:x-pack') plugin 'x-pack', project(':x-plugins:elasticsearch:x-pack')
setting 'xpack.monitoring.agent.interval', '3s' setting 'xpack.monitoring.collection.interval', '3s'
extraConfigFile 'x-pack/roles.yml', '../roles.yml' extraConfigFile 'x-pack/roles.yml', '../roles.yml'
setupCommand 'setupTestAdminUser', setupCommand 'setupTestAdminUser',
'bin/x-pack/users', 'useradd', 'test_admin', '-p', 'changeme', '-r', 'superuser' 'bin/x-pack/users', 'useradd', 'test_admin', '-p', 'changeme', '-r', 'superuser'

View File

@ -153,13 +153,13 @@ project.rootProject.subprojects.findAll { it.path.startsWith(':plugins:') }.each
integTest { integTest {
cluster { cluster {
setting 'xpack.monitoring.agent.interval', '3s' setting 'xpack.monitoring.collection.interval', '3s'
setting 'xpack.monitoring.agent.exporters._http.type', 'http' setting 'xpack.monitoring.collection.exporters._http.type', 'http'
setting 'xpack.monitoring.agent.exporters._http.enabled', 'false' setting 'xpack.monitoring.collection.exporters._http.enabled', 'false'
setting 'xpack.monitoring.agent.exporters._http.ssl.truststore.path', clientKeyStore.name setting 'xpack.monitoring.collection.exporters._http.ssl.truststore.path', clientKeyStore.name
setting 'xpack.monitoring.agent.exporters._http.ssl.truststore.password', 'keypass' setting 'xpack.monitoring.collection.exporters._http.ssl.truststore.password', 'keypass'
setting 'xpack.monitoring.agent.exporters._http.auth.username', 'monitoring_agent' setting 'xpack.monitoring.collection.exporters._http.auth.username', 'monitoring_agent'
setting 'xpack.monitoring.agent.exporters._http.auth.password', 'changeme' setting 'xpack.monitoring.collection.exporters._http.auth.password', 'changeme'
setting 'xpack.security.transport.ssl.enabled', 'true' setting 'xpack.security.transport.ssl.enabled', 'true'
setting 'xpack.security.http.ssl.enabled', 'true' setting 'xpack.security.http.ssl.enabled', 'true'

View File

@ -73,8 +73,8 @@ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase {
URI uri = new URI("https", null, httpAddress.getHostString(), httpAddress.getPort(), "/", null, null); URI uri = new URI("https", null, httpAddress.getHostString(), httpAddress.getPort(), "/", null, null);
Settings exporterSettings = Settings.builder() Settings exporterSettings = Settings.builder()
.put("xpack.monitoring.agent.exporters._http.enabled", true) .put("xpack.monitoring.collection.exporters._http.enabled", true)
.put("xpack.monitoring.agent.exporters._http.host", uri.toString()) .put("xpack.monitoring.collection.exporters._http.host", uri.toString())
.build(); .build();
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(exporterSettings)); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(exporterSettings));
} }
@ -82,8 +82,8 @@ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase {
@After @After
public void disableExporter() { public void disableExporter() {
Settings exporterSettings = Settings.builder() Settings exporterSettings = Settings.builder()
.putNull("xpack.monitoring.agent.exporters._http.enabled") .putNull("xpack.monitoring.collection.exporters._http.enabled")
.putNull("xpack.monitoring.agent.exporters._http.host") .putNull("xpack.monitoring.collection.exporters._http.host")
.build(); .build();
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(exporterSettings)); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(exporterSettings));
} }

View File

@ -11,9 +11,9 @@
metric: [ settings ] metric: [ settings ]
- is_true: nodes - is_true: nodes
- is_true: nodes.$master.settings.xpack.monitoring.agent.exporters._http.type - is_true: nodes.$master.settings.xpack.monitoring.collection.exporters._http.type
- is_false: nodes.$master.settings.xpack.monitoring.agent.exporters._http.auth.username - is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.auth.username
- is_false: nodes.$master.settings.xpack.monitoring.agent.exporters._http.auth.password - is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.auth.password
- is_false: nodes.$master.settings.xpack.monitoring.agent.exporters._http.ssl.truststore.path - is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.ssl.truststore.path
- is_false: nodes.$master.settings.xpack.monitoring.agent.exporters._http.ssl.truststore.password - is_false: nodes.$master.settings.xpack.monitoring.collection.exporters._http.ssl.truststore.password

View File

@ -133,7 +133,7 @@ integTest {
// TODO: fix this rest test to not depend on a hardcoded port! // TODO: fix this rest test to not depend on a hardcoded port!
systemProperty 'tests.rest.blacklist', 'getting_started/10_monitor_cluster_health/*,bulk/10_basic/*' systemProperty 'tests.rest.blacklist', 'getting_started/10_monitor_cluster_health/*,bulk/10_basic/*'
cluster { cluster {
setting 'xpack.monitoring.agent.interval', '3s' setting 'xpack.monitoring.collection.interval', '3s'
waitCondition = { NodeInfo node, AntBuilder ant -> waitCondition = { NodeInfo node, AntBuilder ant ->
File tmpFile = new File(node.cwd, 'wait.success') File tmpFile = new File(node.cwd, 'wait.success')
for (int i = 0; i < 10; i++) { for (int i = 0; i < 10; i++) {

View File

@ -42,12 +42,12 @@ public class Graph extends Plugin implements ActionPlugin {
return XPackPlugin.featureEnabled(settings, NAME, true); return XPackPlugin.featureEnabled(settings, NAME, true);
} }
public Collection<Module> nodeModules() { public Collection<Module> createGuiceModules() {
return Collections.singletonList(new GraphModule(enabled, transportClientMode)); return Collections.singletonList(new GraphModule(enabled, transportClientMode));
} }
@Override @Override
public Collection<Class<? extends LifecycleComponent>> nodeServices() { public Collection<Class<? extends LifecycleComponent>> getGuiceServiceClasses() {
if (enabled == false|| transportClientMode) { if (enabled == false|| transportClientMode) {
return Collections.emptyList(); return Collections.emptyList();
} }

View File

@ -1,10 +1,10 @@
{ {
"graph.explore": { "xpack.graph.explore": {
"documentation": "https://www.elastic.co/guide/en/graph/current/explore.html", "documentation": "https://www.elastic.co/guide/en/graph/current/explore.html",
"methods": ["GET", "POST"], "methods": ["GET", "POST"],
"url": { "url": {
"path": "/{index}/_xpack/graph/_explore", "path": "/{index}/_xpack/graph/_explore",
"paths": ["/{index}/_xpack/graph/_explore", "/{index}/{type}/_xpack/graph/_explore"], "paths": ["/{index}/_xpack/graph/_explore", "/{index}/{type}/_xpack/graph/_explore"],
"parts" : { "parts" : {
"index": { "index": {
"type" : "list", "type" : "list",
@ -23,7 +23,7 @@
"timeout": { "timeout": {
"type" : "time", "type" : "time",
"description" : "Explicit operation timeout" "description" : "Explicit operation timeout"
} }
} }
}, },
"body": { "body": {

View File

@ -1,19 +1,21 @@
---
setup:
- do:
indices.create:
index: test_1
body:
settings:
index:
number_of_shards: 1
number_of_replicas: 0
mappings:
test:
properties:
keys:
type : integer
--- ---
"Test basic graph explore": "Test basic graph explore":
- do:
indices.create:
index: test_1
body:
settings:
index:
number_of_replicas: 0
number_of_shards: 1
mappings:
test:
properties:
keys:
type : "integer"
- do: - do:
index: index:
index: test_1 index: test_1
@ -37,7 +39,7 @@
wait_for_status: green wait_for_status: green
- do: - do:
graph.explore: xpack.graph.explore:
index: test_1 index: test_1
type: test type: test
body: {"query": {"match": {"keys": 1}},"controls":{"use_significance":false},"vertices":[{"field": "keys","min_doc_count": 1}]} body: {"query": {"match": {"keys": 1}},"controls":{"use_significance":false},"vertices":[{"field": "keys","min_doc_count": 1}]}

View File

@ -14,6 +14,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.license.plugin.core.LicensesStatus; import org.elasticsearch.license.plugin.core.LicensesStatus;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -26,6 +27,10 @@ public class PutLicenseResponse extends AcknowledgedResponse implements ToXConte
PutLicenseResponse() { PutLicenseResponse() {
} }
public PutLicenseResponse(boolean acknowledged, LicensesStatus status) {
this(acknowledged, status, null, Collections.<String, String[]>emptyMap());
}
public PutLicenseResponse(boolean acknowledged, LicensesStatus status, String acknowledgeHeader, public PutLicenseResponse(boolean acknowledged, LicensesStatus status, String acknowledgeHeader,
Map<String, String[]> acknowledgeMessages) { Map<String, String[]> acknowledgeMessages) {
super(acknowledged); super(acknowledged);

View File

@ -20,8 +20,6 @@ import org.elasticsearch.license.plugin.core.LicensesService;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import static org.elasticsearch.license.plugin.core.LicensesService.LicensesUpdateResponse;
public class TransportPutLicenseAction extends TransportMasterNodeAction<PutLicenseRequest, PutLicenseResponse> { public class TransportPutLicenseAction extends TransportMasterNodeAction<PutLicenseRequest, PutLicenseResponse> {
private final LicensesService licensesService; private final LicensesService licensesService;
@ -53,18 +51,7 @@ public class TransportPutLicenseAction extends TransportMasterNodeAction<PutLice
@Override @Override
protected void masterOperation(final PutLicenseRequest request, ClusterState state, final ActionListener<PutLicenseResponse> protected void masterOperation(final PutLicenseRequest request, ClusterState state, final ActionListener<PutLicenseResponse>
listener) throws ElasticsearchException { listener) throws ElasticsearchException {
licensesService.registerLicense(request, new ActionListener<LicensesUpdateResponse>() { licensesService.registerLicense(request, listener);
@Override
public void onResponse(LicensesUpdateResponse licensesUpdateResponse) {
listener.onResponse(new PutLicenseResponse(licensesUpdateResponse.isAcknowledged(), licensesUpdateResponse.status(),
licensesUpdateResponse.acknowledgementHeader(), licensesUpdateResponse.acknowledgeMessages()));
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
});
} }
} }

View File

@ -0,0 +1,174 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.license.core.License;
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
import java.util.UUID;
public abstract class ExpirationCallback {
static final String EXPIRATION_JOB_PREFIX = ".license_expiration_job_";
public enum Orientation {PRE, POST}
/**
* Callback that is triggered every <code>frequency</code> when
* current time is between <code>max</code> and <code>min</code>
* before license expiry.
*/
public abstract static class Pre extends ExpirationCallback {
/**
* Callback schedule prior to license expiry
*
* @param min latest relative time to execute before license expiry
* @param max earliest relative time to execute before license expiry
* @param frequency interval between execution
*/
public Pre(TimeValue min, TimeValue max, TimeValue frequency) {
super(Orientation.PRE, min, max, frequency);
}
}
/**
* Callback that is triggered every <code>frequency</code> when
* current time is between <code>min</code> and <code>max</code>
* after license expiry.
*/
public abstract static class Post extends ExpirationCallback {
/**
* Callback schedule after license expiry
*
* @param min earliest relative time to execute after license expiry
* @param max latest relative time to execute after license expiry
* @param frequency interval between execution
*/
public Post(TimeValue min, TimeValue max, TimeValue frequency) {
super(Orientation.POST, min, max, frequency);
}
}
private final String id;
private final Orientation orientation;
private final long min;
private final long max;
private final long frequency;
private ExpirationCallback(Orientation orientation, TimeValue min, TimeValue max, TimeValue frequency) {
this.orientation = orientation;
this.min = (min == null) ? 0 : min.getMillis();
this.max = (max == null) ? Long.MAX_VALUE : max.getMillis();
this.frequency = frequency.getMillis();
this.id = String.join("", EXPIRATION_JOB_PREFIX, UUID.randomUUID().toString());
}
public final String getId() {
return id;
}
public final long getFrequency() {
return frequency;
}
/**
* Calculates the delay for the next trigger time. When <code>now</code> is in a
* valid time bracket with respect to <code>expirationDate</code>, the delay is 0.
* When <code>now</code> is before the time bracket, than delay to the start of the
* time bracket and when <code>now</code> is passed the valid time bracket, the delay
* is <code>null</code>
* @param expirationDate license expiry date in milliseconds
* @param now current time in milliseconds
* @return time delay
*/
final TimeValue delay(long expirationDate, long now) {
final TimeValue delay;
switch (orientation) {
case PRE:
if (expirationDate >= now) {
// license not yet expired
long preExpiryDuration = expirationDate - now;
if (preExpiryDuration > max) {
// license duration is longer than maximum duration, delay it to the first match time
delay = TimeValue.timeValueMillis(preExpiryDuration - max);
} else if (preExpiryDuration <= max && preExpiryDuration >= min) {
// no delay in valid time bracket
delay = TimeValue.timeValueMillis(0);
} else {
// passed last match time
delay = null;
}
} else {
// invalid after license expiry
delay = null;
}
break;
case POST:
if (expirationDate >= now) {
// license not yet expired, delay it to the first match time
delay = TimeValue.timeValueMillis(expirationDate - now + min);
} else {
// license has expired
long expiredDuration = now - expirationDate;
if (expiredDuration < min) {
// license expiry duration is shorter than minimum duration, delay it to the first match time
delay = TimeValue.timeValueMillis(min - expiredDuration);
} else if (expiredDuration >= min && expiredDuration <= max) {
// no delay in valid time bracket
delay = TimeValue.timeValueMillis(0);
} else {
// passed last match time
delay = null;
}
}
break;
default:
throw new IllegalStateException("orientation [" + orientation + "] unknown");
}
return delay;
}
/**
* {@link SchedulerEngine.Schedule#nextScheduledTimeAfter(long, long)} with respect to
* license expiry date
*/
public final long nextScheduledTimeForExpiry(long expiryDate, long startTime, long time) {
TimeValue delay = delay(expiryDate, time);
if (delay != null) {
long delayInMillis = delay.getMillis();
if (delayInMillis == 0L) {
if (startTime == time) {
// initial trigger and in time bracket, schedule immediately
return time;
} else {
// in time bracket, add frequency
return time + frequency;
}
} else {
// not in time bracket
return time + delayInMillis;
}
}
return -1;
}
/**
* Code to execute when the expiry callback is triggered in a valid
* time bracket
* @param license license to operate on
*/
public abstract void on(License license);
public final String toString() {
return LoggerMessageFormat.format(null, "ExpirationCallback:(orientation [{}], min [{}], max [{}], freq [{}])",
orientation.name(), TimeValue.timeValueMillis(min), TimeValue.timeValueMillis(max),
TimeValue.timeValueMillis(frequency));
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import org.elasticsearch.license.core.License;
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
import static org.elasticsearch.license.plugin.core.LicensesService.GRACE_PERIOD_DURATION;
import static org.elasticsearch.license.plugin.core.LicensesService.getLicenseState;
public class LicenseSchedule implements SchedulerEngine.Schedule {
private final License license;
LicenseSchedule(License license) {
this.license = license;
}
@Override
public long nextScheduledTimeAfter(long startTime, long time) {
long nextScheduledTime = -1;
switch (getLicenseState(license, time)) {
case ENABLED:
nextScheduledTime = license.expiryDate();
break;
case GRACE_PERIOD:
nextScheduledTime = license.expiryDate() + GRACE_PERIOD_DURATION.getMillis();
break;
case DISABLED:
if (license.issueDate() > time) {
// when we encounter a license with a future issue date
// which can happen with autogenerated license,
// we want to schedule a notification on the license issue date
// so the license is notificed once it is valid
// see https://github.com/elastic/x-plugins/issues/983
nextScheduledTime = license.issueDate();
}
break;
}
return nextScheduledTime;
}
}

View File

@ -12,9 +12,9 @@ import java.util.List;
public interface LicensesManagerService { public interface LicensesManagerService {
/** /**
* @return the id of registered licensees currently in <code>state</code> * @return current {@link LicenseState}
*/ */
List<String> licenseesWithState(LicenseState state); LicenseState licenseState();
/** /**
* @return the currently active license, or {@code null} if no license is currently installed * @return the currently active license, or {@code null} if no license is currently installed

View File

@ -20,20 +20,17 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.Singleton;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.license.core.License; import org.elasticsearch.license.core.License;
import org.elasticsearch.license.core.LicenseVerifier; import org.elasticsearch.license.core.LicenseVerifier;
import org.elasticsearch.license.plugin.action.delete.DeleteLicenseRequest; import org.elasticsearch.license.plugin.action.delete.DeleteLicenseRequest;
import org.elasticsearch.license.plugin.action.put.PutLicenseRequest; import org.elasticsearch.license.plugin.action.put.PutLicenseRequest;
import org.elasticsearch.license.plugin.action.put.PutLicenseResponse;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.EmptyTransportResponseHandler;
import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportChannel;
@ -41,21 +38,17 @@ import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
import org.elasticsearch.xpack.support.clock.Clock;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Queue;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
/** /**
@ -76,18 +69,15 @@ import java.util.concurrent.atomic.AtomicReference;
* <p> * <p>
* All registered listeners are notified of the current license upon registration or when a new license is installed in the cluster state. * All registered listeners are notified of the current license upon registration or when a new license is installed in the cluster state.
* When a new license is notified as enabled to the registered listener, a notification is scheduled at the time of license expiry. * When a new license is notified as enabled to the registered listener, a notification is scheduled at the time of license expiry.
* Registered listeners are notified using {@link #notifyAndSchedule(LicensesMetaData)} * Registered listeners are notified using {@link #onUpdate(LicensesMetaData)}
*/ */
@Singleton
public class LicensesService extends AbstractLifecycleComponent implements ClusterStateListener, LicensesManagerService, public class LicensesService extends AbstractLifecycleComponent implements ClusterStateListener, LicensesManagerService,
LicenseeRegistry { LicenseeRegistry, SchedulerEngine.Listener {
public static final String REGISTER_TRIAL_LICENSE_ACTION_NAME = "internal:plugin/license/cluster/register_trial_license"; public static final String REGISTER_TRIAL_LICENSE_ACTION_NAME = "internal:plugin/license/cluster/register_trial_license";
private final ClusterService clusterService; private final ClusterService clusterService;
private final ThreadPool threadPool;
private final TransportService transportService; private final TransportService transportService;
/** /**
@ -95,20 +85,12 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
*/ */
private final List<InternalLicensee> registeredLicensees = new CopyOnWriteArrayList<>(); private final List<InternalLicensee> registeredLicensees = new CopyOnWriteArrayList<>();
/**
* Currently active expiry notifications
*/
private final Queue<ScheduledFuture> expiryNotifications = new ConcurrentLinkedQueue<>();
/**
* Currently active event notifications for every registered listener
*/
private final Queue<ScheduledFuture> eventNotifications = new ConcurrentLinkedQueue<>();
/** /**
* Currently active license * Currently active license
*/ */
private final AtomicReference<License> currentLicense = new AtomicReference<>(); private final AtomicReference<License> currentLicense = new AtomicReference<>();
private SchedulerEngine scheduler;
private final Clock clock;
/** /**
* Callbacks to notify relative to license expiry * Callbacks to notify relative to license expiry
@ -128,7 +110,9 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
/** /**
* Duration of grace period after a license has expired * Duration of grace period after a license has expired
*/ */
private TimeValue gracePeriodDuration = days(7); public static final TimeValue GRACE_PERIOD_DURATION = days(7);
private static final String LICENSE_JOB = "licenseJob";
private static final FormatDateTimeFormatter DATE_FORMATTER = Joda.forPattern("EEEE, MMMMM dd, yyyy", Locale.ROOT); private static final FormatDateTimeFormatter DATE_FORMATTER = Joda.forPattern("EEEE, MMMMM dd, yyyy", Locale.ROOT);
@ -136,16 +120,18 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
"please read the following messages and update the license again, this time with the \"acknowledge=true\" parameter:"; "please read the following messages and update the license again, this time with the \"acknowledge=true\" parameter:";
@Inject @Inject
public LicensesService(Settings settings, ClusterService clusterService, ThreadPool threadPool, TransportService transportService) { public LicensesService(Settings settings, ClusterService clusterService, TransportService transportService, Clock clock) {
super(settings); super(settings);
this.clusterService = clusterService; this.clusterService = clusterService;
this.threadPool = threadPool;
this.transportService = transportService; this.transportService = transportService;
if (DiscoveryNode.isMasterNode(settings)) { if (DiscoveryNode.isMasterNode(settings)) {
transportService.registerRequestHandler(REGISTER_TRIAL_LICENSE_ACTION_NAME, TransportRequest.Empty::new, transportService.registerRequestHandler(REGISTER_TRIAL_LICENSE_ACTION_NAME, TransportRequest.Empty::new,
ThreadPool.Names.SAME, new RegisterTrialLicenseRequestHandler()); ThreadPool.Names.SAME, new RegisterTrialLicenseRequestHandler());
} }
populateExpirationCallbacks(); populateExpirationCallbacks();
this.clock = clock;
this.scheduler = new SchedulerEngine(clock);
this.scheduler.register(this);
} }
private void populateExpirationCallbacks() { private void populateExpirationCallbacks() {
@ -251,18 +237,17 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
* Registers new license in the cluster * Registers new license in the cluster
* Master only operation. Installs a new license on the master provided it is VALID * Master only operation. Installs a new license on the master provided it is VALID
*/ */
public void registerLicense(final PutLicenseRequest request, final ActionListener<LicensesUpdateResponse> listener) { public void registerLicense(final PutLicenseRequest request, final ActionListener<PutLicenseResponse> listener) {
final License newLicense = request.license(); final License newLicense = request.license();
final long now = System.currentTimeMillis(); final long now = clock.millis();
if (!verifyLicense(newLicense) || newLicense.issueDate() > now) { if (!LicenseVerifier.verifyLicense(newLicense) || newLicense.issueDate() > now) {
listener.onResponse(new LicensesUpdateResponse(true, LicensesStatus.INVALID)); listener.onResponse(new PutLicenseResponse(true, LicensesStatus.INVALID));
} else if (newLicense.expiryDate() < now) { } else if (newLicense.expiryDate() < now) {
listener.onResponse(new LicensesUpdateResponse(true, LicensesStatus.EXPIRED)); listener.onResponse(new PutLicenseResponse(true, LicensesStatus.EXPIRED));
} else { } else {
if (!request.acknowledged()) { if (!request.acknowledged()) {
final LicensesMetaData currentMetaData = clusterService.state().metaData().custom(LicensesMetaData.TYPE); final License currentLicense = getLicense();
final License currentLicense = getLicense(currentMetaData); if (currentLicense != null) {
if (currentLicense != null && currentLicense != LicensesMetaData.LICENSE_TOMBSTONE) {
Map<String, String[]> acknowledgeMessages = new HashMap<>(registeredLicensees.size() + 1); Map<String, String[]> acknowledgeMessages = new HashMap<>(registeredLicensees.size() + 1);
if (!License.isAutoGeneratedLicense(currentLicense.signature()) // current license is not auto-generated if (!License.isAutoGeneratedLicense(currentLicense.signature()) // current license is not auto-generated
&& currentLicense.issueDate() > newLicense.issueDate()) { // and has a later issue date && currentLicense.issueDate() > newLicense.issueDate()) { // and has a later issue date
@ -278,72 +263,46 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
} }
if (!acknowledgeMessages.isEmpty()) { if (!acknowledgeMessages.isEmpty()) {
// needs acknowledgement // needs acknowledgement
listener.onResponse(new LicensesUpdateResponse(false, LicensesStatus.VALID, ACKNOWLEDGEMENT_HEADER, listener.onResponse(new PutLicenseResponse(false, LicensesStatus.VALID, ACKNOWLEDGEMENT_HEADER,
acknowledgeMessages)); acknowledgeMessages));
return; return;
} }
} }
} }
clusterService.submitStateUpdateTask("register license [" + newLicense.uid() + "]", new clusterService.submitStateUpdateTask("register license [" + newLicense.uid() + "]", new
AckedClusterStateUpdateTask<LicensesUpdateResponse>(request, listener) { AckedClusterStateUpdateTask<PutLicenseResponse>(request, listener) {
@Override @Override
protected LicensesUpdateResponse newResponse(boolean acknowledged) { protected PutLicenseResponse newResponse(boolean acknowledged) {
return new LicensesUpdateResponse(acknowledged, LicensesStatus.VALID); return new PutLicenseResponse(acknowledged, LicensesStatus.VALID);
} }
@Override @Override
public ClusterState execute(ClusterState currentState) throws Exception { public ClusterState execute(ClusterState currentState) throws Exception {
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
mdBuilder.putCustom(LicensesMetaData.TYPE, new LicensesMetaData(newLicense)); mdBuilder.putCustom(LicensesMetaData.TYPE, new LicensesMetaData(newLicense));
return ClusterState.builder(currentState).metaData(mdBuilder).build(); return ClusterState.builder(currentState).metaData(mdBuilder).build();
} }
}); });
} }
} }
private boolean verifyLicense(final License license) {
final byte[] publicKeyBytes;
try (InputStream is = LicensesService.class.getResourceAsStream("/public.key")) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Streams.copy(is, out);
publicKeyBytes = out.toByteArray();
} catch (IOException ex) {
throw new IllegalStateException(ex);
}
return LicenseVerifier.verifyLicense(license, publicKeyBytes);
}
static TimeValue days(int days) { static TimeValue days(int days) {
return TimeValue.timeValueHours(days * 24); return TimeValue.timeValueHours(days * 24);
} }
public static class LicensesUpdateResponse extends ClusterStateUpdateResponse { @Override
private final LicensesStatus status; public void triggered(SchedulerEngine.Event event) {
private final String acknowledgementHeader; final LicensesMetaData licensesMetaData = clusterService.state().metaData().custom(LicensesMetaData.TYPE);
private final Map<String, String[]> acknowledgeMessages; if (licensesMetaData != null) {
final License license = licensesMetaData.getLicense();
public LicensesUpdateResponse(boolean acknowledged, LicensesStatus status) { if (event.getJobName().equals(LICENSE_JOB)) {
this(acknowledged, status, null, Collections.<String, String[]>emptyMap()); notifyLicensees(license);
} } else if (event.getJobName().startsWith(ExpirationCallback.EXPIRATION_JOB_PREFIX)) {
expirationCallbacks.stream()
public LicensesUpdateResponse(boolean acknowledged, LicensesStatus status, String acknowledgementHeader, .filter(expirationCallback -> expirationCallback.getId().equals(event.getJobName()))
Map<String, String[]> acknowledgeMessages) { .forEach(expirationCallback -> expirationCallback.on(license));
super(acknowledged); }
this.status = status;
this.acknowledgeMessages = acknowledgeMessages;
this.acknowledgementHeader = acknowledgementHeader;
}
public LicensesStatus status() {
return status;
}
public String acknowledgementHeader() {
return acknowledgementHeader;
}
public Map<String, String[]> acknowledgeMessages() {
return acknowledgeMessages;
} }
} }
@ -353,35 +312,34 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
public void removeLicense(final DeleteLicenseRequest request, final ActionListener<ClusterStateUpdateResponse> listener) { public void removeLicense(final DeleteLicenseRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
clusterService.submitStateUpdateTask("delete license", clusterService.submitStateUpdateTask("delete license",
new AckedClusterStateUpdateTask<ClusterStateUpdateResponse>(request, listener) { new AckedClusterStateUpdateTask<ClusterStateUpdateResponse>(request, listener) {
@Override @Override
protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { protected ClusterStateUpdateResponse newResponse(boolean acknowledged) {
return new ClusterStateUpdateResponse(acknowledged); return new ClusterStateUpdateResponse(acknowledged);
} }
@Override @Override
public ClusterState execute(ClusterState currentState) throws Exception { public ClusterState execute(ClusterState currentState) throws Exception {
MetaData metaData = currentState.metaData(); MetaData metaData = currentState.metaData();
final LicensesMetaData currentLicenses = metaData.custom(LicensesMetaData.TYPE); final LicensesMetaData currentLicenses = metaData.custom(LicensesMetaData.TYPE);
if (currentLicenses.getLicense() != LicensesMetaData.LICENSE_TOMBSTONE) { if (currentLicenses.getLicense() != LicensesMetaData.LICENSE_TOMBSTONE) {
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
mdBuilder.putCustom(LicensesMetaData.TYPE, new LicensesMetaData(LicensesMetaData.LICENSE_TOMBSTONE)); mdBuilder.putCustom(LicensesMetaData.TYPE, new LicensesMetaData(LicensesMetaData.LICENSE_TOMBSTONE));
return ClusterState.builder(currentState).metaData(mdBuilder).build(); return ClusterState.builder(currentState).metaData(mdBuilder).build();
} else { } else {
return currentState; return currentState;
} }
} }
}); });
} }
@Override @Override
public List<String> licenseesWithState(LicenseState state) { public LicenseState licenseState() {
List<String> licensees = new ArrayList<>(registeredLicensees.size()); if (registeredLicensees.size() > 0) {
for (InternalLicensee licensee : registeredLicensees) { return registeredLicensees.get(0).currentLicenseState;
if (licensee.currentLicenseState == state) { } else {
licensees.add(licensee.id()); final License license = getLicense(clusterService.state().metaData().custom(LicensesMetaData.TYPE));
} return getLicenseState(license, clock.millis());
} }
return licensees;
} }
@Override @Override
@ -412,7 +370,7 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
// do not generate a trial license if any license is present // do not generate a trial license if any license is present
if (currentLicensesMetaData == null) { if (currentLicensesMetaData == null) {
long issueDate = System.currentTimeMillis(); long issueDate = clock.millis();
License.Builder specBuilder = License.builder() License.Builder specBuilder = License.builder()
.uid(UUID.randomUUID().toString()) .uid(UUID.randomUUID().toString())
.issuedTo(clusterService.getClusterName().value()) .issuedTo(clusterService.getClusterName().value())
@ -437,26 +395,16 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
@Override @Override
protected void doStart() throws ElasticsearchException { protected void doStart() throws ElasticsearchException {
clusterService.add(this); clusterService.add(this);
scheduler.start(Collections.emptyList());
} }
@Override @Override
protected void doStop() throws ElasticsearchException { protected void doStop() throws ElasticsearchException {
clusterService.remove(this); clusterService.remove(this);
scheduler.stop();
// cancel all notifications
for (ScheduledFuture scheduledNotification : expiryNotifications) {
FutureUtils.cancel(scheduledNotification);
}
for (ScheduledFuture eventNotification : eventNotifications) {
FutureUtils.cancel(eventNotification);
}
// clear all handlers // clear all handlers
registeredLicensees.clear(); registeredLicensees.clear();
// empty out notification queue
expiryNotifications.clear();
// clear current license // clear current license
currentLicense.set(null); currentLicense.set(null);
} }
@ -482,16 +430,13 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
logger.debug("current [{}]", currentLicensesMetaData); logger.debug("current [{}]", currentLicensesMetaData);
} }
// notify all interested plugins // notify all interested plugins
if (previousClusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { if (previousClusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)
notifyAndSchedule(currentLicensesMetaData); || prevLicensesMetaData == null) {
} else { if (currentLicensesMetaData != null) {
if (prevLicensesMetaData == null) { onUpdate(currentLicensesMetaData);
if (currentLicensesMetaData != null) {
notifyAndSchedule(currentLicensesMetaData);
}
} else if (!prevLicensesMetaData.equals(currentLicensesMetaData)) {
notifyAndSchedule(currentLicensesMetaData);
} }
} else if (!prevLicensesMetaData.equals(currentLicensesMetaData)) {
onUpdate(currentLicensesMetaData);
} }
// auto-generate license if no licenses ever existed // auto-generate license if no licenses ever existed
// this will trigger a subsequent cluster changed event // this will trigger a subsequent cluster changed event
@ -504,245 +449,75 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
} }
} }
/** private void notifyLicensees(final License license) {
* Notifies registered licensees of license state change and/or new active license
* based on the license in <code>currentLicensesMetaData</code>.
* Additionally schedules license expiry notifications and event callbacks
* relative to the current license's expiry
*/
private void notifyAndSchedule(final LicensesMetaData currentLicensesMetaData) {
final License license = getLicense(currentLicensesMetaData);
if (license == LicensesMetaData.LICENSE_TOMBSTONE) { if (license == LicensesMetaData.LICENSE_TOMBSTONE) {
// implies license has been explicitly deleted // implies license has been explicitly deleted
// update licensee states // update licensee states
registeredLicensees.forEach(InternalLicensee::onRemove); registeredLicensees.forEach(InternalLicensee::onRemove);
return; return;
} }
if (license != null) {
logger.debug("notifying [{}] listeners", registeredLicensees.size());
switch (getLicenseState(license, clock.millis())) {
case ENABLED:
for (InternalLicensee licensee : registeredLicensees) {
licensee.onChange(license, LicenseState.ENABLED);
}
logger.debug("license [{}] - valid", license.uid());
break;
case GRACE_PERIOD:
for (InternalLicensee licensee : registeredLicensees) {
licensee.onChange(license, LicenseState.GRACE_PERIOD);
}
logger.warn("license [{}] - grace", license.uid());
break;
case DISABLED:
for (InternalLicensee licensee : registeredLicensees) {
licensee.onChange(license, LicenseState.DISABLED);
}
logger.warn("license [{}] - expired", license.uid());
break;
}
}
}
static LicenseState getLicenseState(final License license, long time) {
if (license == null) {
return LicenseState.DISABLED;
}
if (license.issueDate() > time) {
return LicenseState.DISABLED;
}
if (license.expiryDate() > time) {
return LicenseState.ENABLED;
}
if ((license.expiryDate() + GRACE_PERIOD_DURATION.getMillis()) > time) {
return LicenseState.GRACE_PERIOD;
}
return LicenseState.DISABLED;
}
/**
* Notifies registered licensees of license state change and/or new active license
* based on the license in <code>currentLicensesMetaData</code>.
* Additionally schedules license expiry notifications and event callbacks
* relative to the current license's expiry
*/
void onUpdate(final LicensesMetaData currentLicensesMetaData) {
final License license = getLicense(currentLicensesMetaData);
// license can be null if the trial license is yet to be auto-generated // license can be null if the trial license is yet to be auto-generated
// in this case, it is a no-op // in this case, it is a no-op
if (license != null) { if (license != null) {
logger.debug("notifying [{}] listeners", registeredLicensees.size()); notifyLicensees(license);
long now = System.currentTimeMillis(); if (license.equals(currentLicense.get()) == false) {
if (license.issueDate() > now) {
logger.warn("license [{}] - invalid", license.uid());
return;
}
long expiryDuration = license.expiryDate() - now;
if (license.expiryDate() > now) {
for (InternalLicensee licensee : registeredLicensees) {
licensee.onChange(license, LicenseState.ENABLED);
}
logger.debug("license [{}] - valid", license.uid());
final TimeValue delay = TimeValue.timeValueMillis(expiryDuration);
// cancel any previous notifications
cancelNotifications(expiryNotifications);
try {
logger.debug("schedule grace notification after [{}] for license [{}]", delay.toString(), license.uid());
expiryNotifications.add(threadPool.schedule(delay, executorName(), new LicensingClientNotificationJob()));
} catch (EsRejectedExecutionException ex) {
logger.debug("couldn't schedule grace notification", ex);
}
} else if ((license.expiryDate() + gracePeriodDuration.getMillis()) > now) {
for (InternalLicensee licensee : registeredLicensees) {
licensee.onChange(license, LicenseState.GRACE_PERIOD);
}
logger.warn("license [{}] - grace", license.uid());
final TimeValue delay = TimeValue.timeValueMillis(expiryDuration + gracePeriodDuration.getMillis());
// cancel any previous notifications
cancelNotifications(expiryNotifications);
try {
logger.debug("schedule expiry notification after [{}] for license [{}]", delay.toString(), license.uid());
expiryNotifications.add(threadPool.schedule(delay, executorName(), new LicensingClientNotificationJob()));
} catch (EsRejectedExecutionException ex) {
logger.debug("couldn't schedule expiry notification", ex);
}
} else {
for (InternalLicensee licensee : registeredLicensees) {
licensee.onChange(license, LicenseState.DISABLED);
}
logger.warn("license [{}] - expired", license.uid());
}
if (!license.equals(currentLicense.get())) {
currentLicense.set(license); currentLicense.set(license);
// cancel all scheduled event notifications scheduler.add(new SchedulerEngine.Job(LICENSE_JOB, new LicenseSchedule(license)));
cancelNotifications(eventNotifications); for (ExpirationCallback expirationCallback : expirationCallbacks) {
// schedule expiry callbacks scheduler.add(new SchedulerEngine.Job(expirationCallback.getId(),
for (ExpirationCallback expirationCallback : this.expirationCallbacks) { (startTime, now) ->
final TimeValue delay; expirationCallback.nextScheduledTimeForExpiry(license.expiryDate(), startTime, now)));
if (expirationCallback.matches(license.expiryDate(), now)) {
expirationCallback.on(license);
TimeValue frequency = expirationCallback.frequency();
delay = frequency != null ? frequency : expirationCallback.delay(expiryDuration);
} else {
delay = expirationCallback.delay(expiryDuration);
}
if (delay != null) {
eventNotifications.add(threadPool.schedule(delay, executorName(), new EventNotificationJob(expirationCallback)));
}
if (logger.isDebugEnabled()) {
logger.debug("schedule [{}] after [{}]", expirationCallback, delay);
}
}
logger.debug("scheduled expiry callbacks for [{}] expiring after [{}]", license.uid(),
TimeValue.timeValueMillis(expiryDuration));
}
}
}
private class LicensingClientNotificationJob implements Runnable {
@Override
public void run() {
logger.debug("running expiry notification");
final ClusterState currentClusterState = clusterService.state();
if (!currentClusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) {
final LicensesMetaData currentLicensesMetaData = currentClusterState.metaData().custom(LicensesMetaData.TYPE);
notifyAndSchedule(currentLicensesMetaData);
} else if (logger.isDebugEnabled()) {
// next clusterChanged event will deal with the missed notifications
logger.debug("skip expiry notification [{}]", GatewayService.STATE_NOT_RECOVERED_BLOCK);
}
}
}
private class EventNotificationJob implements Runnable {
private final ExpirationCallback expirationCallback;
EventNotificationJob(ExpirationCallback expirationCallback) {
this.expirationCallback = expirationCallback;
}
@Override
public void run() {
logger.debug("running event notification for [{}]", expirationCallback);
LicensesMetaData currentLicensesMetaData = clusterService.state().metaData().custom(LicensesMetaData.TYPE);
License license = getLicense(currentLicensesMetaData);
if (license != null) {
long now = System.currentTimeMillis();
if (expirationCallback.matches(license.expiryDate(), now)) {
expirationCallback.on(license);
if (expirationCallback.frequency() != null) {
// schedule next event
eventNotifications.add(threadPool.schedule(expirationCallback.frequency(), executorName(), this));
}
} else if (logger.isDebugEnabled()) {
logger.debug("skip scheduling notification for [{}] with license expiring after [{}]", expirationCallback,
TimeValue.timeValueMillis(license.expiryDate() - now));
} }
} }
// clear out any finished event notifications
while (!eventNotifications.isEmpty()) {
ScheduledFuture notification = eventNotifications.peek();
if (notification != null && notification.isDone()) {
// remove the notifications that are done
eventNotifications.poll();
} else {
// stop emptying out the queue as soon as the first undone future hits
break;
}
}
}
}
public abstract static class ExpirationCallback {
public enum Orientation {PRE, POST}
public abstract static class Pre extends ExpirationCallback {
/**
* Callback schedule prior to license expiry
*
* @param min latest relative time to execute before license expiry
* @param max earliest relative time to execute before license expiry
* @param frequency interval between execution
*/
public Pre(TimeValue min, TimeValue max, TimeValue frequency) {
super(Orientation.PRE, min, max, frequency);
}
@Override
public boolean matches(long expirationDate, long now) {
long expiryDuration = expirationDate - now;
if (expiryDuration > 0L) {
if (expiryDuration <= max.getMillis()) {
return expiryDuration >= min.getMillis();
}
}
return false;
}
@Override
public TimeValue delay(long expiryDuration) {
return TimeValue.timeValueMillis(expiryDuration - max.getMillis());
}
}
public abstract static class Post extends ExpirationCallback {
/**
* Callback schedule after license expiry
*
* @param min earliest relative time to execute after license expiry
* @param max latest relative time to execute after license expiry
* @param frequency interval between execution
*/
public Post(TimeValue min, TimeValue max, TimeValue frequency) {
super(Orientation.POST, min, max, frequency);
}
@Override
public boolean matches(long expirationDate, long now) {
long postExpiryDuration = now - expirationDate;
if (postExpiryDuration > 0L) {
if (postExpiryDuration <= max.getMillis()) {
return postExpiryDuration >= min.getMillis();
}
}
return false;
}
@Override
public TimeValue delay(long expiryDuration) {
final long delay;
if (expiryDuration >= 0L) {
delay = expiryDuration + min.getMillis();
} else {
delay = (-1L * expiryDuration) - min.getMillis();
}
if (delay > 0L) {
return TimeValue.timeValueMillis(delay);
} else {
return null;
}
}
}
protected final Orientation orientation;
protected final TimeValue min;
protected final TimeValue max;
private final TimeValue frequency;
private ExpirationCallback(Orientation orientation, TimeValue min, TimeValue max, TimeValue frequency) {
this.orientation = orientation;
this.min = (min == null) ? TimeValue.timeValueMillis(0) : min;
this.max = (max == null) ? TimeValue.timeValueMillis(Long.MAX_VALUE) : max;
this.frequency = frequency;
}
public TimeValue frequency() {
return frequency;
}
public abstract TimeValue delay(long expiryDuration);
public abstract boolean matches(long expirationDate, long now);
public abstract void on(License license);
@Override
public String toString() {
return LoggerMessageFormat.format(null, "ExpirationCallback:(orientation [{}], min [{}], max [{}], freq [{}])",
orientation.name(), min, max, frequency);
} }
} }
@ -764,8 +539,8 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
// triggers a cluster changed event // triggers a cluster changed event
// eventually notifying the current licensee // eventually notifying the current licensee
requestTrialLicense(clusterState); requestTrialLicense(clusterState);
} else { } else if (lifecycleState() == Lifecycle.State.STARTED) {
notifyAndSchedule(currentMetaData); notifyLicensees(currentMetaData.getLicense());
} }
} }
} }
@ -787,7 +562,7 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
} else { } else {
boolean autoGeneratedLicense = License.isAutoGeneratedLicense(license.signature()); boolean autoGeneratedLicense = License.isAutoGeneratedLicense(license.signature());
if ((autoGeneratedLicense && TrialLicense.verify(license)) if ((autoGeneratedLicense && TrialLicense.verify(license))
|| (!autoGeneratedLicense && verifyLicense(license))) { || (!autoGeneratedLicense && LicenseVerifier.verifyLicense(license))) {
return license; return license;
} }
} }
@ -795,25 +570,6 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
return null; return null;
} }
/**
* Cancels out all notification futures
*/
private static void cancelNotifications(Queue<ScheduledFuture> scheduledNotifications) {
// clear out notification queue
while (!scheduledNotifications.isEmpty()) {
ScheduledFuture notification = scheduledNotifications.peek();
if (notification != null) {
// cancel
FutureUtils.cancel(notification);
scheduledNotifications.poll();
}
}
}
private String executorName() {
return ThreadPool.Names.GENERIC;
}
/** /**
* Stores acknowledgement, expiration and license notification callbacks * Stores acknowledgement, expiration and license notification callbacks
* for a registered listener * for a registered listener
@ -879,19 +635,4 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
channel.sendResponse(TransportResponse.Empty.INSTANCE); channel.sendResponse(TransportResponse.Empty.INSTANCE);
} }
} }
}
// TODO - temporary hack for tests, should be removed once we introduce `ClockMock`
public void setGracePeriodDuration(TimeValue gracePeriodDuration) {
this.gracePeriodDuration = gracePeriodDuration;
}
// only for adding expiration callbacks for tests
public void setExpirationCallbacks(List<ExpirationCallback> expirationCallbacks) {
this.expirationCallbacks = expirationCallbacks;
}
// TODO - temporary hack for tests, should be removed once we introduce `ClockMock`
public void setTrialLicenseDuration(TimeValue trialLicenseDuration) {
this.trialLicenseDuration = trialLicenseDuration;
}
}

View File

@ -1,153 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.license.plugin.consumer.TestConsumerPluginBase;
import org.elasticsearch.license.plugin.consumer.TestPluginServiceBase;
import org.elasticsearch.license.plugin.core.LicenseState;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.xpack.XPackPlugin;
import org.junit.After;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST;
/**
* Framework to test licensing plugin integration for existing/new consumer plugins
* see {@link org.elasticsearch.license.plugin.LicensesEagerConsumerPluginIntegrationTests}
* and {@link org.elasticsearch.license.plugin.LicensesLazyConsumerPluginIntegrationTests}
* for example usage
*/
@ClusterScope(scope = TEST, numDataNodes = 2, numClientNodes = 0, transportClientRatio = 0.0)
public abstract class AbstractLicensesConsumerPluginIntegrationTestCase extends AbstractLicensesIntegrationTestCase {
protected final TestConsumerPluginBase consumerPlugin;
public AbstractLicensesConsumerPluginIntegrationTestCase(TestConsumerPluginBase consumerPlugin) {
this.consumerPlugin = consumerPlugin;
}
private final int trialLicenseDurationInSeconds = 20;
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
// this setting is only used in tests
.put("_trial_license_duration_in_seconds", trialLicenseDurationInSeconds)
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(XPackPlugin.class, consumerPlugin.getClass());
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return nodePlugins();
}
@After
public void afterTest() throws Exception {
wipeAllLicenses();
assertTrue(awaitBusy(() -> !clusterService().state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)));
}
public void testTrialLicenseAndSignedLicenseNotification() throws Exception {
logger.info("using {} consumer plugin", consumerPlugin.getClass().getName());
logger.info(" --> trial license generated");
// managerService should report feature to be enabled on all data nodes
assertLicenseeState(consumerPlugin.id(), LicenseState.ENABLED);
// consumer plugin service should return enabled on all data nodes
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.ENABLED, 2);
logger.info(" --> check trial license expiry notification");
// consumer plugin should notify onDisabled on all data nodes (expired trial license)
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.GRACE_PERIOD, trialLicenseDurationInSeconds * 2);
assertLicenseeState(consumerPlugin.id(), LicenseState.GRACE_PERIOD);
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.DISABLED, trialLicenseDurationInSeconds * 2);
assertLicenseeState(consumerPlugin.id(), LicenseState.DISABLED);
logger.info(" --> put signed license");
putLicense(TimeValue.timeValueSeconds(trialLicenseDurationInSeconds));
logger.info(" --> check signed license enabled notification");
// consumer plugin should notify onEnabled on all data nodes (signed license)
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.ENABLED, 1);
assertLicenseeState(consumerPlugin.id(), LicenseState.ENABLED);
logger.info(" --> check signed license expiry notification");
// consumer plugin should notify onDisabled on all data nodes (expired signed license)
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.GRACE_PERIOD, trialLicenseDurationInSeconds * 2);
assertLicenseeState(consumerPlugin.id(), LicenseState.GRACE_PERIOD);
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.DISABLED, trialLicenseDurationInSeconds * 2);
assertLicenseeState(consumerPlugin.id(), LicenseState.DISABLED);
}
public void testTrialLicenseNotification() throws Exception {
logger.info(" --> check onEnabled for trial license");
// managerService should report feature to be enabled on all data nodes
assertLicenseeState(consumerPlugin.id(), LicenseState.ENABLED);
// consumer plugin service should return enabled on all data nodes
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.ENABLED, 1);
logger.info(" --> check trial license expiry notification");
// consumer plugin should notify onDisabled on all data nodes (expired signed license)
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.GRACE_PERIOD, trialLicenseDurationInSeconds);
assertLicenseeState(consumerPlugin.id(), LicenseState.GRACE_PERIOD);
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.DISABLED, trialLicenseDurationInSeconds);
assertLicenseeState(consumerPlugin.id(), LicenseState.DISABLED);
}
public void testOverlappingTrialAndSignedLicenseNotification() throws Exception {
logger.info(" --> check onEnabled for trial license");
// managerService should report feature to be enabled on all data nodes
assertLicenseeState(consumerPlugin.id(), LicenseState.ENABLED);
// consumer plugin service should return enabled on all data nodes
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.ENABLED, 1);
logger.info(" --> put signed license while trial license is in effect");
putLicense(TimeValue.timeValueSeconds(trialLicenseDurationInSeconds * 2));
logger.info(" --> check signed license enabled notification");
// consumer plugin should notify onEnabled on all data nodes (signed license)
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.ENABLED, 1);
assertLicenseeState(consumerPlugin.id(), LicenseState.ENABLED);
logger.info(" --> sleep for rest of trailLicense duration");
Thread.sleep(trialLicenseDurationInSeconds * 1000L);
logger.info(" --> check consumer is still enabled [signed license]");
// consumer plugin should notify onEnabled on all data nodes (signed license)
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.ENABLED, 1);
assertLicenseeState(consumerPlugin.id(), LicenseState.ENABLED);
logger.info(" --> check signed license expiry notification");
// consumer plugin should notify onDisabled on all data nodes (expired signed license)
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.GRACE_PERIOD, trialLicenseDurationInSeconds * 2 * 2);
assertLicenseeState(consumerPlugin.id(), LicenseState.GRACE_PERIOD);
assertConsumerPluginNotification(consumerPluginServices(), LicenseState.DISABLED, trialLicenseDurationInSeconds * 2 * 2);
assertLicenseeState(consumerPlugin.id(), LicenseState.DISABLED);
}
private List<TestPluginServiceBase> consumerPluginServices() {
final InternalTestCluster clients = internalCluster();
List<TestPluginServiceBase> consumerPluginServices = new ArrayList<>();
for (TestPluginServiceBase service : clients.getDataNodeInstances(consumerPlugin.service())) {
consumerPluginServices.add(service);
}
return consumerPluginServices;
}
}

View File

@ -11,37 +11,19 @@ import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.license.core.License; import org.elasticsearch.license.core.License;
import org.elasticsearch.license.plugin.action.put.PutLicenseAction;
import org.elasticsearch.license.plugin.action.put.PutLicenseRequestBuilder;
import org.elasticsearch.license.plugin.action.put.PutLicenseResponse;
import org.elasticsearch.license.plugin.consumer.EagerLicenseRegistrationPluginService;
import org.elasticsearch.license.plugin.consumer.LazyLicenseRegistrationPluginService;
import org.elasticsearch.license.plugin.consumer.TestPluginServiceBase;
import org.elasticsearch.license.plugin.core.LicenseState;
import org.elasticsearch.license.plugin.core.LicensesManagerService;
import org.elasticsearch.license.plugin.core.LicensesMetaData; import org.elasticsearch.license.plugin.core.LicensesMetaData;
import org.elasticsearch.license.plugin.core.LicensesStatus;
import org.elasticsearch.xpack.monitoring.Monitoring; import org.elasticsearch.xpack.monitoring.Monitoring;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.Security;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.xpack.watcher.Watcher; import org.elasticsearch.xpack.watcher.Watcher;
import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.graph.Graph; import org.elasticsearch.xpack.graph.Graph;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.license.plugin.TestUtils.generateSignedLicense;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
public abstract class AbstractLicensesIntegrationTestCase extends ESIntegTestCase { public abstract class AbstractLicensesIntegrationTestCase extends ESIntegTestCase {
@ -71,6 +53,30 @@ public abstract class AbstractLicensesIntegrationTestCase extends ESIntegTestCas
return nodeSettings(0); return nodeSettings(0);
} }
protected void putLicense(final License license) throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
ClusterService clusterService = internalCluster().getInstance(ClusterService.class, internalCluster().getMasterName());
clusterService.submitStateUpdateTask("putting license", new ClusterStateUpdateTask() {
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
latch.countDown();
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
mdBuilder.putCustom(LicensesMetaData.TYPE, new LicensesMetaData(license));
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
@Override
public void onFailure(String source, @Nullable Exception e) {
logger.error("error on metaData cleanup after test", e);
}
});
latch.await();
}
protected void wipeAllLicenses() throws InterruptedException { protected void wipeAllLicenses() throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch latch = new CountDownLatch(1);
ClusterService clusterService = internalCluster().getInstance(ClusterService.class, internalCluster().getMasterName()); ClusterService clusterService = internalCluster().getInstance(ClusterService.class, internalCluster().getMasterName());
@ -94,67 +100,4 @@ public abstract class AbstractLicensesIntegrationTestCase extends ESIntegTestCas
}); });
latch.await(); latch.await();
} }
protected void putLicense(TimeValue expiryDuration) throws Exception {
License license1 = generateSignedLicense(expiryDuration);
final PutLicenseResponse putLicenseResponse = new PutLicenseRequestBuilder(client().admin().cluster(),
PutLicenseAction.INSTANCE).setLicense(license1).get();
assertThat(putLicenseResponse.isAcknowledged(), equalTo(true));
assertThat(putLicenseResponse.status(), equalTo(LicensesStatus.VALID));
}
protected void assertLicenseeState(final String id, final LicenseState state) throws InterruptedException {
assertTrue("LicensesManagerService for licensee " + id + " should have status " + state.name(), awaitBusy(() -> {
final InternalTestCluster clients = internalCluster();
for (LicensesManagerService managerService : clients.getDataNodeInstances(LicensesManagerService.class)) {
if (!managerService.licenseesWithState(state).contains(id)) {
return false;
}
}
return true;
}));
}
protected void assertLazyConsumerPluginNotification(final LicenseState state, int timeoutInSec) throws InterruptedException {
final List<TestPluginServiceBase> consumerPluginServices = consumerLazyPluginServices();
assertConsumerPluginNotification(consumerPluginServices, state, timeoutInSec);
}
protected void assertEagerConsumerPluginNotification(final LicenseState state, int timeoutInSec) throws InterruptedException {
final List<TestPluginServiceBase> consumerPluginServices = consumerEagerPluginServices();
assertConsumerPluginNotification(consumerPluginServices, state, timeoutInSec);
}
protected void assertConsumerPluginNotification(final List<TestPluginServiceBase> consumerPluginServices, final LicenseState state,
int timeoutInSec) throws InterruptedException {
assertThat("At least one instance has to be present", consumerPluginServices.size(), greaterThan(0));
boolean success = awaitBusy(() -> {
for (TestPluginServiceBase pluginService : consumerPluginServices) {
if (state != pluginService.state()) {
return false;
}
}
return true;
}, timeoutInSec + 1, TimeUnit.SECONDS);
logger.debug("Notification assertion complete");
assertThat(consumerPluginServices.get(0).getClass().getName() + " should have status " + state.name(), success, equalTo(true));
}
private List<TestPluginServiceBase> consumerLazyPluginServices() {
final InternalTestCluster clients = internalCluster();
List<TestPluginServiceBase> consumerPluginServices = new ArrayList<>();
for (TestPluginServiceBase service : clients.getDataNodeInstances(LazyLicenseRegistrationPluginService.class)) {
consumerPluginServices.add(service);
}
return consumerPluginServices;
}
private List<TestPluginServiceBase> consumerEagerPluginServices() {
final InternalTestCluster clients = internalCluster();
List<TestPluginServiceBase> consumerPluginServices = new ArrayList<>();
for (TestPluginServiceBase service : clients.getDataNodeInstances(EagerLicenseRegistrationPluginService.class)) {
consumerPluginServices.add(service);
}
return consumerPluginServices;
}
} }

View File

@ -1,19 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin;
import org.apache.lucene.util.LuceneTestCase.BadApple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.plugin.consumer.EagerLicenseRegistrationConsumerPlugin;
// test is just too slow, please fix it to not be sleep-based
@BadApple(bugUrl = "https://github.com/elastic/x-plugins/issues/1007")
public class LicensesEagerConsumerPluginIntegrationTests extends AbstractLicensesConsumerPluginIntegrationTestCase {
public LicensesEagerConsumerPluginIntegrationTests() {
super(new EagerLicenseRegistrationConsumerPlugin(Settings.EMPTY));
}
}

View File

@ -1,19 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin;
import org.apache.lucene.util.LuceneTestCase.BadApple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.plugin.consumer.LazyLicenseRegistrationConsumerPlugin;
//test is just too slow, please fix it to not be sleep-based
@BadApple(bugUrl = "https://github.com/elastic/x-plugins/issues/1007")
public class LicensesLazyConsumerPluginIntegrationTests extends AbstractLicensesConsumerPluginIntegrationTestCase {
public LicensesLazyConsumerPluginIntegrationTests() {
super(new LazyLicenseRegistrationConsumerPlugin(Settings.EMPTY));
}
}

View File

@ -1,162 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin;
import org.apache.lucene.util.LuceneTestCase.BadApple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.license.plugin.consumer.EagerLicenseRegistrationConsumerPlugin;
import org.elasticsearch.license.plugin.consumer.EagerLicenseRegistrationPluginService;
import org.elasticsearch.license.plugin.consumer.LazyLicenseRegistrationConsumerPlugin;
import org.elasticsearch.license.plugin.consumer.LazyLicenseRegistrationPluginService;
import org.elasticsearch.license.plugin.core.LicenseState;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.xpack.XPackPlugin;
import org.junit.After;
import java.util.Arrays;
import java.util.Collection;
import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST;
//test is just too slow, please fix it to not be sleep-based
@BadApple(bugUrl = "https://github.com/elastic/x-plugins/issues/1007")
@ClusterScope(scope = TEST, numDataNodes = 2, numClientNodes = 0)
public class LicensesPluginIntegrationTests extends AbstractLicensesIntegrationTestCase {
private final boolean useEagerLicenseRegistrationPlugin = randomBoolean();
private final int trialLicenseDurationInSeconds = 10;
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
// this setting is only used in tests
.put("_trial_license_duration_in_seconds", trialLicenseDurationInSeconds)
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
if (useEagerLicenseRegistrationPlugin) {
return Arrays.asList(XPackPlugin.class, EagerLicenseRegistrationConsumerPlugin.class);
} else {
return Arrays.asList(XPackPlugin.class, LazyLicenseRegistrationConsumerPlugin.class);
}
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return nodePlugins();
}
@After
public void afterTest() throws Exception {
wipeAllLicenses();
assertTrue(awaitBusy(() -> !clusterService().state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)));
}
public void testTrialLicenseAndSignedLicenseNotification() throws Exception {
logger.info("using {} consumer plugin", useEagerLicenseRegistrationPlugin ? "eager" : "lazy");
logger.info(" --> trial license generated");
// managerService should report feature to be enabled on all data nodes
assertLicenseeState(getCurrentFeatureName(), LicenseState.ENABLED);
// consumer plugin service should return enabled on all data nodes
assertConsumerPluginEnabledNotification(2);
logger.info(" --> check trial license expiry notification");
// consumer plugin should notify onDisabled on all data nodes (expired trial license)
assertConsumerPluginDisabledNotification(trialLicenseDurationInSeconds * 2);
assertLicenseeState(getCurrentFeatureName(), LicenseState.GRACE_PERIOD);
assertLicenseeState(getCurrentFeatureName(), LicenseState.DISABLED);
logger.info(" --> put signed license");
putLicense(TimeValue.timeValueSeconds(trialLicenseDurationInSeconds));
logger.info(" --> check signed license enabled notification");
// consumer plugin should notify onEnabled on all data nodes (signed license)
assertConsumerPluginEnabledNotification(1);
assertLicenseeState(getCurrentFeatureName(), LicenseState.ENABLED);
logger.info(" --> check signed license expiry notification");
// consumer plugin should notify onDisabled on all data nodes (expired signed license)
assertConsumerPluginDisabledNotification(trialLicenseDurationInSeconds * 2);
assertLicenseeState(getCurrentFeatureName(), LicenseState.GRACE_PERIOD);
assertLicenseeState(getCurrentFeatureName(), LicenseState.DISABLED);
}
public void testTrialLicenseNotification() throws Exception {
logger.info(" --> check onEnabled for trial license");
// managerService should report feature to be enabled on all data nodes
assertLicenseeState(getCurrentFeatureName(), LicenseState.ENABLED);
// consumer plugin service should return enabled on all data nodes
assertConsumerPluginEnabledNotification(1);
logger.info(" --> check trial license expiry notification");
// consumer plugin should notify onDisabled on all data nodes (expired signed license)
assertConsumerPluginDisabledNotification(trialLicenseDurationInSeconds * 2);
assertLicenseeState(getCurrentFeatureName(), LicenseState.GRACE_PERIOD);
assertLicenseeState(getCurrentFeatureName(), LicenseState.DISABLED);
}
public void testOverlappingTrialAndSignedLicenseNotification() throws Exception {
logger.info(" --> check onEnabled for trial license");
// managerService should report feature to be enabled on all data nodes
assertLicenseeState(getCurrentFeatureName(), LicenseState.ENABLED);
// consumer plugin service should return enabled on all data nodes
assertConsumerPluginEnabledNotification(1);
logger.info(" --> put signed license while trial license is in effect");
putLicense(TimeValue.timeValueSeconds(trialLicenseDurationInSeconds * 2));
logger.info(" --> check signed license enabled notification");
// consumer plugin should notify onEnabled on all data nodes (signed license)
assertConsumerPluginEnabledNotification(1);
assertLicenseeState(getCurrentFeatureName(), LicenseState.ENABLED);
logger.info(" --> sleep for rest of trailLicense duration");
Thread.sleep(trialLicenseDurationInSeconds * 1000L);
logger.info(" --> check consumer is still enabled [signed license]");
// consumer plugin should notify onEnabled on all data nodes (signed license)
assertConsumerPluginEnabledNotification(1);
assertLicenseeState(getCurrentFeatureName(), LicenseState.ENABLED);
logger.info(" --> check signed license expiry notification");
// consumer plugin should notify onDisabled on all data nodes (expired signed license)
assertConsumerPluginDisabledNotification(trialLicenseDurationInSeconds * 2 * 2);
assertLicenseeState(getCurrentFeatureName(), LicenseState.GRACE_PERIOD);
assertLicenseeState(getCurrentFeatureName(), LicenseState.DISABLED);
}
private String getCurrentFeatureName() {
if (useEagerLicenseRegistrationPlugin) {
return EagerLicenseRegistrationPluginService.ID;
} else {
return LazyLicenseRegistrationPluginService.ID;
}
}
private void assertConsumerPluginEnabledNotification(int timeoutInSec) throws InterruptedException {
if (useEagerLicenseRegistrationPlugin) {
assertEagerConsumerPluginNotification(LicenseState.ENABLED, timeoutInSec);
} else {
assertLazyConsumerPluginNotification(LicenseState.ENABLED, timeoutInSec);
}
}
private void assertConsumerPluginDisabledNotification(int timeoutInSec) throws InterruptedException {
if (useEagerLicenseRegistrationPlugin) {
assertEagerConsumerPluginNotification(LicenseState.GRACE_PERIOD, timeoutInSec);
} else {
assertLazyConsumerPluginNotification(LicenseState.GRACE_PERIOD, timeoutInSec);
}
}
}

View File

@ -1,139 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin;
import org.apache.lucene.util.LuceneTestCase.BadApple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.license.plugin.consumer.EagerLicenseRegistrationConsumerPlugin;
import org.elasticsearch.license.plugin.consumer.EagerLicenseRegistrationPluginService;
import org.elasticsearch.license.plugin.consumer.LazyLicenseRegistrationConsumerPlugin;
import org.elasticsearch.license.plugin.consumer.LazyLicenseRegistrationPluginService;
import org.elasticsearch.license.plugin.core.LicenseState;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.xpack.XPackPlugin;
import org.junit.After;
import java.util.Arrays;
import java.util.Collection;
import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST;
//test is just too slow, please fix it to not be sleep-based
@BadApple(bugUrl = "https://github.com/elastic/x-plugins/issues/1007")
@ClusterScope(scope = TEST, numDataNodes = 0, numClientNodes = 0)
public class LicensesPluginsIntegrationTests extends AbstractLicensesIntegrationTestCase {
private static final String ID_1 = EagerLicenseRegistrationPluginService.ID;
private static final String ID_2 = LazyLicenseRegistrationPluginService.ID;
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.build();
}
private Settings nodeSettingsWithConsumerPlugin(int trialLicenseDuration) {
return Settings.builder()
.put(super.nodeSettings(0))
// this setting is only used in tests
.put("_trial_license_duration_in_seconds", trialLicenseDuration)
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(XPackPlugin.class, EagerLicenseRegistrationConsumerPlugin.class, LazyLicenseRegistrationConsumerPlugin.class);
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return nodePlugins();
}
@After
public void afterTest() throws Exception {
wipeAllLicenses();
}
public void testMultipleConsumerPlugins() throws Exception {
int nNodes = randomIntBetween(2, 3);
int trialLicenseDurationInSec = 20;
int signedLicenseDuration = 5;
startNodesWithConsumerPlugins(nNodes, trialLicenseDurationInSec);
logger.info(" --> trial license generated");
// managerService should report feature to be enabled on all data nodes
assertLicenseeState(ID_1, LicenseState.ENABLED);
assertLicenseeState(ID_2, LicenseState.ENABLED);
// consumer plugin service should return enabled on all data nodes
assertEagerConsumerPluginNotification(LicenseState.ENABLED, 1);
assertLazyConsumerPluginNotification(LicenseState.ENABLED, 1);
logger.info(" --> check trial license expiry notification");
// consumer plugin should notify onDisabled on all data nodes (expired trial license)
assertEagerConsumerPluginNotification(LicenseState.GRACE_PERIOD, trialLicenseDurationInSec * 2);
assertLazyConsumerPluginNotification(LicenseState.GRACE_PERIOD, trialLicenseDurationInSec * 2);
assertLicenseeState(ID_1, LicenseState.GRACE_PERIOD);
assertLicenseeState(ID_2, LicenseState.GRACE_PERIOD);
assertLicenseeState(ID_1, LicenseState.DISABLED);
assertLicenseeState(ID_2, LicenseState.DISABLED);
logger.info(" --> put signed license");
putLicense(TimeValue.timeValueSeconds(signedLicenseDuration));
logger.info(" --> check signed license enabled notification");
// consumer plugin should notify onEnabled on all data nodes (signed license)
assertEagerConsumerPluginNotification(LicenseState.ENABLED, 1);
assertLazyConsumerPluginNotification(LicenseState.ENABLED, 1);
assertLicenseeState(ID_1, LicenseState.ENABLED);
assertLicenseeState(ID_2, LicenseState.ENABLED);
logger.info(" --> check signed license expiry notification");
// consumer plugin should notify onDisabled on all data nodes (expired signed license)
assertEagerConsumerPluginNotification(LicenseState.GRACE_PERIOD, signedLicenseDuration * 2);
assertLazyConsumerPluginNotification(LicenseState.GRACE_PERIOD, signedLicenseDuration * 2);
assertLicenseeState(ID_1, LicenseState.GRACE_PERIOD);
assertLicenseeState(ID_2, LicenseState.GRACE_PERIOD);
assertEagerConsumerPluginNotification(LicenseState.DISABLED, 10 * 2);
assertLazyConsumerPluginNotification(LicenseState.DISABLED, 10 * 2);
assertLicenseeState(ID_1, LicenseState.DISABLED);
assertLicenseeState(ID_2, LicenseState.DISABLED);
}
public void testRandomFeatureLicensesActions() throws Exception {
int nNodes = randomIntBetween(2, 3);
startNodesWithConsumerPlugins(nNodes, 10);
logger.info(" --> check license enabled notification");
assertEagerConsumerPluginNotification(LicenseState.ENABLED, 1);
assertLazyConsumerPluginNotification(LicenseState.ENABLED, 1);
assertLicenseeState(ID_1, LicenseState.ENABLED);
assertLicenseeState(ID_2, LicenseState.ENABLED);
logger.info(" --> check license expiry notification");
// consumer plugin should notify onDisabled on all data nodes (expired signed license)
assertEagerConsumerPluginNotification(LicenseState.GRACE_PERIOD, 10 * 2);
assertLazyConsumerPluginNotification(LicenseState.GRACE_PERIOD, 10 * 2);
assertLicenseeState(ID_1, LicenseState.GRACE_PERIOD);
assertLicenseeState(ID_2, LicenseState.GRACE_PERIOD);
assertEagerConsumerPluginNotification(LicenseState.DISABLED, 10 * 2);
assertLazyConsumerPluginNotification(LicenseState.DISABLED, 10 * 2);
assertLicenseeState(ID_1, LicenseState.DISABLED);
assertLicenseeState(ID_2, LicenseState.DISABLED);
}
private void startNodesWithConsumerPlugins(int nNodes, int trialLicenseDuration) {
for (int i = 0; i < nNodes; i++) {
internalCluster().startNode(nodeSettingsWithConsumerPlugin(trialLicenseDuration));
}
}
}

View File

@ -19,20 +19,16 @@ import org.elasticsearch.license.plugin.action.get.GetLicenseResponse;
import org.elasticsearch.license.plugin.action.put.PutLicenseAction; import org.elasticsearch.license.plugin.action.put.PutLicenseAction;
import org.elasticsearch.license.plugin.action.put.PutLicenseRequestBuilder; import org.elasticsearch.license.plugin.action.put.PutLicenseRequestBuilder;
import org.elasticsearch.license.plugin.action.put.PutLicenseResponse; import org.elasticsearch.license.plugin.action.put.PutLicenseResponse;
import org.elasticsearch.license.plugin.consumer.EagerLicenseRegistrationConsumerPlugin;
import org.elasticsearch.license.plugin.consumer.EagerLicenseRegistrationPluginService;
import org.elasticsearch.license.plugin.consumer.LazyLicenseRegistrationConsumerPlugin;
import org.elasticsearch.license.plugin.consumer.LazyLicenseRegistrationPluginService;
import org.elasticsearch.license.plugin.core.LicenseState; import org.elasticsearch.license.plugin.core.LicenseState;
import org.elasticsearch.license.plugin.core.LicensesMetaData; import org.elasticsearch.license.plugin.core.LicensesMetaData;
import org.elasticsearch.license.plugin.core.LicensesService;
import org.elasticsearch.license.plugin.core.LicensesStatus; import org.elasticsearch.license.plugin.core.LicensesStatus;
import org.elasticsearch.node.Node;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackPlugin;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import static org.elasticsearch.license.plugin.TestUtils.generateSignedLicense; import static org.elasticsearch.license.plugin.TestUtils.generateSignedLicense;
import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST;
@ -43,7 +39,6 @@ import static org.hamcrest.CoreMatchers.nullValue;
@ClusterScope(scope = TEST, numDataNodes = 0, numClientNodes = 0, maxNumDataNodes = 0, transportClientRatio = 0) @ClusterScope(scope = TEST, numDataNodes = 0, numClientNodes = 0, maxNumDataNodes = 0, transportClientRatio = 0)
public class LicensesServiceClusterTests extends AbstractLicensesIntegrationTestCase { public class LicensesServiceClusterTests extends AbstractLicensesIntegrationTestCase {
private final String[] PLUGINS = {EagerLicenseRegistrationPluginService.ID, LazyLicenseRegistrationPluginService.ID};
@Override @Override
protected Settings transportClientSettings() { protected Settings transportClientSettings() {
@ -59,16 +54,12 @@ public class LicensesServiceClusterTests extends AbstractLicensesIntegrationTest
return Settings.builder() return Settings.builder()
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.put("node.data", true) .put("node.data", true)
// this setting is only used in tests
.put("_trial_license_duration_in_seconds", 9)
// this setting is only used in tests
.put("_grace_duration_in_seconds", 9)
.put(NetworkModule.HTTP_ENABLED.getKey(), true); .put(NetworkModule.HTTP_ENABLED.getKey(), true);
} }
@Override @Override
protected Collection<Class<? extends Plugin>> nodePlugins() { protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(XPackPlugin.class, EagerLicenseRegistrationConsumerPlugin.class, LazyLicenseRegistrationConsumerPlugin.class); return Collections.singletonList(XPackPlugin.class);
} }
@Override @Override
@ -107,46 +98,57 @@ public class LicensesServiceClusterTests extends AbstractLicensesIntegrationTest
wipeAllLicenses(); wipeAllLicenses();
} }
private void assertLicenseState(LicenseState state) throws InterruptedException {
boolean success = awaitBusy(() -> {
for (LicensesService service : internalCluster().getDataNodeInstances(LicensesService.class)) {
if (service.licenseState() == state) {
return true;
}
}
return false;
});
assertTrue(success);
}
public void testClusterRestartWhileEnabled() throws Exception { public void testClusterRestartWhileEnabled() throws Exception {
wipeAllLicenses(); wipeAllLicenses();
internalCluster().startNode(); internalCluster().startNode();
ensureGreen(); ensureGreen();
assertEagerConsumerPluginNotification(LicenseState.ENABLED, 5); assertLicenseState(LicenseState.ENABLED);
assertLazyConsumerPluginNotification(LicenseState.ENABLED, 5);
logger.info("--> restart node"); logger.info("--> restart node");
internalCluster().fullRestart(); internalCluster().fullRestart();
ensureYellow(); ensureYellow();
logger.info("--> await node for enabled"); logger.info("--> await node for enabled");
assertEagerConsumerPluginNotification(LicenseState.ENABLED, 5); assertLicenseState(LicenseState.ENABLED);
assertLazyConsumerPluginNotification(LicenseState.ENABLED, 5);
} }
public void testClusterRestartWhileGrace() throws Exception { public void testClusterRestartWhileGrace() throws Exception {
wipeAllLicenses(); wipeAllLicenses();
internalCluster().startNode(); internalCluster().startNode();
assertLicenseState(LicenseState.ENABLED);
putLicense(TestUtils.generateSignedLicense(TimeValue.timeValueMillis(0)));
ensureGreen(); ensureGreen();
assertEagerConsumerPluginNotification(LicenseState.GRACE_PERIOD, 10); assertLicenseState(LicenseState.GRACE_PERIOD);
assertLazyConsumerPluginNotification(LicenseState.GRACE_PERIOD, 10);
logger.info("--> restart node"); logger.info("--> restart node");
internalCluster().fullRestart(); internalCluster().fullRestart();
ensureYellow(); ensureYellow();
logger.info("--> await node for grace_period"); logger.info("--> await node for grace_period");
assertEagerConsumerPluginNotification(LicenseState.GRACE_PERIOD, 5); assertLicenseState(LicenseState.GRACE_PERIOD);
assertLazyConsumerPluginNotification(LicenseState.GRACE_PERIOD, 5);
} }
public void testClusterRestartWhileExpired() throws Exception { public void testClusterRestartWhileExpired() throws Exception {
wipeAllLicenses(); wipeAllLicenses();
internalCluster().startNode(); internalCluster().startNode();
ensureGreen(); ensureGreen();
assertEagerConsumerPluginNotification(LicenseState.DISABLED, 20); assertLicenseState(LicenseState.ENABLED);
assertLazyConsumerPluginNotification(LicenseState.DISABLED, 20); putLicense(TestUtils.generateExpiredLicense(System.currentTimeMillis() - LicensesService.GRACE_PERIOD_DURATION.getMillis()));
assertLicenseState(LicenseState.DISABLED);
logger.info("--> restart node"); logger.info("--> restart node");
internalCluster().fullRestart(); internalCluster().fullRestart();
ensureYellow(); ensureYellow();
logger.info("--> await node for disabled"); logger.info("--> await node for disabled");
assertEagerConsumerPluginNotification(LicenseState.DISABLED, 5); assertLicenseState(LicenseState.DISABLED);
assertLazyConsumerPluginNotification(LicenseState.DISABLED, 5);
} }
public void testClusterNotRecovered() throws Exception { public void testClusterNotRecovered() throws Exception {
@ -154,27 +156,7 @@ public class LicensesServiceClusterTests extends AbstractLicensesIntegrationTest
internalCluster().startNode(nodeSettingsBuilder(0).put("discovery.zen.minimum_master_nodes", 2).put("node.master", true)); internalCluster().startNode(nodeSettingsBuilder(0).put("discovery.zen.minimum_master_nodes", 2).put("node.master", true));
logger.info("--> start second master out of two [recovered state]"); logger.info("--> start second master out of two [recovered state]");
internalCluster().startNode(nodeSettingsBuilder(1).put("discovery.zen.minimum_master_nodes", 2).put("node.master", true)); internalCluster().startNode(nodeSettingsBuilder(1).put("discovery.zen.minimum_master_nodes", 2).put("node.master", true));
assertLicenseesStateEnabled(); assertLicenseState(LicenseState.ENABLED);
assertConsumerPluginEnabledNotification(1);
}
public void testAtMostOnceTrialLicenseGeneration() throws Exception {
wipeAllLicenses();
logger.info("--> start one node [trial license should be generated & enabled]");
internalCluster().startNode(nodeSettingsBuilder(0));
assertLicenseesStateEnabled();
assertConsumerPluginEnabledNotification(1);
logger.info("--> start another node [trial license should be propagated from the old master not generated]");
internalCluster().startNode(nodeSettings(1));
assertLicenseesStateEnabled();
assertConsumerPluginEnabledNotification(1);
logger.info("--> check if multiple trial licenses are found for a id");
LicensesMetaData licensesMetaData = clusterService().state().metaData().custom(LicensesMetaData.TYPE);
assertThat(licensesMetaData.getLicense(), not(LicensesMetaData.LICENSE_TOMBSTONE));
wipeAllLicenses();
} }
private void removeLicense() throws Exception { private void removeLicense() throws Exception {
@ -216,15 +198,4 @@ public class LicensesServiceClusterTests extends AbstractLicensesIntegrationTest
assertThat(licensesMetaData, notNullValue()); assertThat(licensesMetaData, notNullValue());
assertThat(licensesMetaData.getLicense(), not(LicensesMetaData.LICENSE_TOMBSTONE)); assertThat(licensesMetaData.getLicense(), not(LicensesMetaData.LICENSE_TOMBSTONE));
} }
private void assertLicenseesStateEnabled() throws Exception {
for (String id : PLUGINS) {
assertLicenseeState(id, LicenseState.ENABLED);
}
}
private void assertConsumerPluginEnabledNotification(int timeoutInSec) throws InterruptedException {
assertEagerConsumerPluginNotification(LicenseState.ENABLED, timeoutInSec);
assertLazyConsumerPluginNotification(LicenseState.ENABLED, timeoutInSec);
}
} }

View File

@ -1,59 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.plugin.consumer.EagerLicenseRegistrationConsumerPlugin;
import org.elasticsearch.license.plugin.consumer.EagerLicenseRegistrationPluginService;
import org.elasticsearch.license.plugin.core.LicenseState;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.xpack.XPackPlugin;
import java.util.Arrays;
import java.util.Collection;
import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST;
/**
*/
@ESIntegTestCase.ClusterScope(scope = TEST, supportsDedicatedMasters = false, numDataNodes = 10, numClientNodes = 0)
public class LicensesServiceNodeTests extends AbstractLicensesIntegrationTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.put(NetworkModule.HTTP_ENABLED.getKey(), true)
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(XPackPlugin.class, EagerLicenseRegistrationConsumerPlugin.class);
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return nodePlugins();
}
public void testPluginStatus() throws Exception {
final Iterable<EagerLicenseRegistrationPluginService> testPluginServices =
internalCluster().getDataNodeInstances(EagerLicenseRegistrationPluginService.class);
assertTrue(awaitBusy(() -> {
for (EagerLicenseRegistrationPluginService pluginService : testPluginServices) {
if (pluginService.state() != LicenseState.ENABLED) {
return false;
}
}
return true;
}));
}
}

View File

@ -31,6 +31,7 @@ import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import static org.elasticsearch.license.plugin.TestUtils.dateMath; import static org.elasticsearch.license.plugin.TestUtils.dateMath;
import static org.elasticsearch.license.plugin.TestUtils.generateExpiredLicense;
import static org.elasticsearch.license.plugin.TestUtils.generateSignedLicense; import static org.elasticsearch.license.plugin.TestUtils.generateSignedLicense;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.not;
@ -129,7 +130,7 @@ public class LicensesTransportTests extends ESSingleNodeTestCase {
} }
public void testPutExpiredLicense() throws Exception { public void testPutExpiredLicense() throws Exception {
License expiredLicense = generateSignedLicense(dateMath("now-10d/d", System.currentTimeMillis()), TimeValue.timeValueMinutes(2)); License expiredLicense = generateExpiredLicense();
PutLicenseRequestBuilder builder = new PutLicenseRequestBuilder(client().admin().cluster(), PutLicenseAction.INSTANCE); PutLicenseRequestBuilder builder = new PutLicenseRequestBuilder(client().admin().cluster(), PutLicenseAction.INSTANCE);
builder.setLicense(expiredLicense); builder.setLicense(expiredLicense);
PutLicenseResponse putLicenseResponse = builder.get(); PutLicenseResponse putLicenseResponse = builder.get();
@ -162,7 +163,7 @@ public class LicensesTransportTests extends ESSingleNodeTestCase {
License goldLicense = generateSignedLicense("gold", TimeValue.timeValueMinutes(5)); License goldLicense = generateSignedLicense("gold", TimeValue.timeValueMinutes(5));
PutLicenseRequestBuilder putLicenseRequestBuilder = PutLicenseRequestBuilder putLicenseRequestBuilder =
new PutLicenseRequestBuilder(client().admin().cluster(), PutLicenseAction.INSTANCE).setLicense(goldLicense) new PutLicenseRequestBuilder(client().admin().cluster(), PutLicenseAction.INSTANCE).setLicense(goldLicense)
.setAcknowledge(true); .setAcknowledge(true);
PutLicenseResponse putLicenseResponse = putLicenseRequestBuilder.get(); PutLicenseResponse putLicenseResponse = putLicenseRequestBuilder.get();
assertThat(putLicenseResponse.isAcknowledged(), equalTo(true)); assertThat(putLicenseResponse.isAcknowledged(), equalTo(true));
assertThat(putLicenseResponse.status(), equalTo(LicensesStatus.VALID)); assertThat(putLicenseResponse.status(), equalTo(LicensesStatus.VALID));
@ -177,4 +178,4 @@ public class LicensesTransportTests extends ESSingleNodeTestCase {
getLicenseResponse = new GetLicenseRequestBuilder(client().admin().cluster(), GetLicenseAction.INSTANCE).get(); getLicenseResponse = new GetLicenseRequestBuilder(client().admin().cluster(), GetLicenseAction.INSTANCE).get();
assertNull(getLicenseResponse.license()); assertNull(getLicenseResponse.license());
} }
} }

View File

@ -23,11 +23,13 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.license.core.License; import org.elasticsearch.license.core.License;
import org.elasticsearch.license.licensor.LicenseSigner; import org.elasticsearch.license.licensor.LicenseSigner;
import org.elasticsearch.license.plugin.action.put.PutLicenseRequest; import org.elasticsearch.license.plugin.action.put.PutLicenseRequest;
import org.elasticsearch.license.plugin.action.put.PutLicenseResponse;
import org.elasticsearch.license.plugin.core.Licensee; import org.elasticsearch.license.plugin.core.Licensee;
import org.elasticsearch.license.plugin.core.LicensesService; import org.elasticsearch.license.plugin.core.LicensesService;
import org.elasticsearch.license.plugin.core.LicensesStatus; import org.elasticsearch.license.plugin.core.LicensesStatus;
import org.junit.Assert; import org.junit.Assert;
import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@ -90,7 +92,7 @@ public class TestUtils {
} }
public static License generateSignedLicense(String type, long issueDate, TimeValue expiryDuration) throws Exception { public static License generateSignedLicense(String type, long issueDate, TimeValue expiryDuration) throws Exception {
long issue = (issueDate != -1L) ? issueDate : System.currentTimeMillis(); long issue = (issueDate != -1L) ? issueDate : System.currentTimeMillis() - TimeValue.timeValueHours(2).getMillis();
int version = randomIntBetween(License.VERSION_START, License.VERSION_CURRENT); int version = randomIntBetween(License.VERSION_START, License.VERSION_CURRENT);
final String licenseType; final String licenseType;
if (version < License.VERSION_NO_FEATURE_TYPE) { if (version < License.VERSION_NO_FEATURE_TYPE) {
@ -101,7 +103,7 @@ public class TestUtils {
final License.Builder builder = License.builder() final License.Builder builder = License.builder()
.uid(UUID.randomUUID().toString()) .uid(UUID.randomUUID().toString())
.version(version) .version(version)
.expiryDate(issue + expiryDuration.getMillis()) .expiryDate(System.currentTimeMillis() + expiryDuration.getMillis())
.issueDate(issue) .issueDate(issue)
.type(licenseType) .type(licenseType)
.issuedTo("customer") .issuedTo("customer")
@ -115,6 +117,24 @@ public class TestUtils {
return signer.sign(builder.build()); return signer.sign(builder.build());
} }
public static License generateExpiredLicense() throws Exception {
return generateExpiredLicense(System.currentTimeMillis() - TimeValue.timeValueHours(randomIntBetween(1, 10)).getMillis());
}
public static License generateExpiredLicense(long expiryDate) throws Exception {
final License.Builder builder = License.builder()
.uid(UUID.randomUUID().toString())
.version(License.VERSION_CURRENT)
.expiryDate(expiryDate)
.issueDate(expiryDate - TimeValue.timeValueMinutes(10).getMillis())
.type(randomFrom("basic", "silver", "dev", "gold", "platinum"))
.issuedTo("customer")
.issuer("elasticsearch")
.maxNodes(5);
LicenseSigner signer = new LicenseSigner(getTestPriKeyPath(), getTestPubKeyPath());
return signer.sign(builder.build());
}
private static Path getResourcePath(String resource) throws Exception { private static Path getResourcePath(String resource) throws Exception {
return PathUtils.get(TestUtils.class.getResource(resource).toURI()); return PathUtils.get(TestUtils.class.getResource(resource).toURI());
} }
@ -138,9 +158,9 @@ public class TestUtils {
PutLicenseRequest putLicenseRequest = new PutLicenseRequest().license(license); PutLicenseRequest putLicenseRequest = new PutLicenseRequest().license(license);
final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<LicensesStatus> status = new AtomicReference<>(); final AtomicReference<LicensesStatus> status = new AtomicReference<>();
licensesService.registerLicense(putLicenseRequest, new ActionListener<LicensesService.LicensesUpdateResponse>() { licensesService.registerLicense(putLicenseRequest, new ActionListener<PutLicenseResponse>() {
@Override @Override
public void onResponse(LicensesService.LicensesUpdateResponse licensesUpdateResponse) { public void onResponse(PutLicenseResponse licensesUpdateResponse) {
status.set(licensesUpdateResponse.status()); status.set(licensesUpdateResponse.status());
latch.countDown(); latch.countDown();
} }
@ -198,4 +218,4 @@ public class TestUtils {
statuses.add(status); statuses.add(status);
} }
} }
} }

View File

@ -1,35 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.consumer;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
/**
* Registers licenses upon the start of the service lifecycle
* see {@link EagerLicenseRegistrationPluginService}
* <p>
* License registration might happen before clusterService start()
*/
public class EagerLicenseRegistrationConsumerPlugin extends TestConsumerPluginBase {
public static final String NAME = "test_consumer_plugin_1";
@Inject
public EagerLicenseRegistrationConsumerPlugin(Settings settings) {
super(settings);
}
@Override
public Class<? extends TestPluginServiceBase> service() {
return EagerLicenseRegistrationPluginService.class;
}
@Override
public String id() {
return EagerLicenseRegistrationPluginService.ID;
}
}

View File

@ -1,27 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.consumer;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.Singleton;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.plugin.core.LicensesService;
@Singleton
public class EagerLicenseRegistrationPluginService extends TestPluginServiceBase {
public static String ID = "id1";
@Inject
public EagerLicenseRegistrationPluginService(Settings settings, LicensesService licensesClientService) {
super(true, settings, licensesClientService, null);
}
@Override
public String id() {
return ID;
}
}

View File

@ -1,33 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.consumer;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
/**
* Registers licenses only after cluster has recovered
* see {@link LazyLicenseRegistrationPluginService}
* <p>
* License registration happens after clusterservice start()
*/
public class LazyLicenseRegistrationConsumerPlugin extends TestConsumerPluginBase {
@Inject
public LazyLicenseRegistrationConsumerPlugin(Settings settings) {
super(settings);
}
@Override
public Class<? extends TestPluginServiceBase> service() {
return LazyLicenseRegistrationPluginService.class;
}
@Override
public String id() {
return LazyLicenseRegistrationPluginService.ID;
}
}

View File

@ -1,28 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.consumer;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.Singleton;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.plugin.core.LicensesService;
@Singleton
public class LazyLicenseRegistrationPluginService extends TestPluginServiceBase {
public static String ID = "id2";
@Inject
public LazyLicenseRegistrationPluginService(Settings settings, LicensesService licensesClientService, ClusterService clusterService) {
super(false, settings, licensesClientService, clusterService);
}
@Override
public String id() {
return ID;
}
}

View File

@ -1,48 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.consumer;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.plugins.Plugin;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
public abstract class TestConsumerPluginBase extends Plugin {
private final boolean isEnabled;
public TestConsumerPluginBase(Settings settings) {
this.isEnabled = TransportClient.CLIENT_TYPE.equals(settings.get(Client.CLIENT_TYPE_SETTING_S.getKey())) == false;
}
@Override
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
Collection<Class<? extends LifecycleComponent>> services = new ArrayList<>();
if (isEnabled) {
services.add(service());
}
return services;
}
@Override
public List<Setting<?>> getSettings() {
return Arrays.asList(Setting.simpleString("_trial_license_duration_in_seconds", Setting.Property.NodeScope,
Setting.Property.Shared), Setting.simpleString("_grace_duration_in_seconds", Setting.Property.NodeScope,
Setting.Property.Shared));
}
public abstract Class<? extends TestPluginServiceBase> service();
public abstract String id();
}

View File

@ -1,105 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.consumer;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.license.core.License;
import org.elasticsearch.license.plugin.core.LicenseState;
import org.elasticsearch.license.plugin.core.Licensee;
import org.elasticsearch.license.plugin.core.LicensesService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
public abstract class TestPluginServiceBase extends AbstractLifecycleComponent
implements ClusterStateListener, Licensee {
private LicensesService licensesClientService;
private final ClusterService clusterService;
final boolean eagerLicenseRegistration;
public final AtomicBoolean registered = new AtomicBoolean(false);
private AtomicReference<LicenseState> state = new AtomicReference<>(LicenseState.DISABLED);
public TestPluginServiceBase(boolean eagerLicenseRegistration, Settings settings, LicensesService licensesClientService,
ClusterService clusterService) {
super(settings);
this.eagerLicenseRegistration = eagerLicenseRegistration;
this.licensesClientService = licensesClientService;
int trialDurationInSec = settings.getAsInt("_trial_license_duration_in_seconds", -1);
if (trialDurationInSec != -1) {
licensesClientService.setTrialLicenseDuration(TimeValue.timeValueSeconds(trialDurationInSec));
}
int graceDurationInSec = settings.getAsInt("_grace_duration_in_seconds", 5);
licensesClientService.setGracePeriodDuration(TimeValue.timeValueSeconds(graceDurationInSec));
if (!eagerLicenseRegistration) {
this.clusterService = clusterService;
clusterService.add(this);
} else {
this.clusterService = null;
}
}
// should be the same string used by the license Manger to generate
// signed license
public abstract String id();
// check if feature is enabled
public LicenseState state() {
return state.get();
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (!eagerLicenseRegistration && !event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) {
if (registered.compareAndSet(false, true)) {
logger.info("Registering to licensesService [lazy]");
licensesClientService.register(this);
}
}
}
protected void doStart() throws ElasticsearchException {
if (eagerLicenseRegistration) {
if (registered.compareAndSet(false, true)) {
logger.info("Registering to licensesService [eager]");
licensesClientService.register(this);
}
}
}
@Override
public String[] expirationMessages() {
return new String[0];
}
@Override
public String[] acknowledgmentMessages(License currentLicense, License newLicense) {
return new String[0];
}
@Override
public void onChange(Status status) {
this.state.set(status.getLicenseState());
}
@Override
protected void doStop() throws ElasticsearchException {
if (clusterService != null) {
clusterService.remove(this);
}
}
@Override
protected void doClose() throws ElasticsearchException {
}
}

View File

@ -0,0 +1,59 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.license.core.License;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.scheduler.SchedulerEngine;
import org.elasticsearch.xpack.support.clock.ClockMock;
import org.junit.Before;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public abstract class AbstractLicenseServiceTestCase extends ESTestCase {
protected LicensesService licensesService;
protected ClusterService clusterService;
protected TransportService transportService;
protected ClockMock clock;
@Before
public void init() throws Exception {
clusterService = mock(ClusterService.class);
transportService = mock(TransportService.class);
clock = new ClockMock();
licensesService = new LicensesService(Settings.EMPTY, clusterService, transportService, clock);
}
protected void setInitialState(License license) {
ClusterState state = mock(ClusterState.class);
final ClusterBlocks noBlock = ClusterBlocks.builder().build();
when(state.blocks()).thenReturn(noBlock);
MetaData metaData = mock(MetaData.class);
when(metaData.custom(LicensesMetaData.TYPE)).thenReturn(new LicensesMetaData(license));
when(state.metaData()).thenReturn(metaData);
final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class);
final DiscoveryNode mockNode = new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT);
when(discoveryNodes.getMasterNode()).thenReturn(mockNode);
when(state.nodes()).thenReturn(discoveryNodes);
when(clusterService.state()).thenReturn(state);
when(clusterService.lifecycleState()).thenReturn(Lifecycle.State.STARTED);
}
}

View File

@ -0,0 +1,155 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.license.core.License;
import org.elasticsearch.test.ESTestCase;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class ExpirationCallbackTests extends ESTestCase {
public void testPostExpirationDelay() throws Exception {
TimeValue expiryDuration = TimeValue.timeValueSeconds(randomIntBetween(5, 10));
TimeValue min = TimeValue.timeValueSeconds(1);
TimeValue max = TimeValue.timeValueSeconds(4);
TimeValue frequency = TimeValue.timeValueSeconds(1);
NoopPostExpirationCallback post = new NoopPostExpirationCallback(min, max, frequency);
long now = System.currentTimeMillis();
long expiryDate = now + expiryDuration.getMillis();
assertThat(post.delay(expiryDate, now),
equalTo(TimeValue.timeValueMillis(expiryDuration.getMillis() + min.getMillis()))); // before license expiry
assertThat(post.delay(expiryDate, expiryDate), equalTo(min)); // on license expiry
int latestValidTriggerDelay = (int) (expiryDuration.getMillis() + max.getMillis());
int earliestValidTriggerDelay = (int) (expiryDuration.getMillis() + min.getMillis());
assertExpirationCallbackDelay(post, expiryDuration.millis(), latestValidTriggerDelay, earliestValidTriggerDelay);
}
public void testPreExpirationDelay() throws Exception {
TimeValue expiryDuration = TimeValue.timeValueSeconds(randomIntBetween(5, 10));
TimeValue min = TimeValue.timeValueSeconds(1);
TimeValue max = TimeValue.timeValueSeconds(4);
TimeValue frequency = TimeValue.timeValueSeconds(1);
NoopPreExpirationCallback pre = new NoopPreExpirationCallback(min, max, frequency);
long now = System.currentTimeMillis();
long expiryDate = now + expiryDuration.getMillis();
assertThat(pre.delay(expiryDate, expiryDate), nullValue()); // on license expiry
int latestValidTriggerDelay = (int) (expiryDuration.getMillis() - min.getMillis());
int earliestValidTriggerDelay = (int) (expiryDuration.getMillis() - max.getMillis());
assertExpirationCallbackDelay(pre, expiryDuration.millis(), latestValidTriggerDelay, earliestValidTriggerDelay);
}
public void testPostExpirationWithNullMax() throws Exception {
int postExpirySeconds = randomIntBetween(5, 10);
TimeValue postExpiryDuration = TimeValue.timeValueSeconds(postExpirySeconds);
TimeValue min = TimeValue.timeValueSeconds(postExpirySeconds - randomIntBetween(1, 3));
final ExpirationCallback.Post post = new NoopPostExpirationCallback(min, null, timeValueMillis(10));
long now = System.currentTimeMillis();
assertThat(post.delay(now - postExpiryDuration.millis(), now), equalTo(TimeValue.timeValueMillis(0)));
}
public void testPreExpirationWithNullMin() throws Exception {
int expirySeconds = randomIntBetween(5, 10);
TimeValue expiryDuration = TimeValue.timeValueSeconds(expirySeconds);
TimeValue max = TimeValue.timeValueSeconds(expirySeconds + randomIntBetween(1, 10));
final ExpirationCallback.Pre pre = new NoopPreExpirationCallback(null, max, timeValueMillis(10));
long now = System.currentTimeMillis();
assertThat(pre.delay(expiryDuration.millis() + now, now), equalTo(TimeValue.timeValueMillis(0)));
}
public void testPreExpirationScheduleTime() throws Exception {
TimeValue expiryDuration = TimeValue.timeValueSeconds(randomIntBetween(5, 10));
TimeValue min = TimeValue.timeValueSeconds(1);
TimeValue max = TimeValue.timeValueSeconds(4);
TimeValue frequency = TimeValue.timeValueSeconds(1);
NoopPreExpirationCallback pre = new NoopPreExpirationCallback(min, max, frequency);
int latestValidTriggerDelay = (int) (expiryDuration.getMillis() - min.getMillis());
int earliestValidTriggerDelay = (int) (expiryDuration.getMillis() - max.getMillis());
assertExpirationCallbackScheduleTime(pre, expiryDuration.millis(), latestValidTriggerDelay, earliestValidTriggerDelay);
}
public void testPostExpirationScheduleTime() throws Exception {
TimeValue expiryDuration = TimeValue.timeValueSeconds(randomIntBetween(5, 10));
TimeValue min = TimeValue.timeValueSeconds(1);
TimeValue max = TimeValue.timeValueSeconds(4);
TimeValue frequency = TimeValue.timeValueSeconds(1);
NoopPostExpirationCallback pre = new NoopPostExpirationCallback(min, max, frequency);
int latestValidTriggerDelay = (int) (expiryDuration.getMillis() + max.getMillis());
int earliestValidTriggerDelay = (int) (expiryDuration.getMillis() + min.getMillis());
assertExpirationCallbackScheduleTime(pre, expiryDuration.millis(), latestValidTriggerDelay, earliestValidTriggerDelay);
}
private void assertExpirationCallbackDelay(ExpirationCallback expirationCallback, long expiryDuration,
int latestValidTriggerDelay, int earliestValidTriggerDelay) {
long now = System.currentTimeMillis();
long expiryDate = now + expiryDuration;
// bounds
assertThat(expirationCallback.delay(expiryDate, now + earliestValidTriggerDelay), equalTo(TimeValue.timeValueMillis(0)));
assertThat(expirationCallback.delay(expiryDate, now + latestValidTriggerDelay), equalTo(TimeValue.timeValueMillis(0)));
// in match
assertThat(expirationCallback.delay(expiryDate,
now + randomIntBetween(earliestValidTriggerDelay, latestValidTriggerDelay)),
equalTo(TimeValue.timeValueMillis(0)));
// out of bounds
int deltaBeforeEarliestMatch = between(1, earliestValidTriggerDelay);
assertThat(expirationCallback.delay(expiryDate, now + deltaBeforeEarliestMatch),
equalTo(TimeValue.timeValueMillis(earliestValidTriggerDelay - deltaBeforeEarliestMatch)));
int deltaAfterLatestMatch = between(latestValidTriggerDelay + 1, Integer.MAX_VALUE); // after expiry and after max
assertThat(expirationCallback.delay(expiryDate, expiryDate + deltaAfterLatestMatch), nullValue());
}
public void assertExpirationCallbackScheduleTime(ExpirationCallback expirationCallback, long expiryDuration,
int latestValidTriggerDelay, int earliestValidTriggerDelay) {
long now = System.currentTimeMillis();
long expiryDate = now + expiryDuration;
int validTriggerInterval = between(earliestValidTriggerDelay, latestValidTriggerDelay);
assertThat(expirationCallback.nextScheduledTimeForExpiry(expiryDate,
now + validTriggerInterval, now + validTriggerInterval),
equalTo(now + validTriggerInterval));
assertThat(expirationCallback.nextScheduledTimeForExpiry(expiryDate, now, now + validTriggerInterval),
equalTo(now + validTriggerInterval + expirationCallback.getFrequency()));
int deltaBeforeEarliestMatch = between(1, earliestValidTriggerDelay);
assertThat(expirationCallback.nextScheduledTimeForExpiry(expiryDate, now, now + deltaBeforeEarliestMatch),
equalTo(now + deltaBeforeEarliestMatch +
expirationCallback.delay(expiryDate, now + deltaBeforeEarliestMatch).getMillis()));
assertThat(expirationCallback.nextScheduledTimeForExpiry(expiryDate,
now + deltaBeforeEarliestMatch, now + deltaBeforeEarliestMatch),
equalTo(now + deltaBeforeEarliestMatch +
expirationCallback.delay(expiryDate, now + deltaBeforeEarliestMatch).getMillis()));
int deltaAfterLatestMatch = between(latestValidTriggerDelay + 1, Integer.MAX_VALUE); // after expiry and after max
assertThat(expirationCallback.nextScheduledTimeForExpiry(expiryDate, now, now + deltaAfterLatestMatch), equalTo(-1L));
assertThat(expirationCallback.nextScheduledTimeForExpiry(expiryDate,
now + deltaAfterLatestMatch, now + deltaAfterLatestMatch),
equalTo(-1L));
}
private static class NoopPostExpirationCallback extends ExpirationCallback.Post {
public NoopPostExpirationCallback(TimeValue min, TimeValue max, TimeValue frequency) {
super(min, max, frequency);
}
@Override
public void on(License license) {}
}
private static class NoopPreExpirationCallback extends ExpirationCallback.Pre {
public NoopPreExpirationCallback(TimeValue min, TimeValue max, TimeValue frequency) {
super(min, max, frequency);
}
@Override
public void on(License license) {}
}
}

View File

@ -0,0 +1,80 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.license.core.License;
import org.elasticsearch.license.plugin.TestUtils;
import org.elasticsearch.transport.EmptyTransportResponseHandler;
import org.elasticsearch.transport.TransportRequest;
import org.junit.After;
import org.junit.Before;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
public class LicenseClusterChangeTests extends AbstractLicenseServiceTestCase {
private TestUtils.AssertingLicensee licensee;
@Before
public void setup() {
setInitialState(null);
licensesService.start();
licensee = new TestUtils.AssertingLicensee("LicenseClusterChangeTests", logger);
licensesService.register(licensee);
}
@After
public void teardown() {
licensesService.stop();
}
public void testNotificationOnNewLicense() throws Exception {
ClusterState oldState = ClusterState.builder(new ClusterName("a")).build();
final License license = TestUtils.generateSignedLicense(TimeValue.timeValueHours(24));
MetaData metaData = MetaData.builder().putCustom(LicensesMetaData.TYPE, new LicensesMetaData(license)).build();
ClusterState newState = ClusterState.builder(new ClusterName("a")).metaData(metaData).build();
licensesService.clusterChanged(new ClusterChangedEvent("simulated", newState, oldState));
assertThat(licensee.statuses.size(), equalTo(1));
assertTrue(licensee.statuses.get(0).getLicenseState() == LicenseState.ENABLED);
}
public void testNoNotificationOnExistingLicense() throws Exception {
final License license = TestUtils.generateSignedLicense(TimeValue.timeValueHours(24));
MetaData metaData = MetaData.builder().putCustom(LicensesMetaData.TYPE, new LicensesMetaData(license)).build();
ClusterState newState = ClusterState.builder(new ClusterName("a")).metaData(metaData).build();
ClusterState oldState = ClusterState.builder(newState).build();
licensesService.clusterChanged(new ClusterChangedEvent("simulated", newState, oldState));
assertThat(licensee.statuses.size(), equalTo(0));
}
public void testTrialLicenseGeneration() throws Exception {
DiscoveryNode master = new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT);
ClusterState oldState = ClusterState.builder(new ClusterName("a"))
.nodes(DiscoveryNodes.builder().masterNodeId(master.getId()).put(master)).build();
ClusterState newState = ClusterState.builder(oldState).build();
licensesService.clusterChanged(new ClusterChangedEvent("simulated", newState, oldState));
verify(transportService, times(2))
.sendRequest(any(DiscoveryNode.class),
eq(LicensesService.REGISTER_TRIAL_LICENSE_ACTION_NAME),
any(TransportRequest.Empty.class), any(EmptyTransportResponseHandler.class));
}
}

View File

@ -0,0 +1,47 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.license.plugin.TestUtils;
import org.elasticsearch.transport.EmptyTransportResponseHandler;
import org.elasticsearch.transport.TransportRequest;
import static org.elasticsearch.mock.orig.Mockito.times;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.verify;
public class LicenseRegistrationTests extends AbstractLicenseServiceTestCase {
public void testTrialLicenseRequestOnEmptyLicenseState() throws Exception {
setInitialState(null);
TestUtils.AssertingLicensee licensee = new TestUtils.AssertingLicensee(
"testTrialLicenseRequestOnEmptyLicenseState", logger);
licensesService.start();
licensesService.register(licensee);
verify(transportService, times(1))
.sendRequest(any(DiscoveryNode.class),
eq(LicensesService.REGISTER_TRIAL_LICENSE_ACTION_NAME),
any(TransportRequest.Empty.class), any(EmptyTransportResponseHandler.class));
assertThat(licensee.statuses.size(), equalTo(0));
licensesService.stop();
}
public void testNotificationOnRegistration() throws Exception {
setInitialState(TestUtils.generateSignedLicense(TimeValue.timeValueHours(2)));
TestUtils.AssertingLicensee licensee = new TestUtils.AssertingLicensee(
"testNotificationOnRegistration", logger);
licensesService.start();
licensesService.register(licensee);
assertThat(licensee.statuses.size(), equalTo(1));
final LicenseState licenseState = licensee.statuses.get(0).getLicenseState();
assertTrue(licenseState == LicenseState.ENABLED);
licensesService.stop();
}
}

View File

@ -0,0 +1,52 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.license.core.License;
import org.elasticsearch.license.plugin.TestUtils;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
import static org.hamcrest.Matchers.equalTo;
public class LicenseScheduleTests extends ESTestCase {
private License license;
private LicenseSchedule schedule;
@Before
public void setuo() throws Exception {
license = TestUtils.generateSignedLicense(TimeValue.timeValueHours(12));
schedule = new LicenseSchedule(license);
}
public void testEnabledLicenseSchedule() throws Exception {
int expiryDuration = (int) (license.expiryDate() - license.issueDate());
long triggeredTime = license.issueDate() + between(0, expiryDuration);
assertThat(schedule.nextScheduledTimeAfter(license.issueDate(), triggeredTime), equalTo(license.expiryDate()));
}
public void testGraceLicenseSchedule() throws Exception {
long triggeredTime = license.expiryDate() + between(1,
((int) LicensesService.GRACE_PERIOD_DURATION.getMillis()));
assertThat(schedule.nextScheduledTimeAfter(license.issueDate(), triggeredTime),
equalTo(license.expiryDate() + LicensesService.GRACE_PERIOD_DURATION.getMillis()));
}
public void testExpiredLicenseSchedule() throws Exception {
long triggeredTime = license.expiryDate() + LicensesService.GRACE_PERIOD_DURATION.getMillis() +
randomIntBetween(1, 1000);
assertThat(schedule.nextScheduledTimeAfter(license.issueDate(), triggeredTime),
equalTo(-1L));
}
public void testInvalidLicenseSchedule() throws Exception {
long triggeredTime = license.issueDate() - randomIntBetween(1, 1000);
assertThat(schedule.nextScheduledTimeAfter(triggeredTime, triggeredTime),
equalTo(license.issueDate()));
}
}

View File

@ -6,79 +6,58 @@
package org.elasticsearch.license.plugin.core; package org.elasticsearch.license.plugin.core;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.license.core.License; import org.elasticsearch.license.core.License;
import org.elasticsearch.license.plugin.TestUtils; import org.elasticsearch.license.plugin.TestUtils;
import org.elasticsearch.license.plugin.action.put.PutLicenseRequest; import org.elasticsearch.license.plugin.action.put.PutLicenseRequest;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.license.plugin.action.put.PutLicenseResponse;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.license.plugin.TestUtils.awaitNoBlock;
import static org.elasticsearch.license.plugin.TestUtils.awaitNoPendingTasks;
import static org.elasticsearch.license.plugin.TestUtils.generateSignedLicense; import static org.elasticsearch.license.plugin.TestUtils.generateSignedLicense;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
public class LicensesAcknowledgementTests extends ESSingleNodeTestCase { public class LicensesAcknowledgementTests extends AbstractLicenseServiceTestCase {
static {
MetaData.registerPrototype(LicensesMetaData.TYPE, LicensesMetaData.PROTO);
}
@Override
protected boolean resetNodeAfterTest() {
return true;
}
public void testAcknowledgment() throws Exception { public void testAcknowledgment() throws Exception {
final LicensesService licensesService = getInstanceFromNode(LicensesService.class); setInitialState(TestUtils.generateSignedLicense("trial", TimeValue.timeValueHours(2)));
licensesService.start(); licensesService.start();
String id = "testAcknowledgment"; String id = "testAcknowledgment";
String[] acknowledgeMessages = new String[] {"message"}; String[] acknowledgeMessages = new String[] {"message"};
TestUtils.AssertingLicensee licensee = new TestUtils.AssertingLicensee(id, logger); TestUtils.AssertingLicensee licensee = new TestUtils.AssertingLicensee(id, logger);
licensee.setAcknowledgementMessages(acknowledgeMessages); licensee.setAcknowledgementMessages(acknowledgeMessages);
awaitNoBlock(client());
licensesService.register(licensee); licensesService.register(licensee);
awaitNoPendingTasks(client());
// try installing a signed license // try installing a signed license
License signedLicense = generateSignedLicense(TimeValue.timeValueHours(10)); License signedLicense = generateSignedLicense(TimeValue.timeValueHours(10));
PutLicenseRequest putLicenseRequest = new PutLicenseRequest().license(signedLicense); PutLicenseRequest putLicenseRequest = new PutLicenseRequest().license(signedLicense);
CountDownLatch latch = new CountDownLatch(1);
// ensure acknowledgement message was part of the response // ensure acknowledgement message was part of the response
licensesService.registerLicense(putLicenseRequest, new AssertingLicensesUpdateResponse(false, LicensesStatus.VALID, licensesService.registerLicense(putLicenseRequest, new AssertingLicensesUpdateResponse(false, LicensesStatus.VALID,
Collections.singletonMap(id, acknowledgeMessages), latch)); Collections.singletonMap(id, acknowledgeMessages)));
if (!latch.await(5, TimeUnit.SECONDS)) {
fail("waiting too long for a response to license registration");
}
awaitNoPendingTasks(client());
assertThat(licensee.acknowledgementRequested.size(), equalTo(1)); assertThat(licensee.acknowledgementRequested.size(), equalTo(1));
assertThat(licensee.acknowledgementRequested.get(0).v2(), equalTo(signedLicense)); assertThat(licensee.acknowledgementRequested.get(0).v2(), equalTo(signedLicense));
assertThat(licensesService.getLicense(), not(signedLicense)); assertThat(licensesService.getLicense(), not(signedLicense));
latch = new CountDownLatch(1);
// try installing a signed license with acknowledgement // try installing a signed license with acknowledgement
putLicenseRequest = new PutLicenseRequest().license(signedLicense).acknowledge(true); putLicenseRequest = new PutLicenseRequest().license(signedLicense).acknowledge(true);
// ensure license was installed and no acknowledgment message was returned // ensure license was installed and no acknowledgment message was returned
licensee.setAcknowledgementMessages(new String[0]); licensee.setAcknowledgementMessages(new String[0]);
licensesService.registerLicense(putLicenseRequest, new AssertingLicensesUpdateResponse(true, LicensesStatus.VALID, licensesService.registerLicense(putLicenseRequest, new AssertingLicensesUpdateResponse(true, LicensesStatus.VALID,
Collections.<String, String[]>emptyMap(), latch)); Collections.<String, String[]>emptyMap()));
if (!latch.await(5, TimeUnit.SECONDS)) { verify(clusterService, times(1)).submitStateUpdateTask(any(String.class), any(ClusterStateUpdateTask.class));
fail("waiting too long for a response to license registration");
}
awaitNoPendingTasks(client());
assertThat(licensee.acknowledgementRequested.size(), equalTo(1)); assertThat(licensee.acknowledgementRequested.size(), equalTo(1));
assertThat(licensee.acknowledgementRequested.get(0).v2(), equalTo(signedLicense)); assertThat(licensee.acknowledgementRequested.get(0).v2(), equalTo(signedLicense));
assertThat(licensesService.getLicense(), equalTo(signedLicense));
licensesService.stop(); licensesService.stop();
} }
public void testAcknowledgementMultipleLicensee() throws Exception { public void testAcknowledgementMultipleLicensee() throws Exception {
final LicensesService licensesService = getInstanceFromNode(LicensesService.class); setInitialState(TestUtils.generateSignedLicense("trial", TimeValue.timeValueHours(2)));
licensesService.start(); licensesService.start();
String id1 = "testAcknowledgementMultipleLicensee_1"; String id1 = "testAcknowledgementMultipleLicensee_1";
String[] acknowledgeMessages1 = new String[] {"testAcknowledgementMultipleLicensee_1"}; String[] acknowledgeMessages1 = new String[] {"testAcknowledgementMultipleLicensee_1"};
@ -88,62 +67,49 @@ public class LicensesAcknowledgementTests extends ESSingleNodeTestCase {
licensee1.setAcknowledgementMessages(acknowledgeMessages1); licensee1.setAcknowledgementMessages(acknowledgeMessages1);
TestUtils.AssertingLicensee licensee2 = new TestUtils.AssertingLicensee(id2, logger); TestUtils.AssertingLicensee licensee2 = new TestUtils.AssertingLicensee(id2, logger);
licensee2.setAcknowledgementMessages(acknowledgeMessages2); licensee2.setAcknowledgementMessages(acknowledgeMessages2);
awaitNoBlock(client());
licensesService.register(licensee1); licensesService.register(licensee1);
licensesService.register(licensee2); licensesService.register(licensee2);
awaitNoPendingTasks(client());
// try installing a signed license // try installing a signed license
License signedLicense = generateSignedLicense(TimeValue.timeValueHours(10)); License signedLicense = generateSignedLicense(TimeValue.timeValueHours(10));
PutLicenseRequest putLicenseRequest = new PutLicenseRequest().license(signedLicense); PutLicenseRequest putLicenseRequest = new PutLicenseRequest().license(signedLicense);
CountDownLatch latch = new CountDownLatch(1);
// ensure acknowledgement message was part of the response // ensure acknowledgement message was part of the response
final HashMap<String, String[]> expectedMessages = new HashMap<>(); final HashMap<String, String[]> expectedMessages = new HashMap<>();
expectedMessages.put(id1, acknowledgeMessages1); expectedMessages.put(id1, acknowledgeMessages1);
expectedMessages.put(id2, acknowledgeMessages2); expectedMessages.put(id2, acknowledgeMessages2);
licensesService.registerLicense(putLicenseRequest, new AssertingLicensesUpdateResponse(false, LicensesStatus.VALID, licensesService.registerLicense(putLicenseRequest, new AssertingLicensesUpdateResponse(false, LicensesStatus.VALID,
expectedMessages, latch)); expectedMessages));
if (!latch.await(5, TimeUnit.SECONDS)) { verify(clusterService, times(0)).submitStateUpdateTask(any(String.class), any(ClusterStateUpdateTask.class));
fail("waiting too long for a response to license registration");
}
awaitNoPendingTasks(client());
assertThat(licensee2.acknowledgementRequested.size(), equalTo(1)); assertThat(licensee2.acknowledgementRequested.size(), equalTo(1));
assertThat(licensee2.acknowledgementRequested.get(0).v2(), equalTo(signedLicense)); assertThat(licensee2.acknowledgementRequested.get(0).v2(), equalTo(signedLicense));
assertThat(licensee1.acknowledgementRequested.size(), equalTo(1)); assertThat(licensee1.acknowledgementRequested.size(), equalTo(1));
assertThat(licensee1.acknowledgementRequested.get(0).v2(), equalTo(signedLicense)); assertThat(licensee1.acknowledgementRequested.get(0).v2(), equalTo(signedLicense));
assertThat(licensesService.getLicense(), not(signedLicense)); assertThat(licensesService.getLicense(), not(signedLicense));
latch = new CountDownLatch(1);
// try installing a signed license with acknowledgement // try installing a signed license with acknowledgement
putLicenseRequest = new PutLicenseRequest().license(signedLicense).acknowledge(true); putLicenseRequest = new PutLicenseRequest().license(signedLicense).acknowledge(true);
// ensure license was installed and no acknowledgment message was returned // ensure license was installed and no acknowledgment message was returned
licensee1.setAcknowledgementMessages(new String[0]); licensee1.setAcknowledgementMessages(new String[0]);
licensee2.setAcknowledgementMessages(new String[0]); licensee2.setAcknowledgementMessages(new String[0]);
licensesService.registerLicense(putLicenseRequest, new AssertingLicensesUpdateResponse(true, LicensesStatus.VALID, licensesService.registerLicense(putLicenseRequest, new AssertingLicensesUpdateResponse(true, LicensesStatus.VALID,
Collections.<String, String[]>emptyMap(), latch)); Collections.<String, String[]>emptyMap()));
if (!latch.await(5, TimeUnit.SECONDS)) { verify(clusterService, times(1)).submitStateUpdateTask(any(String.class), any(ClusterStateUpdateTask.class));
fail("waiting too long for a response to license registration");
}
awaitNoPendingTasks(client());
assertThat(licensesService.getLicense(), equalTo(signedLicense));
licensesService.stop(); licensesService.stop();
} }
private static class AssertingLicensesUpdateResponse implements ActionListener<LicensesService.LicensesUpdateResponse> { private static class AssertingLicensesUpdateResponse implements ActionListener<PutLicenseResponse> {
private final boolean expectedAcknowledgement; private final boolean expectedAcknowledgement;
private final LicensesStatus expectedStatus; private final LicensesStatus expectedStatus;
private final Map<String, String[]> expectedAckMessages; private final Map<String, String[]> expectedAckMessages;
private final CountDownLatch latch;
public AssertingLicensesUpdateResponse(boolean expectedAcknowledgement, LicensesStatus expectedStatus, public AssertingLicensesUpdateResponse(boolean expectedAcknowledgement, LicensesStatus expectedStatus,
Map<String, String[]> expectedAckMessages, CountDownLatch latch) { Map<String, String[]> expectedAckMessages) {
this.expectedAcknowledgement = expectedAcknowledgement; this.expectedAcknowledgement = expectedAcknowledgement;
this.expectedStatus = expectedStatus; this.expectedStatus = expectedStatus;
this.expectedAckMessages = expectedAckMessages; this.expectedAckMessages = expectedAckMessages;
this.latch = latch;
} }
@Override @Override
public void onResponse(LicensesService.LicensesUpdateResponse licensesUpdateResponse) { public void onResponse(PutLicenseResponse licensesUpdateResponse) {
assertThat(licensesUpdateResponse.isAcknowledged(), equalTo(expectedAcknowledgement)); assertThat(licensesUpdateResponse.isAcknowledged(), equalTo(expectedAcknowledgement));
assertThat(licensesUpdateResponse.status(), equalTo(expectedStatus)); assertThat(licensesUpdateResponse.status(), equalTo(expectedStatus));
assertThat(licensesUpdateResponse.acknowledgeMessages().size(), equalTo(expectedAckMessages.size())); assertThat(licensesUpdateResponse.acknowledgeMessages().size(), equalTo(expectedAckMessages.size()));
@ -153,12 +119,10 @@ public class LicensesAcknowledgementTests extends ESSingleNodeTestCase {
String[] actualMessages = actual.get(expectedEntry.getKey()); String[] actualMessages = actual.get(expectedEntry.getKey());
assertThat(actualMessages, equalTo(expectedEntry.getValue())); assertThat(actualMessages, equalTo(expectedEntry.getValue()));
} }
latch.countDown();
} }
@Override @Override
public void onFailure(Exception throwable) { public void onFailure(Exception throwable) {
latch.countDown();
} }
} }
} }

View File

@ -1,167 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.license.core.License;
import org.elasticsearch.license.plugin.TestUtils;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.Arrays;
import java.util.Collections;
import java.util.concurrent.atomic.AtomicInteger;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
import static org.hamcrest.Matchers.equalTo;
public class LicensesExpirationCallbackTests extends ESSingleNodeTestCase {
static {
MetaData.registerPrototype(LicensesMetaData.TYPE, LicensesMetaData.PROTO);
}
@Override
protected boolean resetNodeAfterTest() {
return true;
}
public void testPostExpiration() throws Exception {
int postExpirySeconds = randomIntBetween(5, 10);
TimeValue postExpiryDuration = TimeValue.timeValueSeconds(postExpirySeconds);
TimeValue min = TimeValue.timeValueSeconds(postExpirySeconds - randomIntBetween(1, 3));
TimeValue max = TimeValue.timeValueSeconds(postExpirySeconds + randomIntBetween(1, 10));
final LicensesService.ExpirationCallback.Post post = new LicensesService.ExpirationCallback.Post(min, max, timeValueMillis(10)) {
@Override
public void on(License license) {
}
};
long now = System.currentTimeMillis();
assertThat(post.matches(now - postExpiryDuration.millis(), now), equalTo(true));
assertThat(post.matches(now + postExpiryDuration.getMillis(), now), equalTo(false));
}
public void testPostExpirationWithNullMax() throws Exception {
int postExpirySeconds = randomIntBetween(5, 10);
TimeValue postExpiryDuration = TimeValue.timeValueSeconds(postExpirySeconds);
TimeValue min = TimeValue.timeValueSeconds(postExpirySeconds - randomIntBetween(1, 3));
final LicensesService.ExpirationCallback.Post post = new LicensesService.ExpirationCallback.Post(min, null, timeValueMillis(10)) {
@Override
public void on(License license) {
}
};
long now = System.currentTimeMillis();
assertThat(post.matches(now - postExpiryDuration.millis(), now), equalTo(true));
}
public void testPreExpirationWithNullMin() throws Exception {
int expirySeconds = randomIntBetween(5, 10);
TimeValue expiryDuration = TimeValue.timeValueSeconds(expirySeconds);
TimeValue max = TimeValue.timeValueSeconds(expirySeconds + randomIntBetween(1, 10));
final LicensesService.ExpirationCallback.Pre pre = new LicensesService.ExpirationCallback.Pre(null, max, timeValueMillis(10)) {
@Override
public void on(License license) {
}
};
long now = System.currentTimeMillis();
assertThat(pre.matches(expiryDuration.millis() + now, now), equalTo(true));
}
public void testPreExpiration() throws Exception {
int expirySeconds = randomIntBetween(5, 10);
TimeValue expiryDuration = TimeValue.timeValueSeconds(expirySeconds);
TimeValue min = TimeValue.timeValueSeconds(expirySeconds - randomIntBetween(0, 3));
TimeValue max = TimeValue.timeValueSeconds(expirySeconds + randomIntBetween(1, 10));
final LicensesService.ExpirationCallback.Pre pre = new LicensesService.ExpirationCallback.Pre(min, max, timeValueMillis(10)) {
@Override
public void on(License license) {
}
};
long now = System.currentTimeMillis();
assertThat(pre.matches(expiryDuration.millis() + now, now), equalTo(true));
assertThat(pre.matches(now - expiryDuration.getMillis(), now), equalTo(false));
}
public void testPreExpirationNotification() throws Exception {
final LicensesService licensesService = getInstanceFromNode(LicensesService.class);
licensesService.setTrialLicenseDuration(TimeValue.timeValueSeconds(5));
AtomicInteger counter = new AtomicInteger(0);
// 2000, 1600, 1200
licensesService.setExpirationCallbacks(Collections.singletonList(
preCallbackLatch(TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(2), timeValueMillis(400), counter))
);
licensesService.start();
TestUtils.AssertingLicensee licensee = new TestUtils.AssertingLicensee("testPreExpirationNotification", logger);
licensesService.register(licensee);
boolean success = awaitBusy(() -> (counter.get() == 3 || counter.get() == 2));
assertThat("counter: actual: " + counter.get() + "vs expected: 3", success, equalTo(true));
licensesService.stop();
}
public void testPostExpirationNotification() throws Exception {
final LicensesService licensesService = getInstanceFromNode(LicensesService.class);
licensesService.setTrialLicenseDuration(TimeValue.timeValueSeconds(3));
AtomicInteger counter = new AtomicInteger(0);
// 700, 1700, 2700
licensesService.setExpirationCallbacks(Collections.singletonList(
postCallbackLatch(timeValueMillis(700), TimeValue.timeValueSeconds(3), TimeValue.timeValueSeconds(1), counter))
);
licensesService.start();
TestUtils.AssertingLicensee licensee = new TestUtils.AssertingLicensee("testPostExpirationNotification", logger);
licensesService.register(licensee);
// callback can be called only twice if the third notification is triggered with a delay
// causing the trigger time to be out of the post expiry callback window
boolean success = awaitBusy(() -> (counter.get() == 3 || counter.get() == 2));
assertThat("counter: actual: " + counter.get() + "vs expected: 3", success, equalTo(true));
licensesService.stop();
}
public void testMultipleExpirationNotification() throws Exception {
final LicensesService licensesService = getInstanceFromNode(LicensesService.class);
licensesService.setTrialLicenseDuration(TimeValue.timeValueSeconds(4));
AtomicInteger postCounter = new AtomicInteger(0);
AtomicInteger preCounter = new AtomicInteger(0);
licensesService.setExpirationCallbacks(Arrays.asList(
// 2000, 1600, 1200
preCallbackLatch(TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(2), timeValueMillis(400), preCounter),
// 100, 500, 900, 1300, 1700
postCallbackLatch(timeValueMillis(100), TimeValue.timeValueSeconds(2), timeValueMillis(400), postCounter))
);
licensesService.start();
TestUtils.AssertingLicensee licensee = new TestUtils.AssertingLicensee("testMultipleExpirationNotification", logger);
licensesService.register(licensee);
// callback can be called one less than expected if the last notification is triggered
// with a delay, causing the trigger time to be out of the expiry callback window
boolean success = awaitBusy(() -> ((preCounter.get() == 3 || preCounter.get() == 2)
&& (postCounter.get() == 5 || postCounter.get() == 4)));
assertThat("post count: actual: " + postCounter.get() + "vs expected: 5 " +
"pre count: actual: " + preCounter.get() + " vs expected: 3", success, equalTo(true));
licensesService.stop();
}
private static LicensesService.ExpirationCallback preCallbackLatch(TimeValue min, TimeValue max, TimeValue frequency,
final AtomicInteger count) {
return new LicensesService.ExpirationCallback.Pre(min, max, frequency) {
@Override
public void on(License license) {
count.incrementAndGet();
}
};
}
private static LicensesService.ExpirationCallback postCallbackLatch(TimeValue min, TimeValue max, TimeValue frequency,
final AtomicInteger count) {
return new LicensesService.ExpirationCallback.Post(min, max, frequency) {
@Override
public void on(License license) {
count.incrementAndGet();
}
};
}
}

View File

@ -1,223 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import org.apache.lucene.util.LuceneTestCase.BadApple;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.license.plugin.TestUtils.AssertingLicensee;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.List;
import static org.elasticsearch.license.plugin.TestUtils.awaitNoBlock;
import static org.elasticsearch.license.plugin.TestUtils.awaitNoPendingTasks;
import static org.elasticsearch.license.plugin.TestUtils.generateSignedLicense;
import static org.elasticsearch.license.plugin.TestUtils.registerAndAckSignedLicenses;
import static org.hamcrest.Matchers.equalTo;
//test is just too slow, please fix it to not be sleep-based
@BadApple(bugUrl = "https://github.com/elastic/x-plugins/issues/1007")
public class LicensesExpiryNotificationTests extends ESSingleNodeTestCase {
static {
MetaData.registerPrototype(LicensesMetaData.TYPE, LicensesMetaData.PROTO);
}
@Override
protected boolean resetNodeAfterTest() {
return true;
}
public void testTrialLicenseEnforcement() throws Exception {
LicensesService licensesService = getInstanceFromNode(LicensesService.class);
licensesService.setTrialLicenseDuration(TimeValue.timeValueSeconds(5));
licensesService.setGracePeriodDuration(TimeValue.timeValueSeconds(3));
licensesService.start();
String id1 = "testTrialLicenseEnforcement";
final AssertingLicensee licensee = new AssertingLicensee(id1, logger);
awaitNoBlock(client());
licensesService.register(licensee);
awaitNoPendingTasks(client());
boolean success = awaitBusy(() -> licensee.statuses.size() == 3);
// trail license: enable, grace, disabled
assertLicenseStates(licensee, LicenseState.ENABLED, LicenseState.GRACE_PERIOD, LicenseState.DISABLED);
assertTrue(dumpLicensingStates(licensee.statuses), success);
licensesService.stop();
}
public void testTrialLicenseEnforcementMultipleLicensees() throws Exception {
LicensesService licensesService = getInstanceFromNode(LicensesService.class);
licensesService.setTrialLicenseDuration(TimeValue.timeValueSeconds(5));
licensesService.setGracePeriodDuration(TimeValue.timeValueSeconds(3));
licensesService.start();
String id1 = "testTrialLicenseEnforcementMultipleLicensees_1";
final AssertingLicensee licensee1 = new AssertingLicensee(id1, logger);
String id12 = "testTrialLicenseEnforcementMultipleLicensees_2";
final AssertingLicensee licensee2 = new AssertingLicensee(id12, logger);
awaitNoBlock(client());
licensesService.register(licensee1);
licensesService.register(licensee2);
awaitNoPendingTasks(client());
boolean success = awaitBusy(() -> licensee1.statuses.size() == 3);
assertTrue(dumpLicensingStates(licensee1.statuses), success);
success = awaitBusy(() -> licensee2.statuses.size() == 3);
assertTrue(dumpLicensingStates(licensee2.statuses), success);
// trail license: enable, grace, disabled
assertLicenseStates(licensee1, LicenseState.ENABLED, LicenseState.GRACE_PERIOD, LicenseState.DISABLED);
assertLicenseStates(licensee2, LicenseState.ENABLED, LicenseState.GRACE_PERIOD, LicenseState.DISABLED);
licensesService.stop();
}
public void testTrialSignedLicenseEnforcement() throws Exception {
LicensesService licensesService = getInstanceFromNode(LicensesService.class);
licensesService.setTrialLicenseDuration(TimeValue.timeValueSeconds(2));
licensesService.setGracePeriodDuration(TimeValue.timeValueSeconds(3));
licensesService.start();
String id1 = "testTrialSignedLicenseEnforcement";
final AssertingLicensee licensee = new AssertingLicensee(id1, logger);
awaitNoBlock(client());
licensesService.register(licensee);
awaitNoPendingTasks(client());
boolean success = awaitBusy(() -> licensee.statuses.size() == 1);
assertTrue(dumpLicensingStates(licensee.statuses), success);
registerAndAckSignedLicenses(licensesService, generateSignedLicense(TimeValue.timeValueSeconds(4)), LicensesStatus.VALID);
success = awaitBusy(() -> licensee.statuses.size() == 4);
// trial: enable, signed: enable, signed: grace, signed: disabled
assertLicenseStates(licensee, LicenseState.ENABLED, LicenseState.ENABLED, LicenseState.GRACE_PERIOD, LicenseState.DISABLED);
assertTrue(dumpLicensingStates(licensee.statuses), success);
licensesService.stop();
}
public void testSignedLicenseEnforcement() throws Exception {
LicensesService licensesService = getInstanceFromNode(LicensesService.class);
licensesService.setTrialLicenseDuration(TimeValue.timeValueSeconds(4));
licensesService.setGracePeriodDuration(TimeValue.timeValueSeconds(3));
licensesService.start();
String id1 = "testSignedLicenseEnforcement";
final AssertingLicensee licensee = new AssertingLicensee(id1, logger);
awaitNoBlock(client());
registerAndAckSignedLicenses(licensesService, generateSignedLicense(TimeValue.timeValueSeconds(2)), LicensesStatus.VALID);
licensesService.register(licensee);
awaitNoPendingTasks(client());
boolean success = awaitBusy(() -> licensee.statuses.size() == 3);
// signed: enable, signed: grace, signed: disabled
assertLicenseStates(licensee, LicenseState.ENABLED, LicenseState.GRACE_PERIOD, LicenseState.DISABLED);
assertTrue(dumpLicensingStates(licensee.statuses), success);
licensesService.stop();
}
public void testSingedLicenseEnforcementMultipleLicensees() throws Exception {
LicensesService licensesService = getInstanceFromNode(LicensesService.class);
licensesService.setTrialLicenseDuration(TimeValue.timeValueSeconds(4));
licensesService.setGracePeriodDuration(TimeValue.timeValueSeconds(3));
licensesService.start();
String id1 = "testSingedLicenseEnforcementMultipleLicensees_1";
final AssertingLicensee licensee1 = new AssertingLicensee(id1, logger);
String id12 = "testSingedLicenseEnforcementMultipleLicensees_2";
final AssertingLicensee licensee2 = new AssertingLicensee(id12, logger);
awaitNoBlock(client());
registerAndAckSignedLicenses(licensesService, generateSignedLicense(TimeValue.timeValueSeconds(2)), LicensesStatus.VALID);
licensesService.register(licensee1);
licensesService.register(licensee2);
awaitNoPendingTasks(client());
boolean success = awaitBusy(() -> licensee1.statuses.size() == 3);
assertTrue(dumpLicensingStates(licensee1.statuses), success);
success = awaitBusy(() -> licensee2.statuses.size() == 3);
assertTrue(dumpLicensingStates(licensee2.statuses), success);
// signed license: enable, grace, disabled
assertLicenseStates(licensee1, LicenseState.ENABLED, LicenseState.GRACE_PERIOD, LicenseState.DISABLED);
assertLicenseStates(licensee2, LicenseState.ENABLED, LicenseState.GRACE_PERIOD, LicenseState.DISABLED);
licensesService.stop();
}
public void testMultipleSignedLicenseEnforcement() throws Exception {
// register with trial license and assert onEnable and onDisable notification
LicensesService licensesService = getInstanceFromNode(LicensesService.class);
licensesService.setTrialLicenseDuration(TimeValue.timeValueSeconds(4));
licensesService.setGracePeriodDuration(TimeValue.timeValueSeconds(1));
licensesService.start();
String id1 = "testMultipleSignedLicenseEnforcement";
final AssertingLicensee licensee = new AssertingLicensee(id1, logger);
awaitNoBlock(client());
licensesService.register(licensee);
awaitNoPendingTasks(client());
// trial license enabled
boolean success = awaitBusy(() -> licensee.statuses.size() == 1);
assertTrue(dumpLicensingStates(licensee.statuses), success);
registerAndAckSignedLicenses(licensesService, generateSignedLicense("basic", TimeValue.timeValueSeconds(3)), LicensesStatus.VALID);
// signed license enabled
success = awaitBusy(() -> licensee.statuses.size() == 2);
assertTrue(dumpLicensingStates(licensee.statuses), success);
registerAndAckSignedLicenses(licensesService, generateSignedLicense("gold", TimeValue.timeValueSeconds(2)), LicensesStatus.VALID);
// second signed license enabled, grace and expired
success = awaitBusy(() ->licensee.statuses.size() == 5);
assertLicenseStates(licensee, LicenseState.ENABLED, LicenseState.ENABLED, LicenseState.ENABLED, LicenseState.GRACE_PERIOD,
LicenseState.DISABLED);
assertTrue(dumpLicensingStates(licensee.statuses), success);
licensesService.stop();
}
public void testNonOverlappingMultipleLicensesEnforcement() throws Exception {
// register with trial license and assert onEnable and onDisable notification
LicensesService licensesService = getInstanceFromNode(LicensesService.class);
licensesService.setTrialLicenseDuration(TimeValue.timeValueSeconds(3));
licensesService.setGracePeriodDuration(TimeValue.timeValueSeconds(1));
licensesService.start();
String id1 = "testNonOverlappingMultipleLicensesEnforcement";
final AssertingLicensee licensee = new AssertingLicensee(id1, logger);
awaitNoBlock(client());
licensesService.register(licensee);
// trial license: enabled, grace, disabled
boolean success = awaitBusy(() -> licensee.statuses.size() == 3);
assertTrue(dumpLicensingStates(licensee.statuses), success);
// install license
registerAndAckSignedLicenses(licensesService, generateSignedLicense("basic", TimeValue.timeValueSeconds(2)), LicensesStatus.VALID);
// trial license: enabled, grace, disabled, signed license: enabled, grace, disabled
success = awaitBusy(() -> licensee.statuses.size() == 6);
assertLicenseStates(licensee, LicenseState.ENABLED, LicenseState.GRACE_PERIOD, LicenseState.DISABLED, LicenseState.ENABLED,
LicenseState.GRACE_PERIOD, LicenseState.DISABLED);
assertTrue(dumpLicensingStates(licensee.statuses), success);
licensesService.stop();
}
private void assertLicenseStates(AssertingLicensee licensee, LicenseState... states) {
StringBuilder msg = new StringBuilder();
msg.append("Actual: ");
msg.append(dumpLicensingStates(licensee.statuses));
msg.append(" Expected: ");
msg.append(dumpLicensingStates(states));
assertThat(msg.toString(), licensee.statuses.size(), equalTo(states.length));
for (int i = 0; i < states.length; i++) {
assertThat(msg.toString(), licensee.statuses.get(i).getLicenseState(), equalTo(states[i]));
}
}
private String dumpLicensingStates(List<Licensee.Status> statuses) {
return dumpLicensingStates(statuses.toArray(new Licensee.Status[statuses.size()]));
}
private String dumpLicensingStates(Licensee.Status... statuses) {
LicenseState[] states = new LicenseState[statuses.length];
for (int i = 0; i < statuses.length; i++) {
states[i] = statuses[i].getLicenseState();
}
return dumpLicensingStates(states);
}
private String dumpLicensingStates(LicenseState... states) {
StringBuilder sb = new StringBuilder();
sb.append("[");
for (int i = 0; i < states.length; i++) {
sb.append(states[i].name());
if (i != states.length - 1) {
sb.append(", ");
}
}
sb.append("]");
return sb.toString();
}
}

View File

@ -7,14 +7,22 @@ package org.elasticsearch.license.plugin.core;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.graph.Graph;
import org.elasticsearch.license.core.License; import org.elasticsearch.license.core.License;
import org.elasticsearch.license.plugin.TestUtils; import org.elasticsearch.license.plugin.TestUtils;
import org.elasticsearch.license.plugin.action.delete.DeleteLicenseRequest; import org.elasticsearch.license.plugin.action.delete.DeleteLicenseRequest;
import org.elasticsearch.xpack.monitoring.Monitoring;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.xpack.security.Security;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.watcher.Watcher;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
@ -26,8 +34,19 @@ import static org.hamcrest.Matchers.not;
public class LicensesManagerServiceTests extends ESSingleNodeTestCase { public class LicensesManagerServiceTests extends ESSingleNodeTestCase {
static { @Override
MetaData.registerPrototype(LicensesMetaData.TYPE, LicensesMetaData.PROTO); protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singletonList(XPackPlugin.class);
}
@Override
protected Settings nodeSettings() {
return Settings.builder().
put(XPackPlugin.featureEnabledSetting(Security.NAME), false)
.put(XPackPlugin.featureEnabledSetting(Monitoring.NAME), false)
.put(XPackPlugin.featureEnabledSetting(Watcher.NAME), false)
.put(XPackPlugin.featureEnabledSetting(Graph.NAME), false)
.build();
} }
@Override @Override
@ -90,9 +109,7 @@ public class LicensesManagerServiceTests extends ESSingleNodeTestCase {
// ensure that the invalid license never made it to cluster state // ensure that the invalid license never made it to cluster state
LicensesMetaData licensesMetaData = clusterService.state().metaData().custom(LicensesMetaData.TYPE); LicensesMetaData licensesMetaData = clusterService.state().metaData().custom(LicensesMetaData.TYPE);
if (licensesMetaData != null) { assertThat(licensesMetaData.getLicense(), not(equalTo(tamperedLicense)));
assertThat(licensesMetaData.getLicense(), equalTo(LicensesMetaData.LICENSE_TOMBSTONE));
}
} }
public void testRemoveLicenses() throws Exception { public void testRemoveLicenses() throws Exception {
@ -185,4 +202,4 @@ public class LicensesManagerServiceTests extends ESSingleNodeTestCase {
} }
assertThat("remove license(s) failed", success.get(), equalTo(true)); assertThat("remove license(s) failed", success.get(), equalTo(true));
} }
} }

View File

@ -0,0 +1,91 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.license.plugin.core;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.license.core.License;
import org.elasticsearch.license.plugin.TestUtils;
import org.elasticsearch.license.plugin.TestUtils.AssertingLicensee;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.util.List;
import static org.hamcrest.Matchers.equalTo;
public class LicensesNotificationTests extends AbstractLicenseServiceTestCase {
public void testLicenseNotification() throws Exception {
final License license = TestUtils.generateSignedLicense(TimeValue.timeValueHours(48));
setInitialState(license);
licensesService.start();
int nLicensee = randomIntBetween(1, 3);
AssertingLicensee[] assertingLicensees = new AssertingLicensee[nLicensee];
for (int i = 0; i < assertingLicensees.length; i++) {
assertingLicensees[i] = new AssertingLicensee("testLicenseNotification" + i, logger);
licensesService.register(assertingLicensees[i]);
assertLicenseStates(assertingLicensees[i], LicenseState.ENABLED);
}
clock.fastForward(TimeValue.timeValueMillis(license.expiryDate() - clock.millis()));
final LicensesMetaData licensesMetaData = new LicensesMetaData(license);
licensesService.onUpdate(licensesMetaData);
for (AssertingLicensee assertingLicensee : assertingLicensees) {
assertLicenseStates(assertingLicensee, LicenseState.ENABLED, LicenseState.GRACE_PERIOD);
}
clock.fastForward(TimeValue.timeValueMillis((license.expiryDate() +
LicensesService.GRACE_PERIOD_DURATION.getMillis()) - clock.millis()));
licensesService.onUpdate(licensesMetaData);
for (AssertingLicensee assertingLicensee : assertingLicensees) {
assertLicenseStates(assertingLicensee, LicenseState.ENABLED, LicenseState.GRACE_PERIOD, LicenseState.DISABLED);
}
clock.setTime(new DateTime(DateTimeZone.UTC));
final License newLicense = TestUtils.generateSignedLicense(TimeValue.timeValueHours(2));
clock.fastForward(TimeValue.timeValueHours(1));
licensesService.onUpdate(new LicensesMetaData(newLicense));
for (AssertingLicensee assertingLicensee : assertingLicensees) {
assertLicenseStates(assertingLicensee, LicenseState.ENABLED, LicenseState.GRACE_PERIOD, LicenseState.DISABLED,
LicenseState.ENABLED);
}
licensesService.stop();
}
private void assertLicenseStates(AssertingLicensee licensee, LicenseState... states) {
StringBuilder msg = new StringBuilder();
msg.append("Actual: ");
msg.append(dumpLicensingStates(licensee.statuses));
msg.append(" Expected: ");
msg.append(dumpLicensingStates(states));
assertThat(msg.toString(), licensee.statuses.size(), equalTo(states.length));
for (int i = 0; i < states.length; i++) {
assertThat(msg.toString(), licensee.statuses.get(i).getLicenseState(), equalTo(states[i]));
}
}
private String dumpLicensingStates(List<Licensee.Status> statuses) {
return dumpLicensingStates(statuses.toArray(new Licensee.Status[statuses.size()]));
}
private String dumpLicensingStates(Licensee.Status... statuses) {
LicenseState[] states = new LicenseState[statuses.length];
for (int i = 0; i < statuses.length; i++) {
states[i] = statuses[i].getLicenseState();
}
return dumpLicensingStates(states);
}
private String dumpLicensingStates(LicenseState... states) {
StringBuilder sb = new StringBuilder();
sb.append("[");
for (int i = 0; i < states.length; i++) {
sb.append(states[i].name());
if (i != states.length - 1) {
sb.append(", ");
}
}
sb.append("]");
return sb.toString();
}
}

View File

@ -1,5 +1,5 @@
{ {
"license.delete": { "xpack.license.delete": {
"documentation": "https://www.elastic.co/guide/en/shield/current/license-management.html", "documentation": "https://www.elastic.co/guide/en/shield/current/license-management.html",
"methods": ["DELETE"], "methods": ["DELETE"],
"url": { "url": {

View File

@ -1,5 +1,5 @@
{ {
"license.get": { "xpack.license.get": {
"documentation": "https://www.elastic.co/guide/en/shield/current/license-management.html", "documentation": "https://www.elastic.co/guide/en/shield/current/license-management.html",
"methods": ["GET"], "methods": ["GET"],
"url": { "url": {

View File

@ -1,5 +1,5 @@
{ {
"license.post": { "xpack.license.post": {
"documentation": "https://www.elastic.co/guide/en/shield/current/license-management.html", "documentation": "https://www.elastic.co/guide/en/shield/current/license-management.html",
"methods": ["PUT", "POST"], "methods": ["PUT", "POST"],
"url": { "url": {

View File

@ -1,72 +1,78 @@
--- ---
"installing and getting license works": teardown:
- do:
xpack.license.post:
acknowledge: true
body: |
{"licenses":[{"uid":"894371dc-9t49-4997-93cb-8o2e3r7fa6a8","type":"trial","issue_date_in_millis":1411948800000,"expiry_date_in_millis":1916956799999,"max_nodes":1,"issued_to":"issuedTo","issuer":"issuer","signature":"AAAAAgAAAA0FWh0T9njItjQ2qammAAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQBZhvozA0trrxhUZ1QbaTsKTna9C5KVQ6pv8yg1pnsBpZXCl8kX1SrgoFn1bXq61IvJwfw5qnmYNiH3hRhTO9EyaCBqaLk8NXZQ6TrRkQSpEnnBwAYUkZeKXsIuBoOk4B4mzwC/r8aMAkzrTiEBtBbog+57cSaU9y37Gkdd+1jXCQrxP+jOEUf7gnXWZvE6oeRroLvCt1fYn09k0CF8kKTbrPTSjC6igZR3uvTHyee74XQ9PRavvHax73T4UOEdQZX/P1ibSQIWKbBRD5YQ1POYVjTayoltTnWLMxfEcAkkATJZLhpBEHST7kZWjrTS6J1dCReJc7a8Vsj/78HXvOIy"}]}
---
"Installing and getting license works":
## current license version ## current license version
- do: - do:
license.post: xpack.license.post:
acknowledge: true acknowledge: true
body: | body: |
{"license":{"uid":"893361dc-9749-4997-93cb-802e3d7fa4a8","type":"basic","issue_date_in_millis":1411948800000,"expiry_date_in_millis":1914278399999,"max_nodes":1,"issued_to":"issuedTo","issuer":"issuer","signature":"AAAAAgAAAA0lKPZ0a7aZquUltho/AAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQAALuQ44S3IG6SzolcXVJ6Z4CIXORDrYQ+wdLCeey0XdujTslAOj+k+vNgo6wauc7Uswi01esHu4lb5IgpvKy7RRCbh5bj/z2ubu2qMJqopp9BQyD7VQjVfqmG6seUMJwJ1a5Avvm9r41YPSPcrii3bKK2e1l6jK6N8ibCvnTyY/XkYGCJrBWTSJePDbg6ErbyodrZ37x1StLbPWcNAkmweyHjDJnvYnbeZZO7A3NmubXZjW7Ttf8/YwQyE00PqMcl7fVPY3hkKpAeHf8aaJbqkKYbqZuER3EWJX7ZvLVb1dNdNg8aXRn7YrkQcYwWgptYQpfV+D7yEJ4j5muAEoler"}} {"license":{"uid":"893361dc-9749-4997-93cb-802e3d7fa4a8","type":"basic","issue_date_in_millis":1411948800000,"expiry_date_in_millis":1914278399999,"max_nodes":1,"issued_to":"issuedTo","issuer":"issuer","signature":"AAAAAgAAAA0lKPZ0a7aZquUltho/AAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQAALuQ44S3IG6SzolcXVJ6Z4CIXORDrYQ+wdLCeey0XdujTslAOj+k+vNgo6wauc7Uswi01esHu4lb5IgpvKy7RRCbh5bj/z2ubu2qMJqopp9BQyD7VQjVfqmG6seUMJwJ1a5Avvm9r41YPSPcrii3bKK2e1l6jK6N8ibCvnTyY/XkYGCJrBWTSJePDbg6ErbyodrZ37x1StLbPWcNAkmweyHjDJnvYnbeZZO7A3NmubXZjW7Ttf8/YwQyE00PqMcl7fVPY3hkKpAeHf8aaJbqkKYbqZuER3EWJX7ZvLVb1dNdNg8aXRn7YrkQcYwWgptYQpfV+D7yEJ4j5muAEoler"}}
- match: { license_status: "valid" } - match: { license_status: "valid" }
- do: - do:
license.get: {} xpack.license.get: {}
## a license object has 10 attributes ## a license object has 10 attributes
- length: { license: 10 } - length: { license: 10 }
## bwc for licenses format ## bwc for licenses format
- do: - do:
license.post: xpack.license.post:
acknowledge: true acknowledge: true
body: | body: |
{"licenses":[{"uid":"893361dc-9749-4997-93cb-802e3d7fa4a8","type":"basic","issue_date_in_millis":1411948800000,"expiry_date_in_millis":1914278399999,"max_nodes":1,"issued_to":"issuedTo","issuer":"issuer","signature":"AAAAAgAAAA0lKPZ0a7aZquUltho/AAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQAALuQ44S3IG6SzolcXVJ6Z4CIXORDrYQ+wdLCeey0XdujTslAOj+k+vNgo6wauc7Uswi01esHu4lb5IgpvKy7RRCbh5bj/z2ubu2qMJqopp9BQyD7VQjVfqmG6seUMJwJ1a5Avvm9r41YPSPcrii3bKK2e1l6jK6N8ibCvnTyY/XkYGCJrBWTSJePDbg6ErbyodrZ37x1StLbPWcNAkmweyHjDJnvYnbeZZO7A3NmubXZjW7Ttf8/YwQyE00PqMcl7fVPY3hkKpAeHf8aaJbqkKYbqZuER3EWJX7ZvLVb1dNdNg8aXRn7YrkQcYwWgptYQpfV+D7yEJ4j5muAEoler"}]} {"licenses":[{"uid":"893361dc-9749-4997-93cb-802e3d7fa4a8","type":"basic","issue_date_in_millis":1411948800000,"expiry_date_in_millis":1914278399999,"max_nodes":1,"issued_to":"issuedTo","issuer":"issuer","signature":"AAAAAgAAAA0lKPZ0a7aZquUltho/AAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQAALuQ44S3IG6SzolcXVJ6Z4CIXORDrYQ+wdLCeey0XdujTslAOj+k+vNgo6wauc7Uswi01esHu4lb5IgpvKy7RRCbh5bj/z2ubu2qMJqopp9BQyD7VQjVfqmG6seUMJwJ1a5Avvm9r41YPSPcrii3bKK2e1l6jK6N8ibCvnTyY/XkYGCJrBWTSJePDbg6ErbyodrZ37x1StLbPWcNAkmweyHjDJnvYnbeZZO7A3NmubXZjW7Ttf8/YwQyE00PqMcl7fVPY3hkKpAeHf8aaJbqkKYbqZuER3EWJX7ZvLVb1dNdNg8aXRn7YrkQcYwWgptYQpfV+D7yEJ4j5muAEoler"}]}
- match: { license_status: "valid" } - match: { license_status: "valid" }
- do: - do:
license.get: {} xpack.license.get: {}
- length: { license: 10 } - length: { license: 10 }
## license version: 1.x ## license version: 1.x
- do: - do:
license.post: xpack.license.post:
acknowledge: true acknowledge: true
body: | body: |
{"licenses":[{"uid":"893361dc-9749-4997-93cb-802e3d7fa4a8","type":"subscription","subscription_type":"gold","issue_date_in_millis":1411948800000,"feature":"shield","expiry_date_in_millis":1914278399999,"max_nodes":1,"issued_to":"issuedTo","issuer":"issuer","signature":"AAAAAQAAAA0LVAywwpSH94cyXr4zAAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQA4qscc/URRZVdFoLwgy9dqybYEQLW8YLkiAyPV5XHHHdtk+dtZIepiNEDkUXhSX2waVJlsNRF8/4kqplDfwNoD2TUM8fTgiIfiSiZYGDTGST+yW/5eAveEU5J5v1liBN27bwkqL+V4YAa0Tcm7NKKwjScWKAHiTU3vF8chPkGfCHE0kQgVwPC9RE82pTw0s6/uR4PfLGNFfqPM0uiE5nucfVrtj89JQiO/KA/7ZyFbo7VTNXxZQt7T7rZWBCP9KIjptXzcWuk08Q5S+rSoJNYbFo3HGKtrCVsRz/55rceNtdwKKXu1IwnSeir4I1/KLduQTtFLy0+1th87VS8T88UT"}]} {"licenses":[{"uid":"893361dc-9749-4997-93cb-802e3d7fa4a8","type":"subscription","subscription_type":"gold","issue_date_in_millis":1411948800000,"feature":"shield","expiry_date_in_millis":1914278399999,"max_nodes":1,"issued_to":"issuedTo","issuer":"issuer","signature":"AAAAAQAAAA0LVAywwpSH94cyXr4zAAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQA4qscc/URRZVdFoLwgy9dqybYEQLW8YLkiAyPV5XHHHdtk+dtZIepiNEDkUXhSX2waVJlsNRF8/4kqplDfwNoD2TUM8fTgiIfiSiZYGDTGST+yW/5eAveEU5J5v1liBN27bwkqL+V4YAa0Tcm7NKKwjScWKAHiTU3vF8chPkGfCHE0kQgVwPC9RE82pTw0s6/uR4PfLGNFfqPM0uiE5nucfVrtj89JQiO/KA/7ZyFbo7VTNXxZQt7T7rZWBCP9KIjptXzcWuk08Q5S+rSoJNYbFo3HGKtrCVsRz/55rceNtdwKKXu1IwnSeir4I1/KLduQTtFLy0+1th87VS8T88UT"}]}
- match: { license_status: "valid" } - match: { license_status: "valid" }
- do: - do:
license.get: {} xpack.license.get: {}
- length: { license: 10 } - length: { license: 10 }
## multiple licenses version: 1.x ## multiple licenses version: 1.x
- do: - do:
license.post: xpack.license.post:
acknowledge: true acknowledge: true
body: | body: |
{"licenses":[{"uid":"893361dc-9749-4997-93cb-802e3d7fa4a8","type":"internal","subscription_type":"none","issue_date_in_millis":1411948800000,"feature":"shield","expiry_date_in_millis":1440892799999,"max_nodes":1,"issued_to":"issuedTo","issuer":"issuer","signature":"AAAAAQAAAA04Q4ky3rFyyWLFkytEAAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQBxMvUMn4h2E4R4TQMijahTxQj4LPQO4f1M79UxX/XkDlGcH+J5pRHx08OtTRPsFL1lED+h+PIXx307Vo+PNDsOxrWvoYZeYBkOLAO3ny9vhQga+52jYhMxIuFrT9xbcSCSNpMhGojgOIPU2WgiopVdVcimo1+Gk8VtklPB1wPwFzfOjOnPgp/Icx3WYpfkeAUUOyWUYiFIBAe4bnz84iF+xwLKbgYk6aHF25ECBtdb/Uruhcm9+jEFpoIEUtCouvvk9C+NJZ4OickV4xpRgaRG2x9PONH8ZN0QGhGYhJGbisoCxuDmlLsyVxqxfMu3n/r7/jdsEJScjAlSrsLDOu6H"},{"uid":"893361dc-9749-4997-93cb-802e3dofh7aa","type":"internal","subscription_type":"none","issue_date_in_millis":1443484800000,"feature":"watcher","expiry_date_in_millis":1914278399999,"max_nodes":1,"issued_to":"issuedTo","issuer":"issuer","signature":"AAAAAQAAAA0Sc90guRIaQEmgLvMnAAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQCQ94dju0pnDZR3Uuypi0ic3aQJ+nvVqe+U8u79Dga5n1qIjcHDh7HvIBJEkF+tnVPlo/PXV/x7BZSwVY1PVErit+6rYix1yuHEgqwxmx/VdRICjCaZM6tk0Ob4dZCPv6Ebn2Mmk89KHC/PwiLPqF6QfwV/Pkpa8k2A3ORJmvYSDvXhe6tCs8dqc4ebrsFxqrZjwWh5CZSpzqqZBFXlngDv2N0hHhpGlueRszD0JJ5dfEL5ZA1DDOrgO9OJVejSHyRqe1L5QRUNdXPVfS+EAG0Dd1cNdJ/sMpYCPnVjbw6iq2/YgM3cuztsXVBY7ij4WnoP3ce7Zjs9TwHn+IqzftC6"}]} {"licenses":[{"uid":"893361dc-9749-4997-93cb-802e3d7fa4a8","type":"internal","subscription_type":"none","issue_date_in_millis":1411948800000,"feature":"shield","expiry_date_in_millis":1440892799999,"max_nodes":1,"issued_to":"issuedTo","issuer":"issuer","signature":"AAAAAQAAAA04Q4ky3rFyyWLFkytEAAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQBxMvUMn4h2E4R4TQMijahTxQj4LPQO4f1M79UxX/XkDlGcH+J5pRHx08OtTRPsFL1lED+h+PIXx307Vo+PNDsOxrWvoYZeYBkOLAO3ny9vhQga+52jYhMxIuFrT9xbcSCSNpMhGojgOIPU2WgiopVdVcimo1+Gk8VtklPB1wPwFzfOjOnPgp/Icx3WYpfkeAUUOyWUYiFIBAe4bnz84iF+xwLKbgYk6aHF25ECBtdb/Uruhcm9+jEFpoIEUtCouvvk9C+NJZ4OickV4xpRgaRG2x9PONH8ZN0QGhGYhJGbisoCxuDmlLsyVxqxfMu3n/r7/jdsEJScjAlSrsLDOu6H"},{"uid":"893361dc-9749-4997-93cb-802e3dofh7aa","type":"internal","subscription_type":"none","issue_date_in_millis":1443484800000,"feature":"watcher","expiry_date_in_millis":1914278399999,"max_nodes":1,"issued_to":"issuedTo","issuer":"issuer","signature":"AAAAAQAAAA0Sc90guRIaQEmgLvMnAAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQCQ94dju0pnDZR3Uuypi0ic3aQJ+nvVqe+U8u79Dga5n1qIjcHDh7HvIBJEkF+tnVPlo/PXV/x7BZSwVY1PVErit+6rYix1yuHEgqwxmx/VdRICjCaZM6tk0Ob4dZCPv6Ebn2Mmk89KHC/PwiLPqF6QfwV/Pkpa8k2A3ORJmvYSDvXhe6tCs8dqc4ebrsFxqrZjwWh5CZSpzqqZBFXlngDv2N0hHhpGlueRszD0JJ5dfEL5ZA1DDOrgO9OJVejSHyRqe1L5QRUNdXPVfS+EAG0Dd1cNdJ/sMpYCPnVjbw6iq2/YgM3cuztsXVBY7ij4WnoP3ce7Zjs9TwHn+IqzftC6"}]}
- match: { license_status: "valid" } - match: { license_status: "valid" }
- do: - do:
license.get: {} xpack.license.get: {}
- length: { license: 10 } - length: { license: 10 }
- match: { license.uid: "893361dc-9749-4997-93cb-802e3dofh7aa" } - match: { license.uid: "893361dc-9749-4997-93cb-802e3dofh7aa" }
--- ---
"Should throw 404 after license deletion": "Should throw 404 after license deletion":
- do: - do:
license.delete: {} xpack.license.delete: {}
- match: { acknowledged: true } - match: { acknowledged: true }
- do: - do:
xpack.license.get: {}
catch: missing catch: missing
license.get: {}

View File

@ -52,55 +52,55 @@ public class MonitoringSettings extends AbstractComponent {
* Sampling interval between two collections (default to 10s) * Sampling interval between two collections (default to 10s)
*/ */
public static final Setting<TimeValue> INTERVAL = public static final Setting<TimeValue> INTERVAL =
timeSetting(key("agent.interval"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope); timeSetting(collectionKey("interval"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope);
/** /**
* Timeout value when collecting index statistics (default to 10m) * Timeout value when collecting index statistics (default to 10m)
*/ */
public static final Setting<TimeValue> INDEX_STATS_TIMEOUT = public static final Setting<TimeValue> INDEX_STATS_TIMEOUT =
timeSetting(key("agent.index.stats.timeout"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope); timeSetting(collectionKey("index.stats.timeout"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope);
/** /**
* Timeout value when collecting total indices statistics (default to 10m) * Timeout value when collecting total indices statistics (default to 10m)
*/ */
public static final Setting<TimeValue> INDICES_STATS_TIMEOUT = public static final Setting<TimeValue> INDICES_STATS_TIMEOUT =
timeSetting(key("agent.indices.stats.timeout"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope); timeSetting(collectionKey("indices.stats.timeout"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope);
/** /**
* List of indices names whose stats will be exported (default to all indices) * List of indices names whose stats will be exported (default to all indices)
*/ */
public static final Setting<List<String>> INDICES = public static final Setting<List<String>> INDICES =
listSetting(key("agent.indices"), Collections.emptyList(), Function.identity(), Property.Dynamic, Property.NodeScope); listSetting(collectionKey("indices"), Collections.emptyList(), Function.identity(), Property.Dynamic, Property.NodeScope);
/** /**
* Timeout value when collecting the cluster state (default to 10m) * Timeout value when collecting the cluster state (default to 10m)
*/ */
public static final Setting<TimeValue> CLUSTER_STATE_TIMEOUT = public static final Setting<TimeValue> CLUSTER_STATE_TIMEOUT =
timeSetting(key("agent.cluster.state.timeout"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope); timeSetting(collectionKey("cluster.state.timeout"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope);
/** /**
* Timeout value when collecting the recovery information (default to 10m) * Timeout value when collecting the recovery information (default to 10m)
*/ */
public static final Setting<TimeValue> CLUSTER_STATS_TIMEOUT = public static final Setting<TimeValue> CLUSTER_STATS_TIMEOUT =
timeSetting(key("agent.cluster.stats.timeout"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope); timeSetting(collectionKey("cluster.stats.timeout"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope);
/** /**
* Timeout value when collecting the recovery information (default to 10m) * Timeout value when collecting the recovery information (default to 10m)
*/ */
public static final Setting<TimeValue> INDEX_RECOVERY_TIMEOUT = public static final Setting<TimeValue> INDEX_RECOVERY_TIMEOUT =
timeSetting(key("agent.index.recovery.timeout"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope); timeSetting(collectionKey("index.recovery.timeout"), TimeValue.timeValueSeconds(10), Property.Dynamic, Property.NodeScope);
/** /**
* Flag to indicate if only active recoveries should be collected (default to false: all recoveries are collected) * Flag to indicate if only active recoveries should be collected (default to false: all recoveries are collected)
*/ */
public static final Setting<Boolean> INDEX_RECOVERY_ACTIVE_ONLY = public static final Setting<Boolean> INDEX_RECOVERY_ACTIVE_ONLY =
boolSetting(key("agent.index.recovery.active_only"), false, Property.Dynamic, Property.NodeScope) ; boolSetting(collectionKey("index.recovery.active_only"), false, Property.Dynamic, Property.NodeScope) ;
/** /**
* List of collectors allowed to collect data (default to all) * List of collectors allowed to collect data (default to all)
*/ */
public static final Setting<List<String>> COLLECTORS = public static final Setting<List<String>> COLLECTORS =
listSetting(key("agent.collectors"), Collections.emptyList(), Function.identity(), Property.NodeScope); listSetting(collectionKey("collectors"), Collections.emptyList(), Function.identity(), Property.NodeScope);
/** /**
* The default retention duration of the monitoring history data. * The default retention duration of the monitoring history data.
@ -123,7 +123,7 @@ public class MonitoringSettings extends AbstractComponent {
* Settings/Options per configured exporter * Settings/Options per configured exporter
*/ */
public static final Setting<Settings> EXPORTERS_SETTINGS = public static final Setting<Settings> EXPORTERS_SETTINGS =
groupSetting(key("agent.exporters."), Property.Dynamic, Property.NodeScope); groupSetting(collectionKey("exporters."), Property.Dynamic, Property.NodeScope);
public static List<Setting<?>> getSettings() { public static List<Setting<?>> getSettings() {
return Arrays.asList(INDICES, return Arrays.asList(INDICES,
@ -141,7 +141,7 @@ public class MonitoringSettings extends AbstractComponent {
} }
public static List<String> getSettingsFilter() { public static List<String> getSettingsFilter() {
return Arrays.asList("xpack.monitoring.agent.exporters.*.auth.*", "xpack.monitoring.agent.exporters.*.ssl.*"); return Arrays.asList("xpack.monitoring.collection.exporters.*.auth.*", "xpack.monitoring.collection.exporters.*.ssl.*");
} }
@ -227,6 +227,17 @@ public class MonitoringSettings extends AbstractComponent {
this.indices = indices.toArray(new String[0]); this.indices = indices.toArray(new String[0]);
} }
/**
* Prefix the {@code key} with the Monitoring prefix and "collection." .
*
* @param key The key to prefix
* @return The key prefixed by the product prefixes + "collection." .
* @see #key(String)
*/
static String collectionKey(String key) {
return key("collection." + key);
}
/** /**
* Prefix the {@code key} with the Monitoring prefix. * Prefix the {@code key} with the Monitoring prefix.
* *

View File

@ -58,7 +58,7 @@ public class MonitoringBulkRequest extends ActionRequest<MonitoringBulkRequest>
validationException); validationException);
} }
if (Strings.hasLength(doc.getMonitoringVersion()) == false) { if (Strings.hasLength(doc.getMonitoringVersion()) == false) {
validationException = addValidationError("monitored system version is missing for monitoring document [" + i + "]", validationException = addValidationError("monitored system API version is missing for monitoring document [" + i + "]",
validationException); validationException);
} }
if (Strings.hasLength(doc.getType()) == false) { if (Strings.hasLength(doc.getType()) == false) {
@ -84,7 +84,7 @@ public class MonitoringBulkRequest extends ActionRequest<MonitoringBulkRequest>
/** /**
* Parses a monitoring bulk request and builds the list of documents to be indexed. * Parses a monitoring bulk request and builds the list of documents to be indexed.
*/ */
public MonitoringBulkRequest add(BytesReference content, String defaultMonitoringId, String defaultMonitoringVersion, public MonitoringBulkRequest add(BytesReference content, String defaultMonitoringId, String defaultMonitoringApiVersion,
String defaultType) throws Exception { String defaultType) throws Exception {
// MonitoringBulkRequest accepts a body request that has the same format as the BulkRequest: // MonitoringBulkRequest accepts a body request that has the same format as the BulkRequest:
// instead of duplicating the parsing logic here we use a new BulkRequest instance to parse the content. // instead of duplicating the parsing logic here we use a new BulkRequest instance to parse the content.
@ -97,7 +97,7 @@ public class MonitoringBulkRequest extends ActionRequest<MonitoringBulkRequest>
// builds a new monitoring document based on the index request // builds a new monitoring document based on the index request
MonitoringBulkDoc doc = MonitoringBulkDoc doc =
new MonitoringBulkDoc(defaultMonitoringId, new MonitoringBulkDoc(defaultMonitoringId,
defaultMonitoringVersion, defaultMonitoringApiVersion,
MonitoringIndex.from(indexRequest.index()), MonitoringIndex.from(indexRequest.index()),
indexRequest.type(), indexRequest.type(),
indexRequest.id(), indexRequest.id(),

View File

@ -21,9 +21,9 @@ public class MonitoringBulkRequestBuilder
return this; return this;
} }
public MonitoringBulkRequestBuilder add(BytesReference content, String defaultId, String defaultVersion, String defaultType) public MonitoringBulkRequestBuilder add(BytesReference content, String defaultId, String defaultApiVersion, String defaultType)
throws Exception { throws Exception {
request.add(content, defaultId, defaultVersion, defaultType); request.add(content, defaultId, defaultApiVersion, defaultType);
return this; return this;
} }
} }

View File

@ -16,13 +16,13 @@ public final class MonitoringTemplateUtils {
private static final String TEMPLATE_VERSION_PROPERTY = Pattern.quote("${monitoring.template.version}"); private static final String TEMPLATE_VERSION_PROPERTY = Pattern.quote("${monitoring.template.version}");
/** Current version of es and data templates **/ /** Current version of es and data templates **/
public static final Integer TEMPLATE_VERSION = 2; public static final String TEMPLATE_VERSION = "2";
private MonitoringTemplateUtils() { private MonitoringTemplateUtils() {
} }
public static String loadTemplate(String id) { public static String loadTemplate(String id) {
String resource = String.format(Locale.ROOT, TEMPLATE_FILE, id); String resource = String.format(Locale.ROOT, TEMPLATE_FILE, id);
return TemplateUtils.loadTemplate(resource, String.valueOf(TEMPLATE_VERSION), TEMPLATE_VERSION_PROPERTY); return TemplateUtils.loadTemplate(resource, TEMPLATE_VERSION, TEMPLATE_VERSION_PROPERTY);
} }
} }

View File

@ -536,11 +536,8 @@ public class HttpExporter extends Exporter {
for (String host : hosts) { for (String host : hosts) {
try { try {
HttpExporterUtils.parseHostWithPath(host, ""); HttpExporterUtils.parseHostWithPath(host, "");
} catch (URISyntaxException e) { } catch (URISyntaxException | MalformedURLException e) {
throw new SettingsException("[xpack.monitoring.agent.exporter] invalid host: [" + host + "]." + throw new SettingsException("[xpack.monitoring.collection.exporters] invalid host: [" + host + "]." +
" error: [" + e.getMessage() + "]");
} catch (MalformedURLException e) {
throw new SettingsException("[xpack.monitoring.agent.exporter] invalid host: [" + host + "]." +
" error: [" + e.getMessage() + "]"); " error: [" + e.getMessage() + "]");
} }
} }

View File

@ -137,8 +137,8 @@ public abstract class MonitoringIndexNameResolver<T extends MonitoringDoc> {
} }
// Used in tests // Used in tests
protected Data(Integer version) { protected Data(String version) {
this.index = String.join(DELIMITER, PREFIX, DATA, String.valueOf(version)); this.index = String.join(DELIMITER, PREFIX, DATA, version);
} }
@Override @Override
@ -153,7 +153,7 @@ public abstract class MonitoringIndexNameResolver<T extends MonitoringDoc> {
@Override @Override
public String templateName() { public String templateName() {
return String.format(Locale.ROOT, "%s-%s-%d", PREFIX, DATA, MonitoringTemplateUtils.TEMPLATE_VERSION); return String.format(Locale.ROOT, "%s-%s-%s", PREFIX, DATA, MonitoringTemplateUtils.TEMPLATE_VERSION);
} }
@Override @Override
@ -180,9 +180,9 @@ public abstract class MonitoringIndexNameResolver<T extends MonitoringDoc> {
} }
// Used in tests // Used in tests
protected Timestamped(MonitoredSystem system, Settings settings, Integer version) { protected Timestamped(MonitoredSystem system, Settings settings, String version) {
this.system = system; this.system = system;
this.index = String.join(DELIMITER, PREFIX, system.getSystem(), String.valueOf(version)); this.index = String.join(DELIMITER, PREFIX, system.getSystem(), version);
String format = INDEX_NAME_TIME_FORMAT_SETTING.get(settings); String format = INDEX_NAME_TIME_FORMAT_SETTING.get(settings);
try { try {
this.formatter = DateTimeFormat.forPattern(format).withZoneUTC(); this.formatter = DateTimeFormat.forPattern(format).withZoneUTC();
@ -209,7 +209,7 @@ public abstract class MonitoringIndexNameResolver<T extends MonitoringDoc> {
@Override @Override
public String templateName() { public String templateName() {
return String.format(Locale.ROOT, "%s-%s-%d", PREFIX, getId(), MonitoringTemplateUtils.TEMPLATE_VERSION); return String.format(Locale.ROOT, "%s-%s-%s", PREFIX, getId(), MonitoringTemplateUtils.TEMPLATE_VERSION);
} }
@Override @Override

View File

@ -5,7 +5,6 @@
*/ */
package org.elasticsearch.xpack.monitoring.agent.resolver; package org.elasticsearch.xpack.monitoring.agent.resolver;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.monitoring.MonitoredSystem; import org.elasticsearch.xpack.monitoring.MonitoredSystem;
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc; import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc;
@ -21,6 +20,7 @@ import org.elasticsearch.xpack.monitoring.agent.collector.indices.IndicesStatsMo
import org.elasticsearch.xpack.monitoring.agent.collector.node.NodeStatsMonitoringDoc; import org.elasticsearch.xpack.monitoring.agent.collector.node.NodeStatsMonitoringDoc;
import org.elasticsearch.xpack.monitoring.agent.collector.shards.ShardMonitoringDoc; import org.elasticsearch.xpack.monitoring.agent.collector.shards.ShardMonitoringDoc;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc; import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringTemplateUtils;
import org.elasticsearch.xpack.monitoring.agent.resolver.bulk.MonitoringBulkDataResolver; import org.elasticsearch.xpack.monitoring.agent.resolver.bulk.MonitoringBulkDataResolver;
import org.elasticsearch.xpack.monitoring.agent.resolver.bulk.MonitoringBulkTimestampedResolver; import org.elasticsearch.xpack.monitoring.agent.resolver.bulk.MonitoringBulkTimestampedResolver;
import org.elasticsearch.xpack.monitoring.agent.resolver.cluster.ClusterInfoResolver; import org.elasticsearch.xpack.monitoring.agent.resolver.cluster.ClusterInfoResolver;
@ -74,8 +74,13 @@ public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver>
private void registerMonitoredSystem(MonitoredSystem id, Settings settings) { private void registerMonitoredSystem(MonitoredSystem id, Settings settings) {
final MonitoringBulkDataResolver dataResolver = new MonitoringBulkDataResolver(); final MonitoringBulkDataResolver dataResolver = new MonitoringBulkDataResolver();
final MonitoringBulkTimestampedResolver timestampedResolver = new MonitoringBulkTimestampedResolver(id, settings); final MonitoringBulkTimestampedResolver timestampedResolver = new MonitoringBulkTimestampedResolver(id, settings);
registrations.add(resolveByClassSystemVersion(id, dataResolver, MonitoringIndex.DATA, Version.CURRENT));
registrations.add(resolveByClassSystemVersion(id, timestampedResolver, MonitoringIndex.TIMESTAMPED, Version.CURRENT)); final String currentApiVersion = MonitoringTemplateUtils.TEMPLATE_VERSION;
// Note: We resolve requests by the API version that is supplied; this allows us to translate and up-convert any older
// requests that come through the _xpack/monitoring/_bulk endpoint
registrations.add(resolveByClassSystemVersion(id, dataResolver, MonitoringIndex.DATA, currentApiVersion));
registrations.add(resolveByClassSystemVersion(id, timestampedResolver, MonitoringIndex.TIMESTAMPED, currentApiVersion));
} }
/** /**
@ -100,7 +105,7 @@ public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver>
} }
static Registration resolveByClassSystemVersion(MonitoredSystem system, MonitoringIndexNameResolver resolver, MonitoringIndex index, static Registration resolveByClassSystemVersion(MonitoredSystem system, MonitoringIndexNameResolver resolver, MonitoringIndex index,
Version version) { String apiVersion) {
return new Registration(resolver, doc -> { return new Registration(resolver, doc -> {
try { try {
if (doc instanceof MonitoringBulkDoc == false || index != ((MonitoringBulkDoc)doc).getIndex()) { if (doc instanceof MonitoringBulkDoc == false || index != ((MonitoringBulkDoc)doc).getIndex()) {
@ -109,7 +114,7 @@ public class ResolversRegistry implements Iterable<MonitoringIndexNameResolver>
if (system != MonitoredSystem.fromSystem(doc.getMonitoringId())) { if (system != MonitoredSystem.fromSystem(doc.getMonitoringId())) {
return false; return false;
} }
return version == Version.fromString(doc.getMonitoringVersion()); return apiVersion.equals(doc.getMonitoringVersion());
} catch (Exception e) { } catch (Exception e) {
return false; return false;
} }

View File

@ -29,7 +29,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT;
public class RestMonitoringBulkAction extends MonitoringRestHandler { public class RestMonitoringBulkAction extends MonitoringRestHandler {
public static final String MONITORING_ID = "system_id"; public static final String MONITORING_ID = "system_id";
public static final String MONITORING_VERSION = "system_version"; public static final String MONITORING_VERSION = "system_api_version";
@Inject @Inject
public RestMonitoringBulkAction(Settings settings, RestController controller) { public RestMonitoringBulkAction(Settings settings, RestController controller) {

View File

@ -32,9 +32,9 @@ public class MonitoringF {
settings.put("script.inline", "true"); settings.put("script.inline", "true");
settings.put("security.manager.enabled", "false"); settings.put("security.manager.enabled", "false");
settings.put("cluster.name", MonitoringF.class.getSimpleName()); settings.put("cluster.name", MonitoringF.class.getSimpleName());
settings.put("xpack.monitoring.agent.interval", "5s"); settings.put("xpack.monitoring.collection.interval", "1s");
if (!CollectionUtils.isEmpty(args)) { if (!CollectionUtils.isEmpty(args)) {
settings.putArray("xpack.monitoring.agent.collectors", args); settings.putArray("xpack.monitoring.collection.collectors", args);
} }
final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch latch = new CountDownLatch(1);

View File

@ -36,13 +36,13 @@ public class MonitoringBulkRequestTests extends ESTestCase {
MonitoringBulkDoc doc = new MonitoringBulkDoc(null, null); MonitoringBulkDoc doc = new MonitoringBulkDoc(null, null);
assertValidationErrors(new MonitoringBulkRequest().add(doc), hasItems("monitored system id is missing for monitoring document [0]", assertValidationErrors(new MonitoringBulkRequest().add(doc), hasItems("monitored system id is missing for monitoring document [0]",
"monitored system version is missing for monitoring document [0]", "monitored system API version is missing for monitoring document [0]",
"type is missing for monitoring document [0]", "type is missing for monitoring document [0]",
"source is missing for monitoring document [0]")); "source is missing for monitoring document [0]"));
doc = new MonitoringBulkDoc("id", null); doc = new MonitoringBulkDoc("id", null);
assertValidationErrors(new MonitoringBulkRequest().add(doc), assertValidationErrors(new MonitoringBulkRequest().add(doc),
hasItems("monitored system version is missing for monitoring document [0]", hasItems("monitored system API version is missing for monitoring document [0]",
"type is missing for monitoring document [0]", "type is missing for monitoring document [0]",
"source is missing for monitoring document [0]")); "source is missing for monitoring document [0]"));
@ -92,7 +92,7 @@ public class MonitoringBulkRequestTests extends ESTestCase {
assertValidationErrors(request, hasItems("type is missing for monitoring document [1]", assertValidationErrors(request, hasItems("type is missing for monitoring document [1]",
"source is missing for monitoring document [2]", "source is missing for monitoring document [2]",
"monitored system version is missing for monitoring document [3]", "monitored system API version is missing for monitoring document [3]",
"monitored system id is missing for monitoring document [4]")); "monitored system id is missing for monitoring document [4]"));
} }

View File

@ -5,13 +5,12 @@
*/ */
package org.elasticsearch.xpack.monitoring.action; package org.elasticsearch.xpack.monitoring.action;
import org.elasticsearch.Version;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.xpack.monitoring.MonitoredSystem; import org.elasticsearch.xpack.monitoring.MonitoredSystem;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringTemplateUtils;
import org.elasticsearch.xpack.monitoring.agent.resolver.bulk.MonitoringBulkTimestampedResolver; import org.elasticsearch.xpack.monitoring.agent.resolver.bulk.MonitoringBulkTimestampedResolver;
import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase;
@ -29,7 +28,6 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
@TestLogging("_root:DEBUG")
public class MonitoringBulkTests extends MonitoringIntegTestCase { public class MonitoringBulkTests extends MonitoringIntegTestCase {
@Override @Override
@ -43,7 +41,7 @@ public class MonitoringBulkTests extends MonitoringIntegTestCase {
int numDocs = scaledRandomIntBetween(100, 5000); int numDocs = scaledRandomIntBetween(100, 5000);
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString()); MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION);
doc.setType(randomFrom(types)); doc.setType(randomFrom(types));
doc.setSource(jsonBuilder().startObject().field("num", numDocs).endObject().bytes()); doc.setSource(jsonBuilder().startObject().field("num", numDocs).endObject().bytes());
requestBuilder.add(doc); requestBuilder.add(doc);
@ -95,7 +93,8 @@ public class MonitoringBulkTests extends MonitoringIntegTestCase {
int numDocs = scaledRandomIntBetween(10, 50); int numDocs = scaledRandomIntBetween(10, 50);
for (int k = 0; k < numDocs; k++) { for (int k = 0; k < numDocs; k++) {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString()); MonitoringBulkDoc doc =
new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION);
doc.setType("concurrent"); doc.setType("concurrent");
doc.setSource(jsonBuilder().startObject().field("num", k).endObject().bytes()); doc.setSource(jsonBuilder().startObject().field("num", k).endObject().bytes());
requestBuilder.add(doc); requestBuilder.add(doc);
@ -133,10 +132,10 @@ public class MonitoringBulkTests extends MonitoringIntegTestCase {
for (int i = 0; i < totalDocs; i++) { for (int i = 0; i < totalDocs; i++) {
MonitoringBulkDoc doc; MonitoringBulkDoc doc;
if (randomBoolean()) { if (randomBoolean()) {
doc = new MonitoringBulkDoc("unknown", Version.CURRENT.toString()); doc = new MonitoringBulkDoc("unknown", MonitoringTemplateUtils.TEMPLATE_VERSION);
unsupportedDocs++; unsupportedDocs++;
} else { } else {
doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString()); doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION);
} }
doc.setType(randomFrom(types)); doc.setType(randomFrom(types));
doc.setSource(jsonBuilder().startObject().field("num", i).endObject().bytes()); doc.setSource(jsonBuilder().startObject().field("num", i).endObject().bytes());

View File

@ -34,6 +34,7 @@ import org.elasticsearch.xpack.monitoring.MonitoringSettings;
import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase;
import org.junit.Before; import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
@ -220,7 +221,7 @@ public abstract class AbstractCollectorTestCase extends MonitoringIntegTestCase
public static class InternalXPackPlugin extends XPackPlugin { public static class InternalXPackPlugin extends XPackPlugin {
public InternalXPackPlugin(Settings settings) { public InternalXPackPlugin(Settings settings) throws IOException {
super(settings); super(settings);
licensing = new InternalLicensing(); licensing = new InternalLicensing();
} }
@ -252,7 +253,7 @@ public abstract class AbstractCollectorTestCase extends MonitoringIntegTestCase
private volatile License license; private volatile License license;
@Override @Override
public List<String> licenseesWithState(LicenseState state) { public LicenseState licenseState() {
return null; return null;
} }

View File

@ -32,7 +32,7 @@ public abstract class AbstractExporterTemplateTestCase extends MonitoringIntegTe
.put(MonitoringSettings.INTERVAL.getKey(), "-1"); .put(MonitoringSettings.INTERVAL.getKey(), "-1");
for (Map.Entry<String, String> setting : exporterSettings().getAsMap().entrySet()) { for (Map.Entry<String, String> setting : exporterSettings().getAsMap().entrySet()) {
settings.put("xpack.monitoring.agent.exporters._exporter." + setting.getKey(), setting.getValue()); settings.put("xpack.monitoring.collection.exporters._exporter." + setting.getKey(), setting.getValue());
} }
return settings.build(); return settings.build();
} }

View File

@ -176,8 +176,8 @@ public class ExportersTests extends ESTestCase {
final AtomicReference<Settings> settingsHolder = new AtomicReference<>(); final AtomicReference<Settings> settingsHolder = new AtomicReference<>();
Settings nodeSettings = Settings.builder() Settings nodeSettings = Settings.builder()
.put("xpack.monitoring.agent.exporters._name0.type", "_type") .put("xpack.monitoring.collection.exporters._name0.type", "_type")
.put("xpack.monitoring.agent.exporters._name1.type", "_type") .put("xpack.monitoring.collection.exporters._name1.type", "_type")
.build(); .build();
clusterSettings = new ClusterSettings(nodeSettings, new HashSet<>(Arrays.asList(MonitoringSettings.EXPORTERS_SETTINGS))); clusterSettings = new ClusterSettings(nodeSettings, new HashSet<>(Arrays.asList(MonitoringSettings.EXPORTERS_SETTINGS)));
@ -197,8 +197,8 @@ public class ExportersTests extends ESTestCase {
assertThat(settings, hasEntry("_name1.type", "_type")); assertThat(settings, hasEntry("_name1.type", "_type"));
Settings update = Settings.builder() Settings update = Settings.builder()
.put("xpack.monitoring.agent.exporters._name0.foo", "bar") .put("xpack.monitoring.collection.exporters._name0.foo", "bar")
.put("xpack.monitoring.agent.exporters._name1.foo", "bar") .put("xpack.monitoring.collection.exporters._name1.foo", "bar")
.build(); .build();
clusterSettings.applySettings(update); clusterSettings.applySettings(update);
assertThat(settingsHolder.get(), notNullValue()); assertThat(settingsHolder.get(), notNullValue());
@ -216,8 +216,8 @@ public class ExportersTests extends ESTestCase {
factories.put("mock", factory); factories.put("mock", factory);
factories.put("mock_master_only", masterOnlyFactory); factories.put("mock_master_only", masterOnlyFactory);
Exporters exporters = new Exporters(Settings.builder() Exporters exporters = new Exporters(Settings.builder()
.put("xpack.monitoring.agent.exporters._name0.type", "mock") .put("xpack.monitoring.collection.exporters._name0.type", "mock")
.put("xpack.monitoring.agent.exporters._name1.type", "mock_master_only") .put("xpack.monitoring.collection.exporters._name1.type", "mock_master_only")
.build(), factories, clusterService, clusterSettings); .build(), factories, clusterService, clusterSettings);
exporters.start(); exporters.start();
@ -241,8 +241,8 @@ public class ExportersTests extends ESTestCase {
factories.put("mock", factory); factories.put("mock", factory);
factories.put("mock_master_only", masterOnlyFactory); factories.put("mock_master_only", masterOnlyFactory);
Exporters exporters = new Exporters(Settings.builder() Exporters exporters = new Exporters(Settings.builder()
.put("xpack.monitoring.agent.exporters._name0.type", "mock") .put("xpack.monitoring.collection.exporters._name0.type", "mock")
.put("xpack.monitoring.agent.exporters._name1.type", "mock_master_only") .put("xpack.monitoring.collection.exporters._name1.type", "mock_master_only")
.build(), factories, clusterService, clusterSettings); .build(), factories, clusterService, clusterSettings);
exporters.start(); exporters.start();
@ -270,7 +270,7 @@ public class ExportersTests extends ESTestCase {
logger.info("--> creating {} exporters", nbExporters); logger.info("--> creating {} exporters", nbExporters);
Settings.Builder settings = Settings.builder(); Settings.Builder settings = Settings.builder();
for (int i = 0; i < nbExporters; i++) { for (int i = 0; i < nbExporters; i++) {
settings.put("xpack.monitoring.agent.exporters._name" + String.valueOf(i) + ".type", "record"); settings.put("xpack.monitoring.collection.exporters._name" + String.valueOf(i) + ".type", "record");
} }
Exporter.Factory factory = new CountingExportFactory("record", false); Exporter.Factory factory = new CountingExportFactory("record", false);

View File

@ -95,10 +95,10 @@ public class HttpExporterTests extends MonitoringIntegTestCase {
Settings.Builder builder = Settings.builder() Settings.Builder builder = Settings.builder()
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put("xpack.monitoring.agent.exporters._http.type", "http") .put("xpack.monitoring.collection.exporters._http.type", "http")
.put("xpack.monitoring.agent.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort()) .put("xpack.monitoring.collection.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort())
.put("xpack.monitoring.agent.exporters._http.connection.keep_alive", false) .put("xpack.monitoring.collection.exporters._http.connection.keep_alive", false)
.put("xpack.monitoring.agent.exporters._http.update_mappings", false); .put("xpack.monitoring.collection.exporters._http.update_mappings", false);
internalCluster().startNode(builder); internalCluster().startNode(builder);
@ -133,23 +133,23 @@ public class HttpExporterTests extends MonitoringIntegTestCase {
// disable exporting to be able to use non valid hosts // disable exporting to be able to use non valid hosts
Settings.Builder builder = Settings.builder() Settings.Builder builder = Settings.builder()
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put("xpack.monitoring.agent.exporters._http.type", "http") .put("xpack.monitoring.collection.exporters._http.type", "http")
.put("xpack.monitoring.agent.exporters._http.host", "test0"); .put("xpack.monitoring.collection.exporters._http.host", "test0");
String nodeName = internalCluster().startNode(builder); String nodeName = internalCluster().startNode(builder);
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder()
.putArray("xpack.monitoring.agent.exporters._http.host", "test1"))); .putArray("xpack.monitoring.collection.exporters._http.host", "test1")));
assertThat(getExporter(nodeName).hosts, Matchers.arrayContaining("test1")); assertThat(getExporter(nodeName).hosts, Matchers.arrayContaining("test1"));
// wipes the non array settings // wipes the non array settings
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder()
.putArray("xpack.monitoring.agent.exporters._http.host", "test2") .putArray("xpack.monitoring.collection.exporters._http.host", "test2")
.put("xpack.monitoring.agent.exporters._http.host", ""))); .put("xpack.monitoring.collection.exporters._http.host", "")));
assertThat(getExporter(nodeName).hosts, Matchers.arrayContaining("test2")); assertThat(getExporter(nodeName).hosts, Matchers.arrayContaining("test2"));
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder()
.putArray("xpack.monitoring.agent.exporters._http.host", "test3"))); .putArray("xpack.monitoring.collection.exporters._http.host", "test3")));
assertThat(getExporter(nodeName).hosts, Matchers.arrayContaining("test3")); assertThat(getExporter(nodeName).hosts, Matchers.arrayContaining("test3"));
} }
@ -157,10 +157,10 @@ public class HttpExporterTests extends MonitoringIntegTestCase {
Settings.Builder builder = Settings.builder() Settings.Builder builder = Settings.builder()
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put("xpack.monitoring.agent.exporters._http.type", "http") .put("xpack.monitoring.collection.exporters._http.type", "http")
.put("xpack.monitoring.agent.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort()) .put("xpack.monitoring.collection.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort())
.put("xpack.monitoring.agent.exporters._http.connection.keep_alive", false) .put("xpack.monitoring.collection.exporters._http.connection.keep_alive", false)
.put("xpack.monitoring.agent.exporters._http.update_mappings", false); .put("xpack.monitoring.collection.exporters._http.update_mappings", false);
logger.info("--> starting node"); logger.info("--> starting node");
@ -221,7 +221,7 @@ public class HttpExporterTests extends MonitoringIntegTestCase {
assertNotNull("Unable to start the second mock web server", secondWebServer); assertNotNull("Unable to start the second mock web server", secondWebServer);
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings( assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(
Settings.builder().putArray("xpack.monitoring.agent.exporters._http.host", Settings.builder().putArray("xpack.monitoring.collection.exporters._http.host",
secondWebServer.getHostName() + ":" + secondWebServer.getPort())).get()); secondWebServer.getHostName() + ":" + secondWebServer.getPort())).get());
// a new exporter is created on update, so we need to re-fetch it // a new exporter is created on update, so we need to re-fetch it
@ -274,9 +274,9 @@ public class HttpExporterTests extends MonitoringIntegTestCase {
public void testUnsupportedClusterVersion() throws Exception { public void testUnsupportedClusterVersion() throws Exception {
Settings.Builder builder = Settings.builder() Settings.Builder builder = Settings.builder()
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put("xpack.monitoring.agent.exporters._http.type", "http") .put("xpack.monitoring.collection.exporters._http.type", "http")
.put("xpack.monitoring.agent.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort()) .put("xpack.monitoring.collection.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort())
.put("xpack.monitoring.agent.exporters._http.connection.keep_alive", false); .put("xpack.monitoring.collection.exporters._http.connection.keep_alive", false);
logger.info("--> starting node"); logger.info("--> starting node");
@ -302,10 +302,10 @@ public class HttpExporterTests extends MonitoringIntegTestCase {
public void testDynamicIndexFormatChange() throws Exception { public void testDynamicIndexFormatChange() throws Exception {
Settings.Builder builder = Settings.builder() Settings.Builder builder = Settings.builder()
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put("xpack.monitoring.agent.exporters._http.type", "http") .put("xpack.monitoring.collection.exporters._http.type", "http")
.put("xpack.monitoring.agent.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort()) .put("xpack.monitoring.collection.exporters._http.host", webServer.getHostName() + ":" + webServer.getPort())
.put("xpack.monitoring.agent.exporters._http.connection.keep_alive", false) .put("xpack.monitoring.collection.exporters._http.connection.keep_alive", false)
.put("xpack.monitoring.agent.exporters._http.update_mappings", false); .put("xpack.monitoring.collection.exporters._http.update_mappings", false);
String agentNode = internalCluster().startNode(builder); String agentNode = internalCluster().startNode(builder);
@ -356,7 +356,7 @@ public class HttpExporterTests extends MonitoringIntegTestCase {
String newTimeFormat = randomFrom("YY", "YYYY", "YYYY.MM", "YYYY-MM", "MM.YYYY", "MM"); String newTimeFormat = randomFrom("YY", "YYYY", "YYYY.MM", "YYYY-MM", "MM.YYYY", "MM");
logger.info("--> updating index time format setting to {}", newTimeFormat); logger.info("--> updating index time format setting to {}", newTimeFormat);
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder()
.put("xpack.monitoring.agent.exporters._http.index.name.time_format", newTimeFormat))); .put("xpack.monitoring.collection.exporters._http.index.name.time_format", newTimeFormat)));
logger.info("--> exporting a second event"); logger.info("--> exporting a second event");
@ -402,9 +402,9 @@ public class HttpExporterTests extends MonitoringIntegTestCase {
Settings.Builder builder = Settings.builder() Settings.Builder builder = Settings.builder()
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put("xpack.monitoring.agent.exporters._http.type", "http") .put("xpack.monitoring.collection.exporters._http.type", "http")
.put("xpack.monitoring.agent.exporters._http.host", host) .put("xpack.monitoring.collection.exporters._http.host", host)
.put("xpack.monitoring.agent.exporters._http.connection.keep_alive", false); .put("xpack.monitoring.collection.exporters._http.connection.keep_alive", false);
String agentNode = internalCluster().startNode(builder); String agentNode = internalCluster().startNode(builder);
HttpExporter exporter = getExporter(agentNode); HttpExporter exporter = getExporter(agentNode);

View File

@ -65,8 +65,8 @@ public class LocalExporterTests extends MonitoringIntegTestCase {
public void testSimpleExport() throws Exception { public void testSimpleExport() throws Exception {
internalCluster().startNode(Settings.builder() internalCluster().startNode(Settings.builder()
.put("xpack.monitoring.agent.exporters._local.type", LocalExporter.TYPE) .put("xpack.monitoring.collection.exporters._local.type", LocalExporter.TYPE)
.put("xpack.monitoring.agent.exporters._local.enabled", true) .put("xpack.monitoring.collection.exporters._local.enabled", true)
.build()); .build());
securedEnsureGreen(); securedEnsureGreen();
@ -96,7 +96,7 @@ public class LocalExporterTests extends MonitoringIntegTestCase {
public void testTemplateCreation() throws Exception { public void testTemplateCreation() throws Exception {
internalCluster().startNode(Settings.builder() internalCluster().startNode(Settings.builder()
.put("xpack.monitoring.agent.exporters._local.type", LocalExporter.TYPE) .put("xpack.monitoring.collection.exporters._local.type", LocalExporter.TYPE)
.build()); .build());
securedEnsureGreen(); securedEnsureGreen();
@ -111,8 +111,8 @@ public class LocalExporterTests extends MonitoringIntegTestCase {
String timeFormat = randomFrom("YY", "YYYY", "YYYY.MM", "YYYY-MM", "MM.YYYY", "MM"); String timeFormat = randomFrom("YY", "YYYY", "YYYY.MM", "YYYY-MM", "MM.YYYY", "MM");
internalCluster().startNode(Settings.builder() internalCluster().startNode(Settings.builder()
.put("xpack.monitoring.agent.exporters._local.type", LocalExporter.TYPE) .put("xpack.monitoring.collection.exporters._local.type", LocalExporter.TYPE)
.put("xpack.monitoring.agent.exporters._local." + LocalExporter.INDEX_NAME_TIME_FORMAT_SETTING, timeFormat) .put("xpack.monitoring.collection.exporters._local." + LocalExporter.INDEX_NAME_TIME_FORMAT_SETTING, timeFormat)
.build()); .build());
securedEnsureGreen(); securedEnsureGreen();
@ -130,7 +130,7 @@ public class LocalExporterTests extends MonitoringIntegTestCase {
logger.debug("--> updates the timestamp"); logger.debug("--> updates the timestamp");
timeFormat = randomFrom("dd", "dd.MM.YYYY", "dd.MM"); timeFormat = randomFrom("dd", "dd.MM.YYYY", "dd.MM");
updateClusterSettings(Settings.builder().put("xpack.monitoring.agent.exporters._local.index.name.time_format", timeFormat)); updateClusterSettings(Settings.builder().put("xpack.monitoring.collection.exporters._local.index.name.time_format", timeFormat));
exporter = getLocalExporter("_local"); // we need to get it again.. as it was rebuilt exporter = getLocalExporter("_local"); // we need to get it again.. as it was rebuilt
indexName = ".monitoring-es-" + MonitoringTemplateUtils.TEMPLATE_VERSION + "-" indexName = ".monitoring-es-" + MonitoringTemplateUtils.TEMPLATE_VERSION + "-"
+ DateTimeFormat.forPattern(timeFormat).withZoneUTC().print(doc.getTimestamp()); + DateTimeFormat.forPattern(timeFormat).withZoneUTC().print(doc.getTimestamp());
@ -144,8 +144,8 @@ public class LocalExporterTests extends MonitoringIntegTestCase {
public void testLocalExporterFlush() throws Exception { public void testLocalExporterFlush() throws Exception {
internalCluster().startNode(Settings.builder() internalCluster().startNode(Settings.builder()
.put("xpack.monitoring.agent.exporters._local.type", LocalExporter.TYPE) .put("xpack.monitoring.collection.exporters._local.type", LocalExporter.TYPE)
.put("xpack.monitoring.agent.exporters._local.enabled", true) .put("xpack.monitoring.collection.exporters._local.enabled", true)
.build()); .build());
securedEnsureGreen(); securedEnsureGreen();

View File

@ -13,6 +13,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.monitoring.MonitoredSystem; import org.elasticsearch.xpack.monitoring.MonitoredSystem;
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc; import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc;
import org.elasticsearch.xpack.monitoring.action.MonitoringIndex; import org.elasticsearch.xpack.monitoring.action.MonitoringIndex;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringTemplateUtils;
import org.elasticsearch.xpack.monitoring.agent.resolver.MonitoringIndexNameResolverTestCase; import org.elasticsearch.xpack.monitoring.agent.resolver.MonitoringIndexNameResolverTestCase;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
@ -29,7 +30,7 @@ public class MonitoringBulkDataResolverTests extends MonitoringIndexNameResolver
@Override @Override
protected MonitoringBulkDoc newMonitoringDoc() { protected MonitoringBulkDoc newMonitoringDoc() {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString(), MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION,
MonitoringIndex.DATA, "kibana", id, MonitoringIndex.DATA, "kibana", id,
new BytesArray("{\"field1\" : \"value1\"}")); new BytesArray("{\"field1\" : \"value1\"}"));

View File

@ -13,6 +13,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.monitoring.MonitoredSystem; import org.elasticsearch.xpack.monitoring.MonitoredSystem;
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc; import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc;
import org.elasticsearch.xpack.monitoring.action.MonitoringIndex; import org.elasticsearch.xpack.monitoring.action.MonitoringIndex;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringTemplateUtils;
import org.elasticsearch.xpack.monitoring.agent.resolver.MonitoringIndexNameResolverTestCase; import org.elasticsearch.xpack.monitoring.agent.resolver.MonitoringIndexNameResolverTestCase;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
@ -28,7 +29,7 @@ public class MonitoringBulkTimestampedResolverTests
@Override @Override
protected MonitoringBulkDoc newMonitoringDoc() { protected MonitoringBulkDoc newMonitoringDoc() {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), Version.CURRENT.toString(), MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION,
MonitoringIndex.TIMESTAMPED, "kibana_stats", null, MonitoringIndex.TIMESTAMPED, "kibana_stats", null,
new BytesArray("{\"field1\" : \"value1\"}")); new BytesArray("{\"field1\" : \"value1\"}"));

View File

@ -44,7 +44,7 @@ public class ClusterStateTests extends MonitoringIntegTestCase {
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put(MonitoringSettings.COLLECTORS.getKey(), ClusterStateCollector.NAME) .put(MonitoringSettings.COLLECTORS.getKey(), ClusterStateCollector.NAME)
.put("xpack.monitoring.agent.exporters.default_local.type", "local") .put("xpack.monitoring.collection.exporters.default_local.type", "local")
.put("node.attr.custom", randomInt) .put("node.attr.custom", randomInt)
.build(); .build();
} }

View File

@ -32,7 +32,7 @@ public class ClusterStatsTests extends MonitoringIntegTestCase {
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put(MonitoringSettings.COLLECTORS.getKey(), ClusterStatsCollector.NAME) .put(MonitoringSettings.COLLECTORS.getKey(), ClusterStatsCollector.NAME)
.put("xpack.monitoring.agent.exporters.default_local.type", "local") .put("xpack.monitoring.collection.exporters.default_local.type", "local")
.build(); .build();
} }

View File

@ -38,7 +38,7 @@ public class IndexRecoveryTests extends MonitoringIntegTestCase {
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put(MonitoringSettings.INDICES.getKey(), INDEX_PREFIX + "*") .put(MonitoringSettings.INDICES.getKey(), INDEX_PREFIX + "*")
.put(MonitoringSettings.COLLECTORS.getKey(), IndexRecoveryCollector.NAME) .put(MonitoringSettings.COLLECTORS.getKey(), IndexRecoveryCollector.NAME)
.put("xpack.monitoring.agent.exporters.default_local.type", "local") .put("xpack.monitoring.collection.exporters.default_local.type", "local")
.build(); .build();
} }

View File

@ -30,7 +30,7 @@ public class IndexStatsTests extends MonitoringIntegTestCase {
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put(MonitoringSettings.COLLECTORS.getKey(), IndexStatsCollector.NAME) .put(MonitoringSettings.COLLECTORS.getKey(), IndexStatsCollector.NAME)
.put("xpack.monitoring.agent.exporters.default_local.type", "local") .put("xpack.monitoring.collection.exporters.default_local.type", "local")
.build(); .build();
} }

View File

@ -30,7 +30,7 @@ public class IndicesStatsTests extends MonitoringIntegTestCase {
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put(MonitoringSettings.COLLECTORS.getKey(), IndicesStatsCollector.NAME) .put(MonitoringSettings.COLLECTORS.getKey(), IndicesStatsCollector.NAME)
.put("xpack.monitoring.agent.exporters.default_local.type", "local") .put("xpack.monitoring.collection.exporters.default_local.type", "local")
.build(); .build();
} }

View File

@ -35,7 +35,7 @@ public class MultiNodesStatsTests extends MonitoringIntegTestCase {
return Settings.builder() return Settings.builder()
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put("xpack.monitoring.agent.exporters.default_local.type", "local") .put("xpack.monitoring.collection.exporters.default_local.type", "local")
.build(); .build();
} }
@ -102,7 +102,7 @@ public class MultiNodesStatsTests extends MonitoringIntegTestCase {
assertThat(((StringTerms) aggregation).getBuckets().size(), equalTo(nbNodes)); assertThat(((StringTerms) aggregation).getBuckets().size(), equalTo(nbNodes));
for (String nodeName : internalCluster().getNodeNames()) { for (String nodeName : internalCluster().getNodeNames()) {
StringTerms.Bucket bucket = (StringTerms.Bucket) ((StringTerms) aggregation) StringTerms.Bucket bucket = ((StringTerms) aggregation)
.getBucketByKey(internalCluster().clusterService(nodeName).localNode().getId()); .getBucketByKey(internalCluster().clusterService(nodeName).localNode().getId());
// At least 1 doc must exist per node, but it can be more than 1 // At least 1 doc must exist per node, but it can be more than 1
// because the first node may have already collected many node stats documents // because the first node may have already collected many node stats documents

View File

@ -33,7 +33,7 @@ public class NodeStatsTests extends MonitoringIntegTestCase {
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put(MonitoringSettings.COLLECTORS.getKey(), NodeStatsCollector.NAME) .put(MonitoringSettings.COLLECTORS.getKey(), NodeStatsCollector.NAME)
.put("xpack.monitoring.agent.exporters.default_local.type", LocalExporter.TYPE) .put("xpack.monitoring.collection.exporters.default_local.type", LocalExporter.TYPE)
.build(); .build();
} }

View File

@ -46,7 +46,7 @@ public class ShardsTests extends MonitoringIntegTestCase {
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put(MonitoringSettings.COLLECTORS.getKey(), ShardsCollector.NAME) .put(MonitoringSettings.COLLECTORS.getKey(), ShardsCollector.NAME)
.put(MonitoringSettings.INDICES.getKey(), INDEX_PREFIX + "*") .put(MonitoringSettings.INDICES.getKey(), INDEX_PREFIX + "*")
.put("xpack.monitoring.agent.exporters.default_local.type", "local") .put("xpack.monitoring.collection.exporters.default_local.type", "local")
.build(); .build();
} }

View File

@ -77,8 +77,8 @@ public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTest
// Won't be deleted // Won't be deleted
createIndex(MonitoringSettings.LEGACY_DATA_INDEX_NAME, now().minusYears(1)); createIndex(MonitoringSettings.LEGACY_DATA_INDEX_NAME, now().minusYears(1));
createDataIndex(now().minusDays(10), 0); createDataIndex(now().minusDays(10), "0");
createDataIndex(now().minusDays(10), 1); createDataIndex(now().minusDays(10), "1");
assertIndicesCount(4); assertIndicesCount(4);
CleanerService.Listener listener = getListener(); CleanerService.Listener listener = getListener();
@ -108,9 +108,9 @@ public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTest
createTimestampedIndex(now().minusDays(10)); createTimestampedIndex(now().minusDays(10));
// Won't be deleted // Won't be deleted
createTimestampedIndex(now().minusDays(10), 0); createTimestampedIndex(now().minusDays(10), "0");
createTimestampedIndex(now().minusDays(10), 1); createTimestampedIndex(now().minusDays(10), "1");
createTimestampedIndex(now().minusDays(10), Integer.MAX_VALUE); createTimestampedIndex(now().minusDays(10), String.valueOf(Integer.MAX_VALUE));
assertIndicesCount(4); assertIndicesCount(4);
CleanerService.Listener listener = getListener(); CleanerService.Listener listener = getListener();
@ -198,7 +198,7 @@ public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTest
/** /**
* Creates a monitoring data index in a given version. * Creates a monitoring data index in a given version.
*/ */
protected void createDataIndex(DateTime creationDate, int version) { protected void createDataIndex(DateTime creationDate, String version) {
createIndex(new MockDataIndexNameResolver(version).index(randomMonitoringDoc()), creationDate); createIndex(new MockDataIndexNameResolver(version).index(randomMonitoringDoc()), creationDate);
} }
@ -212,7 +212,7 @@ public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTest
/** /**
* Creates a monitoring timestamped index using a given template version. * Creates a monitoring timestamped index using a given template version.
*/ */
protected void createTimestampedIndex(DateTime creationDate, int version) { protected void createTimestampedIndex(DateTime creationDate, String version) {
MonitoringDoc monitoringDoc = randomMonitoringDoc(); MonitoringDoc monitoringDoc = randomMonitoringDoc();
monitoringDoc.setTimestamp(creationDate.getMillis()); monitoringDoc.setTimestamp(creationDate.getMillis());

View File

@ -33,7 +33,7 @@ public class LocalIndicesCleanerTests extends AbstractIndicesCleanerTestCase {
protected Settings nodeSettings(int nodeOrdinal) { protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder() return Settings.builder()
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.put("xpack.monitoring.agent.exporters._local.type", LocalExporter.TYPE) .put("xpack.monitoring.collection.exporters._local.type", LocalExporter.TYPE)
.build(); .build();
} }

View File

@ -26,6 +26,7 @@ import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.monitoring.MonitoringLicensee; import org.elasticsearch.xpack.monitoring.MonitoringLicensee;
import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
@ -150,12 +151,8 @@ public class LicenseIntegrationTests extends MonitoringIntegTestCase {
} }
@Override @Override
public List<String> licenseesWithState(LicenseState state) { public LicenseState licenseState() {
List<String> licenseesWithState = new ArrayList<>(); return null;
for (Licensee licensee : licensees) {
licenseesWithState.add(licensee.id());
}
return licenseesWithState;
} }
@Override @Override
@ -165,7 +162,7 @@ public class LicenseIntegrationTests extends MonitoringIntegTestCase {
} }
public static class InternalXPackPlugin extends XPackPlugin { public static class InternalXPackPlugin extends XPackPlugin {
public InternalXPackPlugin(Settings settings) { public InternalXPackPlugin(Settings settings) throws IOException {
super(settings); super(settings);
licensing = new MockLicensing(); licensing = new MockLicensing();
} }

View File

@ -14,7 +14,6 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.monitoring.MonitoringSettings; import org.elasticsearch.xpack.monitoring.MonitoringSettings;
import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase;
import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.hamcrest.Matchers;
import java.util.Collections; import java.util.Collections;
import java.util.Map; import java.util.Map;
@ -22,7 +21,8 @@ import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue;
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER;
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.equalTo;
public class MonitoringSettingsFilterTests extends MonitoringIntegTestCase { public class MonitoringSettingsFilterTests extends MonitoringIntegTestCase {
@ -32,13 +32,13 @@ public class MonitoringSettingsFilterTests extends MonitoringIntegTestCase {
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.put(NetworkModule.HTTP_ENABLED.getKey(), true) .put(NetworkModule.HTTP_ENABLED.getKey(), true)
.put(MonitoringSettings.INTERVAL.getKey(), "-1") .put(MonitoringSettings.INTERVAL.getKey(), "-1")
.put("xpack.monitoring.agent.exporters._http.type", "http") .put("xpack.monitoring.collection.exporters._http.type", "http")
.put("xpack.monitoring.agent.exporters._http.enabled", false) .put("xpack.monitoring.collection.exporters._http.enabled", false)
.put("xpack.monitoring.agent.exporters._http.auth.username", "_user") .put("xpack.monitoring.collection.exporters._http.auth.username", "_user")
.put("xpack.monitoring.agent.exporters._http.auth.password", "_passwd") .put("xpack.monitoring.collection.exporters._http.auth.password", "_passwd")
.put("xpack.monitoring.agent.exporters._http.ssl.truststore.path", "/path/to/truststore") .put("xpack.monitoring.collection.exporters._http.ssl.truststore.path", "/path/to/truststore")
.put("xpack.monitoring.agent.exporters._http.ssl.truststore.password", "_passwd") .put("xpack.monitoring.collection.exporters._http.ssl.truststore.password", "_passwd")
.put("xpack.monitoring.agent.exporters._http.ssl.hostname_verification", true) .put("xpack.monitoring.collection.exporters._http.ssl.hostname_verification", true)
.build(); .build();
} }
@ -60,13 +60,13 @@ public class MonitoringSettingsFilterTests extends MonitoringIntegTestCase {
for (Object node : nodes.values()) { for (Object node : nodes.values()) {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Map<String, Object> settings = (Map<String, Object>) ((Map<String, Object>) node).get("settings"); Map<String, Object> settings = (Map<String, Object>) ((Map<String, Object>) node).get("settings");
assertThat(extractValue("xpack.monitoring.agent.exporters._http.type", settings), Matchers.<Object>equalTo("http")); assertThat(extractValue("xpack.monitoring.collection.exporters._http.type", settings), equalTo("http"));
assertThat(extractValue("xpack.monitoring.agent.exporters._http.enabled", settings), Matchers.<Object>equalTo("false")); assertThat(extractValue("xpack.monitoring.collection.exporters._http.enabled", settings), equalTo("false"));
assertNullSetting(settings, "xpack.monitoring.agent.exporters._http.auth.username"); assertNullSetting(settings, "xpack.monitoring.collection.exporters._http.auth.username");
assertNullSetting(settings, "xpack.monitoring.agent.exporters._http.auth.password"); assertNullSetting(settings, "xpack.monitoring.collection.exporters._http.auth.password");
assertNullSetting(settings, "xpack.monitoring.agent.exporters._http.ssl.truststore.path"); assertNullSetting(settings, "xpack.monitoring.collection.exporters._http.ssl.truststore.path");
assertNullSetting(settings, "xpack.monitoring.agent.exporters._http.ssl.truststore.password"); assertNullSetting(settings, "xpack.monitoring.collection.exporters._http.ssl.truststore.password");
assertNullSetting(settings, "xpack.monitoring.agent.exporters._http.ssl.hostname_verification"); assertNullSetting(settings, "xpack.monitoring.collection.exporters._http.ssl.hostname_verification");
} }
} }
} }

View File

@ -37,7 +37,7 @@ import org.elasticsearch.xpack.security.authc.file.FileRealm;
import org.elasticsearch.xpack.security.authc.support.Hasher; import org.elasticsearch.xpack.security.authc.support.Hasher;
import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.elasticsearch.xpack.security.authz.store.FileRolesStore; import org.elasticsearch.xpack.security.authz.store.FileRolesStore;
import org.elasticsearch.xpack.security.crypto.InternalCryptoService; import org.elasticsearch.xpack.security.crypto.CryptoService;
import org.elasticsearch.xpack.watcher.Watcher; import org.elasticsearch.xpack.watcher.Watcher;
import org.hamcrest.Matcher; import org.hamcrest.Matcher;
import org.jboss.netty.util.internal.SystemPropertyUtil; import org.jboss.netty.util.internal.SystemPropertyUtil;
@ -440,7 +440,7 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase {
public class MockDataIndexNameResolver extends MonitoringIndexNameResolver.Data<MonitoringDoc> { public class MockDataIndexNameResolver extends MonitoringIndexNameResolver.Data<MonitoringDoc> {
public MockDataIndexNameResolver(Integer version) { public MockDataIndexNameResolver(String version) {
super(version); super(version);
} }
@ -462,7 +462,7 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase {
protected class MockTimestampedIndexNameResolver extends MonitoringIndexNameResolver.Timestamped<MonitoringDoc> { protected class MockTimestampedIndexNameResolver extends MonitoringIndexNameResolver.Timestamped<MonitoringDoc> {
public MockTimestampedIndexNameResolver(MonitoredSystem system, Settings settings, Integer version) { public MockTimestampedIndexNameResolver(MonitoredSystem system, Settings settings, String version) {
super(system, settings, version); super(system, settings, version);
} }
@ -537,7 +537,7 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase {
.put("xpack.security.authc.realms.esusers.files.users", writeFile(folder, "users", USERS)) .put("xpack.security.authc.realms.esusers.files.users", writeFile(folder, "users", USERS))
.put("xpack.security.authc.realms.esusers.files.users_roles", writeFile(folder, "users_roles", USER_ROLES)) .put("xpack.security.authc.realms.esusers.files.users_roles", writeFile(folder, "users_roles", USER_ROLES))
.put(FileRolesStore.ROLES_FILE_SETTING.getKey(), writeFile(folder, "roles.yml", ROLES)) .put(FileRolesStore.ROLES_FILE_SETTING.getKey(), writeFile(folder, "roles.yml", ROLES))
.put(InternalCryptoService.FILE_SETTING.getKey(), writeFile(folder, "system_key.yml", systemKey)) .put(CryptoService.FILE_SETTING.getKey(), writeFile(folder, "system_key.yml", systemKey))
.put("xpack.security.authc.sign_user_header", false) .put("xpack.security.authc.sign_user_header", false)
.put("xpack.security.audit.enabled", auditLogsEnabled); .put("xpack.security.audit.enabled", auditLogsEnabled);
} catch (IOException ex) { } catch (IOException ex) {
@ -547,7 +547,7 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase {
static byte[] generateKey() { static byte[] generateKey() {
try { try {
return InternalCryptoService.generateKey(); return CryptoService.generateKey();
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }

View File

@ -16,9 +16,9 @@
"type": "string", "type": "string",
"description" : "Identifier of the monitored system" "description" : "Identifier of the monitored system"
}, },
"system_version" : { "system_api_version" : {
"type" : "string", "type" : "string",
"description" : "Version of the monitored system" "description" : "API Version of the monitored system"
} }
} }
}, },

View File

@ -13,14 +13,11 @@ setup:
--- ---
"Bulk indexing of monitoring data": "Bulk indexing of monitoring data":
# Get the current version
- do: {info: {}}
- set: {version.number: version}
- do: - do:
xpack.monitoring.bulk: xpack.monitoring.bulk:
system_id: "kibana" system_id: "kibana"
system_version: $version system_api_version: "2"
body: body:
- index: - index:
_type: test_type _type: test_type
@ -54,7 +51,7 @@ setup:
- do: - do:
xpack.monitoring.bulk: xpack.monitoring.bulk:
system_id: "kibana" system_id: "kibana"
system_version: $version system_api_version: "2"
type: "default_type" type: "default_type"
body: body:
- '{"index": {}}' - '{"index": {}}'

View File

@ -5,6 +5,16 @@
*/ */
package org.elasticsearch.xpack.security; package org.elasticsearch.xpack.security;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilter;
@ -12,6 +22,7 @@ import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
@ -21,6 +32,7 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexModule;
import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestHandler;
@ -64,8 +76,7 @@ import org.elasticsearch.xpack.security.authz.accesscontrol.OptOutQueryCache;
import org.elasticsearch.xpack.security.authz.accesscontrol.SecurityIndexSearcherWrapper; import org.elasticsearch.xpack.security.authz.accesscontrol.SecurityIndexSearcherWrapper;
import org.elasticsearch.xpack.security.authz.store.FileRolesStore; import org.elasticsearch.xpack.security.authz.store.FileRolesStore;
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
import org.elasticsearch.xpack.security.crypto.CryptoModule; import org.elasticsearch.xpack.security.crypto.CryptoService;
import org.elasticsearch.xpack.security.crypto.InternalCryptoService;
import org.elasticsearch.xpack.security.rest.SecurityRestModule; import org.elasticsearch.xpack.security.rest.SecurityRestModule;
import org.elasticsearch.xpack.security.rest.action.RestAuthenticateAction; import org.elasticsearch.xpack.security.rest.action.RestAuthenticateAction;
import org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction; import org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction;
@ -90,15 +101,6 @@ import org.elasticsearch.xpack.security.user.AnonymousUser;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList; import static java.util.Collections.singletonList;
@ -117,13 +119,17 @@ public class Security implements ActionPlugin {
private final boolean enabled; private final boolean enabled;
private final boolean transportClientMode; private final boolean transportClientMode;
private SecurityLicenseState securityLicenseState; private SecurityLicenseState securityLicenseState;
private final CryptoService cryptoService;
public Security(Settings settings) { public Security(Settings settings, Environment env) throws IOException {
this.settings = settings; this.settings = settings;
this.transportClientMode = XPackPlugin.transportClientMode(settings); this.transportClientMode = XPackPlugin.transportClientMode(settings);
this.enabled = XPackPlugin.featureEnabled(settings, NAME, true); this.enabled = XPackPlugin.featureEnabled(settings, NAME, true);
if (enabled && !transportClientMode) { if (enabled && transportClientMode == false) {
validateAutoCreateIndex(settings); validateAutoCreateIndex(settings);
cryptoService = new CryptoService(settings, env);
} else {
cryptoService = null;
} }
} }
@ -143,8 +149,8 @@ public class Security implements ActionPlugin {
modules.add(new AuthenticationModule(settings)); modules.add(new AuthenticationModule(settings));
modules.add(new AuthorizationModule(settings)); modules.add(new AuthorizationModule(settings));
if (enabled == false) { if (enabled == false) {
modules.add(b -> b.bind(CryptoService.class).toProvider(Providers.of(null)));
modules.add(new SecurityModule(settings, securityLicenseState)); modules.add(new SecurityModule(settings, securityLicenseState));
modules.add(new CryptoModule(settings));
modules.add(new AuditTrailModule(settings)); modules.add(new AuditTrailModule(settings));
modules.add(new SecurityTransportModule(settings)); modules.add(new SecurityTransportModule(settings));
return modules; return modules;
@ -154,8 +160,8 @@ public class Security implements ActionPlugin {
// which might not be the case during Plugin class instantiation. Once nodeModules are pulled // which might not be the case during Plugin class instantiation. Once nodeModules are pulled
// everything should have been loaded // everything should have been loaded
securityLicenseState = new SecurityLicenseState(); securityLicenseState = new SecurityLicenseState();
modules.add(b -> b.bind(CryptoService.class).toInstance(cryptoService));
modules.add(new SecurityModule(settings, securityLicenseState)); modules.add(new SecurityModule(settings, securityLicenseState));
modules.add(new CryptoModule(settings));
modules.add(new AuditTrailModule(settings)); modules.add(new AuditTrailModule(settings));
modules.add(new SecurityRestModule(settings)); modules.add(new SecurityRestModule(settings));
modules.add(new SecurityActionModule(settings)); modules.add(new SecurityActionModule(settings));
@ -175,7 +181,6 @@ public class Security implements ActionPlugin {
list.add(LoggingAuditTrail.class); list.add(LoggingAuditTrail.class);
} }
list.add(SecurityLicensee.class); list.add(SecurityLicensee.class);
list.add(InternalCryptoService.class);
list.add(FileRolesStore.class); list.add(FileRolesStore.class);
list.add(Realms.class); list.add(Realms.class);
return list; return list;
@ -186,13 +191,18 @@ public class Security implements ActionPlugin {
return Settings.EMPTY; return Settings.EMPTY;
} }
return additionalSettings(settings);
}
// pkg private for testing
static Settings additionalSettings(Settings settings) {
Settings.Builder settingsBuilder = Settings.builder(); Settings.Builder settingsBuilder = Settings.builder();
settingsBuilder.put(NetworkModule.TRANSPORT_TYPE_KEY, Security.NAME); settingsBuilder.put(NetworkModule.TRANSPORT_TYPE_KEY, Security.NAME);
settingsBuilder.put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, Security.NAME); settingsBuilder.put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, Security.NAME);
settingsBuilder.put(NetworkModule.HTTP_TYPE_SETTING.getKey(), Security.NAME); settingsBuilder.put(NetworkModule.HTTP_TYPE_SETTING.getKey(), Security.NAME);
SecurityNettyHttpServerTransport.overrideSettings(settingsBuilder, settings); SecurityNettyHttpServerTransport.overrideSettings(settingsBuilder, settings);
addUserSettings(settingsBuilder); addUserSettings(settings, settingsBuilder);
addTribeSettings(settingsBuilder); addTribeSettings(settings, settingsBuilder);
return settingsBuilder.build(); return settingsBuilder.build();
} }
@ -234,7 +244,7 @@ public class Security implements ActionPlugin {
SecurityNettyHttpServerTransport.addSettings(settingsList); SecurityNettyHttpServerTransport.addSettings(settingsList);
// encryption settings // encryption settings
InternalCryptoService.addSettings(settingsList); CryptoService.addSettings(settingsList);
// hide settings // hide settings
settingsList.add(Setting.listSetting(setting("hide_settings"), Collections.emptyList(), Function.identity(), settingsList.add(Setting.listSetting(setting("hide_settings"), Collections.emptyList(), Function.identity(),
@ -346,7 +356,7 @@ public class Security implements ActionPlugin {
} }
} }
private void addUserSettings(Settings.Builder settingsBuilder) { private static void addUserSettings(Settings settings, Settings.Builder settingsBuilder) {
String authHeaderSettingName = ThreadContext.PREFIX + "." + UsernamePasswordToken.BASIC_AUTH_HEADER; String authHeaderSettingName = ThreadContext.PREFIX + "." + UsernamePasswordToken.BASIC_AUTH_HEADER;
if (settings.get(authHeaderSettingName) != null) { if (settings.get(authHeaderSettingName) != null) {
return; return;
@ -374,7 +384,7 @@ public class Security implements ActionPlugin {
* *
* - forcibly enabling it (that means it's not possible to disable security on the tribe clients) * - forcibly enabling it (that means it's not possible to disable security on the tribe clients)
*/ */
private void addTribeSettings(Settings.Builder settingsBuilder) { private static void addTribeSettings(Settings settings, Settings.Builder settingsBuilder) {
Map<String, Settings> tribesSettings = settings.getGroups("tribe", true); Map<String, Settings> tribesSettings = settings.getGroups("tribe", true);
if (tribesSettings.isEmpty()) { if (tribesSettings.isEmpty()) {
// it's not a tribe node // it's not a tribe node

View File

@ -143,7 +143,7 @@ public class SecurityFeatureSet implements XPackFeatureSet {
static boolean systemKeyUsage(CryptoService cryptoService) { static boolean systemKeyUsage(CryptoService cryptoService) {
// we can piggy back on the encryption enabled method as it is only enabled if there is a system key // we can piggy back on the encryption enabled method as it is only enabled if there is a system key
return cryptoService.encryptionEnabled(); return cryptoService != null && cryptoService.isEncryptionEnabled();
} }
static class Usage extends XPackFeatureSet.Usage { static class Usage extends XPackFeatureSet.Usage {

View File

@ -191,7 +191,7 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil
if (response instanceof SearchResponse) { if (response instanceof SearchResponse) {
SearchResponse searchResponse = (SearchResponse) response; SearchResponse searchResponse = (SearchResponse) response;
String scrollId = searchResponse.getScrollId(); String scrollId = searchResponse.getScrollId();
if (scrollId != null && !cryptoService.signed(scrollId)) { if (scrollId != null && !cryptoService.isSigned(scrollId)) {
searchResponse.scrollId(cryptoService.sign(scrollId)); searchResponse.scrollId(cryptoService.sign(scrollId));
} }
return response; return response;

View File

@ -0,0 +1,91 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authz.accesscontrol;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.DocValuesNumbersQuery;
import org.apache.lucene.search.DocValuesRangeQuery;
import org.apache.lucene.search.FieldValueQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.PointInSetQuery;
import org.apache.lucene.search.PointRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.spans.SpanTermQuery;
import java.util.Set;
/**
* Extracts fields from a query, or throws UnsupportedOperationException.
* <p>
* Lucene queries have {@link Weight#extractTerms}, but this is really geared at things
* such as highlighting, not security. For example terms in a Boolean {@code MUST_NOT} clause
* are not included, TermsQuery doesn't implement the method as it could be terribly slow, etc.
*/
class FieldExtractor {
/**
* Populates {@code fields} with the set of fields used by the query, or throws
* UnsupportedOperationException if it doesn't know how to do this.
*/
static void extractFields(Query query, Set<String> fields) throws UnsupportedOperationException {
// NOTE: we expect a rewritten query, so we only need logic for "atomic" queries here:
if (query instanceof BooleanQuery) {
// extract from all clauses
BooleanQuery q = (BooleanQuery) query;
for (BooleanClause clause : q.clauses()) {
extractFields(clause.getQuery(), fields);
}
} else if (query instanceof DisjunctionMaxQuery) {
// extract from all clauses
DisjunctionMaxQuery q = (DisjunctionMaxQuery) query;
for (Query clause : q.getDisjuncts()) {
extractFields(clause, fields);
}
} else if (query instanceof SpanTermQuery) {
// we just do SpanTerm, other spans are trickier, they could contain
// the evil FieldMaskingSpanQuery: so SpanQuery.getField cannot be trusted.
fields.add(((SpanTermQuery)query).getField());
} else if (query instanceof TermQuery) {
fields.add(((TermQuery)query).getTerm().field());
} else if (query instanceof SynonymQuery) {
SynonymQuery q = (SynonymQuery) query;
// all terms must have the same field
fields.add(q.getTerms().get(0).field());
} else if (query instanceof PhraseQuery) {
PhraseQuery q = (PhraseQuery) query;
// all terms must have the same field
fields.add(q.getTerms()[0].field());
} else if (query instanceof MultiPhraseQuery) {
MultiPhraseQuery q = (MultiPhraseQuery) query;
// all terms must have the same field
fields.add(q.getTermArrays()[0][0].field());
} else if (query instanceof PointRangeQuery) {
fields.add(((PointRangeQuery)query).getField());
} else if (query instanceof PointInSetQuery) {
fields.add(((PointInSetQuery)query).getField());
} else if (query instanceof FieldValueQuery) {
fields.add(((FieldValueQuery)query).getField());
} else if (query instanceof DocValuesNumbersQuery) {
fields.add(((DocValuesNumbersQuery)query).getField());
} else if (query instanceof DocValuesRangeQuery) {
fields.add(((DocValuesRangeQuery)query).getField());
} else if (query instanceof MatchAllDocsQuery) {
// no field
} else if (query instanceof MatchNoDocsQuery) {
// no field
} else {
throw new UnsupportedOperationException(); // we don't know how to get the fields from it
}
}
}

View File

@ -56,30 +56,33 @@ public final class FieldSubsetReader extends FilterLeafReader {
* and so on. * and so on.
* @param in reader to filter * @param in reader to filter
* @param fieldNames fields to filter. * @param fieldNames fields to filter.
* @param negate {@code true} if this should be a negative set, meaning set of field names that is denied.
*/ */
public static DirectoryReader wrap(DirectoryReader in, Set<String> fieldNames) throws IOException { public static DirectoryReader wrap(DirectoryReader in, Set<String> fieldNames, boolean negate) throws IOException {
return new FieldSubsetDirectoryReader(in, fieldNames); return new FieldSubsetDirectoryReader(in, fieldNames, negate);
} }
// wraps subreaders with fieldsubsetreaders. // wraps subreaders with fieldsubsetreaders.
static class FieldSubsetDirectoryReader extends FilterDirectoryReader { static class FieldSubsetDirectoryReader extends FilterDirectoryReader {
private final Set<String> fieldNames; private final Set<String> fieldNames;
private final boolean negate;
FieldSubsetDirectoryReader(DirectoryReader in, final Set<String> fieldNames) throws IOException { FieldSubsetDirectoryReader(DirectoryReader in, Set<String> fieldNames, boolean negate) throws IOException {
super(in, new FilterDirectoryReader.SubReaderWrapper() { super(in, new FilterDirectoryReader.SubReaderWrapper() {
@Override @Override
public LeafReader wrap(LeafReader reader) { public LeafReader wrap(LeafReader reader) {
return new FieldSubsetReader(reader, fieldNames); return new FieldSubsetReader(reader, fieldNames, negate);
} }
}); });
this.fieldNames = fieldNames; this.fieldNames = fieldNames;
this.negate = negate;
verifyNoOtherFieldSubsetDirectoryReaderIsWrapped(in); verifyNoOtherFieldSubsetDirectoryReaderIsWrapped(in);
} }
@Override @Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new FieldSubsetDirectoryReader(in, fieldNames); return new FieldSubsetDirectoryReader(in, fieldNames, negate);
} }
public Set<String> getFieldNames() { public Set<String> getFieldNames() {
@ -111,17 +114,23 @@ public final class FieldSubsetReader extends FilterLeafReader {
/** /**
* Wrap a single segment, exposing a subset of its fields. * Wrap a single segment, exposing a subset of its fields.
* @param fields set of field names that should be allowed
* @param negate {@code true} if this should be a negative set, meaning set of field names that is denied.
*/ */
FieldSubsetReader(LeafReader in, Set<String> fieldNames) { FieldSubsetReader(LeafReader in, Set<String> fields, boolean negate) {
super(in); super(in);
// look at what fields the reader has, and preprocess a subset of them that are allowed
ArrayList<FieldInfo> filteredInfos = new ArrayList<>(); ArrayList<FieldInfo> filteredInfos = new ArrayList<>();
for (FieldInfo fi : in.getFieldInfos()) { for (FieldInfo fi : in.getFieldInfos()) {
if (fieldNames.contains(fi.name)) { if (fields.contains(fi.name) ^ negate) {
filteredInfos.add(fi); filteredInfos.add(fi);
} }
} }
fieldInfos = new FieldInfos(filteredInfos.toArray(new FieldInfo[filteredInfos.size()])); fieldInfos = new FieldInfos(filteredInfos.toArray(new FieldInfo[filteredInfos.size()]));
this.fieldNames = fieldNames.toArray(new String[fieldNames.size()]); fieldNames = new String[filteredInfos.size()];
for (int i = 0; i < fieldNames.length; i++) {
fieldNames[i] = filteredInfos.get(i).name;
}
} }
/** returns true if this field is allowed. */ /** returns true if this field is allowed. */

View File

@ -16,8 +16,12 @@ import org.elasticsearch.indices.IndicesQueryCache;
import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.xpack.security.authz.InternalAuthorizationService; import org.elasticsearch.xpack.security.authz.InternalAuthorizationService;
import java.util.HashSet;
import java.util.Set;
/** /**
* Opts out of the query cache if field level security is active for the current request. * Opts out of the query cache if field level security is active for the current request,
* and its unsafe to cache.
*/ */
public final class OptOutQueryCache extends AbstractIndexComponent implements QueryCache { public final class OptOutQueryCache extends AbstractIndexComponent implements QueryCache {
@ -64,13 +68,41 @@ public final class OptOutQueryCache extends AbstractIndexComponent implements Qu
IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(indexName); IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(indexName);
if (indexAccessControl != null && indexAccessControl.getFields() != null) { if (indexAccessControl != null && indexAccessControl.getFields() != null) {
logger.debug("opting out of the query cache. request for index [{}] has field level security enabled", indexName); if (cachingIsSafe(weight, indexAccessControl)) {
// If in the future there is a Query#extractFields() then we can be smart on when to skip the query cache. logger.trace("not opting out of the query cache. request for index [{}] is safe to cache", indexName);
// (only cache if all fields in the query also are defined in the role) return indicesQueryCache.doCache(weight, policy);
return weight; } else {
logger.trace("opting out of the query cache. request for index [{}] is unsafe to cache", indexName);
return weight;
}
} else { } else {
logger.trace("not opting out of the query cache. request for index [{}] has field level security disabled", indexName); logger.trace("not opting out of the query cache. request for index [{}] has field level security disabled", indexName);
return indicesQueryCache.doCache(weight, policy); return indicesQueryCache.doCache(weight, policy);
} }
} }
/**
* Returns true if its safe to use the query cache for this query.
*/
static boolean cachingIsSafe(Weight weight, IndicesAccessControl.IndexAccessControl permissions) {
// support caching for common queries, by inspecting the field
// TODO: If in the future there is a Query#extractFields() then we can do a better job
Set<String> fields = new HashSet<>();
try {
FieldExtractor.extractFields(weight.getQuery(), fields);
} catch (UnsupportedOperationException ok) {
// we don't know how to safely extract the fields of this query, don't cache.
return false;
}
// we successfully extracted the set of fields: check each one
for (String field : fields) {
// don't cache any internal fields (e.g. _field_names), these are complicated.
if (field.startsWith("_") || permissions.getFields().contains(field) == false) {
return false;
}
}
// we can cache, all fields are ok
return true;
}
} }

View File

@ -144,7 +144,8 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
allowedFields.addAll(mapperService.simpleMatchToIndexNames(field)); allowedFields.addAll(mapperService.simpleMatchToIndexNames(field));
} }
resolveParentChildJoinFields(allowedFields); resolveParentChildJoinFields(allowedFields);
reader = FieldSubsetReader.wrap(reader, allowedFields); // TODO: support 'denied' fields (pass true as the 3rd parameter in this case)
reader = FieldSubsetReader.wrap(reader, allowedFields, false);
} }
return reader; return reader;

View File

@ -1,30 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.crypto;
import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.security.support.AbstractSecurityModule;
/**
*
*/
public class CryptoModule extends AbstractSecurityModule.Node {
public CryptoModule(Settings settings) {
super(settings);
}
@Override
protected void configureNode() {
if (securityEnabled == false) {
bind(CryptoService.class).toProvider(Providers.of(null));
return;
}
bind(InternalCryptoService.class).asEagerSingleton();
bind(CryptoService.class).to(InternalCryptoService.class).asEagerSingleton();
}
}

View File

@ -5,128 +5,497 @@
*/ */
package org.elasticsearch.xpack.security.crypto; package org.elasticsearch.xpack.security.crypto;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.KeyGenerator;
import javax.crypto.Mac;
import javax.crypto.SecretKey; import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.InvalidKeyException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.Arrays;
import java.util.Base64;
import java.util.List;
import java.util.Objects;
import java.util.regex.Pattern;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.security.authc.support.CharArrays;
import static org.elasticsearch.xpack.security.Security.setting;
import static org.elasticsearch.xpack.security.authc.support.SecuredString.constantTimeEquals;
/** /**
* Service that provides cryptographic methods based on a shared system key * Service that provides cryptographic methods based on a shared system key
*/ */
public interface CryptoService { public class CryptoService extends AbstractComponent {
public static final String KEY_ALGO = "HmacSHA512";
public static final int KEY_SIZE = 1024;
static final String FILE_NAME = "system_key";
static final String HMAC_ALGO = "HmacSHA1";
static final String DEFAULT_ENCRYPTION_ALGORITHM = "AES/CTR/NoPadding";
static final String DEFAULT_KEY_ALGORITH = "AES";
static final String ENCRYPTED_TEXT_PREFIX = "::es_encrypted::";
static final byte[] ENCRYPTED_BYTE_PREFIX = ENCRYPTED_TEXT_PREFIX.getBytes(StandardCharsets.UTF_8);
static final int DEFAULT_KEY_LENGTH = 128;
static final int RANDOM_KEY_SIZE = 128;
private static final Pattern SIG_PATTERN = Pattern.compile("^\\$\\$[0-9]+\\$\\$[^\\$]*\\$\\$.+");
private static final byte[] HKDF_APP_INFO = "es-security-crypto-service".getBytes(StandardCharsets.UTF_8);
public static final Setting<String> FILE_SETTING = Setting.simpleString(setting("system_key.file"), Setting.Property.NodeScope);
public static final Setting<String> ENCRYPTION_ALGO_SETTING =
new Setting<>(setting("encryption.algorithm"), s -> DEFAULT_ENCRYPTION_ALGORITHM, s -> s, Setting.Property.NodeScope);
public static final Setting<Integer> ENCRYPTION_KEY_LENGTH_SETTING =
Setting.intSetting(setting("encryption_key.length"), DEFAULT_KEY_LENGTH, Setting.Property.NodeScope);
public static final Setting<String> ENCRYPTION_KEY_ALGO_SETTING =
new Setting<>(setting("encryption_key.algorithm"), DEFAULT_KEY_ALGORITH, s -> s, Setting.Property.NodeScope);
private final SecureRandom secureRandom = new SecureRandom();
private final String encryptionAlgorithm;
private final String keyAlgorithm;
private final int keyLength;
private final int ivLength;
private final Path keyFile;
private final SecretKey randomKey;
private final String randomKeyBase64;
private final SecretKey encryptionKey;
private final SecretKey systemKey;
private final SecretKey signingKey;
public CryptoService(Settings settings, Environment env) throws IOException {
super(settings);
this.encryptionAlgorithm = ENCRYPTION_ALGO_SETTING.get(settings);
this.keyLength = ENCRYPTION_KEY_LENGTH_SETTING.get(settings);
this.ivLength = keyLength / 8;
this.keyAlgorithm = ENCRYPTION_KEY_ALGO_SETTING.get(settings);
if (keyLength % 8 != 0) {
throw new IllegalArgumentException("invalid key length [" + keyLength + "]. value must be a multiple of 8");
}
keyFile = resolveSystemKey(settings, env);
systemKey = readSystemKey(keyFile);
randomKey = generateSecretKey(RANDOM_KEY_SIZE);
randomKeyBase64 = Base64.getUrlEncoder().encodeToString(randomKey.getEncoded());
signingKey = createSigningKey(systemKey, randomKey);
try {
encryptionKey = encryptionKey(systemKey, keyLength, keyAlgorithm);
} catch (NoSuchAlgorithmException nsae) {
throw new ElasticsearchException("failed to start crypto service. could not load encryption key", nsae);
}
logger.info("system key [{}] has been loaded", keyFile.toAbsolutePath());
}
public static byte[] generateKey() {
return generateSecretKey(KEY_SIZE).getEncoded();
}
static SecretKey generateSecretKey(int keyLength) {
try {
KeyGenerator generator = KeyGenerator.getInstance(KEY_ALGO);
generator.init(keyLength);
return generator.generateKey();
} catch (NoSuchAlgorithmException e) {
throw new ElasticsearchException("failed to generate key", e);
}
}
public static Path resolveSystemKey(Settings settings, Environment env) {
String location = FILE_SETTING.get(settings);
if (location.isEmpty()) {
return XPackPlugin.resolveConfigFile(env, FILE_NAME);
}
return XPackPlugin.resolveConfigFile(env, location);
}
static SecretKey createSigningKey(@Nullable SecretKey systemKey, SecretKey randomKey) {
assert randomKey != null;
if (systemKey != null) {
return systemKey;
} else {
// the random key is only 128 bits so we use HKDF to expand to 1024 bits with some application specific data mixed in
byte[] keyBytes = HmacSHA1HKDF.extractAndExpand(null, randomKey.getEncoded(), HKDF_APP_INFO, (KEY_SIZE / 8));
assert keyBytes.length * 8 == KEY_SIZE;
return new SecretKeySpec(keyBytes, KEY_ALGO);
}
}
private static SecretKey readSystemKey(Path file) {
if (!Files.exists(file)) {
return null;
}
try {
byte[] bytes = Files.readAllBytes(file);
return new SecretKeySpec(bytes, KEY_ALGO);
} catch (IOException e) {
throw new ElasticsearchException("could not read secret key", e);
}
}
/** /**
* Signs the given text and returns the signed text (original text + signature) * Signs the given text and returns the signed text (original text + signature)
* @param text the string to sign * @param text the string to sign
*/ */
String sign(String text) throws IOException; public String sign(String text) throws IOException {
String sigStr = signInternal(text, signingKey);
return "$$" + sigStr.length() + "$$" + (systemKey == signingKey ? "" : randomKeyBase64) + "$$" + sigStr + text;
}
/** /**
* Unsigns the given signed text, verifies the original text with the attached signature and if valid returns * Unsigns the given signed text, verifies the original text with the attached signature and if valid returns
* the unsigned (original) text. If signature verification fails a {@link IllegalArgumentException} is thrown. * the unsigned (original) text. If signature verification fails a {@link IllegalArgumentException} is thrown.
* @param text the string to unsign and verify * @param signedText the string to unsign and verify
*/ */
String unsignAndVerify(String text); public String unsignAndVerify(String signedText) {
if (!signedText.startsWith("$$") || signedText.length() < 2) {
throw new IllegalArgumentException("tampered signed text");
}
/** // $$34$$randomKeyBase64$$sigtext
* Signs the given text and returns the signed text (original text + signature) String[] pieces = signedText.split("\\$\\$");
* @param text the string to sign if (pieces.length != 4 || !pieces[0].equals("")) {
* @param key the key to sign the text with logger.debug("received signed text [{}] with [{}] parts", signedText, pieces.length);
* @param systemKey the system key. This is optional and if the key != systemKey then the format of the throw new IllegalArgumentException("tampered signed text");
* message will change }
*/ String text;
String sign(String text, SecretKey key, SecretKey systemKey) throws IOException; String base64RandomKey;
String receivedSignature;
try {
int length = Integer.parseInt(pieces[1]);
base64RandomKey = pieces[2];
receivedSignature = pieces[3].substring(0, length);
text = pieces[3].substring(length);
} catch (Exception e) {
logger.error("error occurred while parsing signed text", e);
throw new IllegalArgumentException("tampered signed text");
}
/** SecretKey signingKey;
* Unsigns the given signed text, verifies the original text with the attached signature and if valid returns // no random key, so we must have a system key
* the unsigned (original) text. If signature verification fails a {@link IllegalArgumentException} is thrown. if (base64RandomKey.isEmpty()) {
* @param text the string to unsign and verify if (systemKey == null) {
* @param key the key to unsign the text with logger.debug("received signed text without random key information and no system key is present");
*/ throw new IllegalArgumentException("tampered signed text");
String unsignAndVerify(String text, SecretKey key); }
signingKey = systemKey;
} else if (systemKey != null) {
// we have a system key and there is some random key data, this is an error
logger.debug("received signed text with random key information but a system key is present");
throw new IllegalArgumentException("tampered signed text");
} else {
byte[] randomKeyBytes;
try {
randomKeyBytes = Base64.getUrlDecoder().decode(base64RandomKey);
} catch (IllegalArgumentException e) {
logger.error("error occurred while decoding key data", e);
throw new IllegalStateException("error while verifying the signed text");
}
if (randomKeyBytes.length * 8 != RANDOM_KEY_SIZE) {
logger.debug("incorrect random key data length. received [{}] bytes", randomKeyBytes.length);
throw new IllegalArgumentException("tampered signed text");
}
SecretKey randomKey = new SecretKeySpec(randomKeyBytes, KEY_ALGO);
signingKey = createSigningKey(systemKey, randomKey);
}
try {
String sig = signInternal(text, signingKey);
if (constantTimeEquals(sig, receivedSignature)) {
return text;
}
} catch (Exception e) {
logger.error("error occurred while verifying signed text", e);
throw new IllegalStateException("error while verifying the signed text");
}
throw new IllegalArgumentException("tampered signed text");
}
/** /**
* Checks whether the given text is signed. * Checks whether the given text is signed.
*/ */
boolean signed(String text); public boolean isSigned(String text) {
return SIG_PATTERN.matcher(text).matches();
}
/** /**
* Encrypts the provided char array and returns the encrypted values in a char array * Encrypts the provided char array and returns the encrypted values in a char array
* @param chars the characters to encrypt * @param chars the characters to encrypt
* @return character array representing the encrypted data * @return character array representing the encrypted data
*/ */
char[] encrypt(char[] chars); public char[] encrypt(char[] chars) {
SecretKey key = this.encryptionKey;
if (key == null) {
logger.warn("encrypt called without a key, returning plain text. run syskeygen and copy same key to all nodes to enable " +
"encryption");
return chars;
}
/** byte[] charBytes = CharArrays.toUtf8Bytes(chars);
* Encrypts the provided byte array and returns the encrypted value String base64 = Base64.getEncoder().encodeToString(encryptInternal(charBytes, key));
* @param bytes the data to encrypt return ENCRYPTED_TEXT_PREFIX.concat(base64).toCharArray();
* @return encrypted data }
*/
byte[] encrypt(byte[] bytes);
/** /**
* Decrypts the provided char array and returns the plain-text chars * Decrypts the provided char array and returns the plain-text chars
* @param chars the data to decrypt * @param chars the data to decrypt
* @return plaintext chars * @return plaintext chars
*/ */
char[] decrypt(char[] chars); public char[] decrypt(char[] chars) {
if (encryptionKey == null) {
return chars;
}
/** if (!isEncrypted(chars)) {
* Decrypts the provided char array and returns the plain-text chars // Not encrypted
* @param chars the data to decrypt return chars;
* @param key the key to decrypt the data with }
* @return plaintext chars
*/
char[] decrypt(char[] chars, SecretKey key);
/** String encrypted = new String(chars, ENCRYPTED_TEXT_PREFIX.length(), chars.length - ENCRYPTED_TEXT_PREFIX.length());
* Decrypts the provided byte array and returns the unencrypted bytes byte[] bytes;
* @param bytes the bytes to decrypt try {
* @return plaintext bytes bytes = Base64.getDecoder().decode(encrypted);
*/ } catch (IllegalArgumentException e) {
byte[] decrypt(byte[] bytes); throw new ElasticsearchException("unable to decode encrypted data", e);
}
/** byte[] decrypted = decryptInternal(bytes, encryptionKey);
* Decrypts the provided byte array and returns the unencrypted bytes return CharArrays.utf8BytesToChars(decrypted);
* @param bytes the bytes to decrypt }
* @param key the key to decrypt the data with
* @return plaintext bytes
*/
byte[] decrypt(byte[] bytes, SecretKey key);
/** /**
* Checks whether the given chars are encrypted * Checks whether the given chars are encrypted
* @param chars the chars to check if they are encrypted * @param chars the chars to check if they are encrypted
* @return true is data is encrypted * @return true is data is encrypted
*/ */
boolean encrypted(char[] chars); public boolean isEncrypted(char[] chars) {
return CharArrays.charsBeginsWith(ENCRYPTED_TEXT_PREFIX, chars);
/** }
* Checks whether the given bytes are encrypted
* @param bytes the chars to check if they are encrypted
* @return true is data is encrypted
*/
boolean encrypted(byte[] bytes);
/**
* Registers a listener to be notified of key changes
* @param listener the listener to be notified
*/
void register(Listener listener);
/** /**
* Flag for callers to determine if values will actually be encrypted or returned plaintext * Flag for callers to determine if values will actually be encrypted or returned plaintext
* @return true if values will be encrypted * @return true if values will be encrypted
*/ */
boolean encryptionEnabled(); public boolean isEncryptionEnabled() {
return this.encryptionKey != null;
}
private byte[] encryptInternal(byte[] bytes, SecretKey key) {
byte[] iv = new byte[ivLength];
secureRandom.nextBytes(iv);
Cipher cipher = cipher(Cipher.ENCRYPT_MODE, encryptionAlgorithm, key, iv);
try {
byte[] encrypted = cipher.doFinal(bytes);
byte[] output = new byte[iv.length + encrypted.length];
System.arraycopy(iv, 0, output, 0, iv.length);
System.arraycopy(encrypted, 0, output, iv.length, encrypted.length);
return output;
} catch (BadPaddingException | IllegalBlockSizeException e) {
throw new ElasticsearchException("error encrypting data", e);
}
}
private byte[] decryptInternal(byte[] bytes, SecretKey key) {
if (bytes.length < ivLength) {
logger.error("received data for decryption with size [{}] that is less than IV length [{}]", bytes.length, ivLength);
throw new IllegalArgumentException("invalid data to decrypt");
}
byte[] iv = new byte[ivLength];
System.arraycopy(bytes, 0, iv, 0, ivLength);
byte[] data = new byte[bytes.length - ivLength];
System.arraycopy(bytes, ivLength, data, 0, bytes.length - ivLength);
Cipher cipher = cipher(Cipher.DECRYPT_MODE, encryptionAlgorithm, key, iv);
try {
return cipher.doFinal(data);
} catch (BadPaddingException | IllegalBlockSizeException e) {
throw new IllegalStateException("error decrypting data", e);
}
}
static Mac createMac(SecretKey key) {
try {
Mac mac = HmacSHA1Provider.hmacSHA1();
mac.init(key);
return mac;
} catch (Exception e) {
throw new ElasticsearchException("could not initialize mac", e);
}
}
private static String signInternal(String text, SecretKey key) throws IOException {
Mac mac = createMac(key);
byte[] sig = mac.doFinal(text.getBytes(StandardCharsets.UTF_8));
return Base64.getUrlEncoder().encodeToString(sig);
}
static Cipher cipher(int mode, String encryptionAlgorithm, SecretKey key, byte[] initializationVector) {
try {
Cipher cipher = Cipher.getInstance(encryptionAlgorithm);
cipher.init(mode, key, new IvParameterSpec(initializationVector));
return cipher;
} catch (Exception e) {
throw new ElasticsearchException("error creating cipher", e);
}
}
static SecretKey encryptionKey(SecretKey systemKey, int keyLength, String algorithm) throws NoSuchAlgorithmException {
if (systemKey == null) {
return null;
}
byte[] bytes = systemKey.getEncoded();
if ((bytes.length * 8) < keyLength) {
throw new IllegalArgumentException("at least " + keyLength + " bits should be provided as key data");
}
MessageDigest messageDigest = MessageDigest.getInstance("SHA-256");
byte[] digest = messageDigest.digest(bytes);
assert digest.length == (256 / 8);
if ((digest.length * 8) < keyLength) {
throw new IllegalArgumentException("requested key length is too large");
}
byte[] truncatedDigest = Arrays.copyOfRange(digest, 0, (keyLength / 8));
return new SecretKeySpec(truncatedDigest, algorithm);
}
/**
* Provider class for the HmacSHA1 {@link Mac} that provides an optimization by using a thread local instead of calling
* Mac#getInstance and obtaining a lock (in the internals)
*/
private static class HmacSHA1Provider {
private static final ThreadLocal<Mac> MAC = ThreadLocal.withInitial(() -> {
try {
return Mac.getInstance(HMAC_ALGO);
} catch (NoSuchAlgorithmException e) {
throw new IllegalStateException("could not create Mac instance with algorithm [" + HMAC_ALGO + "]", e);
}
});
private static Mac hmacSHA1() {
Mac instance = MAC.get();
instance.reset();
return instance;
}
}
/**
* Simplified implementation of HKDF using the HmacSHA1 algortihm.
*
* @see <a href=https://tools.ietf.org/html/rfc5869>RFC 5869</a>
*/
private static class HmacSHA1HKDF {
private static final int HMAC_SHA1_BYTE_LENGTH = 20;
private static final String HMAC_SHA1_ALGORITHM = "HmacSHA1";
interface Listener {
/** /**
* This method will be called immediately after a new system key and encryption key are loaded by the * This method performs the <code>extract</code> and <code>expand</code> steps of HKDF in one call with the given
* service. This provides the old keys back to the clients so that they may perform decryption and re-encryption * data. The output of the extract step is used as the input to the expand step
* of data after a key has been changed
* *
* @param oldSystemKey the pre-existing system key * @param salt optional salt value (a non-secret random value); if not provided, it is set to a string of HashLen zeros.
* @param oldEncryptionKey the pre-existing encryption key * @param ikm the input keying material
* @param info optional context and application specific information; if not provided a zero length byte[] is used
* @param outputLength length of output keying material in octets (&lt;= 255*HashLen)
* @return the output keying material
*/ */
void onKeyChange(SecretKey oldSystemKey, SecretKey oldEncryptionKey); static byte[] extractAndExpand(@Nullable SecretKey salt, byte[] ikm, @Nullable byte[] info, int outputLength) {
// arg checking
Objects.requireNonNull(ikm, "the input keying material must not be null");
if (outputLength < 1) {
throw new IllegalArgumentException("output length must be positive int >= 1");
}
if (outputLength > 255 * HMAC_SHA1_BYTE_LENGTH) {
throw new IllegalArgumentException("output length must be <= 255*" + HMAC_SHA1_BYTE_LENGTH);
}
if (salt == null) {
salt = new SecretKeySpec(new byte[HMAC_SHA1_BYTE_LENGTH], HMAC_SHA1_ALGORITHM);
}
if (info == null) {
info = new byte[0];
}
// extract
Mac mac = createMac(salt);
byte[] keyBytes = mac.doFinal(ikm);
final SecretKey pseudoRandomKey = new SecretKeySpec(keyBytes, HMAC_SHA1_ALGORITHM);
/*
* The output OKM is calculated as follows:
* N = ceil(L/HashLen)
* T = T(1) | T(2) | T(3) | ... | T(N)
* OKM = first L octets of T
*
* where:
* T(0) = empty string (zero length)
* T(1) = HMAC-Hash(PRK, T(0) | info | 0x01)
* T(2) = HMAC-Hash(PRK, T(1) | info | 0x02)
* T(3) = HMAC-Hash(PRK, T(2) | info | 0x03)
* ...
*
* (where the constant concatenated to the end of each T(n) is a single octet.)
*/
int n = (outputLength % HMAC_SHA1_BYTE_LENGTH == 0) ?
outputLength / HMAC_SHA1_BYTE_LENGTH :
(outputLength / HMAC_SHA1_BYTE_LENGTH) + 1;
byte[] hashRound = new byte[0];
ByteBuffer generatedBytes = ByteBuffer.allocate(Math.multiplyExact(n, HMAC_SHA1_BYTE_LENGTH));
try {
// initiliaze the mac with the new key
mac.init(pseudoRandomKey);
} catch (InvalidKeyException e) {
throw new ElasticsearchException("failed to initialize the mac", e);
}
for (int roundNum = 1; roundNum <= n; roundNum++) {
mac.reset();
mac.update(hashRound);
mac.update(info);
mac.update((byte) roundNum);
hashRound = mac.doFinal();
generatedBytes.put(hashRound);
}
byte[] result = new byte[outputLength];
generatedBytes.rewind();
generatedBytes.get(result, 0, outputLength);
return result;
}
}
public static void addSettings(List<Setting<?>> settings) {
settings.add(FILE_SETTING);
settings.add(ENCRYPTION_KEY_LENGTH_SETTING);
settings.add(ENCRYPTION_KEY_ALGO_SETTING);
settings.add(ENCRYPTION_ALGO_SETTING);
} }
} }

View File

@ -1,673 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.crypto;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.security.authc.support.CharArrays;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.KeyGenerator;
import javax.crypto.Mac;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.InvalidKeyException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.regex.Pattern;
import static org.elasticsearch.xpack.security.Security.setting;
import static org.elasticsearch.xpack.security.authc.support.SecuredString.constantTimeEquals;
public class InternalCryptoService extends AbstractLifecycleComponent implements CryptoService {
public static final String KEY_ALGO = "HmacSHA512";
public static final int KEY_SIZE = 1024;
static final String FILE_NAME = "system_key";
static final String HMAC_ALGO = "HmacSHA1";
static final String DEFAULT_ENCRYPTION_ALGORITHM = "AES/CTR/NoPadding";
static final String DEFAULT_KEY_ALGORITH = "AES";
static final String ENCRYPTED_TEXT_PREFIX = "::es_encrypted::";
static final byte[] ENCRYPTED_BYTE_PREFIX = ENCRYPTED_TEXT_PREFIX.getBytes(StandardCharsets.UTF_8);
static final int DEFAULT_KEY_LENGTH = 128;
static final int RANDOM_KEY_SIZE = 128;
private static final Pattern SIG_PATTERN = Pattern.compile("^\\$\\$[0-9]+\\$\\$[^\\$]*\\$\\$.+");
private static final byte[] HKDF_APP_INFO = "es-security-crypto-service".getBytes(StandardCharsets.UTF_8);
public static final Setting<String> FILE_SETTING = Setting.simpleString(setting("system_key.file"), Property.NodeScope);
public static final Setting<String> ENCRYPTION_ALGO_SETTING =
new Setting<>(setting("encryption.algorithm"), s -> DEFAULT_ENCRYPTION_ALGORITHM, s -> s, Property.NodeScope);
public static final Setting<Integer> ENCRYPTION_KEY_LENGTH_SETTING =
Setting.intSetting(setting("encryption_key.length"), DEFAULT_KEY_LENGTH, Property.NodeScope);
public static final Setting<String> ENCRYPTION_KEY_ALGO_SETTING =
new Setting<>(setting("encryption_key.algorithm"), DEFAULT_KEY_ALGORITH, s -> s, Property.NodeScope);
private final Environment env;
private final ResourceWatcherService watcherService;
private final List<Listener> listeners;
private final SecureRandom secureRandom = new SecureRandom();
private final String encryptionAlgorithm;
private final String keyAlgorithm;
private final int keyLength;
private final int ivLength;
private Path keyFile;
private SecretKey randomKey;
private String randomKeyBase64;
private volatile SecretKey encryptionKey;
private volatile SecretKey systemKey;
private volatile SecretKey signingKey;
@Inject
public InternalCryptoService(Settings settings, Environment env, ResourceWatcherService watcherService) {
this(settings, env, watcherService, Collections.<Listener>emptyList());
}
InternalCryptoService(Settings settings, Environment env, ResourceWatcherService watcherService, List<Listener> listeners) {
super(settings);
this.env = env;
this.watcherService = watcherService;
this.listeners = new CopyOnWriteArrayList<>(listeners);
this.encryptionAlgorithm = ENCRYPTION_ALGO_SETTING.get(settings);
this.keyLength = ENCRYPTION_KEY_LENGTH_SETTING.get(settings);
this.ivLength = keyLength / 8;
this.keyAlgorithm = ENCRYPTION_KEY_ALGO_SETTING.get(settings);
}
@Override
protected void doStart() throws ElasticsearchException {
if (keyLength % 8 != 0) {
throw new IllegalArgumentException("invalid key length [" + keyLength + "]. value must be a multiple of 8");
}
loadKeys();
FileWatcher watcher = new FileWatcher(keyFile.getParent());
watcher.addListener(new FileListener(listeners));
try {
watcherService.add(watcher, ResourceWatcherService.Frequency.HIGH);
} catch (IOException e) {
throw new ElasticsearchException("failed to start watching system key file [" + keyFile.toAbsolutePath() + "]", e);
}
}
@Override
protected void doStop() throws ElasticsearchException {
}
@Override
protected void doClose() throws ElasticsearchException {
}
private void loadKeys() {
keyFile = resolveSystemKey(settings, env);
systemKey = readSystemKey(keyFile);
randomKey = generateSecretKey(RANDOM_KEY_SIZE);
randomKeyBase64 = Base64.getUrlEncoder().encodeToString(randomKey.getEncoded());
signingKey = createSigningKey(systemKey, randomKey);
try {
encryptionKey = encryptionKey(systemKey, keyLength, keyAlgorithm);
} catch (NoSuchAlgorithmException nsae) {
throw new ElasticsearchException("failed to start crypto service. could not load encryption key", nsae);
}
}
public static byte[] generateKey() {
return generateSecretKey(KEY_SIZE).getEncoded();
}
static SecretKey generateSecretKey(int keyLength) {
try {
KeyGenerator generator = KeyGenerator.getInstance(KEY_ALGO);
generator.init(keyLength);
return generator.generateKey();
} catch (NoSuchAlgorithmException e) {
throw new ElasticsearchException("failed to generate key", e);
}
}
public static Path resolveSystemKey(Settings settings, Environment env) {
String location = FILE_SETTING.get(settings);
if (location.isEmpty()) {
return XPackPlugin.resolveConfigFile(env, FILE_NAME);
}
return XPackPlugin.resolveConfigFile(env, location);
}
static SecretKey createSigningKey(@Nullable SecretKey systemKey, SecretKey randomKey) {
assert randomKey != null;
if (systemKey != null) {
return systemKey;
} else {
// the random key is only 128 bits so we use HKDF to expand to 1024 bits with some application specific data mixed in
byte[] keyBytes = HmacSHA1HKDF.extractAndExpand(null, randomKey.getEncoded(), HKDF_APP_INFO, (KEY_SIZE / 8));
assert keyBytes.length * 8 == KEY_SIZE;
return new SecretKeySpec(keyBytes, KEY_ALGO);
}
}
private static SecretKey readSystemKey(Path file) {
if (!Files.exists(file)) {
return null;
}
try {
byte[] bytes = Files.readAllBytes(file);
return new SecretKeySpec(bytes, KEY_ALGO);
} catch (IOException e) {
throw new ElasticsearchException("could not read secret key", e);
}
}
@Override
public String sign(String text) throws IOException {
return sign(text, this.signingKey, this.systemKey);
}
@Override
public String sign(String text, SecretKey signingKey, @Nullable SecretKey systemKey) throws IOException {
assert signingKey != null;
String sigStr = signInternal(text, signingKey);
return "$$" + sigStr.length() + "$$" + (systemKey == signingKey ? "" : randomKeyBase64) + "$$" + sigStr + text;
}
@Override
public String unsignAndVerify(String signedText) {
return unsignAndVerify(signedText, this.systemKey);
}
@Override
public String unsignAndVerify(String signedText, SecretKey systemKey) {
if (!signedText.startsWith("$$") || signedText.length() < 2) {
throw new IllegalArgumentException("tampered signed text");
}
// $$34$$randomKeyBase64$$sigtext
String[] pieces = signedText.split("\\$\\$");
if (pieces.length != 4 || !pieces[0].equals("")) {
logger.debug("received signed text [{}] with [{}] parts", signedText, pieces.length);
throw new IllegalArgumentException("tampered signed text");
}
String text;
String base64RandomKey;
String receivedSignature;
try {
int length = Integer.parseInt(pieces[1]);
base64RandomKey = pieces[2];
receivedSignature = pieces[3].substring(0, length);
text = pieces[3].substring(length);
} catch (Exception e) {
logger.error("error occurred while parsing signed text", e);
throw new IllegalArgumentException("tampered signed text");
}
SecretKey signingKey;
// no random key, so we must have a system key
if (base64RandomKey.isEmpty()) {
if (systemKey == null) {
logger.debug("received signed text without random key information and no system key is present");
throw new IllegalArgumentException("tampered signed text");
}
signingKey = systemKey;
} else if (systemKey != null) {
// we have a system key and there is some random key data, this is an error
logger.debug("received signed text with random key information but a system key is present");
throw new IllegalArgumentException("tampered signed text");
} else {
byte[] randomKeyBytes;
try {
randomKeyBytes = Base64.getUrlDecoder().decode(base64RandomKey);
} catch (IllegalArgumentException e) {
logger.error("error occurred while decoding key data", e);
throw new IllegalStateException("error while verifying the signed text");
}
if (randomKeyBytes.length * 8 != RANDOM_KEY_SIZE) {
logger.debug("incorrect random key data length. received [{}] bytes", randomKeyBytes.length);
throw new IllegalArgumentException("tampered signed text");
}
SecretKey randomKey = new SecretKeySpec(randomKeyBytes, KEY_ALGO);
signingKey = createSigningKey(systemKey, randomKey);
}
try {
String sig = signInternal(text, signingKey);
if (constantTimeEquals(sig, receivedSignature)) {
return text;
}
} catch (Exception e) {
logger.error("error occurred while verifying signed text", e);
throw new IllegalStateException("error while verifying the signed text");
}
throw new IllegalArgumentException("tampered signed text");
}
@Override
public boolean signed(String text) {
return SIG_PATTERN.matcher(text).matches();
}
@Override
public char[] encrypt(char[] chars) {
SecretKey key = this.encryptionKey;
if (key == null) {
logger.warn("encrypt called without a key, returning plain text. run syskeygen and copy same key to all nodes to enable " +
"encryption");
return chars;
}
byte[] charBytes = CharArrays.toUtf8Bytes(chars);
String base64 = Base64.getEncoder().encodeToString(encryptInternal(charBytes, key));
return ENCRYPTED_TEXT_PREFIX.concat(base64).toCharArray();
}
@Override
public byte[] encrypt(byte[] bytes) {
SecretKey key = this.encryptionKey;
if (key == null) {
logger.warn("encrypt called without a key, returning plain text. run syskeygen and copy same key to all nodes to enable " +
"encryption");
return bytes;
}
byte[] encrypted = encryptInternal(bytes, key);
byte[] prefixed = new byte[ENCRYPTED_BYTE_PREFIX.length + encrypted.length];
System.arraycopy(ENCRYPTED_BYTE_PREFIX, 0, prefixed, 0, ENCRYPTED_BYTE_PREFIX.length);
System.arraycopy(encrypted, 0, prefixed, ENCRYPTED_BYTE_PREFIX.length, encrypted.length);
return prefixed;
}
@Override
public char[] decrypt(char[] chars) {
return decrypt(chars, this.encryptionKey);
}
@Override
public char[] decrypt(char[] chars, SecretKey key) {
if (key == null) {
return chars;
}
if (!encrypted(chars)) {
// Not encrypted
return chars;
}
String encrypted = new String(chars, ENCRYPTED_TEXT_PREFIX.length(), chars.length - ENCRYPTED_TEXT_PREFIX.length());
byte[] bytes;
try {
bytes = Base64.getDecoder().decode(encrypted);
} catch (IllegalArgumentException e) {
throw new ElasticsearchException("unable to decode encrypted data", e);
}
byte[] decrypted = decryptInternal(bytes, key);
return CharArrays.utf8BytesToChars(decrypted);
}
@Override
public byte[] decrypt(byte[] bytes) {
return decrypt(bytes, this.encryptionKey);
}
@Override
public byte[] decrypt(byte[] bytes, SecretKey key) {
if (key == null) {
return bytes;
}
if (!encrypted(bytes)) {
return bytes;
}
byte[] encrypted = Arrays.copyOfRange(bytes, ENCRYPTED_BYTE_PREFIX.length, bytes.length);
return decryptInternal(encrypted, key);
}
@Override
public boolean encrypted(char[] chars) {
return CharArrays.charsBeginsWith(ENCRYPTED_TEXT_PREFIX, chars);
}
@Override
public boolean encrypted(byte[] bytes) {
return bytesBeginsWith(ENCRYPTED_BYTE_PREFIX, bytes);
}
@Override
public void register(Listener listener) {
this.listeners.add(listener);
}
@Override
public boolean encryptionEnabled() {
return this.encryptionKey != null;
}
private byte[] encryptInternal(byte[] bytes, SecretKey key) {
byte[] iv = new byte[ivLength];
secureRandom.nextBytes(iv);
Cipher cipher = cipher(Cipher.ENCRYPT_MODE, encryptionAlgorithm, key, iv);
try {
byte[] encrypted = cipher.doFinal(bytes);
byte[] output = new byte[iv.length + encrypted.length];
System.arraycopy(iv, 0, output, 0, iv.length);
System.arraycopy(encrypted, 0, output, iv.length, encrypted.length);
return output;
} catch (BadPaddingException | IllegalBlockSizeException e) {
throw new ElasticsearchException("error encrypting data", e);
}
}
private byte[] decryptInternal(byte[] bytes, SecretKey key) {
if (bytes.length < ivLength) {
logger.error("received data for decryption with size [{}] that is less than IV length [{}]", bytes.length, ivLength);
throw new IllegalArgumentException("invalid data to decrypt");
}
byte[] iv = new byte[ivLength];
System.arraycopy(bytes, 0, iv, 0, ivLength);
byte[] data = new byte[bytes.length - ivLength];
System.arraycopy(bytes, ivLength, data, 0, bytes.length - ivLength);
Cipher cipher = cipher(Cipher.DECRYPT_MODE, encryptionAlgorithm, key, iv);
try {
return cipher.doFinal(data);
} catch (BadPaddingException | IllegalBlockSizeException e) {
throw new IllegalStateException("error decrypting data", e);
}
}
static Mac createMac(SecretKey key) {
try {
Mac mac = HmacSHA1Provider.hmacSHA1();
mac.init(key);
return mac;
} catch (Exception e) {
throw new ElasticsearchException("could not initialize mac", e);
}
}
private static String signInternal(String text, SecretKey key) throws IOException {
Mac mac = createMac(key);
byte[] sig = mac.doFinal(text.getBytes(StandardCharsets.UTF_8));
return Base64.getUrlEncoder().encodeToString(sig);
}
static Cipher cipher(int mode, String encryptionAlgorithm, SecretKey key, byte[] initializationVector) {
try {
Cipher cipher = Cipher.getInstance(encryptionAlgorithm);
cipher.init(mode, key, new IvParameterSpec(initializationVector));
return cipher;
} catch (Exception e) {
throw new ElasticsearchException("error creating cipher", e);
}
}
static SecretKey encryptionKey(SecretKey systemKey, int keyLength, String algorithm) throws NoSuchAlgorithmException {
if (systemKey == null) {
return null;
}
byte[] bytes = systemKey.getEncoded();
if ((bytes.length * 8) < keyLength) {
throw new IllegalArgumentException("at least " + keyLength + " bits should be provided as key data");
}
MessageDigest messageDigest = MessageDigest.getInstance("SHA-256");
byte[] digest = messageDigest.digest(bytes);
assert digest.length == (256 / 8);
if ((digest.length * 8) < keyLength) {
throw new IllegalArgumentException("requested key length is too large");
}
byte[] truncatedDigest = Arrays.copyOfRange(digest, 0, (keyLength / 8));
return new SecretKeySpec(truncatedDigest, algorithm);
}
private static boolean bytesBeginsWith(byte[] prefix, byte[] bytes) {
if (bytes == null || prefix == null) {
return false;
}
if (prefix.length > bytes.length) {
return false;
}
for (int i = 0; i < prefix.length; i++) {
if (bytes[i] != prefix[i]) {
return false;
}
}
return true;
}
private class FileListener extends FileChangesListener {
private final List<Listener> listeners;
private FileListener(List<Listener> listeners) {
this.listeners = listeners;
}
@Override
public void onFileCreated(Path file) {
if (file.equals(keyFile)) {
final SecretKey oldSystemKey = systemKey;
final SecretKey oldEncryptionKey = encryptionKey;
systemKey = readSystemKey(file);
signingKey = createSigningKey(systemKey, randomKey);
try {
encryptionKey = encryptionKey(signingKey, keyLength, keyAlgorithm);
} catch (NoSuchAlgorithmException nsae) {
logger.error("could not load encryption key", nsae);
encryptionKey = null;
}
logger.info("system key [{}] has been loaded", file.toAbsolutePath());
callListeners(oldSystemKey, oldEncryptionKey);
}
}
@Override
public void onFileDeleted(Path file) {
if (file.equals(keyFile)) {
final SecretKey oldSystemKey = systemKey;
final SecretKey oldEncryptionKey = encryptionKey;
logger.error("system key file was removed! as long as the system key file is missing, elasticsearch " +
"won't function as expected for some requests (e.g. scroll/scan)");
systemKey = null;
encryptionKey = null;
signingKey = createSigningKey(systemKey, randomKey);
callListeners(oldSystemKey, oldEncryptionKey);
}
}
@Override
public void onFileChanged(Path file) {
if (file.equals(keyFile)) {
final SecretKey oldSystemKey = systemKey;
final SecretKey oldEncryptionKey = encryptionKey;
logger.warn("system key file changed!");
SecretKey systemKey = readSystemKey(file);
signingKey = createSigningKey(systemKey, randomKey);
try {
encryptionKey = encryptionKey(signingKey, keyLength, keyAlgorithm);
} catch (NoSuchAlgorithmException nsae) {
logger.error("could not load encryption key", nsae);
encryptionKey = null;
}
callListeners(oldSystemKey, oldEncryptionKey);
}
}
private void callListeners(SecretKey oldSystemKey, SecretKey oldEncryptionKey) {
RuntimeException ex = null;
for (Listener listener : listeners) {
try {
listener.onKeyChange(oldSystemKey, oldEncryptionKey);
} catch (Exception e) {
if (ex == null) ex = new RuntimeException("exception calling key change listeners");
ex.addSuppressed(e);
}
}
// all listeners were notified now rethrow
if (ex != null) {
logger.error("called all key change listeners but one or more exceptions was thrown", ex);
throw ex;
}
}
}
/**
* Provider class for the HmacSHA1 {@link Mac} that provides an optimization by using a thread local instead of calling
* Mac#getInstance and obtaining a lock (in the internals)
*/
private static class HmacSHA1Provider {
private static final ThreadLocal<Mac> MAC = ThreadLocal.withInitial(() -> {
try {
return Mac.getInstance(HMAC_ALGO);
} catch (NoSuchAlgorithmException e) {
throw new IllegalStateException("could not create Mac instance with algorithm [" + HMAC_ALGO + "]", e);
}
});
private static Mac hmacSHA1() {
Mac instance = MAC.get();
instance.reset();
return instance;
}
}
/**
* Simplified implementation of HKDF using the HmacSHA1 algortihm.
*
* @see <a href=https://tools.ietf.org/html/rfc5869>RFC 5869</a>
*/
private static class HmacSHA1HKDF {
private static final int HMAC_SHA1_BYTE_LENGTH = 20;
private static final String HMAC_SHA1_ALGORITHM = "HmacSHA1";
/**
* This method performs the <code>extract</code> and <code>expand</code> steps of HKDF in one call with the given
* data. The output of the extract step is used as the input to the expand step
*
* @param salt optional salt value (a non-secret random value); if not provided, it is set to a string of HashLen zeros.
* @param ikm the input keying material
* @param info optional context and application specific information; if not provided a zero length byte[] is used
* @param outputLength length of output keying material in octets (&lt;= 255*HashLen)
* @return the output keying material
*/
static byte[] extractAndExpand(@Nullable SecretKey salt, byte[] ikm, @Nullable byte[] info, int outputLength) {
// arg checking
Objects.requireNonNull(ikm, "the input keying material must not be null");
if (outputLength < 1) {
throw new IllegalArgumentException("output length must be positive int >= 1");
}
if (outputLength > 255 * HMAC_SHA1_BYTE_LENGTH) {
throw new IllegalArgumentException("output length must be <= 255*" + HMAC_SHA1_BYTE_LENGTH);
}
if (salt == null) {
salt = new SecretKeySpec(new byte[HMAC_SHA1_BYTE_LENGTH], HMAC_SHA1_ALGORITHM);
}
if (info == null) {
info = new byte[0];
}
// extract
Mac mac = createMac(salt);
byte[] keyBytes = mac.doFinal(ikm);
final SecretKey pseudoRandomKey = new SecretKeySpec(keyBytes, HMAC_SHA1_ALGORITHM);
/*
* The output OKM is calculated as follows:
* N = ceil(L/HashLen)
* T = T(1) | T(2) | T(3) | ... | T(N)
* OKM = first L octets of T
*
* where:
* T(0) = empty string (zero length)
* T(1) = HMAC-Hash(PRK, T(0) | info | 0x01)
* T(2) = HMAC-Hash(PRK, T(1) | info | 0x02)
* T(3) = HMAC-Hash(PRK, T(2) | info | 0x03)
* ...
*
* (where the constant concatenated to the end of each T(n) is a single octet.)
*/
int n = (outputLength % HMAC_SHA1_BYTE_LENGTH == 0) ?
outputLength / HMAC_SHA1_BYTE_LENGTH :
(outputLength / HMAC_SHA1_BYTE_LENGTH) + 1;
byte[] hashRound = new byte[0];
ByteBuffer generatedBytes = ByteBuffer.allocate(Math.multiplyExact(n, HMAC_SHA1_BYTE_LENGTH));
try {
// initiliaze the mac with the new key
mac.init(pseudoRandomKey);
} catch (InvalidKeyException e) {
throw new ElasticsearchException("failed to initialize the mac", e);
}
for (int roundNum = 1; roundNum <= n; roundNum++) {
mac.reset();
mac.update(hashRound);
mac.update(info);
mac.update((byte) roundNum);
hashRound = mac.doFinal();
generatedBytes.put(hashRound);
}
byte[] result = new byte[outputLength];
generatedBytes.rewind();
generatedBytes.get(result, 0, outputLength);
return result;
}
}
public static void addSettings(List<Setting<?>> settings) {
settings.add(FILE_SETTING);
settings.add(ENCRYPTION_KEY_LENGTH_SETTING);
settings.add(ENCRYPTION_KEY_ALGO_SETTING);
settings.add(ENCRYPTION_ALGO_SETTING);
}
}

View File

@ -17,7 +17,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.xpack.security.crypto.InternalCryptoService; import org.elasticsearch.xpack.security.crypto.CryptoService;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
@ -66,12 +66,12 @@ public class SystemKeyTool extends SettingCommand {
} }
keyPath = parsePath(args.get(0)); keyPath = parsePath(args.get(0));
} else { } else {
keyPath = InternalCryptoService.resolveSystemKey(env.settings(), env); keyPath = CryptoService.resolveSystemKey(env.settings(), env);
} }
// write the key // write the key
terminal.println(Terminal.Verbosity.VERBOSE, "generating..."); terminal.println(Terminal.Verbosity.VERBOSE, "generating...");
byte[] key = InternalCryptoService.generateKey(); byte[] key = CryptoService.generateKey();
terminal.println(String.format(Locale.ROOT, "Storing generated key in [%s]...", keyPath.toAbsolutePath())); terminal.println(String.format(Locale.ROOT, "Storing generated key in [%s]...", keyPath.toAbsolutePath()));
Files.write(keyPath, key, StandardOpenOption.CREATE_NEW); Files.write(keyPath, key, StandardOpenOption.CREATE_NEW);

View File

@ -35,6 +35,12 @@ public class RestAuthenticateAction extends BaseRestHandler {
super(settings); super(settings);
this.securityContext = securityContext; this.securityContext = securityContext;
controller.registerHandler(GET, "/_xpack/security/_authenticate", this); controller.registerHandler(GET, "/_xpack/security/_authenticate", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(GET, "/_shield/authenticate", this,
"[GET /_shield/authenticate] is deprecated! Use " +
"[GET /_xpack/security/_authenticate] instead.",
deprecationLogger);
} }
@Override @Override

View File

@ -23,8 +23,17 @@ public class RestClearRealmCacheAction extends BaseRestHandler {
@Inject @Inject
public RestClearRealmCacheAction(Settings settings, RestController controller) { public RestClearRealmCacheAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(POST, "/_xpack/security/realm/{realms}/_cache/clear", this); // deprecated
controller.registerHandler(POST, "/_xpack/security/realm/{realms}/_clear_cache", this); controller.registerHandler(POST, "/_xpack/security/realm/{realms}/_clear_cache", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(POST, "/_shield/realm/{realms}/_cache/clear", this,
"[POST /_shield/realm/{realms}/_cache/clear] is deprecated! Use " +
"[POST /_xpack/security/realm/{realms}/_clear_cache] instead.",
deprecationLogger);
controller.registerAsDeprecatedHandler(POST, "/_shield/realm/{realms}/_clear_cache", this,
"[POST /_shield/realm/{realms}/_clear_cache] is deprecated! Use " +
"[POST /_xpack/security/realm/{realms}/_clear_cache] instead.",
deprecationLogger);
} }
@Override @Override

View File

@ -5,7 +5,6 @@
*/ */
package org.elasticsearch.xpack.security.rest.action.role; package org.elasticsearch.xpack.security.rest.action.role;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -28,6 +27,12 @@ public class RestClearRolesCacheAction extends BaseRestHandler {
public RestClearRolesCacheAction(Settings settings, RestController controller) { public RestClearRolesCacheAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(POST, "/_xpack/security/role/{name}/_clear_cache", this); controller.registerHandler(POST, "/_xpack/security/role/{name}/_clear_cache", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(POST, "/_shield/role/{name}/_clear_cache", this,
"[POST /_shield/role/{name}/_clear_cache] is deprecated! Use " +
"[POST /_xpack/security/role/{name}/_clear_cache] instead.",
deprecationLogger);
} }
@Override @Override

View File

@ -21,6 +21,8 @@ import org.elasticsearch.xpack.security.action.role.DeleteRoleRequestBuilder;
import org.elasticsearch.xpack.security.action.role.DeleteRoleResponse; import org.elasticsearch.xpack.security.action.role.DeleteRoleResponse;
import org.elasticsearch.xpack.security.client.SecurityClient; import org.elasticsearch.xpack.security.client.SecurityClient;
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
/** /**
* Rest endpoint to delete a Role from the security index * Rest endpoint to delete a Role from the security index
*/ */
@ -29,7 +31,13 @@ public class RestDeleteRoleAction extends BaseRestHandler {
@Inject @Inject
public RestDeleteRoleAction(Settings settings, RestController controller) { public RestDeleteRoleAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.DELETE, "/_xpack/security/role/{name}", this); controller.registerHandler(DELETE, "/_xpack/security/role/{name}", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(DELETE, "/_shield/role/{name}", this,
"[DELETE /_shield/role/{name}] is deprecated! Use " +
"[DELETE /_xpack/security/role/{name}] instead.",
deprecationLogger);
} }
@Override @Override

View File

@ -22,6 +22,8 @@ import org.elasticsearch.xpack.security.action.role.GetRolesResponse;
import org.elasticsearch.xpack.security.client.SecurityClient; import org.elasticsearch.xpack.security.client.SecurityClient;
import org.elasticsearch.xpack.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import static org.elasticsearch.rest.RestRequest.Method.GET;
/** /**
* Rest endpoint to retrieve a Role from the security index * Rest endpoint to retrieve a Role from the security index
*/ */
@ -30,8 +32,18 @@ public class RestGetRolesAction extends BaseRestHandler {
@Inject @Inject
public RestGetRolesAction(Settings settings, RestController controller) { public RestGetRolesAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.GET, "/_xpack/security/role/", this); controller.registerHandler(GET, "/_xpack/security/role/", this);
controller.registerHandler(RestRequest.Method.GET, "/_xpack/security/role/{name}", this); controller.registerHandler(GET, "/_xpack/security/role/{name}", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(GET, "/_shield/role", this,
"[GET /_shield/role] is deprecated! Use " +
"[GET /_xpack/security/role] instead.",
deprecationLogger);
controller.registerAsDeprecatedHandler(GET, "/_shield/role/{name}", this,
"[GET /_shield/role/{name}] is deprecated! Use " +
"[GET /_xpack/security/role/{name}] instead.",
deprecationLogger);
} }
@Override @Override

View File

@ -21,6 +21,9 @@ import org.elasticsearch.xpack.security.action.role.PutRoleRequestBuilder;
import org.elasticsearch.xpack.security.action.role.PutRoleResponse; import org.elasticsearch.xpack.security.action.role.PutRoleResponse;
import org.elasticsearch.xpack.security.client.SecurityClient; import org.elasticsearch.xpack.security.client.SecurityClient;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestRequest.Method.PUT;
/** /**
* Rest endpoint to add a Role to the security index * Rest endpoint to add a Role to the security index
*/ */
@ -29,8 +32,18 @@ public class RestPutRoleAction extends BaseRestHandler {
@Inject @Inject
public RestPutRoleAction(Settings settings, RestController controller) { public RestPutRoleAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.POST, "/_xpack/security/role/{name}", this); controller.registerHandler(POST, "/_xpack/security/role/{name}", this);
controller.registerHandler(RestRequest.Method.PUT, "/_xpack/security/role/{name}", this); controller.registerHandler(PUT, "/_xpack/security/role/{name}", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(POST, "/_shield/role/{name}", this,
"[POST /_shield/role/{name}] is deprecated! Use " +
"[POST /_xpack/security/role/{name}] instead.",
deprecationLogger);
controller.registerAsDeprecatedHandler(PUT, "/_shield/role/{name}", this,
"[PUT /_shield/role/{name}] is deprecated! Use " +
"[PUT /_xpack/security/role/{name}] instead.",
deprecationLogger);
} }
@Override @Override

View File

@ -22,6 +22,9 @@ import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.action.user.ChangePasswordResponse; import org.elasticsearch.xpack.security.action.user.ChangePasswordResponse;
import org.elasticsearch.xpack.security.client.SecurityClient; import org.elasticsearch.xpack.security.client.SecurityClient;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestRequest.Method.PUT;
/** /**
*/ */
public class RestChangePasswordAction extends BaseRestHandler { public class RestChangePasswordAction extends BaseRestHandler {
@ -32,10 +35,10 @@ public class RestChangePasswordAction extends BaseRestHandler {
public RestChangePasswordAction(Settings settings, RestController controller, SecurityContext securityContext) { public RestChangePasswordAction(Settings settings, RestController controller, SecurityContext securityContext) {
super(settings); super(settings);
this.securityContext = securityContext; this.securityContext = securityContext;
controller.registerHandler(RestRequest.Method.POST, "/_xpack/security/user/{username}/_password", this); controller.registerHandler(POST, "/_xpack/security/user/{username}/_password", this);
controller.registerHandler(RestRequest.Method.PUT, "/_xpack/security/user/{username}/_password", this); controller.registerHandler(PUT, "/_xpack/security/user/{username}/_password", this);
controller.registerHandler(RestRequest.Method.POST, "/_xpack/security/user/_password", this); controller.registerHandler(POST, "/_xpack/security/user/_password", this);
controller.registerHandler(RestRequest.Method.PUT, "/_xpack/security/user/_password", this); controller.registerHandler(PUT, "/_xpack/security/user/_password", this);
} }
@Override @Override

View File

@ -17,11 +17,12 @@ import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.rest.action.support.RestBuilderListener;
import org.elasticsearch.xpack.security.action.user.DeleteUserRequest;
import org.elasticsearch.xpack.security.action.user.DeleteUserRequestBuilder; import org.elasticsearch.xpack.security.action.user.DeleteUserRequestBuilder;
import org.elasticsearch.xpack.security.action.user.DeleteUserResponse; import org.elasticsearch.xpack.security.action.user.DeleteUserResponse;
import org.elasticsearch.xpack.security.client.SecurityClient; import org.elasticsearch.xpack.security.client.SecurityClient;
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
/** /**
* Rest action to delete a user from the security index * Rest action to delete a user from the security index
*/ */
@ -30,7 +31,13 @@ public class RestDeleteUserAction extends BaseRestHandler {
@Inject @Inject
public RestDeleteUserAction(Settings settings, RestController controller) { public RestDeleteUserAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.DELETE, "/_xpack/security/user/{username}", this); controller.registerHandler(DELETE, "/_xpack/security/user/{username}", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(DELETE, "/_shield/user/{username}", this,
"[DELETE /_shield/user/{username}] is deprecated! Use " +
"[DELETE /_xpack/security/user/{username}] instead.",
deprecationLogger);
} }
@Override @Override

View File

@ -9,7 +9,6 @@ import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.BytesRestResponse;
@ -23,6 +22,8 @@ import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.action.user.GetUsersResponse; import org.elasticsearch.xpack.security.action.user.GetUsersResponse;
import org.elasticsearch.xpack.security.client.SecurityClient; import org.elasticsearch.xpack.security.client.SecurityClient;
import static org.elasticsearch.rest.RestRequest.Method.GET;
/** /**
* Rest action to retrieve a user from the security index * Rest action to retrieve a user from the security index
*/ */
@ -31,8 +32,18 @@ public class RestGetUsersAction extends BaseRestHandler {
@Inject @Inject
public RestGetUsersAction(Settings settings, RestController controller) { public RestGetUsersAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.GET, "/_xpack/security/user/", this); controller.registerHandler(GET, "/_xpack/security/user/", this);
controller.registerHandler(RestRequest.Method.GET, "/_xpack/security/user/{username}", this); controller.registerHandler(GET, "/_xpack/security/user/{username}", this);
// @deprecated: Remove in 6.0
controller.registerAsDeprecatedHandler(GET, "/_shield/user", this,
"[GET /_shield/user] is deprecated! Use " +
"[GET /_xpack/security/user] instead.",
deprecationLogger);
controller.registerAsDeprecatedHandler(GET, "/_shield/user/{username}", this,
"[GET /_shield/user/{username}] is deprecated! Use " +
"[GET /_xpack/security/user/{username}] instead.",
deprecationLogger);
} }
@Override @Override

Some files were not shown because too many files have changed in this diff Show More