Merge branch 'master' into license-checking/graph-ux
Original commit: elastic/x-pack-elasticsearch@0e769a1352
This commit is contained in:
commit
88a03c6cbe
|
@ -13,11 +13,11 @@ import org.joda.time.format.ISODateTimeFormat;
|
|||
|
||||
public class DateUtils {
|
||||
|
||||
private final static FormatDateTimeFormatter formatDateOnlyFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
private static final FormatDateTimeFormatter formatDateOnlyFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
|
||||
private final static DateTimeFormatter dateOnlyFormatter = formatDateOnlyFormatter.parser().withZoneUTC();
|
||||
private static final DateTimeFormatter dateOnlyFormatter = formatDateOnlyFormatter.parser().withZoneUTC();
|
||||
|
||||
private final static DateTimeFormatter dateTimeFormatter = ISODateTimeFormat.dateTime().withZoneUTC();
|
||||
private static final DateTimeFormatter dateTimeFormatter = ISODateTimeFormat.dateTime().withZoneUTC();
|
||||
|
||||
public static long endOfTheDay(String date) {
|
||||
try {
|
||||
|
|
|
@ -29,9 +29,9 @@ import java.util.Locale;
|
|||
* Provides serialization/deserialization & validation methods for license object
|
||||
*/
|
||||
public class License implements ToXContent {
|
||||
public final static int VERSION_START = 1;
|
||||
public final static int VERSION_NO_FEATURE_TYPE = 2;
|
||||
public final static int VERSION_CURRENT = VERSION_NO_FEATURE_TYPE;
|
||||
public static final int VERSION_START = 1;
|
||||
public static final int VERSION_NO_FEATURE_TYPE = 2;
|
||||
public static final int VERSION_CURRENT = VERSION_NO_FEATURE_TYPE;
|
||||
|
||||
/**
|
||||
* XContent param name to deserialize license(s) with
|
||||
|
@ -51,7 +51,7 @@ public class License implements ToXContent {
|
|||
*/
|
||||
public static final String LICENSE_VERSION_MODE = "license_version";
|
||||
|
||||
public final static Comparator<License> LATEST_ISSUE_DATE_FIRST = new Comparator<License>() {
|
||||
public static final Comparator<License> LATEST_ISSUE_DATE_FIRST = new Comparator<License>() {
|
||||
@Override
|
||||
public int compare(License right, License left) {
|
||||
return Long.compare(left.issueDate(), right.issueDate());
|
||||
|
@ -506,7 +506,7 @@ public class License implements ToXContent {
|
|||
return result;
|
||||
}
|
||||
|
||||
public final static class Fields {
|
||||
public static final class Fields {
|
||||
public static final String STATUS = "status";
|
||||
public static final String UID = "uid";
|
||||
public static final String TYPE = "type";
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
*/
|
||||
package org.elasticsearch.license.core;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -51,7 +53,11 @@ public class LicenseVerifier {
|
|||
license.toXContent(contentBuilder, new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
Signature rsa = Signature.getInstance("SHA512withRSA");
|
||||
rsa.initVerify(CryptUtils.readEncryptedPublicKey(encryptedPublicKeyData));
|
||||
rsa.update(contentBuilder.bytes().toBytes());
|
||||
BytesRefIterator iterator = contentBuilder.bytes().iterator();
|
||||
BytesRef ref;
|
||||
while((ref = iterator.next()) != null) {
|
||||
rsa.update(ref.bytes, ref.offset, ref.length);
|
||||
}
|
||||
return rsa.verify(signedContent)
|
||||
&& Arrays.equals(Base64.getEncoder().encode(encryptedPublicKeyData), signatureHash);
|
||||
} catch (IOException | NoSuchAlgorithmException | SignatureException | InvalidKeyException e) {
|
||||
|
|
|
@ -25,9 +25,9 @@ import static org.hamcrest.core.IsEqual.equalTo;
|
|||
|
||||
public class TestUtils {
|
||||
|
||||
private final static FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
private final static DateMathParser dateMathParser = new DateMathParser(formatDateTimeFormatter);
|
||||
private final static DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer();
|
||||
private static final FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
private static final DateMathParser dateMathParser = new DateMathParser(formatDateTimeFormatter);
|
||||
private static final DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer();
|
||||
|
||||
public static String dateMathString(String time, final long now) {
|
||||
return dateTimeFormatter.print(dateMathParser.parse(time, new Callable<Long>() {
|
||||
|
|
|
@ -5,6 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.license.licensor;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -30,7 +33,7 @@ import java.util.Collections;
|
|||
*/
|
||||
public class LicenseSigner {
|
||||
|
||||
private final static int MAGIC_LENGTH = 13;
|
||||
private static final int MAGIC_LENGTH = 13;
|
||||
|
||||
private final Path publicKeyPath;
|
||||
|
||||
|
@ -55,7 +58,11 @@ public class LicenseSigner {
|
|||
try {
|
||||
final Signature rsa = Signature.getInstance("SHA512withRSA");
|
||||
rsa.initSign(CryptUtils.readEncryptedPrivateKey(Files.readAllBytes(privateKeyPath)));
|
||||
rsa.update(contentBuilder.bytes().toBytes());
|
||||
final BytesRefIterator iterator = contentBuilder.bytes().iterator();
|
||||
BytesRef ref;
|
||||
while((ref = iterator.next()) != null) {
|
||||
rsa.update(ref.bytes, ref.offset, ref.length);
|
||||
}
|
||||
signedContent = rsa.sign();
|
||||
} catch (InvalidKeyException | IOException | NoSuchAlgorithmException | SignatureException e) {
|
||||
throw new IllegalStateException(e);
|
||||
|
|
|
@ -10,7 +10,7 @@ import joptsimple.OptionSpec;
|
|||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
|
||||
|
@ -54,9 +54,9 @@ public class KeyPairGeneratorTool extends Command {
|
|||
Path publicKeyPath = parsePath(publicKeyPathOption.value(options));
|
||||
Path privateKeyPath = parsePath(privateKeyPathOption.value(options));
|
||||
if (Files.exists(privateKeyPath)) {
|
||||
throw new UserError(ExitCodes.USAGE, privateKeyPath + " already exists");
|
||||
throw new UserException(ExitCodes.USAGE, privateKeyPath + " already exists");
|
||||
} else if (Files.exists(publicKeyPath)) {
|
||||
throw new UserError(ExitCodes.USAGE, publicKeyPath + " already exists");
|
||||
throw new UserException(ExitCodes.USAGE, publicKeyPath + " already exists");
|
||||
}
|
||||
|
||||
SecureRandom random = new SecureRandom();
|
||||
|
|
|
@ -12,7 +12,7 @@ import joptsimple.OptionSet;
|
|||
import joptsimple.OptionSpec;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
|
@ -62,9 +62,9 @@ public class LicenseGeneratorTool extends Command {
|
|||
Path publicKeyPath = parsePath(publicKeyPathOption.value(options));
|
||||
Path privateKeyPath = parsePath(privateKeyPathOption.value(options));
|
||||
if (Files.exists(privateKeyPath) == false) {
|
||||
throw new UserError(ExitCodes.USAGE, privateKeyPath + " does not exist");
|
||||
throw new UserException(ExitCodes.USAGE, privateKeyPath + " does not exist");
|
||||
} else if (Files.exists(publicKeyPath) == false) {
|
||||
throw new UserError(ExitCodes.USAGE, publicKeyPath + " does not exist");
|
||||
throw new UserException(ExitCodes.USAGE, publicKeyPath + " does not exist");
|
||||
}
|
||||
|
||||
final License licenseSpec;
|
||||
|
@ -73,11 +73,11 @@ public class LicenseGeneratorTool extends Command {
|
|||
} else if (options.has(licenseFileOption)) {
|
||||
Path licenseSpecPath = parsePath(licenseFileOption.value(options));
|
||||
if (Files.exists(licenseSpecPath) == false) {
|
||||
throw new UserError(ExitCodes.USAGE, licenseSpecPath + " does not exist");
|
||||
throw new UserException(ExitCodes.USAGE, licenseSpecPath + " does not exist");
|
||||
}
|
||||
licenseSpec = License.fromSource(Files.readAllBytes(licenseSpecPath));
|
||||
} else {
|
||||
throw new UserError(ExitCodes.USAGE, "Must specify either --license or --licenseFile");
|
||||
throw new UserException(ExitCodes.USAGE, "Must specify either --license or --licenseFile");
|
||||
}
|
||||
|
||||
// sign
|
||||
|
|
|
@ -12,7 +12,7 @@ import joptsimple.OptionSet;
|
|||
import joptsimple.OptionSpec;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
|
@ -49,7 +49,7 @@ public class LicenseVerificationTool extends Command {
|
|||
protected void execute(Terminal terminal, OptionSet options) throws Exception {
|
||||
Path publicKeyPath = parsePath(publicKeyPathOption.value(options));
|
||||
if (Files.exists(publicKeyPath) == false) {
|
||||
throw new UserError(ExitCodes.USAGE, publicKeyPath + " does not exist");
|
||||
throw new UserException(ExitCodes.USAGE, publicKeyPath + " does not exist");
|
||||
}
|
||||
|
||||
final License licenseSpec;
|
||||
|
@ -58,16 +58,16 @@ public class LicenseVerificationTool extends Command {
|
|||
} else if (options.has(licenseFileOption)) {
|
||||
Path licenseSpecPath = parsePath(licenseFileOption.value(options));
|
||||
if (Files.exists(licenseSpecPath) == false) {
|
||||
throw new UserError(ExitCodes.USAGE, licenseSpecPath + " does not exist");
|
||||
throw new UserException(ExitCodes.USAGE, licenseSpecPath + " does not exist");
|
||||
}
|
||||
licenseSpec = License.fromSource(Files.readAllBytes(licenseSpecPath));
|
||||
} else {
|
||||
throw new UserError(ExitCodes.USAGE, "Must specify either --license or --licenseFile");
|
||||
throw new UserException(ExitCodes.USAGE, "Must specify either --license or --licenseFile");
|
||||
}
|
||||
|
||||
// verify
|
||||
if (LicenseVerifier.verifyLicense(licenseSpec, Files.readAllBytes(publicKeyPath)) == false) {
|
||||
throw new UserError(ExitCodes.DATA_ERROR, "Invalid License!");
|
||||
throw new UserException(ExitCodes.DATA_ERROR, "Invalid License!");
|
||||
}
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
builder.startObject();
|
||||
|
|
|
@ -36,9 +36,9 @@ public class TestUtils {
|
|||
public static final String PUBLIC_KEY_RESOURCE = "/public.key";
|
||||
public static final String PRIVATE_KEY_RESOURCE = "/private.key";
|
||||
|
||||
private final static FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
private final static DateMathParser dateMathParser = new DateMathParser(formatDateTimeFormatter);
|
||||
private final static DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer();
|
||||
private static final FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
private static final DateMathParser dateMathParser = new DateMathParser(formatDateTimeFormatter);
|
||||
private static final DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer();
|
||||
|
||||
public static String dumpLicense(License license) throws Exception {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
|
|
|
@ -11,9 +11,7 @@ import java.nio.file.Path;
|
|||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
||||
|
@ -27,12 +25,12 @@ public class KeyPairGenerationToolTests extends CommandTestCase {
|
|||
public void testMissingKeyPaths() throws Exception {
|
||||
Path exists = createTempFile("", "existing");
|
||||
Path dne = createTempDir().resolve("dne");
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", exists.toString(), "--privateKeyPath", dne.toString());
|
||||
});
|
||||
assertThat(e.getMessage(), containsString("existing"));
|
||||
assertEquals(ExitCodes.USAGE, e.exitCode);
|
||||
e = expectThrows(UserError.class, () -> {
|
||||
e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", dne.toString(), "--privateKeyPath", exists.toString());
|
||||
});
|
||||
assertThat(e.getMessage(), containsString("existing"));
|
||||
|
|
|
@ -12,12 +12,9 @@ import java.nio.file.Path;
|
|||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.MockTerminal;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.licensor.TestUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
public class LicenseGenerationToolTests extends CommandTestCase {
|
||||
|
@ -38,14 +35,14 @@ public class LicenseGenerationToolTests extends CommandTestCase {
|
|||
public void testMissingKeyPaths() throws Exception {
|
||||
Path pub = createTempDir().resolve("pub");
|
||||
Path pri = createTempDir().resolve("pri");
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pub.toString(), "--privateKeyPath", pri.toString());
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("pri does not exist"));
|
||||
assertEquals(ExitCodes.USAGE, e.exitCode);
|
||||
|
||||
Files.createFile(pri);
|
||||
e = expectThrows(UserError.class, () -> {
|
||||
e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pub.toString(), "--privateKeyPath", pri.toString());
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("pub does not exist"));
|
||||
|
@ -53,7 +50,7 @@ public class LicenseGenerationToolTests extends CommandTestCase {
|
|||
}
|
||||
|
||||
public void testMissingLicenseSpec() throws Exception {
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pubKeyPath.toString(), "--privateKeyPath", priKeyPath.toString());
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("Must specify either --license or --licenseFile"));
|
||||
|
|
|
@ -12,7 +12,7 @@ import java.nio.file.Path;
|
|||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.licensor.TestUtils;
|
||||
|
@ -36,7 +36,7 @@ public class LicenseVerificationToolTests extends CommandTestCase {
|
|||
|
||||
public void testMissingKeyPath() throws Exception {
|
||||
Path pub = createTempDir().resolve("pub");
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pub.toString());
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("pub does not exist"));
|
||||
|
@ -44,7 +44,7 @@ public class LicenseVerificationToolTests extends CommandTestCase {
|
|||
}
|
||||
|
||||
public void testMissingLicenseSpec() throws Exception {
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pubKeyPath.toString());
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("Must specify either --license or --licenseFile"));
|
||||
|
@ -56,7 +56,7 @@ public class LicenseVerificationToolTests extends CommandTestCase {
|
|||
License tamperedLicense = License.builder()
|
||||
.fromLicenseSpec(signedLicense, signedLicense.signature())
|
||||
.expiryDate(signedLicense.expiryDate() + randomIntBetween(1, 1000)).build();
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pubKeyPath.toString(),
|
||||
"--license", TestUtils.dumpLicense(tamperedLicense));
|
||||
});
|
||||
|
|
|
@ -14,18 +14,18 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.script.GeneralScriptException;
|
||||
import org.elasticsearch.script.ScriptException;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.watcher.condition.Condition;
|
||||
import org.elasticsearch.xpack.common.ScriptServiceProxy;
|
||||
import org.elasticsearch.xpack.watcher.condition.script.ExecutableScriptCondition;
|
||||
import org.elasticsearch.xpack.watcher.condition.script.ScriptCondition;
|
||||
import org.elasticsearch.xpack.watcher.condition.script.ScriptConditionFactory;
|
||||
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.support.Script;
|
||||
import org.elasticsearch.xpack.common.ScriptServiceProxy;
|
||||
import org.elasticsearch.xpack.watcher.watch.Payload;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
@ -41,12 +41,10 @@ import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalArgument
|
|||
import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.mockExecutionContext;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class ScriptConditionTests extends ESTestCase {
|
||||
ThreadPool tp = null;
|
||||
|
||||
private ThreadPool tp = null;
|
||||
|
||||
@Before
|
||||
public void init() {
|
||||
|
@ -54,8 +52,8 @@ public class ScriptConditionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@After
|
||||
public void cleanup() {
|
||||
tp.shutdownNow();
|
||||
public void cleanup() throws InterruptedException {
|
||||
terminate(tp);
|
||||
}
|
||||
|
||||
public void testExecute() throws Exception {
|
||||
|
@ -136,13 +134,8 @@ public class ScriptConditionTests extends ESTestCase {
|
|||
XContentParser parser = XContentFactory.xContent(builder.bytes()).createParser(builder.bytes());
|
||||
parser.nextToken();
|
||||
ScriptCondition scriptCondition = conditionParser.parseCondition("_watch", parser);
|
||||
try {
|
||||
conditionParser.createExecutable(scriptCondition);
|
||||
fail("expected a condition validation exception trying to create an executable with a bad or missing script");
|
||||
} catch (GeneralScriptException e) {
|
||||
// TODO add these when the test if fixed
|
||||
// assertThat(e.getMessage(), is("ASDF"));
|
||||
}
|
||||
GeneralScriptException exception = expectThrows(GeneralScriptException.class,
|
||||
() -> conditionParser.createExecutable(scriptCondition));
|
||||
}
|
||||
|
||||
public void testScriptConditionParser_badLang() throws Exception {
|
||||
|
@ -153,39 +146,30 @@ public class ScriptConditionTests extends ESTestCase {
|
|||
XContentParser parser = XContentFactory.xContent(builder.bytes()).createParser(builder.bytes());
|
||||
parser.nextToken();
|
||||
ScriptCondition scriptCondition = conditionParser.parseCondition("_watch", parser);
|
||||
try {
|
||||
conditionParser.createExecutable(scriptCondition);
|
||||
fail("expected a condition validation exception trying to create an executable with an invalid language");
|
||||
} catch (GeneralScriptException e) {
|
||||
// TODO add these when the test if fixed
|
||||
// assertThat(e.getMessage(), is("ASDF"));
|
||||
}
|
||||
GeneralScriptException exception = expectThrows(GeneralScriptException.class,
|
||||
() -> conditionParser.createExecutable(scriptCondition));
|
||||
assertThat(exception.getMessage(), containsString("script_lang not supported [not_a_valid_lang]]"));
|
||||
}
|
||||
|
||||
public void testScriptConditionThrowException() throws Exception {
|
||||
ScriptServiceProxy scriptService = getScriptServiceProxy(tp);
|
||||
ExecutableScriptCondition condition = new ExecutableScriptCondition(
|
||||
new ScriptCondition(Script.inline("assert false").build()), logger, scriptService);
|
||||
new ScriptCondition(Script.inline("null.foo").build()), logger, scriptService);
|
||||
SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 500L, new ShardSearchFailure[0]);
|
||||
WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response));
|
||||
ScriptCondition.Result result = condition.execute(ctx);
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.status(), is(Condition.Result.Status.FAILURE));
|
||||
assertThat(result.reason(), notNullValue());
|
||||
assertThat(result.reason(), containsString("Assertion"));
|
||||
ScriptException exception = expectThrows(ScriptException.class, () -> condition.execute(ctx));
|
||||
assertThat(exception.getMessage(), containsString("Error evaluating null.foo"));
|
||||
}
|
||||
|
||||
public void testScriptConditionReturnObject() throws Exception {
|
||||
public void testScriptConditionReturnObjectThrowsException() throws Exception {
|
||||
ScriptServiceProxy scriptService = getScriptServiceProxy(tp);
|
||||
ExecutableScriptCondition condition = new ExecutableScriptCondition(
|
||||
new ScriptCondition(Script.inline("return new Object()").build()), logger, scriptService);
|
||||
SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 500L, new ShardSearchFailure[0]);
|
||||
WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response));
|
||||
ScriptCondition.Result result = condition.execute(ctx);
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.status(), is(Condition.Result.Status.FAILURE));
|
||||
assertThat(result.reason(), notNullValue());
|
||||
assertThat(result.reason(), containsString("ScriptException"));
|
||||
Exception exception = expectThrows(GeneralScriptException.class, () -> condition.execute(ctx));
|
||||
assertThat(exception.getMessage(),
|
||||
containsString("condition [script] must return a boolean value (true|false) but instead returned [_name]"));
|
||||
}
|
||||
|
||||
public void testScriptConditionAccessCtx() throws Exception {
|
||||
|
|
|
@ -93,7 +93,7 @@ public class SearchInputIT extends ESIntegTestCase {
|
|||
return types;
|
||||
}
|
||||
|
||||
private final static String TEMPLATE_QUERY = "{\"query\":{\"bool\":{\"must\":{\"match\":{\"event_type\":{\"query\":\"a\"," +
|
||||
private static final String TEMPLATE_QUERY = "{\"query\":{\"bool\":{\"must\":{\"match\":{\"event_type\":{\"query\":\"a\"," +
|
||||
"\"type\":\"boolean\"}}},\"filter\":{\"range\":{\"_timestamp\":" +
|
||||
"{\"from\":\"{{ctx.trigger.scheduled_time}}||-{{seconds_param}}\",\"to\":\"{{ctx.trigger.scheduled_time}}\"," +
|
||||
"\"include_lower\":true,\"include_upper\":true}}}}}}";
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
apply plugin: 'elasticsearch.rest-test'
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: ':x-plugins:elasticsearch:x-pack', configuration: 'runtime')
|
||||
}
|
||||
|
||||
integTest {
|
||||
cluster {
|
||||
setting 'script.inline', 'true'
|
||||
plugin 'x-pack', project(':x-plugins:elasticsearch:x-pack')
|
||||
extraConfigFile 'x-pack/roles.yml', 'roles.yml'
|
||||
[
|
||||
test_admin: 'superuser',
|
||||
transport_user: 'superuser',
|
||||
existing: 'superuser',
|
||||
bob: 'actual_role'
|
||||
].each { String user, String role ->
|
||||
setupCommand 'setupUser#' + user,
|
||||
'bin/x-pack/users', 'useradd', user, '-p', 'changeme', '-r', role
|
||||
}
|
||||
waitCondition = { node, ant ->
|
||||
File tmpFile = new File(node.cwd, 'wait.success')
|
||||
ant.get(src: "http://${node.httpUri()}",
|
||||
dest: tmpFile.toString(),
|
||||
username: 'test_admin',
|
||||
password: 'changeme',
|
||||
ignoreerrors: true,
|
||||
retries: 10)
|
||||
return tmpFile.exists()
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
actual_role:
|
||||
run_as: [ "joe" ]
|
||||
cluster:
|
||||
- monitor
|
||||
indices:
|
||||
- names: [ "index1", "index2" ]
|
||||
privileges: [ "read", "write", "create_index", "indices:admin/refresh" ]
|
||||
fields:
|
||||
- foo
|
||||
- bar
|
||||
query:
|
||||
bool:
|
||||
must_not:
|
||||
match:
|
||||
hidden: true
|
||||
- names: "*"
|
||||
privileges: [ "read" ]
|
|
@ -0,0 +1,123 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.security;
|
||||
|
||||
import joptsimple.OptionParser;
|
||||
import joptsimple.OptionSet;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cli.MockTerminal;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.xpack.security.SecurityTemplateService;
|
||||
import org.elasticsearch.xpack.security.action.role.GetRolesResponse;
|
||||
import org.elasticsearch.xpack.security.action.user.GetUsersResponse;
|
||||
import org.elasticsearch.xpack.security.action.user.PutUserResponse;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool;
|
||||
import org.elasticsearch.xpack.security.authc.support.SecuredString;
|
||||
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
|
||||
import org.elasticsearch.xpack.security.client.SecurityClient;
|
||||
import org.elasticsearch.xpack.security.user.User;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
/**
|
||||
* Integration tests for the {@code migrate} shell command
|
||||
*/
|
||||
public class MigrateToolIT extends MigrateToolTestCase {
|
||||
|
||||
@Before
|
||||
public void setupUpTest() throws Exception {
|
||||
Client client = getClient();
|
||||
SecurityClient c = new SecurityClient(client);
|
||||
|
||||
// Add an existing user so the tool will skip it
|
||||
PutUserResponse pur = c.preparePutUser("existing", "s3kirt".toCharArray(), "role1", "user").get();
|
||||
assertTrue(pur.created());
|
||||
}
|
||||
|
||||
private static String[] args(String command) {
|
||||
if (!Strings.hasLength(command)) {
|
||||
return Strings.EMPTY_ARRAY;
|
||||
}
|
||||
return command.split("\\s+");
|
||||
}
|
||||
|
||||
public void testRunMigrateTool() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("path.home", createTempDir().toAbsolutePath().toString())
|
||||
.build();
|
||||
String integHome = System.getProperty("tests.config.dir");
|
||||
logger.info("--> HOME: {}", integHome);
|
||||
// Cluster should already be up
|
||||
String url = "http://" + getHttpURL();
|
||||
logger.info("--> using URL: {}", url);
|
||||
MockTerminal t = new MockTerminal();
|
||||
ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles();
|
||||
OptionParser parser = muor.getParser();
|
||||
OptionSet options = parser.parse("-u", "test_admin", "-p", "changeme", "-U", url, "-c", integHome);
|
||||
muor.execute(t, options, settings.getAsMap());
|
||||
|
||||
logger.info("--> output:\n{}", t.getOutput());
|
||||
|
||||
Client client = getClient();
|
||||
SecurityClient c = new SecurityClient(client);
|
||||
|
||||
// Check that the migrated user can be retrieved
|
||||
GetUsersResponse resp = c.prepareGetUsers("bob").get();
|
||||
assertTrue("user 'bob' should exist", resp.hasUsers());
|
||||
User bob = resp.users()[0];
|
||||
assertEquals(bob.principal(), "bob");
|
||||
assertArrayEquals(bob.roles(), new String[]{"actual_role"});
|
||||
|
||||
// Make sure the existing user did not change
|
||||
resp = c.prepareGetUsers("existing").get();
|
||||
assertTrue("user should exist", resp.hasUsers());
|
||||
User existing = resp.users()[0];
|
||||
assertEquals(existing.principal(), "existing");
|
||||
assertArrayEquals(existing.roles(), new String[]{"role1", "user"});
|
||||
|
||||
// Make sure the "actual_role" made it in and is correct
|
||||
GetRolesResponse roleResp = c.prepareGetRoles().names("actual_role").get();
|
||||
assertTrue("role should exist", roleResp.hasRoles());
|
||||
RoleDescriptor rd = roleResp.roles()[0];
|
||||
assertNotNull(rd);
|
||||
assertEquals(rd.getName(), "actual_role");
|
||||
assertArrayEquals(rd.getClusterPrivileges(), new String[]{"monitor"});
|
||||
assertArrayEquals(rd.getRunAs(), new String[]{"joe"});
|
||||
RoleDescriptor.IndicesPrivileges[] ips = rd.getIndicesPrivileges();
|
||||
assertEquals(ips.length, 2);
|
||||
for (RoleDescriptor.IndicesPrivileges ip : ips) {
|
||||
if (Arrays.equals(ip.getIndices(), new String[]{"index1", "index2"})) {
|
||||
assertArrayEquals(ip.getPrivileges(), new String[]{"read", "write", "create_index", "indices:admin/refresh"});
|
||||
assertArrayEquals(ip.getFields(), new String[]{"foo", "bar"});
|
||||
assertNotNull(ip.getQuery());
|
||||
assertThat(ip.getQuery().utf8ToString(), containsString("{\"bool\":{\"must_not\":{\"match\":{\"hidden\":true}}}}"));
|
||||
} else {
|
||||
assertArrayEquals(ip.getIndices(), new String[]{"*"});
|
||||
assertArrayEquals(ip.getPrivileges(), new String[]{"read"});
|
||||
assertArrayEquals(ip.getFields(), null);
|
||||
assertNull(ip.getQuery());
|
||||
}
|
||||
}
|
||||
|
||||
// Check that bob can access the things the "actual_role" says he can
|
||||
String token = basicAuthHeaderValue("bob", new SecuredString("changeme".toCharArray()));
|
||||
// Create "index1" index and try to search from it as "bob"
|
||||
client.filterWithHeader(Collections.singletonMap("Authorization", token)).admin().indices().prepareCreate("index1").get();
|
||||
SearchResponse searchResp = client.filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("index1").get();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,177 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.security;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
/**
|
||||
* {@link MigrateToolTestCase} is an abstract base class to run integration
|
||||
* tests against an external Elasticsearch Cluster.
|
||||
* <p>
|
||||
* You can define a list of transport addresses from where you can reach your cluster
|
||||
* by setting "tests.cluster" system property. It defaults to "localhost:9300".
|
||||
* <p>
|
||||
* All tests can be run from maven using mvn install as maven will start an external cluster first.
|
||||
* <p>
|
||||
* If you want to debug this module from your IDE, then start an external cluster by yourself
|
||||
* then run JUnit. If you changed the default port, set "tests.cluster=localhost:PORT" when running
|
||||
* your test.
|
||||
*/
|
||||
@LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose")
|
||||
public abstract class MigrateToolTestCase extends LuceneTestCase {
|
||||
|
||||
/**
|
||||
* Key used to eventually switch to using an external cluster and provide its transport addresses
|
||||
*/
|
||||
public static final String TESTS_CLUSTER = "tests.cluster";
|
||||
|
||||
/**
|
||||
* Key used to eventually switch to using an external cluster and provide its transport addresses
|
||||
*/
|
||||
public static final String TESTS_HTTP_CLUSTER = "tests.rest.cluster";
|
||||
|
||||
/**
|
||||
* Defaults to localhost:9300
|
||||
*/
|
||||
public static final String TESTS_CLUSTER_DEFAULT = "localhost:9300";
|
||||
|
||||
protected static final ESLogger logger = ESLoggerFactory.getLogger(MigrateToolTestCase.class.getName());
|
||||
|
||||
private static final AtomicInteger counter = new AtomicInteger();
|
||||
private static Client client;
|
||||
private static String clusterAddresses;
|
||||
private static String clusterHttpAddresses;
|
||||
|
||||
private static Client startClient(Path tempDir, TransportAddress... transportAddresses) {
|
||||
logger.info("--> Starting Elasticsearch Java TransportClient {}, {}", transportAddresses, tempDir);
|
||||
|
||||
Settings clientSettings = Settings.builder()
|
||||
.put("cluster.name", "qa_migrate_tests_" + counter.getAndIncrement())
|
||||
.put("client.transport.ignore_cluster_name", true)
|
||||
.put("path.home", tempDir)
|
||||
.put(Security.USER_SETTING.getKey(), "transport_user:changeme")
|
||||
.put("node.mode", "network") // we require network here!
|
||||
.build();
|
||||
|
||||
TransportClient.Builder transportClientBuilder = TransportClient.builder()
|
||||
.addPlugin(XPackPlugin.class)
|
||||
.settings(clientSettings);
|
||||
TransportClient client = transportClientBuilder.build().addTransportAddresses(transportAddresses);
|
||||
|
||||
logger.info("--> Elasticsearch Java TransportClient started");
|
||||
|
||||
Exception clientException = null;
|
||||
try {
|
||||
ClusterHealthResponse health = client.admin().cluster().prepareHealth().get();
|
||||
logger.info("--> connected to [{}] cluster which is running [{}] node(s).",
|
||||
health.getClusterName(), health.getNumberOfNodes());
|
||||
} catch (Exception e) {
|
||||
clientException = e;
|
||||
}
|
||||
|
||||
assumeNoException("Sounds like your cluster is not running at " + clusterAddresses, clientException);
|
||||
|
||||
return client;
|
||||
}
|
||||
|
||||
private static Client startClient() throws UnknownHostException {
|
||||
String[] stringAddresses = clusterAddresses.split(",");
|
||||
TransportAddress[] transportAddresses = new TransportAddress[stringAddresses.length];
|
||||
int i = 0;
|
||||
for (String stringAddress : stringAddresses) {
|
||||
int lastColon = stringAddress.lastIndexOf(":");
|
||||
if (lastColon == -1) {
|
||||
throw new IllegalArgumentException("address [" + clusterAddresses + "] not valid");
|
||||
}
|
||||
String ip = stringAddress.substring(0, lastColon);
|
||||
String port = stringAddress.substring(lastColon + 1);
|
||||
try {
|
||||
transportAddresses[i++] = new InetSocketTransportAddress(InetAddress.getByName(ip), Integer.valueOf(port));
|
||||
} catch (NumberFormatException e) {
|
||||
throw new IllegalArgumentException("port is not valid, expected number but was [" + port + "]");
|
||||
}
|
||||
}
|
||||
return startClient(createTempDir(), transportAddresses);
|
||||
}
|
||||
|
||||
public static Client getClient() {
|
||||
if (client == null) {
|
||||
try {
|
||||
client = startClient();
|
||||
} catch (UnknownHostException e) {
|
||||
logger.error("could not start the client", e);
|
||||
}
|
||||
assertThat(client, notNullValue());
|
||||
}
|
||||
return client;
|
||||
}
|
||||
|
||||
public static String getHttpURL() {
|
||||
return clusterHttpAddresses;
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void initializeSettings() throws UnknownHostException {
|
||||
String port = System.getProperty("integ.http.port");
|
||||
clusterAddresses = System.getProperty(TESTS_CLUSTER);
|
||||
clusterHttpAddresses = System.getProperty(TESTS_HTTP_CLUSTER);
|
||||
if (clusterAddresses == null || clusterAddresses.isEmpty()) {
|
||||
throw new UnknownHostException("unable to get a cluster address");
|
||||
}
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void stopTransportClient() {
|
||||
if (client != null) {
|
||||
client.close();
|
||||
client = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
public void defineIndexName() {
|
||||
doClean();
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanIndex() {
|
||||
doClean();
|
||||
}
|
||||
|
||||
private void doClean() {
|
||||
if (client != null) {
|
||||
try {
|
||||
client.admin().indices().prepareDelete("_all").get();
|
||||
} catch (Exception e) {
|
||||
// We ignore this cleanup exception
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -22,8 +22,8 @@ import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordTok
|
|||
|
||||
public class GraphWithSecurityIT extends ESRestTestCase {
|
||||
|
||||
private final static String TEST_ADMIN_USERNAME = "test_admin";
|
||||
private final static String TEST_ADMIN_PASSWORD = "changeme";
|
||||
private static final String TEST_ADMIN_USERNAME = "test_admin";
|
||||
private static final String TEST_ADMIN_PASSWORD = "changeme";
|
||||
|
||||
public GraphWithSecurityIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
|
|
|
@ -27,8 +27,8 @@ import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordTok
|
|||
|
||||
public class WatcherWithSecurityIT extends ESRestTestCase {
|
||||
|
||||
private final static String TEST_ADMIN_USERNAME = "test_admin";
|
||||
private final static String TEST_ADMIN_PASSWORD = "changeme";
|
||||
private static final String TEST_ADMIN_USERNAME = "test_admin";
|
||||
private static final String TEST_ADMIN_PASSWORD = "changeme";
|
||||
|
||||
public WatcherWithSecurityIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
|
|
|
@ -420,7 +420,7 @@ public class TransportGraphExploreAction extends HandledTransportAction<GraphExp
|
|||
// weakest weights.
|
||||
// A priority queue is used to trim vertices according to the size settings
|
||||
// requested for each field.
|
||||
private final void trimNewAdditions(Hop currentHop, ArrayList<Connection> newConnections, ArrayList<Vertex> newVertices) {
|
||||
private void trimNewAdditions(Hop currentHop, ArrayList<Connection> newConnections, ArrayList<Vertex> newVertices) {
|
||||
Set<Vertex> evictions = new HashSet<>();
|
||||
|
||||
for (int k = 0; k < currentHop.getNumberVertexRequests(); k++) {
|
||||
|
@ -460,7 +460,7 @@ public class TransportGraphExploreAction extends HandledTransportAction<GraphExp
|
|||
// we can do something server-side here
|
||||
|
||||
// Helper method - compute the total signal of all scores in the search results
|
||||
private final double getExpandTotalSignalStrength(Hop lastHop, Hop currentHop, Sampler sample) {
|
||||
private double getExpandTotalSignalStrength(Hop lastHop, Hop currentHop, Sampler sample) {
|
||||
double totalSignalOutput = 0;
|
||||
for (int j = 0; j < lastHop.getNumberVertexRequests(); j++) {
|
||||
VertexRequest lastVr = lastHop.getVertexRequest(j);
|
||||
|
@ -509,7 +509,7 @@ public class TransportGraphExploreAction extends HandledTransportAction<GraphExp
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
@ -688,7 +688,7 @@ public class TransportGraphExploreAction extends HandledTransportAction<GraphExp
|
|||
}
|
||||
|
||||
// Helper method - Provides a total signal strength for all terms connected to the initial query
|
||||
private final double getInitialTotalSignalStrength(Hop rootHop, Sampler sample) {
|
||||
private double getInitialTotalSignalStrength(Hop rootHop, Sampler sample) {
|
||||
double totalSignalStrength = 0;
|
||||
for (int i = 0; i < rootHop.getNumberVertexRequests(); i++) {
|
||||
if (request.useSignificance()) {
|
||||
|
@ -711,13 +711,13 @@ public class TransportGraphExploreAction extends HandledTransportAction<GraphExp
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
} catch (Throwable t) {
|
||||
logger.error("unable to execute the graph query", t);
|
||||
listener.onFailure(t);
|
||||
} catch (Exception e) {
|
||||
logger.error("unable to execute the graph query", e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ import java.util.Map;
|
|||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -66,8 +66,8 @@ public class RestGraphAction extends BaseRestHandler {
|
|||
public static final ParseField TERM_FIELD = new ParseField("term");
|
||||
|
||||
@Inject
|
||||
public RestGraphAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) {
|
||||
super(settings, client);
|
||||
public RestGraphAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry) {
|
||||
super(settings);
|
||||
// @deprecated TODO need to add deprecation support as per https://github.com/elastic/x-plugins/issues/1760#issuecomment-217507517
|
||||
controller.registerHandler(GET, "/{index}/_graph/explore", this);
|
||||
controller.registerHandler(POST, "/{index}/_graph/explore", this);
|
||||
|
@ -82,7 +82,7 @@ public class RestGraphAction extends BaseRestHandler {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws IOException {
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) throws IOException {
|
||||
GraphExploreRequest graphRequest = new GraphExploreRequest(Strings.splitStringByCommaToArray(request.param("index")));
|
||||
graphRequest.indicesOptions(IndicesOptions.fromRequest(request, graphRequest.indicesOptions()));
|
||||
graphRequest.routing(request.param("routing"));
|
||||
|
|
|
@ -270,11 +270,10 @@ public class GraphTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
GraphExploreResponse response = grb.get();
|
||||
if (response.getShardFailures().length > 0) {
|
||||
throw ((ShardSearchFailure) response.getShardFailures()[0]).getCause();
|
||||
expectedError = response.getShardFailures()[0].getCause();
|
||||
}
|
||||
} catch (Throwable rte) {
|
||||
} catch (Exception rte) {
|
||||
expectedError = rte;
|
||||
|
||||
}
|
||||
assertNotNull(expectedError);
|
||||
String message = expectedError.toString();
|
||||
|
|
|
@ -59,7 +59,7 @@ public class TransportDeleteLicenseAction extends TransportMasterNodeAction<Dele
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -61,7 +61,7 @@ public class TransportPutLicenseAction extends TransportMasterNodeAction<PutLice
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -14,7 +14,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
|
|||
/**
|
||||
* A supporting base class for injectable Licensee components.
|
||||
*/
|
||||
public abstract class AbstractLicenseeComponent<T extends AbstractLicenseeComponent<T>> extends AbstractLifecycleComponent<T>
|
||||
public abstract class AbstractLicenseeComponent<T extends AbstractLicenseeComponent<T>> extends AbstractLifecycleComponent
|
||||
implements Licensee {
|
||||
|
||||
private final String id;
|
||||
|
|
|
@ -10,7 +10,7 @@ import org.elasticsearch.rest.RestStatus;
|
|||
|
||||
public class LicenseUtils {
|
||||
|
||||
public final static String EXPIRED_FEATURE_HEADER = "es.license.expired.feature";
|
||||
public static final String EXPIRED_FEATURE_HEADER = "es.license.expired.feature";
|
||||
|
||||
/**
|
||||
* Exception to be thrown when a feature action requires a valid license, but license
|
||||
|
@ -21,7 +21,7 @@ public class LicenseUtils {
|
|||
*/
|
||||
public static ElasticsearchSecurityException newComplianceException(String feature) {
|
||||
ElasticsearchSecurityException e = new ElasticsearchSecurityException("current license is non-compliant for [{}]",
|
||||
RestStatus.UNAUTHORIZED, feature);
|
||||
RestStatus.FORBIDDEN, feature);
|
||||
e.addHeader(EXPIRED_FEATURE_HEADER, feature);
|
||||
return e;
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ import org.apache.lucene.util.CollectionUtil;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -186,7 +187,7 @@ public class LicensesMetaData extends AbstractDiffable<MetaData.Custom> implemen
|
|||
XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
license.toXContent(contentBuilder,
|
||||
new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
streamOutput.writeString(Base64.getEncoder().encodeToString(encrypt(contentBuilder.bytes().toBytes())));
|
||||
streamOutput.writeString(Base64.getEncoder().encodeToString(encrypt(BytesReference.toBytes(contentBuilder.bytes()))));
|
||||
}
|
||||
} else {
|
||||
if (license == LICENSE_TOMBSTONE) {
|
||||
|
@ -238,7 +239,7 @@ public class LicensesMetaData extends AbstractDiffable<MetaData.Custom> implemen
|
|||
return new LicensesMetaData(license);
|
||||
}
|
||||
|
||||
private final static class Fields {
|
||||
private static final class Fields {
|
||||
private static final String SIGNED_LICENCES = "signed_licenses";
|
||||
private static final String TRIAL_LICENSES = "trial_licenses";
|
||||
private static final String LICENSE = "license";
|
||||
|
|
|
@ -79,7 +79,7 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||
* Registered listeners are notified using {@link #notifyAndSchedule(LicensesMetaData)}
|
||||
*/
|
||||
@Singleton
|
||||
public class LicensesService extends AbstractLifecycleComponent<LicensesService> implements ClusterStateListener, LicensesManagerService,
|
||||
public class LicensesService extends AbstractLifecycleComponent implements ClusterStateListener, LicensesManagerService,
|
||||
LicenseeRegistry {
|
||||
|
||||
public static final String REGISTER_TRIAL_LICENSE_ACTION_NAME = "internal:plugin/license/cluster/register_trial_license";
|
||||
|
@ -427,8 +427,8 @@ public class LicensesService extends AbstractLifecycleComponent<LicensesService>
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(String source, @Nullable Throwable t) {
|
||||
logger.error("unexpected failure during [{}]", t, source);
|
||||
public void onFailure(String source, @Nullable Exception e) {
|
||||
logger.error("unexpected failure during [{}]", e, source);
|
||||
}
|
||||
|
||||
});
|
||||
|
@ -643,11 +643,11 @@ public class LicensesService extends AbstractLifecycleComponent<LicensesService>
|
|||
}
|
||||
}
|
||||
|
||||
public static abstract class ExpirationCallback {
|
||||
public abstract static class ExpirationCallback {
|
||||
|
||||
public enum Orientation {PRE, POST}
|
||||
|
||||
public static abstract class Pre extends ExpirationCallback {
|
||||
public abstract static class Pre extends ExpirationCallback {
|
||||
|
||||
/**
|
||||
* Callback schedule prior to license expiry
|
||||
|
@ -677,7 +677,7 @@ public class LicensesService extends AbstractLifecycleComponent<LicensesService>
|
|||
}
|
||||
}
|
||||
|
||||
public static abstract class Post extends ExpirationCallback {
|
||||
public abstract static class Post extends ExpirationCallback {
|
||||
|
||||
/**
|
||||
* Callback schedule after license expiry
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.license.plugin.core;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -32,7 +33,7 @@ public class TrialLicense {
|
|||
try {
|
||||
XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
spec.toXContent(contentBuilder, new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
byte[] encrypt = encrypt(contentBuilder.bytes().toBytes());
|
||||
byte[] encrypt = encrypt(BytesReference.toBytes(contentBuilder.bytes()));
|
||||
byte[] bytes = new byte[4 + 4 + encrypt.length];
|
||||
ByteBuffer byteBuffer = ByteBuffer.wrap(bytes);
|
||||
// always generate license version -VERSION_CURRENT
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.license.plugin.rest;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.plugin.action.delete.DeleteLicenseAction;
|
||||
|
@ -21,13 +21,13 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE;
|
|||
public class RestDeleteLicenseAction extends BaseRestHandler {
|
||||
|
||||
@Inject
|
||||
public RestDeleteLicenseAction(Settings settings, RestController controller, Client client) {
|
||||
super(settings, client);
|
||||
public RestDeleteLicenseAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(DELETE, "/_xpack/license", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) {
|
||||
client.admin().cluster().execute(DeleteLicenseAction.INSTANCE, new DeleteLicenseRequest(), new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.license.plugin.rest;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -32,8 +32,8 @@ import static org.elasticsearch.rest.RestStatus.OK;
|
|||
public class RestGetLicenseAction extends BaseRestHandler {
|
||||
|
||||
@Inject
|
||||
public RestGetLicenseAction(Settings settings, RestController controller, Client client) {
|
||||
super(settings, client);
|
||||
public RestGetLicenseAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(GET, "/_xpack/license", this);
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,7 @@ public class RestGetLicenseAction extends BaseRestHandler {
|
|||
* The licenses are sorted by latest issue_date
|
||||
*/
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) {
|
||||
final Map<String, String> overrideParams = new HashMap<>(2);
|
||||
overrideParams.put(License.REST_VIEW_MODE, "true");
|
||||
overrideParams.put(License.LICENSE_VERSION_MODE, String.valueOf(License.VERSION_CURRENT));
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.license.plugin.rest;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -28,16 +28,16 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT;
|
|||
public class RestPutLicenseAction extends BaseRestHandler {
|
||||
|
||||
@Inject
|
||||
public RestPutLicenseAction(Settings settings, RestController controller, Client client) {
|
||||
super(settings, client);
|
||||
public RestPutLicenseAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(PUT, "/_xpack/license", this);
|
||||
controller.registerHandler(POST, "/_xpack/license", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) {
|
||||
PutLicenseRequest putLicenseRequest = new PutLicenseRequest();
|
||||
putLicenseRequest.license(request.content().toUtf8());
|
||||
putLicenseRequest.license(request.content().utf8ToString());
|
||||
putLicenseRequest.acknowledge(request.paramAsBoolean("acknowledge", false));
|
||||
client.admin().cluster().execute(PutLicenseAction.INSTANCE, putLicenseRequest,
|
||||
new RestBuilderListener<PutLicenseResponse>(channel) {
|
||||
|
|
|
@ -88,8 +88,8 @@ public abstract class AbstractLicensesIntegrationTestCase extends ESIntegTestCas
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(String source, @Nullable Throwable t) {
|
||||
logger.error("error on metaData cleanup after test", t);
|
||||
public void onFailure(String source, @Nullable Exception e) {
|
||||
logger.error("error on metaData cleanup after test", e);
|
||||
}
|
||||
});
|
||||
latch.await();
|
||||
|
|
|
@ -5,8 +5,8 @@
|
|||
*/
|
||||
package org.elasticsearch.license.plugin;
|
||||
|
||||
import org.elasticsearch.common.io.stream.ByteBufferStreamInput;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -16,7 +16,6 @@ import org.elasticsearch.license.plugin.core.LicensesStatus;
|
|||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
@ -70,7 +69,7 @@ public class PutLicenseResponseTests extends ESTestCase {
|
|||
// write it out
|
||||
response.writeTo(output);
|
||||
|
||||
ByteBufferStreamInput input = new ByteBufferStreamInput(ByteBuffer.wrap(output.bytes().toBytes()));
|
||||
StreamInput input = output.bytes().streamInput();
|
||||
|
||||
// read it back in
|
||||
response.readFrom(input);
|
||||
|
|
|
@ -48,8 +48,8 @@ import static org.junit.Assert.assertThat;
|
|||
|
||||
public class TestUtils {
|
||||
|
||||
private final static FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
private final static DateMathParser dateMathParser = new DateMathParser(formatDateTimeFormatter);
|
||||
private static final FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd");
|
||||
private static final DateMathParser dateMathParser = new DateMathParser(formatDateTimeFormatter);
|
||||
|
||||
public static long dateMath(String time, final long now) {
|
||||
return dateMathParser.parse(time, new Callable<Long>() {
|
||||
|
@ -146,7 +146,7 @@ public class TestUtils {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.license.plugin;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -96,7 +97,7 @@ public class TrialLicenseTests extends ESTestCase {
|
|||
try {
|
||||
XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
spec.toXContent(contentBuilder, new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
byte[] encrypt = encrypt(contentBuilder.bytes().toBytes());
|
||||
byte[] encrypt = encrypt(BytesReference.toBytes(contentBuilder.bytes()));
|
||||
byte[] bytes = new byte[4 + 4 + encrypt.length];
|
||||
ByteBuffer byteBuffer = ByteBuffer.wrap(bytes);
|
||||
byteBuffer.putInt(-spec.version())
|
||||
|
|
|
@ -16,7 +16,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
*/
|
||||
public class EagerLicenseRegistrationConsumerPlugin extends TestConsumerPluginBase {
|
||||
|
||||
public final static String NAME = "test_consumer_plugin_1";
|
||||
public static final String NAME = "test_consumer_plugin_1";
|
||||
|
||||
@Inject
|
||||
public EagerLicenseRegistrationConsumerPlugin(Settings settings) {
|
||||
|
|
|
@ -21,7 +21,7 @@ import org.elasticsearch.license.plugin.core.LicensesService;
|
|||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
public abstract class TestPluginServiceBase extends AbstractLifecycleComponent<TestPluginServiceBase>
|
||||
public abstract class TestPluginServiceBase extends AbstractLifecycleComponent
|
||||
implements ClusterStateListener, Licensee {
|
||||
|
||||
private LicensesService licensesClientService;
|
||||
|
|
|
@ -157,7 +157,7 @@ public class LicensesAcknowledgementTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
public void onFailure(Exception throwable) {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -174,7 +174,7 @@ public class LicensesManagerServiceTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
public void onFailure(Exception throwable) {
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
|
|
|
@ -9,8 +9,9 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoriesMetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.common.io.stream.ByteBufferStreamInput;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -24,7 +25,6 @@ import org.elasticsearch.license.plugin.Licensing;
|
|||
import org.elasticsearch.license.plugin.TestUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.UUID;
|
||||
|
@ -42,9 +42,7 @@ public class LicensesMetaDataSerializationTests extends ESTestCase {
|
|||
builder.startObject("licenses");
|
||||
licensesMetaData.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
byte[] serializedBytes = builder.bytes().toBytes();
|
||||
|
||||
LicensesMetaData licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(serializedBytes);
|
||||
LicensesMetaData licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes());
|
||||
assertThat(licensesMetaDataFromXContent.getLicense(), equalTo(license));
|
||||
}
|
||||
|
||||
|
@ -90,9 +88,7 @@ public class LicensesMetaDataSerializationTests extends ESTestCase {
|
|||
builder.startObject("licenses");
|
||||
licensesMetaData.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
byte[] serializedBytes = builder.bytes().toBytes();
|
||||
|
||||
LicensesMetaData licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(serializedBytes);
|
||||
LicensesMetaData licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes());
|
||||
assertThat(licensesMetaDataFromXContent.getLicense(), equalTo(trialLicense));
|
||||
}
|
||||
|
||||
|
@ -113,13 +109,13 @@ public class LicensesMetaDataSerializationTests extends ESTestCase {
|
|||
builder.startArray("trial_licenses");
|
||||
XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
trialLicense.toXContent(contentBuilder, new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
builder.value(Base64.getEncoder().encodeToString(encrypt(contentBuilder.bytes().toBytes())));
|
||||
builder.value(Base64.getEncoder().encodeToString(encrypt(BytesReference.toBytes(contentBuilder.bytes()))));
|
||||
builder.endArray();
|
||||
builder.startArray("signed_licenses");
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
LicensesMetaData licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes().toBytes());
|
||||
LicensesMetaData licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes());
|
||||
assertThat(licensesMetaDataFromXContent.getLicense(), equalTo(trialLicense));
|
||||
|
||||
// signed license
|
||||
|
@ -133,7 +129,7 @@ public class LicensesMetaDataSerializationTests extends ESTestCase {
|
|||
builder.endArray();
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes().toBytes());
|
||||
licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes());
|
||||
assertThat(licensesMetaDataFromXContent.getLicense(), equalTo(signedLicense));
|
||||
|
||||
// trial and signed license
|
||||
|
@ -143,14 +139,14 @@ public class LicensesMetaDataSerializationTests extends ESTestCase {
|
|||
builder.startArray("trial_licenses");
|
||||
contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
trialLicense.toXContent(contentBuilder, new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
builder.value(Base64.getEncoder().encodeToString(encrypt(contentBuilder.bytes().toBytes())));
|
||||
builder.value(Base64.getEncoder().encodeToString(encrypt(BytesReference.toBytes(contentBuilder.bytes()))));
|
||||
builder.endArray();
|
||||
builder.startArray("signed_licenses");
|
||||
signedLicense.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes().toBytes());
|
||||
licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes());
|
||||
assertThat(licensesMetaDataFromXContent.getLicense(), equalTo(signedLicense));
|
||||
|
||||
// license with later issue date is selected
|
||||
|
@ -162,7 +158,7 @@ public class LicensesMetaDataSerializationTests extends ESTestCase {
|
|||
builder.startArray("trial_licenses");
|
||||
contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
trialLicense.toXContent(contentBuilder, new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
builder.value(Base64.getEncoder().encodeToString(encrypt(contentBuilder.bytes().toBytes())));
|
||||
builder.value(Base64.getEncoder().encodeToString(encrypt(BytesReference.toBytes(contentBuilder.bytes()))));
|
||||
builder.endArray();
|
||||
builder.startArray("signed_licenses");
|
||||
signedLicense.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
|
@ -170,7 +166,7 @@ public class LicensesMetaDataSerializationTests extends ESTestCase {
|
|||
builder.endArray();
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes().toBytes());
|
||||
licensesMetaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes());
|
||||
assertThat(licensesMetaDataFromXContent.getLicense(), equalTo(signedLicenseIssuedLater));
|
||||
|
||||
}
|
||||
|
@ -190,13 +186,12 @@ public class LicensesMetaDataSerializationTests extends ESTestCase {
|
|||
output.writeVInt(1);
|
||||
XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
trialLicense.toXContent(contentBuilder, new ToXContent.MapParams(Collections.singletonMap(License.LICENSE_SPEC_VIEW_MODE, "true")));
|
||||
output.writeString(Base64.getEncoder().encodeToString(encrypt(contentBuilder.bytes().toBytes())));
|
||||
byte[] bytes = output.bytes().toBytes();
|
||||
ByteBufferStreamInput input = new ByteBufferStreamInput(ByteBuffer.wrap(bytes));
|
||||
|
||||
input.setVersion(Version.V_2_0_0_beta1);
|
||||
LicensesMetaData licensesMetaData = LicensesMetaData.PROTO.readFrom(input);
|
||||
assertThat(licensesMetaData.getLicense(), equalTo(trialLicense));
|
||||
output.writeString(Base64.getEncoder().encodeToString(encrypt(BytesReference.toBytes(contentBuilder.bytes()))));
|
||||
try (StreamInput input = output.bytes().streamInput()) {
|
||||
input.setVersion(Version.V_2_0_0_beta1);
|
||||
LicensesMetaData licensesMetaData = LicensesMetaData.PROTO.readFrom(input);
|
||||
assertThat(licensesMetaData.getLicense(), equalTo(trialLicense));
|
||||
}
|
||||
|
||||
// signed license
|
||||
License signedLicense = TestUtils.generateSignedLicense(TimeValue.timeValueHours(2));
|
||||
|
@ -204,11 +199,11 @@ public class LicensesMetaDataSerializationTests extends ESTestCase {
|
|||
output.writeVInt(1);
|
||||
signedLicense.writeTo(output);
|
||||
output.writeVInt(0);
|
||||
bytes = output.bytes().toBytes();
|
||||
input = new ByteBufferStreamInput(ByteBuffer.wrap(bytes));
|
||||
input.setVersion(Version.V_2_0_0_beta1);
|
||||
licensesMetaData = LicensesMetaData.PROTO.readFrom(input);
|
||||
assertThat(licensesMetaData.getLicense(), equalTo(signedLicense));
|
||||
try (StreamInput input = output.bytes().streamInput()) {
|
||||
input.setVersion(Version.V_2_0_0_beta1);
|
||||
LicensesMetaData licensesMetaData = LicensesMetaData.PROTO.readFrom(input);
|
||||
assertThat(licensesMetaData.getLicense(), equalTo(signedLicense));
|
||||
}
|
||||
}
|
||||
|
||||
public void testLicenseTombstoneFromXContext() throws Exception {
|
||||
|
@ -216,11 +211,11 @@ public class LicensesMetaDataSerializationTests extends ESTestCase {
|
|||
builder.startObject("licenses");
|
||||
builder.nullField("license");
|
||||
builder.endObject();
|
||||
LicensesMetaData metaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes().toBytes());
|
||||
LicensesMetaData metaDataFromXContent = getLicensesMetaDataFromXContent(builder.bytes());
|
||||
assertThat(metaDataFromXContent.getLicense(), equalTo(LicensesMetaData.LICENSE_TOMBSTONE));
|
||||
}
|
||||
|
||||
private static LicensesMetaData getLicensesMetaDataFromXContent(byte[] bytes) throws Exception {
|
||||
private static LicensesMetaData getLicensesMetaDataFromXContent(BytesReference bytes) throws Exception {
|
||||
final XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(bytes);
|
||||
parser.nextToken(); // consume null
|
||||
parser.nextToken(); // consume "licenses"
|
||||
|
|
|
@ -82,7 +82,7 @@ public class MonitoringFeatureSet implements XPackFeatureSet {
|
|||
|
||||
private static final String ENABLED_EXPORTERS_XFIELD = "enabled_exporters";
|
||||
|
||||
private @Nullable Map<String, Object> exporters;
|
||||
@Nullable private Map<String, Object> exporters;
|
||||
|
||||
public Usage(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
|
|
|
@ -84,12 +84,12 @@ public class MonitoringBulkResponse extends ActionResponse {
|
|||
}
|
||||
|
||||
Error(StreamInput in) throws IOException {
|
||||
this(in.<Throwable>readThrowable());
|
||||
this(in.readException());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeThrowable(getCause());
|
||||
out.writeException(getCause());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -114,8 +114,8 @@ public class TransportMonitoringBulkAction extends HandledTransportAction<Monito
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
listener.onResponse(new MonitoringBulkResponse(buildTookInMillis(startTimeNanos), new MonitoringBulkResponse.Error(t)));
|
||||
public void onFailure(Exception e) {
|
||||
listener.onResponse(new MonitoringBulkResponse(buildTookInMillis(startTimeNanos), new MonitoringBulkResponse.Error(e)));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ import java.util.concurrent.locks.ReentrantLock;
|
|||
* @see #stopCollection()
|
||||
* @see #startCollection()
|
||||
*/
|
||||
public class AgentService extends AbstractLifecycleComponent<AgentService> {
|
||||
public class AgentService extends AbstractLifecycleComponent {
|
||||
|
||||
private volatile ExportingWorker exportingWorker;
|
||||
|
||||
|
@ -204,8 +204,8 @@ public class AgentService extends AbstractLifecycleComponent<AgentService> {
|
|||
} catch (InterruptedException e) {
|
||||
logger.trace("interrupted");
|
||||
Thread.currentThread().interrupt();
|
||||
} catch (Throwable t) {
|
||||
logger.error("background thread had an uncaught exception", t);
|
||||
} catch (Exception e) {
|
||||
logger.error("background thread had an uncaught exception", e);
|
||||
}
|
||||
}
|
||||
logger.debug("worker shutdown");
|
||||
|
|
|
@ -19,7 +19,7 @@ import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
|||
|
||||
import java.util.Collection;
|
||||
|
||||
public abstract class AbstractCollector<T> extends AbstractLifecycleComponent<T> implements Collector<T> {
|
||||
public abstract class AbstractCollector extends AbstractLifecycleComponent implements Collector {
|
||||
|
||||
private final String name;
|
||||
|
||||
|
@ -48,9 +48,9 @@ public abstract class AbstractCollector<T> extends AbstractLifecycleComponent<T>
|
|||
}
|
||||
|
||||
@Override
|
||||
public T start() {
|
||||
public void start() {
|
||||
logger.debug("starting collector [{}]", name());
|
||||
return super.start();
|
||||
super.start();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -90,9 +90,9 @@ public abstract class AbstractCollector<T> extends AbstractLifecycleComponent<T>
|
|||
protected abstract Collection<MonitoringDoc> doCollect() throws Exception;
|
||||
|
||||
@Override
|
||||
public T stop() {
|
||||
public void stop() {
|
||||
logger.debug("stopping collector [{}]", name());
|
||||
return super.stop();
|
||||
super.stop();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -10,7 +10,7 @@ import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
|||
|
||||
import java.util.Collection;
|
||||
|
||||
public interface Collector<T> extends LifecycleComponent<T> {
|
||||
public interface Collector extends LifecycleComponent {
|
||||
|
||||
String name();
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.List;
|
|||
* This collector runs on the master node only and collects {@link ClusterStateMonitoringDoc} document
|
||||
* at a given frequency.
|
||||
*/
|
||||
public class ClusterStateCollector extends AbstractCollector<ClusterStateCollector> {
|
||||
public class ClusterStateCollector extends AbstractCollector {
|
||||
|
||||
public static final String NAME = "cluster-state-collector";
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ import java.util.List;
|
|||
* document; the cluster stats are also indexed in the timestamped index in a
|
||||
* "cluster_stats" document.
|
||||
*/
|
||||
public class ClusterStatsCollector extends AbstractCollector<ClusterStatsCollector> {
|
||||
public class ClusterStatsCollector extends AbstractCollector {
|
||||
|
||||
public static final String NAME = "cluster-stats-collector";
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.List;
|
|||
* This collector runs on the master node only and collects a {@link IndexRecoveryMonitoringDoc} document
|
||||
* for every index that has on-going shard recoveries.
|
||||
*/
|
||||
public class IndexRecoveryCollector extends AbstractCollector<IndexRecoveryCollector> {
|
||||
public class IndexRecoveryCollector extends AbstractCollector {
|
||||
|
||||
public static final String NAME = "index-recovery-collector";
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.util.List;
|
|||
* This collector runs on the master node only and collect a {@link IndexStatsMonitoringDoc} document
|
||||
* for each existing index in the cluster.
|
||||
*/
|
||||
public class IndexStatsCollector extends AbstractCollector<IndexStatsCollector> {
|
||||
public class IndexStatsCollector extends AbstractCollector {
|
||||
|
||||
public static final String NAME = "index-stats-collector";
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.util.Collections;
|
|||
* <p>
|
||||
* This collector runs on the master node only and collect one {@link IndicesStatsMonitoringDoc} document.
|
||||
*/
|
||||
public class IndicesStatsCollector extends AbstractCollector<IndicesStatsCollector> {
|
||||
public class IndicesStatsCollector extends AbstractCollector {
|
||||
|
||||
public static final String NAME = "indices-stats-collector";
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.Collections;
|
|||
* This collector runs on every non-client node and collect
|
||||
* a {@link NodeStatsMonitoringDoc} document for each node of the cluster.
|
||||
*/
|
||||
public class NodeStatsCollector extends AbstractCollector<NodeStatsCollector> {
|
||||
public class NodeStatsCollector extends AbstractCollector {
|
||||
|
||||
public static final String NAME = "node-stats-collector";
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.List;
|
|||
* This collector runs on the master node only and collects the {@link ShardMonitoringDoc} documents
|
||||
* for every index shard.
|
||||
*/
|
||||
public class ShardsCollector extends AbstractCollector<ShardsCollector> {
|
||||
public class ShardsCollector extends AbstractCollector {
|
||||
|
||||
public static final String NAME = "shards-collector";
|
||||
|
||||
|
|
|
@ -23,7 +23,8 @@ public abstract class Exporter implements AutoCloseable {
|
|||
protected final Config config;
|
||||
protected final ESLogger logger;
|
||||
|
||||
protected final @Nullable TimeValue bulkTimeout;
|
||||
@Nullable protected final TimeValue bulkTimeout;
|
||||
|
||||
private AtomicBoolean closed = new AtomicBoolean(false);
|
||||
|
||||
public Exporter(String type, Config config) {
|
||||
|
@ -103,7 +104,7 @@ public abstract class Exporter implements AutoCloseable {
|
|||
}
|
||||
}
|
||||
|
||||
public static abstract class Factory<E extends Exporter> {
|
||||
public abstract static class Factory<E extends Exporter> {
|
||||
|
||||
private final String type;
|
||||
private final boolean singleton;
|
||||
|
|
|
@ -32,7 +32,7 @@ import static java.util.Collections.emptyMap;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class Exporters extends AbstractLifecycleComponent<Exporters> implements Iterable<Exporter> {
|
||||
public class Exporters extends AbstractLifecycleComponent implements Iterable<Exporter> {
|
||||
|
||||
private final Map<String, Exporter.Factory> factories;
|
||||
private final ClusterService clusterService;
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.monitoring.agent.exporter.http;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
|
@ -58,9 +59,6 @@ import java.util.Map;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class HttpExporter extends Exporter {
|
||||
|
||||
public static final String TYPE = "http";
|
||||
|
@ -216,7 +214,8 @@ public class HttpExporter extends Exporter {
|
|||
out.write(CONTENT_TYPE.xContent().streamSeparator());
|
||||
|
||||
// Render the monitoring document
|
||||
out.write(resolver.source(doc, CONTENT_TYPE).toBytes());
|
||||
BytesRef bytesRef = resolver.source(doc, CONTENT_TYPE).toBytesRef();
|
||||
out.write(bytesRef.bytes, bytesRef.offset, bytesRef.length);
|
||||
|
||||
// Adds final bulk separator
|
||||
out.write(CONTENT_TYPE.xContent().streamSeparator());
|
||||
|
@ -533,7 +532,7 @@ public class HttpExporter extends Exporter {
|
|||
}
|
||||
}
|
||||
|
||||
static private void validateHosts(String[] hosts) {
|
||||
private static void validateHosts(String[] hosts) {
|
||||
for (String host : hosts) {
|
||||
try {
|
||||
HttpExporterUtils.parseHostWithPath(host, "");
|
||||
|
@ -662,9 +661,9 @@ public class HttpExporter extends Exporter {
|
|||
}
|
||||
} catch (InterruptedException e) {
|
||||
// ignore, if closed, good....
|
||||
} catch (Throwable t) {
|
||||
} catch (Exception e) {
|
||||
logger.debug("error in keep alive thread, shutting down (will be restarted after a successful connection has been " +
|
||||
"made) {}", ExceptionsHelper.detailedMessage(t));
|
||||
"made) {}", ExceptionsHelper.detailedMessage(e));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -718,8 +717,9 @@ public class HttpExporter extends Exporter {
|
|||
for (MonitoringDoc monitoringDoc : docs) {
|
||||
try {
|
||||
render(monitoringDoc, buffer);
|
||||
BytesRef bytesRef = buffer.bytes().toBytesRef();
|
||||
// write the result to the connection
|
||||
out.write(buffer.bytes().toBytes());
|
||||
out.write(bytesRef.bytes, bytesRef.offset, bytesRef.length);
|
||||
} finally {
|
||||
buffer.reset();
|
||||
}
|
||||
|
|
|
@ -211,8 +211,8 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
logger.error("failed to update monitoring index template [{}]", throwable, template);
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to update monitoring index template [{}]", e, template);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -296,7 +296,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to delete indices", e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -126,7 +126,7 @@ public abstract class MonitoringIndexNameResolver<T extends MonitoringDoc> {
|
|||
* Data index name resolvers are used used to index documents in
|
||||
* the monitoring data index (.monitoring-data-{VERSION})
|
||||
*/
|
||||
public static abstract class Data<T extends MonitoringDoc> extends MonitoringIndexNameResolver<T> {
|
||||
public abstract static class Data<T extends MonitoringDoc> extends MonitoringIndexNameResolver<T> {
|
||||
|
||||
public static final String DATA = "data";
|
||||
|
||||
|
@ -166,7 +166,7 @@ public abstract class MonitoringIndexNameResolver<T extends MonitoringDoc> {
|
|||
* Timestamped index name resolvers are used used to index documents in
|
||||
* a timestamped index (.monitoring-{ID}-{VERSION}-YYYY.MM.dd)
|
||||
*/
|
||||
public static abstract class Timestamped<T extends MonitoringDoc> extends MonitoringIndexNameResolver<T> {
|
||||
public abstract static class Timestamped<T extends MonitoringDoc> extends MonitoringIndexNameResolver<T> {
|
||||
|
||||
public static final Setting<String> INDEX_NAME_TIME_FORMAT_SETTING = new Setting<>("index.name.time_format", "YYYY.MM.dd",
|
||||
Function.identity(), Setting.Property.NodeScope);
|
||||
|
|
|
@ -26,7 +26,7 @@ import java.util.concurrent.ScheduledFuture;
|
|||
/**
|
||||
* {@code CleanerService} takes care of deleting old monitoring indices.
|
||||
*/
|
||||
public class CleanerService extends AbstractLifecycleComponent<CleanerService> {
|
||||
public class CleanerService extends AbstractLifecycleComponent {
|
||||
|
||||
private final MonitoringLicensee licensee;
|
||||
private final ThreadPool threadPool;
|
||||
|
@ -179,8 +179,8 @@ public class CleanerService extends AbstractLifecycleComponent<CleanerService> {
|
|||
for (Listener listener : listeners) {
|
||||
try {
|
||||
listener.onCleanUpIndices(retention);
|
||||
} catch (Throwable t) {
|
||||
logger.error("listener failed to clean indices", t);
|
||||
} catch (Exception e) {
|
||||
logger.error("listener failed to clean indices", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -209,8 +209,8 @@ public class CleanerService extends AbstractLifecycleComponent<CleanerService> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.error("failed to clean indices", t);
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to clean indices", e);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.monitoring.rest;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.rest.XPackRestHandler;
|
||||
|
||||
|
@ -13,7 +12,7 @@ public abstract class MonitoringRestHandler extends XPackRestHandler {
|
|||
|
||||
protected static String URI_BASE = XPackRestHandler.URI_BASE + "/monitoring";
|
||||
|
||||
public MonitoringRestHandler(Settings settings, Client client) {
|
||||
super(settings, client);
|
||||
public MonitoringRestHandler(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.monitoring.rest.action;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -32,8 +32,8 @@ public class RestMonitoringBulkAction extends MonitoringRestHandler {
|
|||
public static final String MONITORING_VERSION = "system_version";
|
||||
|
||||
@Inject
|
||||
public RestMonitoringBulkAction(Settings settings, RestController controller, Client client) {
|
||||
super(settings, client);
|
||||
public RestMonitoringBulkAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(POST, URI_BASE + "/_bulk", this);
|
||||
controller.registerHandler(PUT, URI_BASE + "/_bulk", this);
|
||||
controller.registerHandler(POST, URI_BASE + "/{type}/_bulk", this);
|
||||
|
@ -41,7 +41,7 @@ public class RestMonitoringBulkAction extends MonitoringRestHandler {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void handleRequest(RestRequest request, RestChannel channel, XPackClient client) throws Exception {
|
||||
public void handleRequest(RestRequest request, RestChannel channel, XPackClient client) throws Exception {
|
||||
String defaultType = request.param("type");
|
||||
|
||||
String id = request.param(MONITORING_ID);
|
||||
|
|
|
@ -81,9 +81,9 @@ public class MonitoringBulkTests extends MonitoringIntegTestCase {
|
|||
|
||||
threads[i] = new Thread(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.error("unexpected error in exporting thread", t);
|
||||
exceptions.add(t);
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("unexpected error in exporting thread", e);
|
||||
exceptions.add(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -56,7 +56,7 @@ public class MonitoringIndexTests extends ESTestCase {
|
|||
|
||||
index.writeTo(out);
|
||||
|
||||
final StreamInput in = StreamInput.wrap(out.bytes().toBytes());
|
||||
final StreamInput in = out.bytes().streamInput();
|
||||
|
||||
assertSame(index, MonitoringIndex.readFrom(in));
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
|||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.discovery.DiscoverySettings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -90,7 +90,8 @@ public class TransportMonitoringBulkActionTests extends ESTestCase {
|
|||
clusterService = new ClusterService(Settings.builder().put("cluster.name",
|
||||
TransportMonitoringBulkActionTests.class.getName()).build(),
|
||||
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadPool);
|
||||
clusterService.setLocalNode(new DiscoveryNode("node", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
clusterService.setLocalNode(new DiscoveryNode("node", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(),
|
||||
Version.CURRENT));
|
||||
clusterService.setNodeConnectionsService(new NodeConnectionsService(Settings.EMPTY, null, null) {
|
||||
@Override
|
||||
public void connectToAddedNodes(ClusterChangedEvent event) {
|
||||
|
@ -152,8 +153,8 @@ public class TransportMonitoringBulkActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(String source, Throwable t) {
|
||||
fail("unexpected exception: " + t);
|
||||
public void onFailure(String source, Exception e) {
|
||||
fail("unexpected exception: " + e);
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -296,9 +296,9 @@ public class ExportersTests extends ESTestCase {
|
|||
logger.debug("--> exporting thread [{}] exports {} documents", threadNum, threadDocs);
|
||||
threads[i] = new Thread(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.error("unexpected error in exporting thread", t);
|
||||
exceptions.add(t);
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("unexpected error in exporting thread", e);
|
||||
exceptions.add(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -9,7 +9,7 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -130,7 +130,7 @@ public class MonitoringDocTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
Set<DiscoveryNode.Role> roles = new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values())));
|
||||
return new DiscoveryNode(randomAsciiOfLength(5), randomAsciiOfLength(3), randomAsciiOfLength(3), randomAsciiOfLength(3),
|
||||
DummyTransportAddress.INSTANCE, attributes, roles, randomVersion(random()));
|
||||
return new DiscoveryNode(randomAsciiOfLength(5), randomAsciiOfLength(3), LocalTransportAddress.buildUnique(),
|
||||
attributes, roles, randomVersion(random()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -139,7 +139,7 @@ public class HttpExporterTemplateTests extends AbstractExporterTemplateTestCase
|
|||
boolean templateExist = templates.containsKey(templateName);
|
||||
|
||||
if ("GET".equals(request.getMethod())) {
|
||||
return templateExist ? newResponse(200, templates.get(templateName).toUtf8()) : NOT_FOUND;
|
||||
return templateExist ? newResponse(200, templates.get(templateName).utf8ToString()) : NOT_FOUND;
|
||||
}
|
||||
if ("PUT".equals(request.getMethod())) {
|
||||
templates.put(templateName, new BytesArray(request.getBody().readByteArray()));
|
||||
|
|
|
@ -22,7 +22,7 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
@ -439,14 +439,14 @@ public class HttpExporterTests extends MonitoringIntegTestCase {
|
|||
IndexRecoveryMonitoringDoc doc = new IndexRecoveryMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setRecoveryResponse(new RecoveryResponse());
|
||||
return doc;
|
||||
} else {
|
||||
ClusterStateMonitoringDoc doc = new ClusterStateMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setClusterState(ClusterState.PROTO);
|
||||
doc.setStatus(ClusterHealthStatus.GREEN);
|
||||
return doc;
|
||||
|
@ -467,7 +467,8 @@ public class HttpExporterTests extends MonitoringIntegTestCase {
|
|||
|
||||
private void enqueueGetClusterVersionResponse(MockWebServer mockWebServer, Version v) throws IOException {
|
||||
mockWebServer.enqueue(new MockResponse().setResponseCode(200).setBody(
|
||||
jsonBuilder().startObject().startObject("version").field("number", v.toString()).endObject().endObject().bytes().toUtf8()));
|
||||
jsonBuilder().startObject().startObject("version").field("number", v.toString()).endObject().endObject().bytes()
|
||||
.utf8ToString()));
|
||||
}
|
||||
|
||||
private void enqueueResponse(int responseCode, String body) throws IOException {
|
||||
|
|
|
@ -13,7 +13,7 @@ import org.elasticsearch.cluster.ClusterState;
|
|||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
@ -198,14 +198,14 @@ public class LocalExporterTests extends MonitoringIntegTestCase {
|
|||
IndexRecoveryMonitoringDoc doc = new IndexRecoveryMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setRecoveryResponse(new RecoveryResponse());
|
||||
return doc;
|
||||
} else {
|
||||
ClusterStateMonitoringDoc doc = new ClusterStateMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setClusterState(ClusterState.PROTO);
|
||||
doc.setStatus(ClusterHealthStatus.GREEN);
|
||||
return doc;
|
||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
||||
|
@ -31,7 +31,7 @@ public class DataResolverTests extends MonitoringIndexNameResolverTestCase {
|
|||
MonitoringDoc doc = new MonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
return doc;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
|
||||
|
@ -40,7 +40,7 @@ public class TimestampedResolverTests extends MonitoringIndexNameResolverTestCas
|
|||
MonitoringDoc doc = new MonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode(randomAsciiOfLength(5), DummyTransportAddress.INSTANCE,
|
||||
doc.setSourceNode(new DiscoveryNode(randomAsciiOfLength(5), LocalTransportAddress.buildUnique(),
|
||||
emptyMap(), emptySet(), Version.CURRENT));
|
||||
return doc;
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver.bulk;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
|
||||
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc;
|
||||
|
@ -39,7 +39,7 @@ public class MonitoringBulkDataResolverTests extends MonitoringIndexNameResolver
|
|||
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
return doc;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver.bulk;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
|
||||
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc;
|
||||
|
@ -41,7 +41,7 @@ public class MonitoringBulkTimestampedResolverTests
|
|||
}
|
||||
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
return doc;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
|||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
@ -42,7 +42,7 @@ public class ClusterInfoResolverTests extends MonitoringIndexNameResolverTestCas
|
|||
ClusterInfoMonitoringDoc doc = new ClusterInfoMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setVersion(randomFrom(Version.V_2_0_0, Version.CURRENT).toString());
|
||||
doc.setLicense(licenseBuilder.build());
|
||||
doc.setClusterName(randomAsciiOfLength(5));
|
||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver.cluster;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.ClusterStateNodeMonitoringDoc;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringTemplateUtils;
|
||||
|
@ -28,7 +28,7 @@ public class ClusterStateNodeResolverTests extends
|
|||
ClusterStateNodeMonitoringDoc doc = new ClusterStateNodeMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setNodeId(UUID.randomUUID().toString());
|
||||
doc.setStateUUID(UUID.randomUUID().toString());
|
||||
return doc;
|
||||
|
|
|
@ -11,7 +11,6 @@ import org.elasticsearch.cluster.ClusterState;
|
|||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.ClusterStateMonitoringDoc;
|
||||
|
@ -32,7 +31,7 @@ public class ClusterStateResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
ClusterStateMonitoringDoc doc = new ClusterStateMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setStatus(randomFrom(ClusterHealthStatus.values()));
|
||||
|
||||
DiscoveryNode masterNode = new DiscoveryNode("master", new LocalTransportAddress("master"),
|
||||
|
|
|
@ -21,7 +21,7 @@ import org.elasticsearch.cluster.routing.ShardRouting;
|
|||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -63,7 +63,7 @@ public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
ClusterStatsMonitoringDoc doc = new ClusterStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setClusterStats(randomClusterStats());
|
||||
return doc;
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
*/
|
||||
private ClusterStatsResponse randomClusterStats() {
|
||||
List<ClusterStatsNodeResponse> responses = Collections.singletonList(
|
||||
new ClusterStatsNodeResponse(new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE,
|
||||
new ClusterStatsNodeResponse(new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(),
|
||||
emptyMap(), emptySet(), Version.CURRENT),
|
||||
ClusterHealthStatus.GREEN, randomNodeInfo(), randomNodeStats(), randomShardStats())
|
||||
);
|
||||
|
@ -106,10 +106,10 @@ public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
* @return a random {@link NodeInfo} used to resolve a monitoring document.
|
||||
*/
|
||||
private NodeInfo randomNodeInfo() {
|
||||
BoundTransportAddress transportAddress = new BoundTransportAddress(new TransportAddress[]{DummyTransportAddress.INSTANCE},
|
||||
DummyTransportAddress.INSTANCE);
|
||||
BoundTransportAddress transportAddress = new BoundTransportAddress(new TransportAddress[]{LocalTransportAddress.buildUnique()},
|
||||
LocalTransportAddress.buildUnique());
|
||||
return new NodeInfo(Version.CURRENT, org.elasticsearch.Build.CURRENT,
|
||||
new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), emptyMap(),
|
||||
new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT),
|
||||
Settings.EMPTY, DummyOsInfo.INSTANCE, new ProcessInfo(randomInt(), randomBoolean()), JvmInfo.jvmInfo(),
|
||||
new ThreadPoolInfo(Collections.singletonList(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5))),
|
||||
new TransportInfo(transportAddress, Collections.emptyMap()), new HttpInfo(transportAddress, randomLong()),
|
||||
|
@ -127,7 +127,7 @@ public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
};
|
||||
Map<Index, List<IndexShardStats>> statsByShard = new HashMap<>();
|
||||
statsByShard.put(index, Collections.singletonList(new IndexShardStats(new ShardId(index, 0), randomShardStats())));
|
||||
return new NodeStats(new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0,
|
||||
return new NodeStats(new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0,
|
||||
new NodeIndicesStats(new CommonStats(), statsByShard), null, null, null, null,
|
||||
new FsInfo(0, null, pathInfo), null, null, null, null, null, null);
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver.indices;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
|
@ -32,9 +32,9 @@ public class IndexRecoveryResolverTests extends MonitoringIndexNameResolverTestC
|
|||
IndexRecoveryMonitoringDoc doc = new IndexRecoveryMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
|
||||
DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT);
|
||||
DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT);
|
||||
Map<String, java.util.List<RecoveryState>> shardRecoveryStates = new HashMap<>();
|
||||
shardRecoveryStates.put("test", Collections.singletonList(new RecoveryState(new ShardId("test", "uuid", 0), true,
|
||||
RecoveryState.Type.STORE, localNode, localNode)));
|
||||
|
|
|
@ -12,7 +12,7 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
|
@ -45,7 +45,7 @@ public class IndexStatsResolverTests extends MonitoringIndexNameResolverTestCase
|
|||
IndexStatsMonitoringDoc doc = new IndexStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setIndexStats(randomIndexStats());
|
||||
return doc;
|
||||
}
|
||||
|
@ -58,7 +58,7 @@ public class IndexStatsResolverTests extends MonitoringIndexNameResolverTestCase
|
|||
public void testIndexStatsResolver() throws Exception {
|
||||
IndexStatsMonitoringDoc doc = newMonitoringDoc();
|
||||
doc.setTimestamp(1437580442979L);
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
|
||||
IndexStatsResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MonitoringTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
|
|
|
@ -13,7 +13,7 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
|
@ -49,7 +49,7 @@ public class IndicesStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
IndicesStatsMonitoringDoc doc = new IndicesStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setIndicesStats(randomIndicesStats());
|
||||
return doc;
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ public class IndicesStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
IndicesStatsMonitoringDoc doc = newMonitoringDoc();
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(1437580442979L);
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
|
||||
IndicesStatsResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MonitoringTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver.node;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.DiscoveryNodeMonitoringDoc;
|
||||
|
@ -28,9 +28,9 @@ public class DiscoveryNodeResolverTests extends MonitoringIndexNameResolverTestC
|
|||
DiscoveryNodeMonitoringDoc doc = new DiscoveryNodeMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setNode(new DiscoveryNode(randomAsciiOfLength(3), UUID.randomUUID().toString(),
|
||||
DummyTransportAddress.INSTANCE, emptyMap(), emptySet(),
|
||||
LocalTransportAddress.buildUnique(), emptyMap(), emptySet(),
|
||||
VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), Version.CURRENT)));
|
||||
return doc;
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
|
@ -59,7 +59,7 @@ public class NodeStatsResolverTests extends MonitoringIndexNameResolverTestCase<
|
|||
NodeStatsMonitoringDoc doc = new NodeStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setNodeMaster(randomBoolean());
|
||||
doc.setNodeId(UUID.randomUUID().toString());
|
||||
doc.setDiskThresholdDeciderEnabled(randomBoolean());
|
||||
|
@ -133,7 +133,7 @@ public class NodeStatsResolverTests extends MonitoringIndexNameResolverTestCase<
|
|||
new ThreadPoolStats.Stats(ThreadPool.Names.SEARCH, 0, 0, 0, 0, 0, 0),
|
||||
new ThreadPoolStats.Stats(InternalWatchExecutor.THREAD_POOL_NAME, 0, 0, 0, 0, 0, 0)
|
||||
);
|
||||
return new NodeStats(new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0,
|
||||
return new NodeStats(new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0,
|
||||
new NodeIndicesStats(new CommonStats(), statsByShard), OsProbe.getInstance().osStats(),
|
||||
ProcessProbe.getInstance().processStats(), JvmStats.jvmStats(),
|
||||
new ThreadPoolStats(threadPoolStats),
|
||||
|
|
|
@ -9,7 +9,7 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -31,7 +31,7 @@ public class ShardsResolverTests extends MonitoringIndexNameResolverTestCase<Sha
|
|||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setClusterStateUUID(UUID.randomUUID().toString());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
|
||||
ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(new Index(randomAsciiOfLength(5), UUID.randomUUID().toString()),
|
||||
randomIntBetween(0, 5)), null, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
|
||||
|
@ -48,7 +48,7 @@ public class ShardsResolverTests extends MonitoringIndexNameResolverTestCase<Sha
|
|||
|
||||
final String clusterStateUUID = UUID.randomUUID().toString();
|
||||
doc.setClusterStateUUID(clusterStateUUID);
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
|
||||
ShardsResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MonitoringTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License;
|
||||
# you may not use this file except in compliance with the Elastic License.
|
||||
|
||||
SCRIPT="$0"
|
||||
|
||||
# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
|
||||
while [ -h "$SCRIPT" ] ; do
|
||||
ls=`ls -ld "$SCRIPT"`
|
||||
# Drop everything prior to ->
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
SCRIPT="$link"
|
||||
else
|
||||
SCRIPT=`dirname "$SCRIPT"`/"$link"
|
||||
fi
|
||||
done
|
||||
|
||||
# determine elasticsearch home
|
||||
ES_HOME=`dirname "$SCRIPT"`/../..
|
||||
|
||||
# make ELASTICSEARCH_HOME absolute
|
||||
ES_HOME=`cd "$ES_HOME"; pwd`
|
||||
|
||||
# If an include wasn't specified in the environment, then search for one...
|
||||
if [ "x$ES_INCLUDE" = "x" ]; then
|
||||
# Locations (in order) to use when searching for an include file.
|
||||
for include in /usr/share/elasticsearch/elasticsearch.in.sh \
|
||||
/usr/local/share/elasticsearch/elasticsearch.in.sh \
|
||||
/opt/elasticsearch/elasticsearch.in.sh \
|
||||
~/.elasticsearch.in.sh \
|
||||
"`dirname "$0"`"/../elasticsearch.in.sh \
|
||||
"$ES_HOME/bin/elasticsearch.in.sh"; do
|
||||
if [ -r "$include" ]; then
|
||||
. "$include"
|
||||
break
|
||||
fi
|
||||
done
|
||||
# ...otherwise, source the specified include.
|
||||
elif [ -r "$ES_INCLUDE" ]; then
|
||||
. "$ES_INCLUDE"
|
||||
fi
|
||||
|
||||
if [ -x "$JAVA_HOME/bin/java" ]; then
|
||||
JAVA="$JAVA_HOME/bin/java"
|
||||
else
|
||||
JAVA=`which java`
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVA" ]; then
|
||||
echo "Could not find any executable java binary. Please install java in your PATH or set JAVA_HOME"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$ES_CLASSPATH" ]; then
|
||||
echo "You must set the ES_CLASSPATH var" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Try to read package config files
|
||||
if [ -f "/etc/sysconfig/elasticsearch" ]; then
|
||||
CONF_DIR=/etc/elasticsearch
|
||||
|
||||
. "/etc/sysconfig/elasticsearch"
|
||||
elif [ -f "/etc/default/elasticsearch" ]; then
|
||||
CONF_DIR=/etc/elasticsearch
|
||||
|
||||
. "/etc/default/elasticsearch"
|
||||
fi
|
||||
|
||||
export HOSTNAME=`hostname -s`
|
||||
|
||||
# include x-pack jars in classpath
|
||||
ES_CLASSPATH="$ES_CLASSPATH:$ES_HOME/plugins/x-pack/*"
|
||||
|
||||
# don't let JAVA_TOOL_OPTIONS slip in (e.g. crazy agents in ubuntu)
|
||||
# works around https://bugs.launchpad.net/ubuntu/+source/jayatana/+bug/1441487
|
||||
if [ "x$JAVA_TOOL_OPTIONS" != "x" ]; then
|
||||
echo "Warning: Ignoring JAVA_TOOL_OPTIONS=$JAVA_TOOL_OPTIONS"
|
||||
echo "Please pass JVM parameters via ES_JAVA_OPTS instead"
|
||||
unset JAVA_TOOL_OPTIONS
|
||||
fi
|
||||
|
||||
# CONF_FILE setting was removed
|
||||
if [ ! -z "$CONF_FILE" ]; then
|
||||
echo "CONF_FILE setting is no longer supported. elasticsearch.yml must be placed in the config directory and cannot be renamed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
declare -a args=("$@")
|
||||
|
||||
if [ -e "$CONF_DIR" ]; then
|
||||
args=("${args[@]}" -Edefault.path.conf="$CONF_DIR")
|
||||
fi
|
||||
|
||||
cd "$ES_HOME" > /dev/null
|
||||
"$JAVA" $ES_JAVA_OPTS -cp "$ES_CLASSPATH" -Des.path.home="$ES_HOME" org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool "${args[@]}"
|
||||
status=$?
|
||||
cd - > /dev/null
|
||||
exit $status
|
|
@ -0,0 +1,9 @@
|
|||
@echo off
|
||||
|
||||
rem Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
rem or more contributor license agreements. Licensed under the Elastic License;
|
||||
rem you may not use this file except in compliance with the Elastic License.
|
||||
|
||||
PUSHD "%~dp0"
|
||||
CALL "%~dp0.in.bat" org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool %*
|
||||
POPD
|
|
@ -141,8 +141,12 @@ public class Security implements ActionPlugin {
|
|||
}
|
||||
|
||||
modules.add(new AuthenticationModule(settings));
|
||||
modules.add(new AuthorizationModule(settings));
|
||||
if (enabled == false) {
|
||||
modules.add(new SecurityModule(settings, securityLicenseState));
|
||||
modules.add(new CryptoModule(settings));
|
||||
modules.add(new AuditTrailModule(settings));
|
||||
modules.add(new SecurityTransportModule(settings));
|
||||
return modules;
|
||||
}
|
||||
|
||||
|
@ -152,7 +156,6 @@ public class Security implements ActionPlugin {
|
|||
securityLicenseState = new SecurityLicenseState();
|
||||
modules.add(new SecurityModule(settings, securityLicenseState));
|
||||
modules.add(new CryptoModule(settings));
|
||||
modules.add(new AuthorizationModule(settings));
|
||||
modules.add(new AuditTrailModule(settings));
|
||||
modules.add(new SecurityRestModule(settings));
|
||||
modules.add(new SecurityActionModule(settings));
|
||||
|
|
|
@ -13,14 +13,21 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.security.audit.AuditTrailService;
|
||||
import org.elasticsearch.xpack.security.authc.Realm;
|
||||
import org.elasticsearch.xpack.security.authc.Realms;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
|
||||
import org.elasticsearch.xpack.XPackFeatureSet;
|
||||
import org.elasticsearch.xpack.security.authz.store.RolesStore;
|
||||
import org.elasticsearch.xpack.security.crypto.CryptoService;
|
||||
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
|
||||
import org.elasticsearch.xpack.security.transport.netty.SecurityNettyHttpServerTransport;
|
||||
import org.elasticsearch.xpack.security.transport.netty.SecurityNettyTransport;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -30,16 +37,33 @@ import java.util.stream.Collectors;
|
|||
*/
|
||||
public class SecurityFeatureSet implements XPackFeatureSet {
|
||||
|
||||
private final Settings settings;
|
||||
private final boolean enabled;
|
||||
private final SecurityLicenseState licenseState;
|
||||
private final @Nullable Realms realms;
|
||||
@Nullable
|
||||
private final Realms realms;
|
||||
@Nullable
|
||||
private final RolesStore rolesStore;
|
||||
@Nullable
|
||||
private final IPFilter ipFilter;
|
||||
@Nullable
|
||||
private final AuditTrailService auditTrailService;
|
||||
@Nullable
|
||||
private final CryptoService cryptoService;
|
||||
|
||||
@Inject
|
||||
public SecurityFeatureSet(Settings settings, @Nullable SecurityLicenseState licenseState,
|
||||
@Nullable Realms realms, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
@Nullable Realms realms, NamedWriteableRegistry namedWriteableRegistry, @Nullable RolesStore rolesStore,
|
||||
@Nullable IPFilter ipFilter, @Nullable AuditTrailService auditTrailService,
|
||||
@Nullable CryptoService cryptoService) {
|
||||
this.enabled = Security.enabled(settings);
|
||||
this.licenseState = licenseState;
|
||||
this.realms = realms;
|
||||
this.rolesStore = rolesStore;
|
||||
this.settings = settings;
|
||||
this.ipFilter = ipFilter;
|
||||
this.auditTrailService = auditTrailService;
|
||||
this.cryptoService = cryptoService;
|
||||
namedWriteableRegistry.register(Usage.class, Usage.writeableName(Security.NAME), Usage::new);
|
||||
}
|
||||
|
||||
|
@ -66,7 +90,12 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
@Override
|
||||
public XPackFeatureSet.Usage usage() {
|
||||
List<Map<String, Object>> enabledRealms = buildEnabledRealms(realms);
|
||||
return new Usage(available(), enabled(), enabledRealms);
|
||||
Map<String, Object> rolesStoreUsage = rolesStoreUsage(rolesStore);
|
||||
Map<String, Object> sslUsage = sslUsage(settings);
|
||||
Map<String, Object> auditUsage = auditUsage(auditTrailService);
|
||||
Map<String, Object> ipFilterUsage = ipFilterUsage(ipFilter);
|
||||
boolean hasSystemKey = systemKeyUsage(cryptoService);
|
||||
return new Usage(available(), enabled(), enabledRealms, rolesStoreUsage, sslUsage, auditUsage, ipFilterUsage, hasSystemKey);
|
||||
}
|
||||
|
||||
static List<Map<String, Object>> buildEnabledRealms(Realms realms) {
|
||||
|
@ -84,26 +113,86 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
return enabledRealms;
|
||||
}
|
||||
|
||||
static Map<String, Object> rolesStoreUsage(@Nullable RolesStore rolesStore) {
|
||||
if (rolesStore == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
return rolesStore.usageStats();
|
||||
}
|
||||
|
||||
static Map<String, Object> sslUsage(Settings settings) {
|
||||
Map<String, Object> map = new HashMap<>(2);
|
||||
map.put("http", Collections.singletonMap("enabled", SecurityNettyHttpServerTransport.SSL_SETTING.get(settings)));
|
||||
map.put("transport", Collections.singletonMap("enabled", SecurityNettyTransport.SSL_SETTING.get(settings)));
|
||||
return map;
|
||||
}
|
||||
|
||||
static Map<String, Object> auditUsage(@Nullable AuditTrailService auditTrailService) {
|
||||
if (auditTrailService == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
return auditTrailService.usageStats();
|
||||
}
|
||||
|
||||
static Map<String, Object> ipFilterUsage(@Nullable IPFilter ipFilter) {
|
||||
if (ipFilter == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
return ipFilter.usageStats();
|
||||
}
|
||||
|
||||
static boolean systemKeyUsage(CryptoService cryptoService) {
|
||||
// we can piggy back on the encryption enabled method as it is only enabled if there is a system key
|
||||
return cryptoService.encryptionEnabled();
|
||||
}
|
||||
|
||||
static class Usage extends XPackFeatureSet.Usage {
|
||||
|
||||
private static final String ENABLED_REALMS_XFIELD = "enabled_realms";
|
||||
private static final String ROLES_XFIELD = "roles";
|
||||
private static final String SSL_XFIELD = "ssl";
|
||||
private static final String AUDIT_XFIELD = "audit";
|
||||
private static final String IP_FILTER_XFIELD = "ipfilter";
|
||||
private static final String SYSTEM_KEY_XFIELD = "system_key";
|
||||
|
||||
private List<Map<String, Object>> enabledRealms;
|
||||
private Map<String, Object> rolesStoreUsage;
|
||||
private Map<String, Object> sslUsage;
|
||||
private Map<String, Object> auditUsage;
|
||||
private Map<String, Object> ipFilterUsage;
|
||||
private boolean hasSystemKey;
|
||||
|
||||
public Usage(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
enabledRealms = in.readList(StreamInput::readMap);
|
||||
rolesStoreUsage = in.readMap();
|
||||
sslUsage = in.readMap();
|
||||
auditUsage = in.readMap();
|
||||
ipFilterUsage = in.readMap();
|
||||
hasSystemKey = in.readBoolean();
|
||||
}
|
||||
|
||||
public Usage(boolean available, boolean enabled, List<Map<String, Object>> enabledRealms) {
|
||||
public Usage(boolean available, boolean enabled, List<Map<String, Object>> enabledRealms, Map<String, Object> rolesStoreUsage,
|
||||
Map<String, Object> sslUsage, Map<String, Object> auditUsage, Map<String, Object> ipFilterUsage,
|
||||
boolean hasSystemKey) {
|
||||
super(Security.NAME, available, enabled);
|
||||
this.enabledRealms = enabledRealms;
|
||||
this.rolesStoreUsage = rolesStoreUsage;
|
||||
this.sslUsage = sslUsage;
|
||||
this.auditUsage = auditUsage;
|
||||
this.ipFilterUsage = ipFilterUsage;
|
||||
this.hasSystemKey = hasSystemKey;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeList(enabledRealms.stream().map((m) -> (Writeable) o -> o.writeMap(m)).collect(Collectors.toList()));
|
||||
out.writeMap(rolesStoreUsage);
|
||||
out.writeMap(sslUsage);
|
||||
out.writeMap(auditUsage);
|
||||
out.writeMap(ipFilterUsage);
|
||||
out.writeBoolean(hasSystemKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -111,6 +200,11 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
super.innerXContent(builder, params);
|
||||
if (enabled) {
|
||||
builder.field(ENABLED_REALMS_XFIELD, enabledRealms);
|
||||
builder.field(ROLES_XFIELD, rolesStoreUsage);
|
||||
builder.field(SSL_XFIELD, sslUsage);
|
||||
builder.field(AUDIT_XFIELD, auditUsage);
|
||||
builder.field(IP_FILTER_XFIELD, ipFilterUsage);
|
||||
builder.field(SYSTEM_KEY_XFIELD, hasSystemKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -76,7 +76,7 @@ public class SecurityLifecycleService extends AbstractComponent implements Clust
|
|||
if (nativeUserStore.canStart(event.state(), master)) {
|
||||
threadPool.generic().execute(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
public void onFailure(Exception throwable) {
|
||||
logger.error("failed to start native user store service", throwable);
|
||||
assert false : "security lifecycle services startup failed";
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ public class SecurityLifecycleService extends AbstractComponent implements Clust
|
|||
if (nativeRolesStore.canStart(event.state(), master)) {
|
||||
threadPool.generic().execute(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
public void onFailure(Exception throwable) {
|
||||
logger.error("failed to start native roles store services", throwable);
|
||||
assert false : "security lifecycle services startup failed";
|
||||
}
|
||||
|
@ -117,7 +117,7 @@ public class SecurityLifecycleService extends AbstractComponent implements Clust
|
|||
threadPool.generic().execute(new AbstractRunnable() {
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
public void onFailure(Exception throwable) {
|
||||
logger.error("failed to start index audit trail services", throwable);
|
||||
assert false : "security lifecycle services startup failed";
|
||||
}
|
||||
|
|
|
@ -90,8 +90,8 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste
|
|||
if (createTemplate && templateCreationPending.compareAndSet(false, true)) {
|
||||
threadPool.generic().execute(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.warn("failed to create security index template", t);
|
||||
public void onFailure(Exception e) {
|
||||
logger.warn("failed to create security index template", e);
|
||||
templateCreationPending.set(false);
|
||||
}
|
||||
|
||||
|
|
|
@ -44,9 +44,6 @@ import java.util.function.Predicate;
|
|||
|
||||
import static org.elasticsearch.xpack.security.support.Exceptions.authorizationError;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SecurityActionFilter extends AbstractComponent implements ActionFilter {
|
||||
|
||||
private static final Predicate<String> LICENSE_EXPIRATION_ACTION_MATCHER = HealthAndStatsPrivilege.INSTANCE.predicate();
|
||||
|
@ -109,8 +106,8 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil
|
|||
} else {
|
||||
chain.proceed(task, action, request, listener);
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
listener.onFailure(t);
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -231,7 +228,7 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
if (threadContext != null) {
|
||||
threadContext.restore();
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ public class TransportDeleteRoleAction extends HandledTransportAction<DeleteRole
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
listener.onFailure(t);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -77,7 +77,7 @@ public class TransportGetRolesAction extends HandledTransportAction<GetRolesRequ
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
logger.error("failed to retrieve role [{}]", t, rolename);
|
||||
listener.onFailure(t);
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ public class TransportGetRolesAction extends HandledTransportAction<GetRolesRequ
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
logger.error("failed to retrieve role [{}]", t,
|
||||
Strings.arrayToDelimitedString(request.names(), ","));
|
||||
listener.onFailure(t);
|
||||
|
|
|
@ -48,7 +48,7 @@ public class TransportPutRoleAction extends HandledTransportAction<PutRoleReques
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
listener.onFailure(t);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException;
|
|||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.xpack.security.authc.support.CharArrays;
|
||||
|
@ -80,7 +81,7 @@ public class ChangePasswordRequest extends ActionRequest<ChangePasswordRequest>
|
|||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
username = in.readString();
|
||||
passwordHash = CharArrays.utf8BytesToChars(in.readBytesReference().array());
|
||||
passwordHash = CharArrays.utf8BytesToChars(BytesReference.toBytes(in.readBytesReference()));
|
||||
refreshPolicy = RefreshPolicy.readFrom(in);
|
||||
}
|
||||
|
||||
|
|
|
@ -127,7 +127,7 @@ public class PutUserRequest extends ActionRequest<PutUserRequest> implements Use
|
|||
if (passwordHashRef == BytesArray.EMPTY) {
|
||||
passwordHash = null;
|
||||
} else {
|
||||
passwordHash = CharArrays.utf8BytesToChars(passwordHashRef.array());
|
||||
passwordHash = CharArrays.utf8BytesToChars(BytesReference.toBytes(passwordHashRef));
|
||||
}
|
||||
roles = in.readStringArray();
|
||||
fullName = in.readOptionalString();
|
||||
|
|
|
@ -79,6 +79,11 @@ public class PutUserRequestBuilder extends ActionRequestBuilder<PutUserRequest,
|
|||
return this;
|
||||
}
|
||||
|
||||
public PutUserRequestBuilder passwordHash(char[] passwordHash) {
|
||||
request.passwordHash(passwordHash);
|
||||
return this;
|
||||
}
|
||||
|
||||
public PutUserRequestBuilder source(String username, BytesReference source) throws IOException {
|
||||
username(username);
|
||||
try (XContentParser parser = XContentHelper.createParser(source)) {
|
||||
|
@ -99,6 +104,14 @@ public class PutUserRequestBuilder extends ActionRequestBuilder<PutUserRequest,
|
|||
throw new ElasticsearchParseException(
|
||||
"expected field [{}] to be of type string, but found [{}] instead", currentFieldName, token);
|
||||
}
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, User.Fields.PASSWORD_HASH)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
char[] passwordChars = parser.text().toCharArray();
|
||||
passwordHash(passwordChars);
|
||||
} else {
|
||||
throw new ElasticsearchParseException(
|
||||
"expected field [{}] to be of type string, but found [{}] instead", currentFieldName, token);
|
||||
}
|
||||
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, User.Fields.ROLES)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
roles(Strings.commaDelimitedListToStringArray(parser.text()));
|
||||
|
|
|
@ -12,7 +12,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
|
||||
import org.elasticsearch.xpack.security.user.AnonymousUser;
|
||||
import org.elasticsearch.xpack.security.user.SystemUser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -50,7 +49,7 @@ public class TransportChangePasswordAction extends HandledTransportAction<Change
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -54,7 +54,7 @@ public class TransportDeleteUserAction extends HandledTransportAction<DeleteUser
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue