Merge branch 'master' into less_drugs
Original commit: elastic/x-pack-elasticsearch@fde8483759
This commit is contained in:
commit
dec8319d77
|
@ -10,7 +10,7 @@ import joptsimple.OptionSpec;
|
|||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
|
||||
|
@ -54,9 +54,9 @@ public class KeyPairGeneratorTool extends Command {
|
|||
Path publicKeyPath = parsePath(publicKeyPathOption.value(options));
|
||||
Path privateKeyPath = parsePath(privateKeyPathOption.value(options));
|
||||
if (Files.exists(privateKeyPath)) {
|
||||
throw new UserError(ExitCodes.USAGE, privateKeyPath + " already exists");
|
||||
throw new UserException(ExitCodes.USAGE, privateKeyPath + " already exists");
|
||||
} else if (Files.exists(publicKeyPath)) {
|
||||
throw new UserError(ExitCodes.USAGE, publicKeyPath + " already exists");
|
||||
throw new UserException(ExitCodes.USAGE, publicKeyPath + " already exists");
|
||||
}
|
||||
|
||||
SecureRandom random = new SecureRandom();
|
||||
|
|
|
@ -12,7 +12,7 @@ import joptsimple.OptionSet;
|
|||
import joptsimple.OptionSpec;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
|
@ -62,9 +62,9 @@ public class LicenseGeneratorTool extends Command {
|
|||
Path publicKeyPath = parsePath(publicKeyPathOption.value(options));
|
||||
Path privateKeyPath = parsePath(privateKeyPathOption.value(options));
|
||||
if (Files.exists(privateKeyPath) == false) {
|
||||
throw new UserError(ExitCodes.USAGE, privateKeyPath + " does not exist");
|
||||
throw new UserException(ExitCodes.USAGE, privateKeyPath + " does not exist");
|
||||
} else if (Files.exists(publicKeyPath) == false) {
|
||||
throw new UserError(ExitCodes.USAGE, publicKeyPath + " does not exist");
|
||||
throw new UserException(ExitCodes.USAGE, publicKeyPath + " does not exist");
|
||||
}
|
||||
|
||||
final License licenseSpec;
|
||||
|
@ -73,11 +73,11 @@ public class LicenseGeneratorTool extends Command {
|
|||
} else if (options.has(licenseFileOption)) {
|
||||
Path licenseSpecPath = parsePath(licenseFileOption.value(options));
|
||||
if (Files.exists(licenseSpecPath) == false) {
|
||||
throw new UserError(ExitCodes.USAGE, licenseSpecPath + " does not exist");
|
||||
throw new UserException(ExitCodes.USAGE, licenseSpecPath + " does not exist");
|
||||
}
|
||||
licenseSpec = License.fromSource(Files.readAllBytes(licenseSpecPath));
|
||||
} else {
|
||||
throw new UserError(ExitCodes.USAGE, "Must specify either --license or --licenseFile");
|
||||
throw new UserException(ExitCodes.USAGE, "Must specify either --license or --licenseFile");
|
||||
}
|
||||
|
||||
// sign
|
||||
|
|
|
@ -12,7 +12,7 @@ import joptsimple.OptionSet;
|
|||
import joptsimple.OptionSpec;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
|
@ -49,7 +49,7 @@ public class LicenseVerificationTool extends Command {
|
|||
protected void execute(Terminal terminal, OptionSet options) throws Exception {
|
||||
Path publicKeyPath = parsePath(publicKeyPathOption.value(options));
|
||||
if (Files.exists(publicKeyPath) == false) {
|
||||
throw new UserError(ExitCodes.USAGE, publicKeyPath + " does not exist");
|
||||
throw new UserException(ExitCodes.USAGE, publicKeyPath + " does not exist");
|
||||
}
|
||||
|
||||
final License licenseSpec;
|
||||
|
@ -58,16 +58,16 @@ public class LicenseVerificationTool extends Command {
|
|||
} else if (options.has(licenseFileOption)) {
|
||||
Path licenseSpecPath = parsePath(licenseFileOption.value(options));
|
||||
if (Files.exists(licenseSpecPath) == false) {
|
||||
throw new UserError(ExitCodes.USAGE, licenseSpecPath + " does not exist");
|
||||
throw new UserException(ExitCodes.USAGE, licenseSpecPath + " does not exist");
|
||||
}
|
||||
licenseSpec = License.fromSource(Files.readAllBytes(licenseSpecPath));
|
||||
} else {
|
||||
throw new UserError(ExitCodes.USAGE, "Must specify either --license or --licenseFile");
|
||||
throw new UserException(ExitCodes.USAGE, "Must specify either --license or --licenseFile");
|
||||
}
|
||||
|
||||
// verify
|
||||
if (LicenseVerifier.verifyLicense(licenseSpec, Files.readAllBytes(publicKeyPath)) == false) {
|
||||
throw new UserError(ExitCodes.DATA_ERROR, "Invalid License!");
|
||||
throw new UserException(ExitCodes.DATA_ERROR, "Invalid License!");
|
||||
}
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
builder.startObject();
|
||||
|
|
|
@ -11,9 +11,7 @@ import java.nio.file.Path;
|
|||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
||||
|
@ -27,12 +25,12 @@ public class KeyPairGenerationToolTests extends CommandTestCase {
|
|||
public void testMissingKeyPaths() throws Exception {
|
||||
Path exists = createTempFile("", "existing");
|
||||
Path dne = createTempDir().resolve("dne");
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", exists.toString(), "--privateKeyPath", dne.toString());
|
||||
});
|
||||
assertThat(e.getMessage(), containsString("existing"));
|
||||
assertEquals(ExitCodes.USAGE, e.exitCode);
|
||||
e = expectThrows(UserError.class, () -> {
|
||||
e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", dne.toString(), "--privateKeyPath", exists.toString());
|
||||
});
|
||||
assertThat(e.getMessage(), containsString("existing"));
|
||||
|
|
|
@ -12,12 +12,9 @@ import java.nio.file.Path;
|
|||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.MockTerminal;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.licensor.TestUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
public class LicenseGenerationToolTests extends CommandTestCase {
|
||||
|
@ -38,14 +35,14 @@ public class LicenseGenerationToolTests extends CommandTestCase {
|
|||
public void testMissingKeyPaths() throws Exception {
|
||||
Path pub = createTempDir().resolve("pub");
|
||||
Path pri = createTempDir().resolve("pri");
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pub.toString(), "--privateKeyPath", pri.toString());
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("pri does not exist"));
|
||||
assertEquals(ExitCodes.USAGE, e.exitCode);
|
||||
|
||||
Files.createFile(pri);
|
||||
e = expectThrows(UserError.class, () -> {
|
||||
e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pub.toString(), "--privateKeyPath", pri.toString());
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("pub does not exist"));
|
||||
|
@ -53,7 +50,7 @@ public class LicenseGenerationToolTests extends CommandTestCase {
|
|||
}
|
||||
|
||||
public void testMissingLicenseSpec() throws Exception {
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pubKeyPath.toString(), "--privateKeyPath", priKeyPath.toString());
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("Must specify either --license or --licenseFile"));
|
||||
|
|
|
@ -12,7 +12,7 @@ import java.nio.file.Path;
|
|||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.license.core.License;
|
||||
import org.elasticsearch.license.licensor.TestUtils;
|
||||
|
@ -36,7 +36,7 @@ public class LicenseVerificationToolTests extends CommandTestCase {
|
|||
|
||||
public void testMissingKeyPath() throws Exception {
|
||||
Path pub = createTempDir().resolve("pub");
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pub.toString());
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("pub does not exist"));
|
||||
|
@ -44,7 +44,7 @@ public class LicenseVerificationToolTests extends CommandTestCase {
|
|||
}
|
||||
|
||||
public void testMissingLicenseSpec() throws Exception {
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pubKeyPath.toString());
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("Must specify either --license or --licenseFile"));
|
||||
|
@ -56,7 +56,7 @@ public class LicenseVerificationToolTests extends CommandTestCase {
|
|||
License tamperedLicense = License.builder()
|
||||
.fromLicenseSpec(signedLicense, signedLicense.signature())
|
||||
.expiryDate(signedLicense.expiryDate() + randomIntBetween(1, 1000)).build();
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
UserException e = expectThrows(UserException.class, () -> {
|
||||
execute("--publicKeyPath", pubKeyPath.toString(),
|
||||
"--license", TestUtils.dumpLicense(tamperedLicense));
|
||||
});
|
||||
|
|
|
@ -14,18 +14,18 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.script.GeneralScriptException;
|
||||
import org.elasticsearch.script.ScriptException;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.watcher.condition.Condition;
|
||||
import org.elasticsearch.xpack.common.ScriptServiceProxy;
|
||||
import org.elasticsearch.xpack.watcher.condition.script.ExecutableScriptCondition;
|
||||
import org.elasticsearch.xpack.watcher.condition.script.ScriptCondition;
|
||||
import org.elasticsearch.xpack.watcher.condition.script.ScriptConditionFactory;
|
||||
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.support.Script;
|
||||
import org.elasticsearch.xpack.common.ScriptServiceProxy;
|
||||
import org.elasticsearch.xpack.watcher.watch.Payload;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
@ -41,12 +41,10 @@ import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalArgument
|
|||
import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.mockExecutionContext;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class ScriptConditionTests extends ESTestCase {
|
||||
ThreadPool tp = null;
|
||||
|
||||
private ThreadPool tp = null;
|
||||
|
||||
@Before
|
||||
public void init() {
|
||||
|
@ -54,8 +52,8 @@ public class ScriptConditionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@After
|
||||
public void cleanup() {
|
||||
tp.shutdownNow();
|
||||
public void cleanup() throws InterruptedException {
|
||||
terminate(tp);
|
||||
}
|
||||
|
||||
public void testExecute() throws Exception {
|
||||
|
@ -136,13 +134,8 @@ public class ScriptConditionTests extends ESTestCase {
|
|||
XContentParser parser = XContentFactory.xContent(builder.bytes()).createParser(builder.bytes());
|
||||
parser.nextToken();
|
||||
ScriptCondition scriptCondition = conditionParser.parseCondition("_watch", parser);
|
||||
try {
|
||||
conditionParser.createExecutable(scriptCondition);
|
||||
fail("expected a condition validation exception trying to create an executable with a bad or missing script");
|
||||
} catch (GeneralScriptException e) {
|
||||
// TODO add these when the test if fixed
|
||||
// assertThat(e.getMessage(), is("ASDF"));
|
||||
}
|
||||
GeneralScriptException exception = expectThrows(GeneralScriptException.class,
|
||||
() -> conditionParser.createExecutable(scriptCondition));
|
||||
}
|
||||
|
||||
public void testScriptConditionParser_badLang() throws Exception {
|
||||
|
@ -153,39 +146,30 @@ public class ScriptConditionTests extends ESTestCase {
|
|||
XContentParser parser = XContentFactory.xContent(builder.bytes()).createParser(builder.bytes());
|
||||
parser.nextToken();
|
||||
ScriptCondition scriptCondition = conditionParser.parseCondition("_watch", parser);
|
||||
try {
|
||||
conditionParser.createExecutable(scriptCondition);
|
||||
fail("expected a condition validation exception trying to create an executable with an invalid language");
|
||||
} catch (GeneralScriptException e) {
|
||||
// TODO add these when the test if fixed
|
||||
// assertThat(e.getMessage(), is("ASDF"));
|
||||
}
|
||||
GeneralScriptException exception = expectThrows(GeneralScriptException.class,
|
||||
() -> conditionParser.createExecutable(scriptCondition));
|
||||
assertThat(exception.getMessage(), containsString("script_lang not supported [not_a_valid_lang]]"));
|
||||
}
|
||||
|
||||
public void testScriptConditionThrowException() throws Exception {
|
||||
ScriptServiceProxy scriptService = getScriptServiceProxy(tp);
|
||||
ExecutableScriptCondition condition = new ExecutableScriptCondition(
|
||||
new ScriptCondition(Script.inline("assert false").build()), logger, scriptService);
|
||||
new ScriptCondition(Script.inline("null.foo").build()), logger, scriptService);
|
||||
SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 500L, new ShardSearchFailure[0]);
|
||||
WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response));
|
||||
ScriptCondition.Result result = condition.execute(ctx);
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.status(), is(Condition.Result.Status.FAILURE));
|
||||
assertThat(result.reason(), notNullValue());
|
||||
assertThat(result.reason(), containsString("Assertion"));
|
||||
ScriptException exception = expectThrows(ScriptException.class, () -> condition.execute(ctx));
|
||||
assertThat(exception.getMessage(), containsString("Error evaluating null.foo"));
|
||||
}
|
||||
|
||||
public void testScriptConditionReturnObject() throws Exception {
|
||||
public void testScriptConditionReturnObjectThrowsException() throws Exception {
|
||||
ScriptServiceProxy scriptService = getScriptServiceProxy(tp);
|
||||
ExecutableScriptCondition condition = new ExecutableScriptCondition(
|
||||
new ScriptCondition(Script.inline("return new Object()").build()), logger, scriptService);
|
||||
SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 500L, new ShardSearchFailure[0]);
|
||||
WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response));
|
||||
ScriptCondition.Result result = condition.execute(ctx);
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result.status(), is(Condition.Result.Status.FAILURE));
|
||||
assertThat(result.reason(), notNullValue());
|
||||
assertThat(result.reason(), containsString("ScriptException"));
|
||||
Exception exception = expectThrows(GeneralScriptException.class, () -> condition.execute(ctx));
|
||||
assertThat(exception.getMessage(),
|
||||
containsString("condition [script] must return a boolean value (true|false) but instead returned [_name]"));
|
||||
}
|
||||
|
||||
public void testScriptConditionAccessCtx() throws Exception {
|
||||
|
|
|
@ -420,7 +420,7 @@ public class TransportGraphExploreAction extends HandledTransportAction<GraphExp
|
|||
// weakest weights.
|
||||
// A priority queue is used to trim vertices according to the size settings
|
||||
// requested for each field.
|
||||
private final void trimNewAdditions(Hop currentHop, ArrayList<Connection> newConnections, ArrayList<Vertex> newVertices) {
|
||||
private void trimNewAdditions(Hop currentHop, ArrayList<Connection> newConnections, ArrayList<Vertex> newVertices) {
|
||||
Set<Vertex> evictions = new HashSet<>();
|
||||
|
||||
for (int k = 0; k < currentHop.getNumberVertexRequests(); k++) {
|
||||
|
@ -460,7 +460,7 @@ public class TransportGraphExploreAction extends HandledTransportAction<GraphExp
|
|||
// we can do something server-side here
|
||||
|
||||
// Helper method - compute the total signal of all scores in the search results
|
||||
private final double getExpandTotalSignalStrength(Hop lastHop, Hop currentHop, Sampler sample) {
|
||||
private double getExpandTotalSignalStrength(Hop lastHop, Hop currentHop, Sampler sample) {
|
||||
double totalSignalOutput = 0;
|
||||
for (int j = 0; j < lastHop.getNumberVertexRequests(); j++) {
|
||||
VertexRequest lastVr = lastHop.getVertexRequest(j);
|
||||
|
@ -509,7 +509,7 @@ public class TransportGraphExploreAction extends HandledTransportAction<GraphExp
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
@ -688,7 +688,7 @@ public class TransportGraphExploreAction extends HandledTransportAction<GraphExp
|
|||
}
|
||||
|
||||
// Helper method - Provides a total signal strength for all terms connected to the initial query
|
||||
private final double getInitialTotalSignalStrength(Hop rootHop, Sampler sample) {
|
||||
private double getInitialTotalSignalStrength(Hop rootHop, Sampler sample) {
|
||||
double totalSignalStrength = 0;
|
||||
for (int i = 0; i < rootHop.getNumberVertexRequests(); i++) {
|
||||
if (request.useSignificance()) {
|
||||
|
@ -711,13 +711,13 @@ public class TransportGraphExploreAction extends HandledTransportAction<GraphExp
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
} catch (Throwable t) {
|
||||
logger.error("unable to execute the graph query", t);
|
||||
listener.onFailure(t);
|
||||
} catch (Exception e) {
|
||||
logger.error("unable to execute the graph query", e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -270,11 +270,10 @@ public class GraphTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
GraphExploreResponse response = grb.get();
|
||||
if (response.getShardFailures().length > 0) {
|
||||
throw ((ShardSearchFailure) response.getShardFailures()[0]).getCause();
|
||||
expectedError = response.getShardFailures()[0].getCause();
|
||||
}
|
||||
} catch (Throwable rte) {
|
||||
} catch (Exception rte) {
|
||||
expectedError = rte;
|
||||
|
||||
}
|
||||
assertNotNull(expectedError);
|
||||
String message = expectedError.toString();
|
||||
|
|
|
@ -59,7 +59,7 @@ public class TransportDeleteLicenseAction extends TransportMasterNodeAction<Dele
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -61,7 +61,7 @@ public class TransportPutLicenseAction extends TransportMasterNodeAction<PutLice
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -427,8 +427,8 @@ public class LicensesService extends AbstractLifecycleComponent implements Clust
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(String source, @Nullable Throwable t) {
|
||||
logger.error("unexpected failure during [{}]", t, source);
|
||||
public void onFailure(String source, @Nullable Exception e) {
|
||||
logger.error("unexpected failure during [{}]", e, source);
|
||||
}
|
||||
|
||||
});
|
||||
|
|
|
@ -88,8 +88,8 @@ public abstract class AbstractLicensesIntegrationTestCase extends ESIntegTestCas
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(String source, @Nullable Throwable t) {
|
||||
logger.error("error on metaData cleanup after test", t);
|
||||
public void onFailure(String source, @Nullable Exception e) {
|
||||
logger.error("error on metaData cleanup after test", e);
|
||||
}
|
||||
});
|
||||
latch.await();
|
||||
|
|
|
@ -146,7 +146,7 @@ public class TestUtils {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
|
|
|
@ -157,7 +157,7 @@ public class LicensesAcknowledgementTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
public void onFailure(Exception throwable) {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -174,7 +174,7 @@ public class LicensesManagerServiceTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
public void onFailure(Exception throwable) {
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
|
|
|
@ -84,12 +84,12 @@ public class MonitoringBulkResponse extends ActionResponse {
|
|||
}
|
||||
|
||||
Error(StreamInput in) throws IOException {
|
||||
this(in.<Throwable>readThrowable());
|
||||
this(in.readException());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeThrowable(getCause());
|
||||
out.writeException(getCause());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -114,8 +114,8 @@ public class TransportMonitoringBulkAction extends HandledTransportAction<Monito
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
listener.onResponse(new MonitoringBulkResponse(buildTookInMillis(startTimeNanos), new MonitoringBulkResponse.Error(t)));
|
||||
public void onFailure(Exception e) {
|
||||
listener.onResponse(new MonitoringBulkResponse(buildTookInMillis(startTimeNanos), new MonitoringBulkResponse.Error(e)));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -204,8 +204,8 @@ public class AgentService extends AbstractLifecycleComponent {
|
|||
} catch (InterruptedException e) {
|
||||
logger.trace("interrupted");
|
||||
Thread.currentThread().interrupt();
|
||||
} catch (Throwable t) {
|
||||
logger.error("background thread had an uncaught exception", t);
|
||||
} catch (Exception e) {
|
||||
logger.error("background thread had an uncaught exception", e);
|
||||
}
|
||||
}
|
||||
logger.debug("worker shutdown");
|
||||
|
|
|
@ -59,9 +59,6 @@ import java.util.Map;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class HttpExporter extends Exporter {
|
||||
|
||||
public static final String TYPE = "http";
|
||||
|
@ -664,9 +661,9 @@ public class HttpExporter extends Exporter {
|
|||
}
|
||||
} catch (InterruptedException e) {
|
||||
// ignore, if closed, good....
|
||||
} catch (Throwable t) {
|
||||
} catch (Exception e) {
|
||||
logger.debug("error in keep alive thread, shutting down (will be restarted after a successful connection has been " +
|
||||
"made) {}", ExceptionsHelper.detailedMessage(t));
|
||||
"made) {}", ExceptionsHelper.detailedMessage(e));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -211,8 +211,8 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
logger.error("failed to update monitoring index template [{}]", throwable, template);
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to update monitoring index template [{}]", e, template);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -296,7 +296,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to delete indices", e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -179,8 +179,8 @@ public class CleanerService extends AbstractLifecycleComponent {
|
|||
for (Listener listener : listeners) {
|
||||
try {
|
||||
listener.onCleanUpIndices(retention);
|
||||
} catch (Throwable t) {
|
||||
logger.error("listener failed to clean indices", t);
|
||||
} catch (Exception e) {
|
||||
logger.error("listener failed to clean indices", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -209,8 +209,8 @@ public class CleanerService extends AbstractLifecycleComponent {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.error("failed to clean indices", t);
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to clean indices", e);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -81,9 +81,9 @@ public class MonitoringBulkTests extends MonitoringIntegTestCase {
|
|||
|
||||
threads[i] = new Thread(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.error("unexpected error in exporting thread", t);
|
||||
exceptions.add(t);
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("unexpected error in exporting thread", e);
|
||||
exceptions.add(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,7 +19,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
|||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.discovery.DiscoverySettings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -90,7 +90,8 @@ public class TransportMonitoringBulkActionTests extends ESTestCase {
|
|||
clusterService = new ClusterService(Settings.builder().put("cluster.name",
|
||||
TransportMonitoringBulkActionTests.class.getName()).build(),
|
||||
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadPool);
|
||||
clusterService.setLocalNode(new DiscoveryNode("node", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
clusterService.setLocalNode(new DiscoveryNode("node", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(),
|
||||
Version.CURRENT));
|
||||
clusterService.setNodeConnectionsService(new NodeConnectionsService(Settings.EMPTY, null, null) {
|
||||
@Override
|
||||
public void connectToAddedNodes(ClusterChangedEvent event) {
|
||||
|
@ -152,8 +153,8 @@ public class TransportMonitoringBulkActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(String source, Throwable t) {
|
||||
fail("unexpected exception: " + t);
|
||||
public void onFailure(String source, Exception e) {
|
||||
fail("unexpected exception: " + e);
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -296,9 +296,9 @@ public class ExportersTests extends ESTestCase {
|
|||
logger.debug("--> exporting thread [{}] exports {} documents", threadNum, threadDocs);
|
||||
threads[i] = new Thread(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.error("unexpected error in exporting thread", t);
|
||||
exceptions.add(t);
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("unexpected error in exporting thread", e);
|
||||
exceptions.add(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -9,7 +9,7 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -130,7 +130,7 @@ public class MonitoringDocTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
Set<DiscoveryNode.Role> roles = new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values())));
|
||||
return new DiscoveryNode(randomAsciiOfLength(5), randomAsciiOfLength(3), randomAsciiOfLength(3), randomAsciiOfLength(3),
|
||||
DummyTransportAddress.INSTANCE, attributes, roles, randomVersion(random()));
|
||||
return new DiscoveryNode(randomAsciiOfLength(5), randomAsciiOfLength(3), LocalTransportAddress.buildUnique(),
|
||||
attributes, roles, randomVersion(random()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
@ -439,14 +439,14 @@ public class HttpExporterTests extends MonitoringIntegTestCase {
|
|||
IndexRecoveryMonitoringDoc doc = new IndexRecoveryMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setRecoveryResponse(new RecoveryResponse());
|
||||
return doc;
|
||||
} else {
|
||||
ClusterStateMonitoringDoc doc = new ClusterStateMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setClusterState(ClusterState.PROTO);
|
||||
doc.setStatus(ClusterHealthStatus.GREEN);
|
||||
return doc;
|
||||
|
|
|
@ -13,7 +13,7 @@ import org.elasticsearch.cluster.ClusterState;
|
|||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
@ -198,14 +198,14 @@ public class LocalExporterTests extends MonitoringIntegTestCase {
|
|||
IndexRecoveryMonitoringDoc doc = new IndexRecoveryMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setRecoveryResponse(new RecoveryResponse());
|
||||
return doc;
|
||||
} else {
|
||||
ClusterStateMonitoringDoc doc = new ClusterStateMonitoringDoc(MonitoredSystem.ES.getSystem(), Version.CURRENT.toString());
|
||||
doc.setClusterUUID(internalCluster().getClusterName());
|
||||
doc.setTimestamp(System.currentTimeMillis());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setClusterState(ClusterState.PROTO);
|
||||
doc.setStatus(ClusterHealthStatus.GREEN);
|
||||
return doc;
|
||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
|
||||
|
@ -31,7 +31,7 @@ public class DataResolverTests extends MonitoringIndexNameResolverTestCase {
|
|||
MonitoringDoc doc = new MonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
return doc;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
|
||||
|
@ -40,7 +40,7 @@ public class TimestampedResolverTests extends MonitoringIndexNameResolverTestCas
|
|||
MonitoringDoc doc = new MonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode(randomAsciiOfLength(5), DummyTransportAddress.INSTANCE,
|
||||
doc.setSourceNode(new DiscoveryNode(randomAsciiOfLength(5), LocalTransportAddress.buildUnique(),
|
||||
emptyMap(), emptySet(), Version.CURRENT));
|
||||
return doc;
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver.bulk;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
|
||||
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc;
|
||||
|
@ -39,7 +39,7 @@ public class MonitoringBulkDataResolverTests extends MonitoringIndexNameResolver
|
|||
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
return doc;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver.bulk;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
|
||||
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc;
|
||||
|
@ -41,7 +41,7 @@ public class MonitoringBulkTimestampedResolverTests
|
|||
}
|
||||
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
return doc;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
|||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.license.core.License;
|
||||
|
@ -42,7 +42,7 @@ public class ClusterInfoResolverTests extends MonitoringIndexNameResolverTestCas
|
|||
ClusterInfoMonitoringDoc doc = new ClusterInfoMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setVersion(randomFrom(Version.V_2_0_0, Version.CURRENT).toString());
|
||||
doc.setLicense(licenseBuilder.build());
|
||||
doc.setClusterName(randomAsciiOfLength(5));
|
||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver.cluster;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.ClusterStateNodeMonitoringDoc;
|
||||
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringTemplateUtils;
|
||||
|
@ -28,7 +28,7 @@ public class ClusterStateNodeResolverTests extends
|
|||
ClusterStateNodeMonitoringDoc doc = new ClusterStateNodeMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setNodeId(UUID.randomUUID().toString());
|
||||
doc.setStateUUID(UUID.randomUUID().toString());
|
||||
return doc;
|
||||
|
|
|
@ -11,7 +11,6 @@ import org.elasticsearch.cluster.ClusterState;
|
|||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.ClusterStateMonitoringDoc;
|
||||
|
@ -32,7 +31,7 @@ public class ClusterStateResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
ClusterStateMonitoringDoc doc = new ClusterStateMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setStatus(randomFrom(ClusterHealthStatus.values()));
|
||||
|
||||
DiscoveryNode masterNode = new DiscoveryNode("master", new LocalTransportAddress("master"),
|
||||
|
|
|
@ -21,7 +21,7 @@ import org.elasticsearch.cluster.routing.ShardRouting;
|
|||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -63,7 +63,7 @@ public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
ClusterStatsMonitoringDoc doc = new ClusterStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setClusterStats(randomClusterStats());
|
||||
return doc;
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
*/
|
||||
private ClusterStatsResponse randomClusterStats() {
|
||||
List<ClusterStatsNodeResponse> responses = Collections.singletonList(
|
||||
new ClusterStatsNodeResponse(new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE,
|
||||
new ClusterStatsNodeResponse(new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(),
|
||||
emptyMap(), emptySet(), Version.CURRENT),
|
||||
ClusterHealthStatus.GREEN, randomNodeInfo(), randomNodeStats(), randomShardStats())
|
||||
);
|
||||
|
@ -106,10 +106,10 @@ public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
* @return a random {@link NodeInfo} used to resolve a monitoring document.
|
||||
*/
|
||||
private NodeInfo randomNodeInfo() {
|
||||
BoundTransportAddress transportAddress = new BoundTransportAddress(new TransportAddress[]{DummyTransportAddress.INSTANCE},
|
||||
DummyTransportAddress.INSTANCE);
|
||||
BoundTransportAddress transportAddress = new BoundTransportAddress(new TransportAddress[]{LocalTransportAddress.buildUnique()},
|
||||
LocalTransportAddress.buildUnique());
|
||||
return new NodeInfo(Version.CURRENT, org.elasticsearch.Build.CURRENT,
|
||||
new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT),
|
||||
new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT),
|
||||
Settings.EMPTY, DummyOsInfo.INSTANCE, new ProcessInfo(randomInt(), randomBoolean()), JvmInfo.jvmInfo(),
|
||||
new ThreadPoolInfo(Collections.singletonList(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5))),
|
||||
new TransportInfo(transportAddress, Collections.emptyMap()), new HttpInfo(transportAddress, randomLong()),
|
||||
|
@ -127,7 +127,7 @@ public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
};
|
||||
Map<Index, List<IndexShardStats>> statsByShard = new HashMap<>();
|
||||
statsByShard.put(index, Collections.singletonList(new IndexShardStats(new ShardId(index, 0), randomShardStats())));
|
||||
return new NodeStats(new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0,
|
||||
return new NodeStats(new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0,
|
||||
new NodeIndicesStats(new CommonStats(), statsByShard), null, null, null, null,
|
||||
new FsInfo(0, null, pathInfo), null, null, null, null, null, null);
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver.indices;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
|
@ -32,9 +32,9 @@ public class IndexRecoveryResolverTests extends MonitoringIndexNameResolverTestC
|
|||
IndexRecoveryMonitoringDoc doc = new IndexRecoveryMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
|
||||
DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT);
|
||||
DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT);
|
||||
Map<String, java.util.List<RecoveryState>> shardRecoveryStates = new HashMap<>();
|
||||
shardRecoveryStates.put("test", Collections.singletonList(new RecoveryState(new ShardId("test", "uuid", 0), true,
|
||||
RecoveryState.Type.STORE, localNode, localNode)));
|
||||
|
|
|
@ -12,7 +12,7 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
|
@ -45,7 +45,7 @@ public class IndexStatsResolverTests extends MonitoringIndexNameResolverTestCase
|
|||
IndexStatsMonitoringDoc doc = new IndexStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setIndexStats(randomIndexStats());
|
||||
return doc;
|
||||
}
|
||||
|
@ -58,7 +58,7 @@ public class IndexStatsResolverTests extends MonitoringIndexNameResolverTestCase
|
|||
public void testIndexStatsResolver() throws Exception {
|
||||
IndexStatsMonitoringDoc doc = newMonitoringDoc();
|
||||
doc.setTimestamp(1437580442979L);
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
|
||||
IndexStatsResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MonitoringTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
|
|
|
@ -13,7 +13,7 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
|
@ -49,7 +49,7 @@ public class IndicesStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
IndicesStatsMonitoringDoc doc = new IndicesStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setIndicesStats(randomIndicesStats());
|
||||
return doc;
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ public class IndicesStatsResolverTests extends MonitoringIndexNameResolverTestCa
|
|||
IndicesStatsMonitoringDoc doc = newMonitoringDoc();
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(1437580442979L);
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
|
||||
IndicesStatsResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MonitoringTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.monitoring.agent.resolver.node;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.xpack.monitoring.agent.collector.cluster.DiscoveryNodeMonitoringDoc;
|
||||
|
@ -28,9 +28,9 @@ public class DiscoveryNodeResolverTests extends MonitoringIndexNameResolverTestC
|
|||
DiscoveryNodeMonitoringDoc doc = new DiscoveryNodeMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setNode(new DiscoveryNode(randomAsciiOfLength(3), UUID.randomUUID().toString(),
|
||||
DummyTransportAddress.INSTANCE, emptyMap(), emptySet(),
|
||||
LocalTransportAddress.buildUnique(), emptyMap(), emptySet(),
|
||||
VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), Version.CURRENT)));
|
||||
return doc;
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
|
@ -59,7 +59,7 @@ public class NodeStatsResolverTests extends MonitoringIndexNameResolverTestCase<
|
|||
NodeStatsMonitoringDoc doc = new NodeStatsMonitoringDoc(randomMonitoringId(), randomAsciiOfLength(2));
|
||||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setNodeMaster(randomBoolean());
|
||||
doc.setNodeId(UUID.randomUUID().toString());
|
||||
doc.setDiskThresholdDeciderEnabled(randomBoolean());
|
||||
|
@ -133,7 +133,7 @@ public class NodeStatsResolverTests extends MonitoringIndexNameResolverTestCase<
|
|||
new ThreadPoolStats.Stats(ThreadPool.Names.SEARCH, 0, 0, 0, 0, 0, 0),
|
||||
new ThreadPoolStats.Stats(InternalWatchExecutor.THREAD_POOL_NAME, 0, 0, 0, 0, 0, 0)
|
||||
);
|
||||
return new NodeStats(new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0,
|
||||
return new NodeStats(new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0,
|
||||
new NodeIndicesStats(new CommonStats(), statsByShard), OsProbe.getInstance().osStats(),
|
||||
ProcessProbe.getInstance().processStats(), JvmStats.jvmStats(),
|
||||
new ThreadPoolStats(threadPoolStats),
|
||||
|
|
|
@ -9,7 +9,7 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -31,7 +31,7 @@ public class ShardsResolverTests extends MonitoringIndexNameResolverTestCase<Sha
|
|||
doc.setClusterUUID(randomAsciiOfLength(5));
|
||||
doc.setTimestamp(Math.abs(randomLong()));
|
||||
doc.setClusterStateUUID(UUID.randomUUID().toString());
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
|
||||
ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(new Index(randomAsciiOfLength(5), UUID.randomUUID().toString()),
|
||||
randomIntBetween(0, 5)), null, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
|
||||
|
@ -48,7 +48,7 @@ public class ShardsResolverTests extends MonitoringIndexNameResolverTestCase<Sha
|
|||
|
||||
final String clusterStateUUID = UUID.randomUUID().toString();
|
||||
doc.setClusterStateUUID(clusterStateUUID);
|
||||
doc.setSourceNode(new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT));
|
||||
doc.setSourceNode(new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
|
||||
ShardsResolver resolver = newResolver();
|
||||
assertThat(resolver.index(doc), equalTo(".monitoring-es-" + MonitoringTemplateUtils.TEMPLATE_VERSION + "-2015.07.22"));
|
||||
|
|
|
@ -141,8 +141,12 @@ public class Security implements ActionPlugin {
|
|||
}
|
||||
|
||||
modules.add(new AuthenticationModule(settings));
|
||||
modules.add(new AuthorizationModule(settings));
|
||||
if (enabled == false) {
|
||||
modules.add(new SecurityModule(settings, securityLicenseState));
|
||||
modules.add(new CryptoModule(settings));
|
||||
modules.add(new AuditTrailModule(settings));
|
||||
modules.add(new SecurityTransportModule(settings));
|
||||
return modules;
|
||||
}
|
||||
|
||||
|
@ -152,7 +156,6 @@ public class Security implements ActionPlugin {
|
|||
securityLicenseState = new SecurityLicenseState();
|
||||
modules.add(new SecurityModule(settings, securityLicenseState));
|
||||
modules.add(new CryptoModule(settings));
|
||||
modules.add(new AuthorizationModule(settings));
|
||||
modules.add(new AuditTrailModule(settings));
|
||||
modules.add(new SecurityRestModule(settings));
|
||||
modules.add(new SecurityActionModule(settings));
|
||||
|
|
|
@ -13,14 +13,21 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.security.audit.AuditTrailService;
|
||||
import org.elasticsearch.xpack.security.authc.Realm;
|
||||
import org.elasticsearch.xpack.security.authc.Realms;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
|
||||
import org.elasticsearch.xpack.XPackFeatureSet;
|
||||
import org.elasticsearch.xpack.security.authz.store.RolesStore;
|
||||
import org.elasticsearch.xpack.security.crypto.CryptoService;
|
||||
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
|
||||
import org.elasticsearch.xpack.security.transport.netty.SecurityNettyHttpServerTransport;
|
||||
import org.elasticsearch.xpack.security.transport.netty.SecurityNettyTransport;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -30,16 +37,33 @@ import java.util.stream.Collectors;
|
|||
*/
|
||||
public class SecurityFeatureSet implements XPackFeatureSet {
|
||||
|
||||
private final Settings settings;
|
||||
private final boolean enabled;
|
||||
private final SecurityLicenseState licenseState;
|
||||
@Nullable private final Realms realms;
|
||||
@Nullable
|
||||
private final Realms realms;
|
||||
@Nullable
|
||||
private final RolesStore rolesStore;
|
||||
@Nullable
|
||||
private final IPFilter ipFilter;
|
||||
@Nullable
|
||||
private final AuditTrailService auditTrailService;
|
||||
@Nullable
|
||||
private final CryptoService cryptoService;
|
||||
|
||||
@Inject
|
||||
public SecurityFeatureSet(Settings settings, @Nullable SecurityLicenseState licenseState,
|
||||
@Nullable Realms realms, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
@Nullable Realms realms, NamedWriteableRegistry namedWriteableRegistry, @Nullable RolesStore rolesStore,
|
||||
@Nullable IPFilter ipFilter, @Nullable AuditTrailService auditTrailService,
|
||||
@Nullable CryptoService cryptoService) {
|
||||
this.enabled = Security.enabled(settings);
|
||||
this.licenseState = licenseState;
|
||||
this.realms = realms;
|
||||
this.rolesStore = rolesStore;
|
||||
this.settings = settings;
|
||||
this.ipFilter = ipFilter;
|
||||
this.auditTrailService = auditTrailService;
|
||||
this.cryptoService = cryptoService;
|
||||
namedWriteableRegistry.register(Usage.class, Usage.writeableName(Security.NAME), Usage::new);
|
||||
}
|
||||
|
||||
|
@ -66,7 +90,12 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
@Override
|
||||
public XPackFeatureSet.Usage usage() {
|
||||
List<Map<String, Object>> enabledRealms = buildEnabledRealms(realms);
|
||||
return new Usage(available(), enabled(), enabledRealms);
|
||||
Map<String, Object> rolesStoreUsage = rolesStoreUsage(rolesStore);
|
||||
Map<String, Object> sslUsage = sslUsage(settings);
|
||||
Map<String, Object> auditUsage = auditUsage(auditTrailService);
|
||||
Map<String, Object> ipFilterUsage = ipFilterUsage(ipFilter);
|
||||
boolean hasSystemKey = systemKeyUsage(cryptoService);
|
||||
return new Usage(available(), enabled(), enabledRealms, rolesStoreUsage, sslUsage, auditUsage, ipFilterUsage, hasSystemKey);
|
||||
}
|
||||
|
||||
static List<Map<String, Object>> buildEnabledRealms(Realms realms) {
|
||||
|
@ -84,26 +113,86 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
return enabledRealms;
|
||||
}
|
||||
|
||||
static Map<String, Object> rolesStoreUsage(@Nullable RolesStore rolesStore) {
|
||||
if (rolesStore == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
return rolesStore.usageStats();
|
||||
}
|
||||
|
||||
static Map<String, Object> sslUsage(Settings settings) {
|
||||
Map<String, Object> map = new HashMap<>(2);
|
||||
map.put("http", Collections.singletonMap("enabled", SecurityNettyHttpServerTransport.SSL_SETTING.get(settings)));
|
||||
map.put("transport", Collections.singletonMap("enabled", SecurityNettyTransport.SSL_SETTING.get(settings)));
|
||||
return map;
|
||||
}
|
||||
|
||||
static Map<String, Object> auditUsage(@Nullable AuditTrailService auditTrailService) {
|
||||
if (auditTrailService == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
return auditTrailService.usageStats();
|
||||
}
|
||||
|
||||
static Map<String, Object> ipFilterUsage(@Nullable IPFilter ipFilter) {
|
||||
if (ipFilter == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
return ipFilter.usageStats();
|
||||
}
|
||||
|
||||
static boolean systemKeyUsage(CryptoService cryptoService) {
|
||||
// we can piggy back on the encryption enabled method as it is only enabled if there is a system key
|
||||
return cryptoService.encryptionEnabled();
|
||||
}
|
||||
|
||||
static class Usage extends XPackFeatureSet.Usage {
|
||||
|
||||
private static final String ENABLED_REALMS_XFIELD = "enabled_realms";
|
||||
private static final String ROLES_XFIELD = "roles";
|
||||
private static final String SSL_XFIELD = "ssl";
|
||||
private static final String AUDIT_XFIELD = "audit";
|
||||
private static final String IP_FILTER_XFIELD = "ipfilter";
|
||||
private static final String SYSTEM_KEY_XFIELD = "system_key";
|
||||
|
||||
private List<Map<String, Object>> enabledRealms;
|
||||
private Map<String, Object> rolesStoreUsage;
|
||||
private Map<String, Object> sslUsage;
|
||||
private Map<String, Object> auditUsage;
|
||||
private Map<String, Object> ipFilterUsage;
|
||||
private boolean hasSystemKey;
|
||||
|
||||
public Usage(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
enabledRealms = in.readList(StreamInput::readMap);
|
||||
rolesStoreUsage = in.readMap();
|
||||
sslUsage = in.readMap();
|
||||
auditUsage = in.readMap();
|
||||
ipFilterUsage = in.readMap();
|
||||
hasSystemKey = in.readBoolean();
|
||||
}
|
||||
|
||||
public Usage(boolean available, boolean enabled, List<Map<String, Object>> enabledRealms) {
|
||||
public Usage(boolean available, boolean enabled, List<Map<String, Object>> enabledRealms, Map<String, Object> rolesStoreUsage,
|
||||
Map<String, Object> sslUsage, Map<String, Object> auditUsage, Map<String, Object> ipFilterUsage,
|
||||
boolean hasSystemKey) {
|
||||
super(Security.NAME, available, enabled);
|
||||
this.enabledRealms = enabledRealms;
|
||||
this.rolesStoreUsage = rolesStoreUsage;
|
||||
this.sslUsage = sslUsage;
|
||||
this.auditUsage = auditUsage;
|
||||
this.ipFilterUsage = ipFilterUsage;
|
||||
this.hasSystemKey = hasSystemKey;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeList(enabledRealms.stream().map((m) -> (Writeable) o -> o.writeMap(m)).collect(Collectors.toList()));
|
||||
out.writeMap(rolesStoreUsage);
|
||||
out.writeMap(sslUsage);
|
||||
out.writeMap(auditUsage);
|
||||
out.writeMap(ipFilterUsage);
|
||||
out.writeBoolean(hasSystemKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -111,6 +200,11 @@ public class SecurityFeatureSet implements XPackFeatureSet {
|
|||
super.innerXContent(builder, params);
|
||||
if (enabled) {
|
||||
builder.field(ENABLED_REALMS_XFIELD, enabledRealms);
|
||||
builder.field(ROLES_XFIELD, rolesStoreUsage);
|
||||
builder.field(SSL_XFIELD, sslUsage);
|
||||
builder.field(AUDIT_XFIELD, auditUsage);
|
||||
builder.field(IP_FILTER_XFIELD, ipFilterUsage);
|
||||
builder.field(SYSTEM_KEY_XFIELD, hasSystemKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -76,7 +76,7 @@ public class SecurityLifecycleService extends AbstractComponent implements Clust
|
|||
if (nativeUserStore.canStart(event.state(), master)) {
|
||||
threadPool.generic().execute(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
public void onFailure(Exception throwable) {
|
||||
logger.error("failed to start native user store service", throwable);
|
||||
assert false : "security lifecycle services startup failed";
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ public class SecurityLifecycleService extends AbstractComponent implements Clust
|
|||
if (nativeRolesStore.canStart(event.state(), master)) {
|
||||
threadPool.generic().execute(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
public void onFailure(Exception throwable) {
|
||||
logger.error("failed to start native roles store services", throwable);
|
||||
assert false : "security lifecycle services startup failed";
|
||||
}
|
||||
|
@ -117,7 +117,7 @@ public class SecurityLifecycleService extends AbstractComponent implements Clust
|
|||
threadPool.generic().execute(new AbstractRunnable() {
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
public void onFailure(Exception throwable) {
|
||||
logger.error("failed to start index audit trail services", throwable);
|
||||
assert false : "security lifecycle services startup failed";
|
||||
}
|
||||
|
|
|
@ -90,8 +90,8 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste
|
|||
if (createTemplate && templateCreationPending.compareAndSet(false, true)) {
|
||||
threadPool.generic().execute(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.warn("failed to create security index template", t);
|
||||
public void onFailure(Exception e) {
|
||||
logger.warn("failed to create security index template", e);
|
||||
templateCreationPending.set(false);
|
||||
}
|
||||
|
||||
|
|
|
@ -44,9 +44,6 @@ import java.util.function.Predicate;
|
|||
|
||||
import static org.elasticsearch.xpack.security.support.Exceptions.authorizationError;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SecurityActionFilter extends AbstractComponent implements ActionFilter {
|
||||
|
||||
private static final Predicate<String> LICENSE_EXPIRATION_ACTION_MATCHER = HealthAndStatsPrivilege.INSTANCE.predicate();
|
||||
|
@ -109,8 +106,8 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil
|
|||
} else {
|
||||
chain.proceed(task, action, request, listener);
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
listener.onFailure(t);
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -231,7 +228,7 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
if (threadContext != null) {
|
||||
threadContext.restore();
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ public class TransportDeleteRoleAction extends HandledTransportAction<DeleteRole
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
listener.onFailure(t);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -77,7 +77,7 @@ public class TransportGetRolesAction extends HandledTransportAction<GetRolesRequ
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
logger.error("failed to retrieve role [{}]", t, rolename);
|
||||
listener.onFailure(t);
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ public class TransportGetRolesAction extends HandledTransportAction<GetRolesRequ
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
logger.error("failed to retrieve role [{}]", t,
|
||||
Strings.arrayToDelimitedString(request.names(), ","));
|
||||
listener.onFailure(t);
|
||||
|
|
|
@ -48,7 +48,7 @@ public class TransportPutRoleAction extends HandledTransportAction<PutRoleReques
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
listener.onFailure(t);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -12,7 +12,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
|
||||
import org.elasticsearch.xpack.security.user.AnonymousUser;
|
||||
import org.elasticsearch.xpack.security.user.SystemUser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -50,7 +49,7 @@ public class TransportChangePasswordAction extends HandledTransportAction<Change
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -54,7 +54,7 @@ public class TransportDeleteUserAction extends HandledTransportAction<DeleteUser
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -78,7 +78,7 @@ public class TransportGetUsersAction extends HandledTransportAction<GetUsersRequ
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to retrieve user [{}]", e, username);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ public class TransportGetUsersAction extends HandledTransportAction<GetUsersRequ
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to retrieve user [{}]", e,
|
||||
Strings.arrayToDelimitedString(request.usernames(), ","));
|
||||
listener.onFailure(e);
|
||||
|
|
|
@ -59,7 +59,7 @@ public class TransportPutUserAction extends HandledTransportAction<PutUserReques
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to put user [{}]", e, request.username());
|
||||
listener.onFailure(e);
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.audit;
|
|||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -44,18 +45,16 @@ public class AuditTrailModule extends AbstractSecurityModule.Node {
|
|||
|
||||
@Override
|
||||
protected void configureNode() {
|
||||
if (!enabled) {
|
||||
bind(AuditTrail.class).toInstance(AuditTrail.NOOP);
|
||||
return;
|
||||
}
|
||||
List<String> outputs = OUTPUTS_SETTING.get(settings);
|
||||
if (outputs.isEmpty()) {
|
||||
if (securityEnabled == false || enabled == false || outputs.isEmpty()) {
|
||||
bind(AuditTrailService.class).toProvider(Providers.of(null));
|
||||
bind(AuditTrail.class).toInstance(AuditTrail.NOOP);
|
||||
return;
|
||||
}
|
||||
bind(AuditTrail.class).to(AuditTrailService.class).asEagerSingleton();
|
||||
Multibinder<AuditTrail> binder = Multibinder.newSetBinder(binder(), AuditTrail.class);
|
||||
|
||||
bind(AuditTrailService.class).asEagerSingleton();
|
||||
bind(AuditTrail.class).to(AuditTrailService.class);
|
||||
Multibinder<AuditTrail> binder = Multibinder.newSetBinder(binder(), AuditTrail.class);
|
||||
Set<String> uniqueOutputs = Sets.newHashSet(outputs);
|
||||
for (String output : uniqueOutputs) {
|
||||
switch (output) {
|
||||
|
|
|
@ -16,6 +16,8 @@ import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule;
|
|||
import org.elasticsearch.transport.TransportMessage;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -197,4 +199,11 @@ public class AuditTrailService extends AbstractComponent implements AuditTrail {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> map = new HashMap<>(2);
|
||||
map.put("enabled", AuditTrailModule.ENABLED_SETTING.get(settings));
|
||||
map.put("outputs", AuditTrailModule.OUTPUTS_SETTING.get(settings));
|
||||
return map;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -886,7 +886,7 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl
|
|||
INDEX_TEMPLATE_NAME);
|
||||
threadPool.generic().execute(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable throwable) {
|
||||
public void onFailure(Exception throwable) {
|
||||
logger.error("failed to update security audit index template [{}]", throwable, INDEX_TEMPLATE_NAME);
|
||||
}
|
||||
|
||||
|
|
|
@ -159,7 +159,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
if (t instanceof IndexNotFoundException) {
|
||||
logger.trace("failed to retrieve user [{}] since security index does not exist", username);
|
||||
// We don't invoke the onFailure listener here, instead
|
||||
|
@ -228,7 +228,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
// attempt to clear scroll response
|
||||
if (lastResponse != null && lastResponse.getScrollId() != null) {
|
||||
clearScrollResponse(lastResponse.getScrollId());
|
||||
|
@ -260,7 +260,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
if (t instanceof IndexNotFoundException) {
|
||||
logger.trace("failed to retrieve user [{}] since security index does not exist", t, username);
|
||||
} else {
|
||||
|
@ -287,7 +287,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
if (t instanceof IndexNotFoundException) {
|
||||
logger.trace("could not retrieve user [{}] because security index does not exist", t, user);
|
||||
} else {
|
||||
|
@ -334,7 +334,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
Throwable cause = e;
|
||||
if (e instanceof ElasticsearchException) {
|
||||
cause = ExceptionsHelper.unwrapCause(e);
|
||||
|
@ -368,7 +368,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
@ -410,7 +410,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
Throwable cause = e;
|
||||
if (e instanceof ElasticsearchException) {
|
||||
cause = ExceptionsHelper.unwrapCause(e);
|
||||
|
@ -455,7 +455,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
@ -479,7 +479,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
@ -550,9 +550,9 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
if (state.compareAndSet(State.STARTED, State.STOPPING)) {
|
||||
try {
|
||||
userPoller.stop();
|
||||
} catch (Throwable t) {
|
||||
} catch (Exception e) {
|
||||
state.set(State.FAILED);
|
||||
throw t;
|
||||
throw e;
|
||||
} finally {
|
||||
state.set(State.STOPPED);
|
||||
}
|
||||
|
@ -594,10 +594,10 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
return securityIndexExists;
|
||||
}
|
||||
|
||||
char[] reservedUserPassword(String username) throws Throwable {
|
||||
char[] reservedUserPassword(String username) throws Exception {
|
||||
assert started();
|
||||
final AtomicReference<char[]> passwordHash = new AtomicReference<>();
|
||||
final AtomicReference<Throwable> failure = new AtomicReference<>();
|
||||
final AtomicReference<Exception> failure = new AtomicReference<>();
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
client.prepareGet(SecurityTemplateService.SECURITY_INDEX_NAME, RESERVED_USER_DOC_TYPE, username)
|
||||
.execute(new LatchedActionListener<>(new ActionListener<GetResponse>() {
|
||||
|
@ -615,7 +615,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
if (e instanceof IndexNotFoundException) {
|
||||
logger.trace("could not retrieve built in user [{}] password since security index does not exist", e, username);
|
||||
} else {
|
||||
|
@ -634,7 +634,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
failure.set(e);
|
||||
}
|
||||
|
||||
Throwable failureCause = failure.get();
|
||||
Exception failureCause = failure.get();
|
||||
if (failureCause != null) {
|
||||
// if there is any sort of failure we need to throw an exception to prevent the fallback to the default password...
|
||||
throw failureCause;
|
||||
|
@ -651,7 +651,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
// Not really much to do here except for warn about it...
|
||||
logger.warn("failed to clear scroll [{}]", t, scrollId);
|
||||
}
|
||||
|
@ -669,7 +669,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("unable to clear realm cache for user [{}]", e, username);
|
||||
ElasticsearchException exception = new ElasticsearchException("clearing the cache for [" + username
|
||||
+ "] failed. please clear the realm cache manually", e);
|
||||
|
@ -838,21 +838,22 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
|
||||
// call listeners
|
||||
Throwable th = null;
|
||||
RuntimeException ex = null;
|
||||
for (ChangeListener listener : listeners) {
|
||||
try {
|
||||
listener.onUsersChanged(changedUsers);
|
||||
} catch (Throwable t) {
|
||||
th = ExceptionsHelper.useOrSuppress(th, t);
|
||||
} catch (Exception e) {
|
||||
if (ex == null) ex = new RuntimeException("exception while notifying listeners");
|
||||
ex.addSuppressed(e);
|
||||
}
|
||||
}
|
||||
|
||||
ExceptionsHelper.reThrowIfNotNull(th);
|
||||
if (ex != null) throw ex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.error("error occurred while checking the native users for changes", t);
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("error occurred while checking the native users for changes", e);
|
||||
}
|
||||
|
||||
private boolean isStopped() {
|
||||
|
|
|
@ -131,7 +131,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm {
|
|||
return DEFAULT_PASSWORD_HASH;
|
||||
}
|
||||
return passwordHash;
|
||||
} catch (Throwable e) {
|
||||
} catch (Exception e) {
|
||||
logger.error("failed to retrieve password hash for reserved user [{}]", e, username);
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -37,9 +37,6 @@ import static java.util.Collections.emptyMap;
|
|||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.xpack.security.support.SecurityFiles.openAtomicMoveWriter;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FileUserPasswdStore {
|
||||
|
||||
private final ESLogger logger;
|
||||
|
@ -111,8 +108,8 @@ public class FileUserPasswdStore {
|
|||
static Map<String, char[]> parseFileLenient(Path path, ESLogger logger) {
|
||||
try {
|
||||
return parseFile(path, logger);
|
||||
} catch (Throwable t) {
|
||||
logger.error("failed to parse users file [{}]. skipping/removing all users...", t, path.toAbsolutePath());
|
||||
} catch (Exception e) {
|
||||
logger.error("failed to parse users file [{}]. skipping/removing all users...", e, path.toAbsolutePath());
|
||||
return emptyMap();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,9 +37,6 @@ import static java.util.Collections.emptyMap;
|
|||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.xpack.security.support.SecurityFiles.openAtomicMoveWriter;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FileUserRolesStore {
|
||||
|
||||
private static final Pattern USERS_DELIM = Pattern.compile("\\s*,\\s*");
|
||||
|
@ -103,8 +100,8 @@ public class FileUserRolesStore {
|
|||
static Map<String, String[]> parseFileLenient(Path path, ESLogger logger) {
|
||||
try {
|
||||
return parseFile(path, logger);
|
||||
} catch (Throwable t) {
|
||||
logger.error("failed to parse users_roles file [{}]. skipping/removing all entries...", t, path.toAbsolutePath());
|
||||
} catch (Exception e) {
|
||||
logger.error("failed to parse users_roles file [{}]. skipping/removing all entries...", e, path.toAbsolutePath());
|
||||
return emptyMap();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ import org.elasticsearch.cli.ExitCodes;
|
|||
import org.elasticsearch.cli.MultiCommand;
|
||||
import org.elasticsearch.cli.SettingCommand;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
|
@ -88,7 +88,7 @@ public class UsersTool extends MultiCommand {
|
|||
String username = parseUsername(arguments.values(options));
|
||||
Validation.Error validationError = Users.validateUsername(username);
|
||||
if (validationError != null) {
|
||||
throw new UserError(ExitCodes.DATA_ERROR, "Invalid username [" + username + "]... " + validationError);
|
||||
throw new UserException(ExitCodes.DATA_ERROR, "Invalid username [" + username + "]... " + validationError);
|
||||
}
|
||||
|
||||
char[] password = parsePassword(terminal, passwordOption.value(options));
|
||||
|
@ -102,7 +102,7 @@ public class UsersTool extends MultiCommand {
|
|||
|
||||
Map<String, char[]> users = new HashMap<>(FileUserPasswdStore.parseFile(passwordFile, null));
|
||||
if (users.containsKey(username)) {
|
||||
throw new UserError(ExitCodes.CODE_ERROR, "User [" + username + "] already exists");
|
||||
throw new UserException(ExitCodes.CODE_ERROR, "User [" + username + "] already exists");
|
||||
}
|
||||
Hasher hasher = Hasher.BCRYPT;
|
||||
users.put(username, hasher.hash(new SecuredString(password)));
|
||||
|
@ -149,7 +149,7 @@ public class UsersTool extends MultiCommand {
|
|||
|
||||
Map<String, char[]> users = new HashMap<>(FileUserPasswdStore.parseFile(passwordFile, null));
|
||||
if (users.containsKey(username) == false) {
|
||||
throw new UserError(ExitCodes.NO_USER, "User [" + username + "] doesn't exist");
|
||||
throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist");
|
||||
}
|
||||
if (Files.exists(passwordFile)) {
|
||||
char[] passwd = users.remove(username);
|
||||
|
@ -205,7 +205,7 @@ public class UsersTool extends MultiCommand {
|
|||
FileAttributesChecker attributesChecker = new FileAttributesChecker(file);
|
||||
Map<String, char[]> users = new HashMap<>(FileUserPasswdStore.parseFile(file, null));
|
||||
if (users.containsKey(username) == false) {
|
||||
throw new UserError(ExitCodes.NO_USER, "User [" + username + "] doesn't exist");
|
||||
throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist");
|
||||
}
|
||||
users.put(username, Hasher.BCRYPT.hash(new SecuredString(password)));
|
||||
FileUserPasswdStore.writeFile(users, file);
|
||||
|
@ -261,7 +261,7 @@ public class UsersTool extends MultiCommand {
|
|||
|
||||
Map<String, char[]> usersMap = FileUserPasswdStore.parseFile(usersFile, null);
|
||||
if (!usersMap.containsKey(username)) {
|
||||
throw new UserError(ExitCodes.NO_USER, "User [" + username + "] doesn't exist");
|
||||
throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist");
|
||||
}
|
||||
|
||||
Map<String, String[]> userRoles = FileUserRolesStore.parseFile(rolesFile, null);
|
||||
|
@ -325,7 +325,7 @@ public class UsersTool extends MultiCommand {
|
|||
|
||||
if (username != null) {
|
||||
if (!users.contains(username)) {
|
||||
throw new UserError(ExitCodes.NO_USER, "User [" + username + "] doesn't exist");
|
||||
throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist");
|
||||
}
|
||||
|
||||
if (userRoles.containsKey(username)) {
|
||||
|
@ -394,38 +394,38 @@ public class UsersTool extends MultiCommand {
|
|||
}
|
||||
|
||||
// pkg private for testing
|
||||
static String parseUsername(List<String> args) throws UserError {
|
||||
static String parseUsername(List<String> args) throws UserException {
|
||||
if (args.isEmpty()) {
|
||||
throw new UserError(ExitCodes.USAGE, "Missing username argument");
|
||||
throw new UserException(ExitCodes.USAGE, "Missing username argument");
|
||||
} else if (args.size() > 1) {
|
||||
throw new UserError(ExitCodes.USAGE, "Expected a single username argument, found extra: " + args.toString());
|
||||
throw new UserException(ExitCodes.USAGE, "Expected a single username argument, found extra: " + args.toString());
|
||||
}
|
||||
String username = args.get(0);
|
||||
Validation.Error validationError = Users.validateUsername(username);
|
||||
if (validationError != null) {
|
||||
throw new UserError(ExitCodes.DATA_ERROR, "Invalid username [" + username + "]... " + validationError);
|
||||
throw new UserException(ExitCodes.DATA_ERROR, "Invalid username [" + username + "]... " + validationError);
|
||||
}
|
||||
return username;
|
||||
}
|
||||
|
||||
// pkg private for testing
|
||||
static char[] parsePassword(Terminal terminal, String passwordStr) throws UserError {
|
||||
static char[] parsePassword(Terminal terminal, String passwordStr) throws UserException {
|
||||
char[] password;
|
||||
if (passwordStr != null) {
|
||||
password = passwordStr.toCharArray();
|
||||
Validation.Error validationError = Users.validatePassword(password);
|
||||
if (validationError != null) {
|
||||
throw new UserError(ExitCodes.DATA_ERROR, "Invalid password..." + validationError);
|
||||
throw new UserException(ExitCodes.DATA_ERROR, "Invalid password..." + validationError);
|
||||
}
|
||||
} else {
|
||||
password = terminal.readSecret("Enter new password: ");
|
||||
Validation.Error validationError = Users.validatePassword(password);
|
||||
if (validationError != null) {
|
||||
throw new UserError(ExitCodes.DATA_ERROR, "Invalid password..." + validationError);
|
||||
throw new UserException(ExitCodes.DATA_ERROR, "Invalid password..." + validationError);
|
||||
}
|
||||
char[] retyped = terminal.readSecret("Retype new password: ");
|
||||
if (Arrays.equals(password, retyped) == false) {
|
||||
throw new UserError(ExitCodes.DATA_ERROR, "Password mismatch");
|
||||
throw new UserException(ExitCodes.DATA_ERROR, "Password mismatch");
|
||||
}
|
||||
}
|
||||
return password;
|
||||
|
@ -446,7 +446,7 @@ public class UsersTool extends MultiCommand {
|
|||
}
|
||||
|
||||
// pkg private for testing
|
||||
static String[] parseRoles(Terminal terminal, Environment env, String rolesStr) throws UserError {
|
||||
static String[] parseRoles(Terminal terminal, Environment env, String rolesStr) throws UserException {
|
||||
if (rolesStr.isEmpty()) {
|
||||
return Strings.EMPTY_ARRAY;
|
||||
}
|
||||
|
@ -454,7 +454,7 @@ public class UsersTool extends MultiCommand {
|
|||
for (String role : roles) {
|
||||
Validation.Error validationError = Validation.Roles.validateRoleName(role);
|
||||
if (validationError != null) {
|
||||
throw new UserError(ExitCodes.DATA_ERROR, "Invalid role [" + role + "]... " + validationError);
|
||||
throw new UserException(ExitCodes.DATA_ERROR, "Invalid role [" + role + "]... " + validationError);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ public class LdapSession implements Closeable {
|
|||
return groupsResolver.resolve(ldap, userDn, timeout, logger);
|
||||
}
|
||||
|
||||
public static interface GroupsResolver {
|
||||
public interface GroupsResolver {
|
||||
|
||||
List<String> resolve(LDAPInterface ldapConnection, String userDn, TimeValue timeout, ESLogger logger);
|
||||
|
||||
|
|
|
@ -484,7 +484,7 @@ public class BCrypt {
|
|||
* @param lr an array containing the two 32-bit half blocks
|
||||
* @param off the position in the array of the blocks
|
||||
*/
|
||||
private final void encipher(int lr[], int off) {
|
||||
private void encipher(int lr[], int off) {
|
||||
int i, n, l = lr[off], r = lr[off + 1];
|
||||
|
||||
l ^= P[0];
|
||||
|
|
|
@ -95,8 +95,8 @@ public class DnRoleMapper {
|
|||
public static Map<DN, Set<String>> parseFileLenient(Path path, ESLogger logger, String realmType, String realmName) {
|
||||
try {
|
||||
return parseFile(path, logger, realmType, realmName);
|
||||
} catch (Throwable t) {
|
||||
logger.error("failed to parse role mappings file [{}]. skipping/removing all mappings...", t, path.toAbsolutePath());
|
||||
} catch (Exception e) {
|
||||
logger.error("failed to parse role mappings file [{}]. skipping/removing all mappings...", e, path.toAbsolutePath());
|
||||
return emptyMap();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,12 +10,7 @@ package org.elasticsearch.xpack.security.authc.support;
|
|||
*/
|
||||
public interface RefreshListener {
|
||||
|
||||
static final RefreshListener NOOP = new RefreshListener() {
|
||||
@Override
|
||||
public void onRefresh() {
|
||||
}
|
||||
};
|
||||
RefreshListener NOOP = () -> {};
|
||||
|
||||
void onRefresh();
|
||||
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.security.authz;
|
||||
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore;
|
||||
import org.elasticsearch.xpack.security.authz.store.FileRolesStore;
|
||||
|
@ -24,6 +25,10 @@ public class AuthorizationModule extends AbstractSecurityModule.Node {
|
|||
|
||||
@Override
|
||||
protected void configureNode() {
|
||||
if (securityEnabled == false) {
|
||||
bind(RolesStore.class).toProvider(Providers.of(null));
|
||||
return;
|
||||
}
|
||||
|
||||
// First the file and native roles stores must be bound...
|
||||
bind(ReservedRolesStore.class).asEagerSingleton();
|
||||
|
|
|
@ -125,7 +125,7 @@ public interface IndicesPermission extends Permission, Iterable<IndicesPermissio
|
|||
if (group.check(action, indexOrAlias)) {
|
||||
granted = true;
|
||||
for (String index : concreteIndices) {
|
||||
if (group.getFields() != null) {
|
||||
if (group.hasFields()) {
|
||||
Set<String> roleFields = rolesFieldsByIndex.get(index);
|
||||
if (roleFields == null) {
|
||||
roleFields = new HashSet<>();
|
||||
|
@ -133,7 +133,7 @@ public interface IndicesPermission extends Permission, Iterable<IndicesPermissio
|
|||
}
|
||||
roleFields.addAll(group.getFields());
|
||||
}
|
||||
if (group.getQuery() != null) {
|
||||
if (group.hasQuery()) {
|
||||
Set<BytesReference> roleQueries = roleQueriesByIndex.get(index);
|
||||
if (roleQueries == null) {
|
||||
roleQueries = new HashSet<>();
|
||||
|
@ -330,5 +330,13 @@ public interface IndicesPermission extends Permission, Iterable<IndicesPermissio
|
|||
assert index != null;
|
||||
return actionMatcher.test(action) && indexNameMatcher.test(index);
|
||||
}
|
||||
|
||||
public boolean hasFields() {
|
||||
return fields != null;
|
||||
}
|
||||
|
||||
public boolean hasQuery() {
|
||||
return query != null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,9 @@ package org.elasticsearch.xpack.security.authz.store;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.xpack.security.authz.permission.Role;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A composite roles store that combines built in roles, file-based roles, and index-based roles. Checks the built in roles first, then the
|
||||
* file roles, and finally the index roles.
|
||||
|
@ -40,4 +43,12 @@ public class CompositeRolesStore implements RolesStore {
|
|||
|
||||
return nativeRolesStore.role(role);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> usage = new HashMap<>(2);
|
||||
usage.put("file", fileRolesStore.usageStats());
|
||||
usage.put("native", nativeRolesStore.usageStats());
|
||||
return usage;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,13 +15,13 @@ import org.elasticsearch.common.logging.ESLogger;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
import org.elasticsearch.xpack.security.authc.support.RefreshListener;
|
||||
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
|
||||
import org.elasticsearch.xpack.security.authz.permission.IndicesPermission.Group;
|
||||
import org.elasticsearch.xpack.security.authz.permission.Role;
|
||||
import org.elasticsearch.xpack.security.support.NoOpLogger;
|
||||
import org.elasticsearch.xpack.security.support.Validation;
|
||||
|
@ -46,9 +46,6 @@ import static java.util.Collections.emptySet;
|
|||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.xpack.security.Security.setting;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FileRolesStore extends AbstractLifecycleComponent implements RolesStore {
|
||||
|
||||
public static final Setting<String> ROLES_FILE_SETTING =
|
||||
|
@ -100,6 +97,28 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
|
|||
return permissions.get(role);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> usageStats = new HashMap<>();
|
||||
usageStats.put("size", permissions.size());
|
||||
|
||||
boolean dls = false;
|
||||
boolean fls = false;
|
||||
for (Role role : permissions.values()) {
|
||||
for (Group group : role.indices()) {
|
||||
fls = fls || group.hasFields();
|
||||
dls = dls || group.hasQuery();
|
||||
}
|
||||
if (fls && dls) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
usageStats.put("fls", fls);
|
||||
usageStats.put("dls", dls);
|
||||
|
||||
return usageStats;
|
||||
}
|
||||
|
||||
public static Path resolveFile(Settings settings, Environment env) {
|
||||
String location = ROLES_FILE_SETTING.get(settings);
|
||||
if (location.isEmpty()) {
|
||||
|
@ -288,8 +307,8 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
|
|||
try {
|
||||
permissions = parseFile(file, logger, settings);
|
||||
logger.info("updated roles (roles file [{}] changed)", file.toAbsolutePath());
|
||||
} catch (Throwable t) {
|
||||
logger.error("could not reload roles file [{}]. Current roles remain unmodified", t, file.toAbsolutePath());
|
||||
} catch (Exception e) {
|
||||
logger.error("could not reload roles file [{}]. Current roles remain unmodified", e, file.toAbsolutePath());
|
||||
return;
|
||||
}
|
||||
listener.onRefresh();
|
||||
|
|
|
@ -15,6 +15,9 @@ import org.elasticsearch.action.get.GetResponse;
|
|||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.search.ClearScrollRequest;
|
||||
import org.elasticsearch.action.search.ClearScrollResponse;
|
||||
import org.elasticsearch.action.search.MultiSearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.MultiSearchResponse;
|
||||
import org.elasticsearch.action.search.MultiSearchResponse.Item;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchScrollRequest;
|
||||
|
@ -47,6 +50,7 @@ import org.elasticsearch.xpack.security.action.role.ClearRolesCacheResponse;
|
|||
import org.elasticsearch.xpack.security.action.role.DeleteRoleRequest;
|
||||
import org.elasticsearch.xpack.security.action.role.PutRoleRequest;
|
||||
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
|
||||
import org.elasticsearch.xpack.security.authz.permission.IndicesPermission.Group;
|
||||
import org.elasticsearch.xpack.security.authz.permission.Role;
|
||||
import org.elasticsearch.xpack.security.client.SecurityClient;
|
||||
import org.elasticsearch.xpack.security.support.SelfReschedulingRunnable;
|
||||
|
@ -56,8 +60,10 @@ import org.elasticsearch.threadpool.ThreadPool.Names;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -231,7 +237,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
// attempt to clear the scroll request
|
||||
if (lastResponse != null && lastResponse.getScrollId() != null) {
|
||||
clearScollRequest(lastResponse.getScrollId());
|
||||
|
@ -277,7 +283,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to delete role from the index", e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
|
@ -311,7 +317,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("failed to put role [{}]", e, request.name());
|
||||
listener.onFailure(e);
|
||||
}
|
||||
|
@ -329,6 +335,84 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
return roleAndVersion == null ? null : roleAndVersion.getRole();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> usageStats() {
|
||||
if (state() != State.STARTED) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
boolean dls = false;
|
||||
boolean fls = false;
|
||||
Map<String, Object> usageStats = new HashMap<>();
|
||||
if (securityIndexExists == false) {
|
||||
usageStats.put("size", 0L);
|
||||
usageStats.put("fls", fls);
|
||||
usageStats.put("dls", dls);
|
||||
return usageStats;
|
||||
}
|
||||
|
||||
long count = (long) roleCache.size();
|
||||
for (RoleAndVersion rv : roleCache.values()) {
|
||||
Role role = rv.getRole();
|
||||
for (Group group : role.indices()) {
|
||||
fls = fls || group.hasFields();
|
||||
dls = dls || group.hasQuery();
|
||||
}
|
||||
if (fls && dls) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// slow path - query for necessary information
|
||||
if (fls == false || dls == false) {
|
||||
MultiSearchRequestBuilder builder = client.prepareMultiSearch()
|
||||
.add(client.prepareSearch(SecurityTemplateService.SECURITY_INDEX_NAME)
|
||||
.setTypes(ROLE_DOC_TYPE)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setSize(0));
|
||||
|
||||
if (fls == false) {
|
||||
builder.add(client.prepareSearch(SecurityTemplateService.SECURITY_INDEX_NAME)
|
||||
.setTypes(ROLE_DOC_TYPE)
|
||||
.setQuery(QueryBuilders.existsQuery("indices.fields"))
|
||||
.setSize(0)
|
||||
.setTerminateAfter(1));
|
||||
}
|
||||
|
||||
if (dls == false) {
|
||||
builder.add(client.prepareSearch(SecurityTemplateService.SECURITY_INDEX_NAME)
|
||||
.setTypes(ROLE_DOC_TYPE)
|
||||
.setQuery(QueryBuilders.existsQuery("indices.query"))
|
||||
.setSize(0)
|
||||
.setTerminateAfter(1));
|
||||
}
|
||||
|
||||
MultiSearchResponse multiSearchResponse = builder.get();
|
||||
int pos = 0;
|
||||
Item[] responses = multiSearchResponse.getResponses();
|
||||
if (responses[pos].isFailure() == false) {
|
||||
count = responses[pos].getResponse().getHits().getTotalHits();
|
||||
}
|
||||
|
||||
if (fls == false) {
|
||||
if (responses[++pos].isFailure() == false) {
|
||||
fls = responses[pos].getResponse().getHits().getTotalHits() > 0L;
|
||||
}
|
||||
}
|
||||
|
||||
if (dls == false) {
|
||||
if (responses[++pos].isFailure() == false) {
|
||||
dls = responses[pos].getResponse().getHits().getTotalHits() > 0L;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
usageStats.put("size", count);
|
||||
usageStats.put("fls", fls);
|
||||
usageStats.put("dls", dls);
|
||||
return usageStats;
|
||||
}
|
||||
|
||||
private RoleAndVersion getRoleAndVersion(final String roleId) {
|
||||
RoleAndVersion roleAndVersion = null;
|
||||
final AtomicReference<GetResponse> getRef = new AtomicReference<>(null);
|
||||
|
@ -345,7 +429,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
if (t instanceof IndexNotFoundException) {
|
||||
logger.trace("failed to retrieve role [{}] since security index does not exist", t, roleId);
|
||||
} else {
|
||||
|
@ -403,7 +487,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
public void onFailure(Exception t) {
|
||||
// Not really much to do here except for warn about it...
|
||||
logger.warn("failed to clear scroll [{}] after retrieving roles", t, scrollId);
|
||||
}
|
||||
|
@ -441,7 +525,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("unable to clear cache for role [{}]", e, role);
|
||||
ElasticsearchException exception = new ElasticsearchException("clearing the cache for [" + role
|
||||
+ "] failed. please clear the role cache manually", e);
|
||||
|
@ -568,8 +652,8 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.error("error occurred while checking the native roles for changes", t);
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("error occurred while checking the native roles for changes", e);
|
||||
}
|
||||
|
||||
private boolean isStopped() {
|
||||
|
|
|
@ -19,6 +19,8 @@ import org.elasticsearch.xpack.security.user.SystemUser;
|
|||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -55,6 +57,11 @@ public class ReservedRolesStore implements RolesStore {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> usageStats() {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
public RoleDescriptor roleDescriptor(String role) {
|
||||
switch (role) {
|
||||
case SuperuserRole.NAME:
|
||||
|
|
|
@ -7,6 +7,8 @@ package org.elasticsearch.xpack.security.authz.store;
|
|||
|
||||
import org.elasticsearch.xpack.security.authz.permission.Role;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* An interface for looking up a role given a string role name
|
||||
*/
|
||||
|
@ -14,4 +16,5 @@ public interface RolesStore {
|
|||
|
||||
Role role(String role);
|
||||
|
||||
Map<String, Object> usageStats();
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.security.crypto;
|
||||
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.security.support.AbstractSecurityModule;
|
||||
|
||||
|
@ -19,6 +20,10 @@ public class CryptoModule extends AbstractSecurityModule.Node {
|
|||
|
||||
@Override
|
||||
protected void configureNode() {
|
||||
if (securityEnabled == false) {
|
||||
bind(CryptoService.class).toProvider(Providers.of(null));
|
||||
return;
|
||||
}
|
||||
bind(InternalCryptoService.class).asEagerSingleton();
|
||||
bind(CryptoService.class).to(InternalCryptoService.class).asEagerSingleton();
|
||||
}
|
||||
|
|
|
@ -12,13 +12,12 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.xpack.security.authc.support.CharArrays;
|
||||
import org.elasticsearch.watcher.FileChangesListener;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.security.authc.support.CharArrays;
|
||||
|
||||
import javax.crypto.BadPaddingException;
|
||||
import javax.crypto.Cipher;
|
||||
|
@ -28,6 +27,7 @@ import javax.crypto.Mac;
|
|||
import javax.crypto.SecretKey;
|
||||
import javax.crypto.spec.IvParameterSpec;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
@ -45,12 +45,9 @@ import java.util.Objects;
|
|||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.elasticsearch.xpack.security.authc.support.SecuredString.constantTimeEquals;
|
||||
import static org.elasticsearch.xpack.security.Security.setting;
|
||||
import static org.elasticsearch.xpack.security.authc.support.SecuredString.constantTimeEquals;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class InternalCryptoService extends AbstractLifecycleComponent implements CryptoService {
|
||||
|
||||
public static final String KEY_ALGO = "HmacSHA512";
|
||||
|
@ -232,8 +229,8 @@ public class InternalCryptoService extends AbstractLifecycleComponent implements
|
|||
base64RandomKey = pieces[2];
|
||||
receivedSignature = pieces[3].substring(0, length);
|
||||
text = pieces[3].substring(length);
|
||||
} catch (Throwable t) {
|
||||
logger.error("error occurred while parsing signed text", t);
|
||||
} catch (Exception e) {
|
||||
logger.error("error occurred while parsing signed text", e);
|
||||
throw new IllegalArgumentException("tampered signed text");
|
||||
}
|
||||
|
||||
|
@ -270,8 +267,8 @@ public class InternalCryptoService extends AbstractLifecycleComponent implements
|
|||
if (constantTimeEquals(sig, receivedSignature)) {
|
||||
return text;
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
logger.error("error occurred while verifying signed text", t);
|
||||
} catch (Exception e) {
|
||||
logger.error("error occurred while verifying signed text", e);
|
||||
throw new IllegalStateException("error while verifying the signed text");
|
||||
}
|
||||
|
||||
|
@ -543,29 +540,20 @@ public class InternalCryptoService extends AbstractLifecycleComponent implements
|
|||
}
|
||||
|
||||
private void callListeners(SecretKey oldSystemKey, SecretKey oldEncryptionKey) {
|
||||
Throwable th = null;
|
||||
RuntimeException ex = null;
|
||||
for (Listener listener : listeners) {
|
||||
try {
|
||||
listener.onKeyChange(oldSystemKey, oldEncryptionKey);
|
||||
} catch (Throwable t) {
|
||||
if (th == null) {
|
||||
th = t;
|
||||
} else {
|
||||
th.addSuppressed(t);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
if (ex == null) ex = new RuntimeException("exception calling key change listeners");
|
||||
ex.addSuppressed(e);
|
||||
}
|
||||
}
|
||||
|
||||
// all listeners were notified now rethrow
|
||||
if (th != null) {
|
||||
logger.error("called all key change listeners but one or more exceptions was thrown", th);
|
||||
if (th instanceof RuntimeException) {
|
||||
throw (RuntimeException) th;
|
||||
} else if (th instanceof Error) {
|
||||
throw (Error) th;
|
||||
} else {
|
||||
throw new RuntimeException(th);
|
||||
}
|
||||
if (ex != null) {
|
||||
logger.error("called all key change listeners but one or more exceptions was thrown", ex);
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,12 +7,10 @@ package org.elasticsearch.xpack.security.crypto.tool;
|
|||
|
||||
import joptsimple.OptionSet;
|
||||
import joptsimple.OptionSpec;
|
||||
import joptsimple.util.KeyValuePair;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.SettingCommand;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -26,7 +24,6 @@ import java.nio.file.Path;
|
|||
import java.nio.file.StandardOpenOption;
|
||||
import java.nio.file.attribute.PosixFileAttributeView;
|
||||
import java.nio.file.attribute.PosixFilePermission;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
@ -65,7 +62,7 @@ public class SystemKeyTool extends SettingCommand {
|
|||
if (options.hasArgument(arguments)) {
|
||||
List<String> args = arguments.values(options);
|
||||
if (args.size() > 1) {
|
||||
throw new UserError(ExitCodes.USAGE, "No more than one key path can be supplied");
|
||||
throw new UserException(ExitCodes.USAGE, "No more than one key path can be supplied");
|
||||
}
|
||||
keyPath = parsePath(args.get(0));
|
||||
} else {
|
||||
|
|
|
@ -123,8 +123,8 @@ public abstract class AbstractSSLService extends AbstractComponent {
|
|||
sslEngine.setEnabledCipherSuites(supportedCiphers(sslEngine.getSupportedCipherSuites(), ciphers, false));
|
||||
} catch (ElasticsearchException e) {
|
||||
throw e;
|
||||
} catch (Throwable t) {
|
||||
throw new IllegalArgumentException("failed loading cipher suites [" + Arrays.asList(ciphers) + "]", t);
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("failed loading cipher suites [" + Arrays.asList(ciphers) + "]", e);
|
||||
}
|
||||
|
||||
try {
|
||||
|
|
|
@ -49,8 +49,8 @@ public class SelfReschedulingRunnable extends AbstractRunnable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.warn("failed to run scheduled task", t);
|
||||
public void onFailure(Exception e) {
|
||||
logger.warn("failed to run scheduled task", e);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -34,8 +34,8 @@ public class SecurityClientTransportService extends TransportService {
|
|||
try {
|
||||
clientFilter.outbound(action, request);
|
||||
super.sendRequest(node, action, request, options, handler);
|
||||
} catch (Throwable t) {
|
||||
handler.handleException(new TransportException("failed sending request", t));
|
||||
} catch (Exception e) {
|
||||
handler.handleException(new TransportException("failed sending request", e));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -38,9 +38,6 @@ import static org.elasticsearch.xpack.security.transport.netty.SecurityNettyTran
|
|||
import static org.elasticsearch.xpack.security.transport.netty.SecurityNettyTransport.PROFILE_CLIENT_AUTH_SETTING;
|
||||
import static org.elasticsearch.xpack.security.transport.netty.SecurityNettyTransport.SSL_SETTING;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SecurityServerTransportService extends TransportService {
|
||||
|
||||
public static final String SETTING_NAME = "xpack.security.type";
|
||||
|
@ -81,16 +78,16 @@ public class SecurityServerTransportService extends TransportService {
|
|||
try {
|
||||
clientFilter.outbound(action, request);
|
||||
super.sendRequest(node, action, request, options, new ContextRestoreResponseHandler<>(original, handler));
|
||||
} catch (Throwable t) {
|
||||
handler.handleException(new TransportException("failed sending request", t));
|
||||
} catch (Exception e) {
|
||||
handler.handleException(new TransportException("failed sending request", e));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
clientFilter.outbound(action, request);
|
||||
super.sendRequest(node, action, request, options, handler);
|
||||
} catch (Throwable t) {
|
||||
handler.handleException(new TransportException("failed sending request", t));
|
||||
} catch (Exception e) {
|
||||
handler.handleException(new TransportException("failed sending request", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -194,8 +191,8 @@ public class SecurityServerTransportService extends TransportService {
|
|||
RequestContext context = new RequestContext(request, threadContext);
|
||||
RequestContext.setCurrent(context);
|
||||
handler.messageReceived(request, channel, task);
|
||||
} catch (Throwable t) {
|
||||
channel.sendResponse(t);
|
||||
} catch (Exception e) {
|
||||
channel.sendResponse(e);
|
||||
} finally {
|
||||
RequestContext.removeCurrent();
|
||||
}
|
||||
|
|
|
@ -21,6 +21,11 @@ public class SecurityTransportModule extends AbstractSecurityModule {
|
|||
|
||||
@Override
|
||||
protected void configure(boolean clientMode) {
|
||||
if (securityEnabled == false) {
|
||||
bind(IPFilter.class).toProvider(Providers.<IPFilter>of(null));
|
||||
return;
|
||||
}
|
||||
|
||||
if (clientMode) {
|
||||
// no ip filtering on the client
|
||||
bind(IPFilter.class).toProvider(Providers.<IPFilter>of(null));
|
||||
|
|
|
@ -130,6 +130,13 @@ public class IPFilter {
|
|||
updateRules();
|
||||
}
|
||||
|
||||
public Map<String, Object> usageStats() {
|
||||
Map<String, Object> map = new HashMap<>(2);
|
||||
map.put("http", Collections.singletonMap("enabled", isHttpFilterEnabled));
|
||||
map.put("transport", Collections.singletonMap("enabled", isIpFilterEnabled));
|
||||
return map;
|
||||
}
|
||||
|
||||
private void setTransportProfiles(Settings settings) {
|
||||
transportGroups = settings.getAsGroups();
|
||||
updateRules();
|
||||
|
|
|
@ -26,14 +26,12 @@ import org.jboss.netty.channel.ChannelHandlerContext;
|
|||
import org.jboss.netty.channel.ChannelPipeline;
|
||||
import org.jboss.netty.channel.ChannelPipelineFactory;
|
||||
import org.jboss.netty.channel.ChannelStateEvent;
|
||||
import org.jboss.netty.channel.ExceptionEvent;
|
||||
import org.jboss.netty.channel.SimpleChannelHandler;
|
||||
import org.jboss.netty.handler.ssl.SslHandler;
|
||||
|
||||
import javax.net.ssl.SSLEngine;
|
||||
import javax.net.ssl.SSLParameters;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.xpack.security.Security.featureEnabledSetting;
|
||||
|
@ -113,7 +111,7 @@ public class SecurityNettyTransport extends NettyTransport {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void onException(Channel channel, Throwable e) {
|
||||
protected void onException(Channel channel, Exception e) {
|
||||
if (isNotSslRecordException(e)) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("received plaintext traffic on a encrypted channel, closing connection {}", e, channel);
|
||||
|
|
|
@ -146,7 +146,7 @@ public class ClearRealmsCacheTests extends SecurityIntegTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
error.set(e);
|
||||
latch.countDown();
|
||||
}
|
||||
|
|
|
@ -709,9 +709,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
|
|||
SearchResponse response = client()
|
||||
.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)))
|
||||
.prepareSearch("test")
|
||||
.addField("field1")
|
||||
.addField("field2")
|
||||
.addField("field3")
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.addStoredField("field3")
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).fields().size(), equalTo(1));
|
||||
assertThat(response.getHits().getAt(0).fields().get("field1").<String>getValue(), equalTo("value1"));
|
||||
|
@ -719,9 +719,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
|
|||
// user2 is granted access to field2 only:
|
||||
response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD)))
|
||||
.prepareSearch("test")
|
||||
.addField("field1")
|
||||
.addField("field2")
|
||||
.addField("field3")
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.addStoredField("field3")
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).fields().size(), equalTo(1));
|
||||
assertThat(response.getHits().getAt(0).fields().get("field2").<String>getValue(), equalTo("value2"));
|
||||
|
@ -729,9 +729,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
|
|||
// user3 is granted access to field1 and field2:
|
||||
response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD)))
|
||||
.prepareSearch("test")
|
||||
.addField("field1")
|
||||
.addField("field2")
|
||||
.addField("field3")
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.addStoredField("field3")
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).fields().size(), equalTo(2));
|
||||
assertThat(response.getHits().getAt(0).fields().get("field1").<String>getValue(), equalTo("value1"));
|
||||
|
@ -740,18 +740,18 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
|
|||
// user4 is granted access to no fields:
|
||||
response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD)))
|
||||
.prepareSearch("test")
|
||||
.addField("field1")
|
||||
.addField("field2")
|
||||
.addField("field3")
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.addStoredField("field3")
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).fields().size(), equalTo(0));
|
||||
|
||||
// user5 has no field level security configured:
|
||||
response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD)))
|
||||
.prepareSearch("test")
|
||||
.addField("field1")
|
||||
.addField("field2")
|
||||
.addField("field3")
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.addStoredField("field3")
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).fields().size(), equalTo(3));
|
||||
assertThat(response.getHits().getAt(0).fields().get("field1").<String>getValue(), equalTo("value1"));
|
||||
|
@ -761,9 +761,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
|
|||
// user6 has field level security configured with access to field*:
|
||||
response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD)))
|
||||
.prepareSearch("test")
|
||||
.addField("field1")
|
||||
.addField("field2")
|
||||
.addField("field3")
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.addStoredField("field3")
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).fields().size(), equalTo(3));
|
||||
assertThat(response.getHits().getAt(0).fields().get("field1").<String>getValue(), equalTo("value1"));
|
||||
|
@ -773,9 +773,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
|
|||
// user7 has access to all fields due to a mix of roles without field level security and with:
|
||||
response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD)))
|
||||
.prepareSearch("test")
|
||||
.addField("field1")
|
||||
.addField("field2")
|
||||
.addField("field3")
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.addStoredField("field3")
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).fields().size(), equalTo(3));
|
||||
assertThat(response.getHits().getAt(0).fields().get("field1").<String>getValue(), equalTo("value1"));
|
||||
|
@ -785,9 +785,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
|
|||
// user8 has field level security configured with access to field1 and field2:
|
||||
response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user8", USERS_PASSWD)))
|
||||
.prepareSearch("test")
|
||||
.addField("field1")
|
||||
.addField("field2")
|
||||
.addField("field3")
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.addStoredField("field3")
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).fields().size(), equalTo(2));
|
||||
assertThat(response.getHits().getAt(0).fields().get("field1").<String>getValue(), equalTo("value1"));
|
||||
|
|
|
@ -5,12 +5,19 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.security;
|
||||
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.security.audit.AuditTrailService;
|
||||
import org.elasticsearch.xpack.security.authc.Realm;
|
||||
import org.elasticsearch.xpack.security.authc.Realms;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.XPackFeatureSet;
|
||||
import org.elasticsearch.xpack.security.authz.store.RolesStore;
|
||||
import org.elasticsearch.xpack.security.crypto.CryptoService;
|
||||
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
|
||||
import org.elasticsearch.xpack.security.transport.netty.SecurityNettyHttpServerTransport;
|
||||
import org.elasticsearch.xpack.security.transport.netty.SecurityNettyTransport;
|
||||
import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -21,6 +28,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.nullValue;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.mockito.Matchers.anyObject;
|
||||
|
@ -34,24 +42,35 @@ import static org.mockito.Mockito.when;
|
|||
*/
|
||||
public class SecurityFeatureSetTests extends ESTestCase {
|
||||
|
||||
private Settings settings;
|
||||
private SecurityLicenseState licenseState;
|
||||
private Realms realms;
|
||||
private NamedWriteableRegistry namedWriteableRegistry;
|
||||
private IPFilter ipFilter;
|
||||
private RolesStore rolesStore;
|
||||
private AuditTrailService auditTrail;
|
||||
private CryptoService cryptoService;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
settings = Settings.builder().put("path.home", createTempDir()).build();
|
||||
licenseState = mock(SecurityLicenseState.class);
|
||||
realms = mock(Realms.class);
|
||||
namedWriteableRegistry = mock(NamedWriteableRegistry.class);
|
||||
ipFilter = mock(IPFilter.class);
|
||||
rolesStore = mock(RolesStore.class);
|
||||
auditTrail = mock(AuditTrailService.class);
|
||||
cryptoService = mock(CryptoService.class);
|
||||
}
|
||||
|
||||
public void testWritableRegistration() throws Exception {
|
||||
new SecurityFeatureSet(Settings.EMPTY, licenseState, realms, namedWriteableRegistry);
|
||||
new SecurityFeatureSet(settings, licenseState, realms, namedWriteableRegistry, rolesStore, ipFilter, auditTrail, cryptoService);
|
||||
verify(namedWriteableRegistry).register(eq(SecurityFeatureSet.Usage.class), eq("xpack.usage.security"), anyObject());
|
||||
}
|
||||
|
||||
public void testAvailable() throws Exception {
|
||||
SecurityFeatureSet featureSet = new SecurityFeatureSet(Settings.EMPTY, licenseState, realms, namedWriteableRegistry);
|
||||
SecurityFeatureSet featureSet = new SecurityFeatureSet(settings, licenseState, realms, namedWriteableRegistry, rolesStore,
|
||||
ipFilter, auditTrail, cryptoService);
|
||||
boolean available = randomBoolean();
|
||||
when(licenseState.authenticationAndAuthorizationEnabled()).thenReturn(available);
|
||||
assertThat(featureSet.available(), is(available));
|
||||
|
@ -60,27 +79,60 @@ public class SecurityFeatureSetTests extends ESTestCase {
|
|||
public void testEnabledSetting() throws Exception {
|
||||
boolean enabled = randomBoolean();
|
||||
Settings settings = Settings.builder()
|
||||
.put(this.settings)
|
||||
.put("xpack.security.enabled", enabled)
|
||||
.build();
|
||||
SecurityFeatureSet featureSet = new SecurityFeatureSet(settings, licenseState, realms, namedWriteableRegistry);
|
||||
SecurityFeatureSet featureSet = new SecurityFeatureSet(settings, licenseState, realms, namedWriteableRegistry, rolesStore,
|
||||
ipFilter, auditTrail, cryptoService);
|
||||
assertThat(featureSet.enabled(), is(enabled));
|
||||
}
|
||||
|
||||
public void testEnabledDefault() throws Exception {
|
||||
SecurityFeatureSet featureSet = new SecurityFeatureSet(Settings.EMPTY, licenseState, realms, namedWriteableRegistry);
|
||||
SecurityFeatureSet featureSet = new SecurityFeatureSet(settings, licenseState, realms, namedWriteableRegistry, rolesStore,
|
||||
ipFilter, auditTrail, cryptoService);
|
||||
assertThat(featureSet.enabled(), is(true));
|
||||
}
|
||||
|
||||
public void testUsage() throws Exception {
|
||||
|
||||
boolean available = randomBoolean();
|
||||
when(licenseState.authenticationAndAuthorizationEnabled()).thenReturn(available);
|
||||
boolean authcAuthzAvailable = randomBoolean();
|
||||
when(licenseState.authenticationAndAuthorizationEnabled()).thenReturn(authcAuthzAvailable);
|
||||
|
||||
Settings.Builder settings = Settings.builder();
|
||||
Settings.Builder settings = Settings.builder().put(this.settings);
|
||||
|
||||
boolean enabled = randomBoolean();
|
||||
settings.put("xpack.security.enabled", enabled);
|
||||
|
||||
final boolean httpSSLEnabled = randomBoolean();
|
||||
settings.put(SecurityNettyHttpServerTransport.SSL_SETTING.getKey(), httpSSLEnabled);
|
||||
final boolean transportSSLEnabled = randomBoolean();
|
||||
settings.put(SecurityNettyTransport.SSL_SETTING.getKey(), transportSSLEnabled);
|
||||
final boolean auditingEnabled = randomBoolean();
|
||||
final String[] auditOutputs = randomFrom(new String[] {"logfile"}, new String[] {"index"}, new String[] {"logfile", "index"});
|
||||
when(auditTrail.usageStats())
|
||||
.thenReturn(MapBuilder.<String, Object>newMapBuilder()
|
||||
.put("enabled", auditingEnabled)
|
||||
.put("outputs", auditOutputs)
|
||||
.map());
|
||||
|
||||
final boolean httpIpFilterEnabled = randomBoolean();
|
||||
final boolean transportIPFilterEnabled = randomBoolean();
|
||||
when(ipFilter.usageStats())
|
||||
.thenReturn(MapBuilder.<String, Object>newMapBuilder()
|
||||
.put("http", Collections.singletonMap("enabled", httpIpFilterEnabled))
|
||||
.put("transport", Collections.singletonMap("enabled", transportIPFilterEnabled))
|
||||
.map());
|
||||
|
||||
|
||||
final boolean rolesStoreEnabled = randomBoolean();
|
||||
if (rolesStoreEnabled) {
|
||||
when(rolesStore.usageStats()).thenReturn(Collections.singletonMap("count", 1));
|
||||
} else {
|
||||
when(rolesStore.usageStats()).thenReturn(Collections.emptyMap());
|
||||
}
|
||||
final boolean useSystemKey = randomBoolean();
|
||||
when(cryptoService.encryptionEnabled()).thenReturn(useSystemKey);
|
||||
|
||||
List<Realm> realmsList= new ArrayList<>();
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
@ -93,24 +145,49 @@ public class SecurityFeatureSetTests extends ESTestCase {
|
|||
realmUsage.put("key3", i % 2 == 0);
|
||||
when(realm.usageStats()).thenReturn(realmUsage);
|
||||
}
|
||||
when(realms.iterator()).thenReturn(available ? realmsList.iterator() : Collections.<Realm>emptyIterator());
|
||||
when(realms.iterator()).thenReturn(authcAuthzAvailable ? realmsList.iterator() : Collections.<Realm>emptyIterator());
|
||||
|
||||
SecurityFeatureSet featureSet = new SecurityFeatureSet(settings.build(), licenseState, realms, namedWriteableRegistry);
|
||||
SecurityFeatureSet featureSet = new SecurityFeatureSet(settings.build(), licenseState, realms, namedWriteableRegistry, rolesStore,
|
||||
ipFilter, auditTrail, cryptoService);
|
||||
XPackFeatureSet.Usage usage = featureSet.usage();
|
||||
assertThat(usage, is(notNullValue()));
|
||||
assertThat(usage.name(), is(Security.NAME));
|
||||
assertThat(usage.enabled(), is(enabled));
|
||||
assertThat(usage.available(), is(available));
|
||||
assertThat(usage.available(), is(authcAuthzAvailable));
|
||||
XContentSource source = new XContentSource(usage);
|
||||
|
||||
if (enabled && available) {
|
||||
if (enabled) {
|
||||
if (authcAuthzAvailable) {
|
||||
for (int i = 0; i < 5; i++) {
|
||||
assertThat(source.getValue("enabled_realms." + i + ".key1"), is("value" + i));
|
||||
assertThat(source.getValue("enabled_realms." + i + ".key2"), is(i));
|
||||
assertThat(source.getValue("enabled_realms." + i + ".key3"), is(i % 2 == 0));
|
||||
}
|
||||
} else if (enabled) {
|
||||
} else {
|
||||
assertThat(source.getValue("enabled_realms"), is(notNullValue()));
|
||||
}
|
||||
|
||||
// check SSL
|
||||
assertThat(source.getValue("ssl.http.enabled"), is(httpSSLEnabled));
|
||||
assertThat(source.getValue("ssl.transport.enabled"), is(transportSSLEnabled));
|
||||
|
||||
// auditing
|
||||
assertThat(source.getValue("audit.enabled"), is(auditingEnabled));
|
||||
assertThat(source.getValue("audit.outputs"), contains(auditOutputs));
|
||||
|
||||
// ip filter
|
||||
assertThat(source.getValue("ipfilter.http.enabled"), is(httpIpFilterEnabled));
|
||||
assertThat(source.getValue("ipfilter.transport.enabled"), is(transportIPFilterEnabled));
|
||||
|
||||
// roles
|
||||
if (rolesStoreEnabled) {
|
||||
assertThat(source.getValue("roles.count"), is(1));
|
||||
} else {
|
||||
assertThat(((Map) source.getValue("roles")).isEmpty(), is(true));
|
||||
}
|
||||
|
||||
// system key
|
||||
assertThat(source.getValue("system_key"), is(useSystemKey));
|
||||
} else {
|
||||
assertThat(source.getValue("enabled_realms"), is(nullValue()));
|
||||
}
|
||||
|
|
|
@ -33,6 +33,6 @@ public class VersionCompatibilityTests extends ESTestCase {
|
|||
*
|
||||
*/
|
||||
assertThat("Remove workaround in LicenseService class when es core supports merging cluster level custom metadata",
|
||||
Version.CURRENT.equals(Version.V_5_0_0_alpha4), is(true));
|
||||
Version.CURRENT.equals(Version.V_5_0_0_alpha5), is(true));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ public class TransportDeleteRoleActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -96,7 +96,7 @@ public class TransportDeleteRoleActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -108,7 +108,7 @@ public class TransportDeleteRoleActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testException() {
|
||||
final Throwable t = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException());
|
||||
final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException());
|
||||
final String roleName = randomFrom("admin", "dept_a", "restricted");
|
||||
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
|
||||
TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class),
|
||||
|
@ -124,7 +124,7 @@ public class TransportDeleteRoleActionTests extends ESTestCase {
|
|||
Object[] args = invocation.getArguments();
|
||||
assert args.length == 2;
|
||||
ActionListener<Boolean> listener = (ActionListener<Boolean>) args[1];
|
||||
listener.onFailure(t);
|
||||
listener.onFailure(e);
|
||||
return null;
|
||||
}
|
||||
}).when(rolesStore).deleteRole(eq(request), any(ActionListener.class));
|
||||
|
@ -138,14 +138,14 @@ public class TransportDeleteRoleActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
||||
assertThat(responseRef.get(), is(nullValue()));
|
||||
assertThat(throwableRef.get(), is(notNullValue()));
|
||||
assertThat(throwableRef.get(), is(sameInstance(t)));
|
||||
assertThat(throwableRef.get(), is(sameInstance(e)));
|
||||
verify(rolesStore, times(1)).deleteRole(eq(request), any(ActionListener.class));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -89,7 +89,7 @@ public class TransportGetRolesActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -158,7 +158,7 @@ public class TransportGetRolesActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -251,7 +251,7 @@ public class TransportGetRolesActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -273,7 +273,7 @@ public class TransportGetRolesActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testException() {
|
||||
final Throwable t = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException());
|
||||
final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException());
|
||||
final List<RoleDescriptor> storeRoleDescriptors = randomRoleDescriptors();
|
||||
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
|
||||
SecurityContext context = mock(SecurityContext.class);
|
||||
|
@ -290,7 +290,7 @@ public class TransportGetRolesActionTests extends ESTestCase {
|
|||
Object[] args = invocation.getArguments();
|
||||
assert args.length == 2;
|
||||
ActionListener<RoleDescriptor> listener = (ActionListener<RoleDescriptor>) args[1];
|
||||
listener.onFailure(t);
|
||||
listener.onFailure(e);
|
||||
return null;
|
||||
}
|
||||
}).when(rolesStore).getRoleDescriptor(eq(request.names()[0]), any(ActionListener.class));
|
||||
|
@ -301,7 +301,7 @@ public class TransportGetRolesActionTests extends ESTestCase {
|
|||
Object[] args = invocation.getArguments();
|
||||
assert args.length == 2;
|
||||
ActionListener<List<RoleDescriptor>> listener = (ActionListener<List<RoleDescriptor>>) args[1];
|
||||
listener.onFailure(t);
|
||||
listener.onFailure(e);
|
||||
return null;
|
||||
}
|
||||
}).when(rolesStore).getRoleDescriptors(aryEq(request.names()), any(ActionListener.class));
|
||||
|
@ -316,13 +316,13 @@ public class TransportGetRolesActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
||||
assertThat(throwableRef.get(), is(notNullValue()));
|
||||
assertThat(throwableRef.get(), is(t));
|
||||
assertThat(throwableRef.get(), is(e));
|
||||
assertThat(responseRef.get(), is(nullValue()));
|
||||
}
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ public class TransportPutRoleActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -97,7 +97,7 @@ public class TransportPutRoleActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -109,7 +109,7 @@ public class TransportPutRoleActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testException() {
|
||||
final Throwable t = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException());
|
||||
final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException());
|
||||
final String roleName = randomFrom("admin", "dept_a", "restricted");
|
||||
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
|
||||
TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class),
|
||||
|
@ -124,7 +124,7 @@ public class TransportPutRoleActionTests extends ESTestCase {
|
|||
Object[] args = invocation.getArguments();
|
||||
assert args.length == 3;
|
||||
ActionListener<Boolean> listener = (ActionListener<Boolean>) args[2];
|
||||
listener.onFailure(t);
|
||||
listener.onFailure(e);
|
||||
return null;
|
||||
}
|
||||
}).when(rolesStore).putRole(eq(request), any(RoleDescriptor.class), any(ActionListener.class));
|
||||
|
@ -138,14 +138,14 @@ public class TransportPutRoleActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
||||
assertThat(responseRef.get(), is(nullValue()));
|
||||
assertThat(throwableRef.get(), is(notNullValue()));
|
||||
assertThat(throwableRef.get(), is(sameInstance(t)));
|
||||
assertThat(throwableRef.get(), is(sameInstance(e)));
|
||||
verify(rolesStore, times(1)).putRole(eq(request), any(RoleDescriptor.class), any(ActionListener.class));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ public class TransportAuthenticateActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -72,7 +72,7 @@ public class TransportAuthenticateActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -99,7 +99,7 @@ public class TransportAuthenticateActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -68,7 +68,7 @@ public class TransportChangePasswordActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -97,7 +97,7 @@ public class TransportChangePasswordActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -135,7 +135,7 @@ public class TransportChangePasswordActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -152,13 +152,13 @@ public class TransportChangePasswordActionTests extends ESTestCase {
|
|||
ChangePasswordRequest request = new ChangePasswordRequest();
|
||||
request.username(user.principal());
|
||||
request.passwordHash(Hasher.BCRYPT.hash(new SecuredString("changeme".toCharArray())));
|
||||
final Throwable t = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new RuntimeException());
|
||||
final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new RuntimeException());
|
||||
doAnswer(new Answer() {
|
||||
public Void answer(InvocationOnMock invocation) {
|
||||
Object[] args = invocation.getArguments();
|
||||
assert args.length == 2;
|
||||
ActionListener<Void> listener = (ActionListener<Void>) args[1];
|
||||
listener.onFailure(t);
|
||||
listener.onFailure(e);
|
||||
return null;
|
||||
}
|
||||
}).when(usersStore).changePassword(eq(request), any(ActionListener.class));
|
||||
|
@ -174,14 +174,14 @@ public class TransportChangePasswordActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
||||
assertThat(responseRef.get(), is(nullValue()));
|
||||
assertThat(throwableRef.get(), is(notNullValue()));
|
||||
assertThat(throwableRef.get(), sameInstance(t));
|
||||
assertThat(throwableRef.get(), sameInstance(e));
|
||||
verify(usersStore, times(1)).changePassword(eq(request), any(ActionListener.class));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TransportDeleteUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -90,7 +90,7 @@ public class TransportDeleteUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -118,7 +118,7 @@ public class TransportDeleteUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -156,7 +156,7 @@ public class TransportDeleteUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -168,7 +168,7 @@ public class TransportDeleteUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testException() {
|
||||
final Throwable t = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new RuntimeException());
|
||||
final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new RuntimeException());
|
||||
final User user = new User("joe");
|
||||
NativeUsersStore usersStore = mock(NativeUsersStore.class);
|
||||
TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class),
|
||||
|
@ -180,7 +180,7 @@ public class TransportDeleteUserActionTests extends ESTestCase {
|
|||
Object[] args = invocation.getArguments();
|
||||
assert args.length == 2;
|
||||
ActionListener<Boolean> listener = (ActionListener<Boolean>) args[1];
|
||||
listener.onFailure(t);
|
||||
listener.onFailure(e);
|
||||
return null;
|
||||
}
|
||||
}).when(usersStore).deleteUser(eq(request), any(ActionListener.class));
|
||||
|
@ -194,14 +194,14 @@ public class TransportDeleteUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
||||
assertThat(responseRef.get(), is(nullValue()));
|
||||
assertThat(throwableRef.get(), is(notNullValue()));
|
||||
assertThat(throwableRef.get(), sameInstance(t));
|
||||
assertThat(throwableRef.get(), sameInstance(e));
|
||||
verify(usersStore, times(1)).deleteUser(eq(request), any(ActionListener.class));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -116,7 +116,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -147,7 +147,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -185,7 +185,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -242,7 +242,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -261,7 +261,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testException() {
|
||||
final Throwable t = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new ValidationException());
|
||||
final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new ValidationException());
|
||||
final List<User> storeUsers =
|
||||
randomFrom(Collections.singletonList(new User("joe")), Arrays.asList(new User("jane"), new User("fred")), randomUsers());
|
||||
final String[] storeUsernames = storeUsers.stream().map(User::principal).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY);
|
||||
|
@ -277,7 +277,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
|
|||
Object[] args = invocation.getArguments();
|
||||
assert args.length == 2;
|
||||
ActionListener<List<User>> listener = (ActionListener<List<User>>) args[1];
|
||||
listener.onFailure(t);
|
||||
listener.onFailure(e);
|
||||
return null;
|
||||
}
|
||||
}).when(usersStore).getUsers(aryEq(storeUsernames), any(ActionListener.class));
|
||||
|
@ -288,7 +288,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
|
|||
Object[] args = invocation.getArguments();
|
||||
assert args.length == 2;
|
||||
ActionListener<User> listener = (ActionListener<User>) args[1];
|
||||
listener.onFailure(t);
|
||||
listener.onFailure(e);
|
||||
return null;
|
||||
}
|
||||
}).when(usersStore).getUser(eq(storeUsernames[0]), any(ActionListener.class));
|
||||
|
@ -303,13 +303,13 @@ public class TransportGetUsersActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
||||
assertThat(throwableRef.get(), is(notNullValue()));
|
||||
assertThat(throwableRef.get(), is(sameInstance(t)));
|
||||
assertThat(throwableRef.get(), is(sameInstance(e)));
|
||||
assertThat(responseRef.get(), is(nullValue()));
|
||||
if (request.usernames().length == 1) {
|
||||
verify(usersStore, times(1)).getUser(eq(request.usernames()[0]), any(ActionListener.class));
|
||||
|
|
|
@ -67,7 +67,7 @@ public class TransportPutUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -95,7 +95,7 @@ public class TransportPutUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -124,7 +124,7 @@ public class TransportPutUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -167,7 +167,7 @@ public class TransportPutUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
@ -179,7 +179,7 @@ public class TransportPutUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testException() {
|
||||
final Throwable t = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new ValidationException());
|
||||
final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new ValidationException());
|
||||
final User user = new User("joe");
|
||||
NativeUsersStore usersStore = mock(NativeUsersStore.class);
|
||||
TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class),
|
||||
|
@ -192,7 +192,7 @@ public class TransportPutUserActionTests extends ESTestCase {
|
|||
Object[] args = invocation.getArguments();
|
||||
assert args.length == 2;
|
||||
ActionListener<Boolean> listener = (ActionListener<Boolean>) args[1];
|
||||
listener.onFailure(t);
|
||||
listener.onFailure(e);
|
||||
return null;
|
||||
}
|
||||
}).when(usersStore).putUser(eq(request), any(ActionListener.class));
|
||||
|
@ -206,14 +206,14 @@ public class TransportPutUserActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
public void onFailure(Exception e) {
|
||||
throwableRef.set(e);
|
||||
}
|
||||
});
|
||||
|
||||
assertThat(responseRef.get(), is(nullValue()));
|
||||
assertThat(throwableRef.get(), is(notNullValue()));
|
||||
assertThat(throwableRef.get(), sameInstance(t));
|
||||
assertThat(throwableRef.get(), sameInstance(e));
|
||||
verify(usersStore, times(1)).putUser(eq(request), any(ActionListener.class));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.security.audit;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.inject.Guice;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
|
@ -15,20 +14,17 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.local.LocalTransport;
|
||||
import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail;
|
||||
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class AuditTrailModuleTests extends ESTestCase {
|
||||
public void testEnabled() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
|
@ -93,7 +89,7 @@ public class AuditTrailModuleTests extends ESTestCase {
|
|||
try {
|
||||
Guice.createInjector(settingsModule, new AuditTrailModule(settings));
|
||||
fail("Expect initialization to fail when an unknown audit trail output is configured");
|
||||
} catch (Throwable t) {
|
||||
} catch (Exception e) {
|
||||
// expected
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,20 +17,20 @@ import org.elasticsearch.cluster.service.ClusterService;
|
|||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportMessage;
|
||||
import org.elasticsearch.xpack.security.InternalClient;
|
||||
import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail.State;
|
||||
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
|
||||
import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule;
|
||||
import org.elasticsearch.xpack.security.user.SystemUser;
|
||||
import org.elasticsearch.xpack.security.user.User;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportMessage;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -58,8 +58,8 @@ public class IndexAuditTrailMutedTests extends ESTestCase {
|
|||
@Before
|
||||
public void setup() {
|
||||
transport = mock(Transport.class);
|
||||
when(transport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { DummyTransportAddress.INSTANCE },
|
||||
DummyTransportAddress.INSTANCE));
|
||||
when(transport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { LocalTransportAddress.buildUnique() },
|
||||
LocalTransportAddress.buildUnique()));
|
||||
|
||||
threadPool = new TestThreadPool("index audit trail tests");
|
||||
transportClient = TransportClient.builder().settings(Settings.EMPTY).build();
|
||||
|
|
|
@ -21,21 +21,11 @@ import org.elasticsearch.common.inject.util.Providers;
|
|||
import org.elasticsearch.common.network.NetworkAddress;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail.Message;
|
||||
import org.elasticsearch.xpack.security.transport.netty.SecurityNettyTransport;
|
||||
import org.elasticsearch.xpack.security.user.SystemUser;
|
||||
import org.elasticsearch.xpack.security.user.User;
|
||||
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
|
||||
import org.elasticsearch.xpack.security.crypto.InternalCryptoService;
|
||||
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
|
||||
import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalTestCluster;
|
||||
import org.elasticsearch.test.SecurityIntegTestCase;
|
||||
|
@ -46,6 +36,15 @@ import org.elasticsearch.transport.Transport;
|
|||
import org.elasticsearch.transport.TransportInfo;
|
||||
import org.elasticsearch.transport.TransportMessage;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail.Message;
|
||||
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
|
||||
import org.elasticsearch.xpack.security.crypto.InternalCryptoService;
|
||||
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
|
||||
import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule;
|
||||
import org.elasticsearch.xpack.security.transport.netty.SecurityNettyTransport;
|
||||
import org.elasticsearch.xpack.security.user.SystemUser;
|
||||
import org.elasticsearch.xpack.security.user.User;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
|
@ -64,12 +63,12 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
||||
import static org.elasticsearch.test.InternalTestCluster.clusterName;
|
||||
import static org.elasticsearch.xpack.security.audit.index.IndexNameResolver.Rollover.DAILY;
|
||||
import static org.elasticsearch.xpack.security.audit.index.IndexNameResolver.Rollover.HOURLY;
|
||||
import static org.elasticsearch.xpack.security.audit.index.IndexNameResolver.Rollover.MONTHLY;
|
||||
import static org.elasticsearch.xpack.security.audit.index.IndexNameResolver.Rollover.WEEKLY;
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
||||
import static org.elasticsearch.test.InternalTestCluster.clusterName;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
@ -255,8 +254,8 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase {
|
|||
Settings settings = builder.put(settings(rollover, includes, excludes)).build();
|
||||
logger.info("--> settings: [{}]", settings.getAsMap().toString());
|
||||
Transport transport = mock(Transport.class);
|
||||
BoundTransportAddress boundTransportAddress = new BoundTransportAddress(new TransportAddress[]{DummyTransportAddress.INSTANCE},
|
||||
DummyTransportAddress.INSTANCE);
|
||||
BoundTransportAddress boundTransportAddress = new BoundTransportAddress(new TransportAddress[]{ remoteHostAddress()},
|
||||
remoteHostAddress());
|
||||
when(transport.boundAddress()).thenReturn(boundTransportAddress);
|
||||
threadPool = new TestThreadPool("index audit trail tests");
|
||||
enqueuedMessage = new SetOnce<>();
|
||||
|
@ -279,7 +278,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase {
|
|||
assertAuditMessage(hit, "transport", "anonymous_access_denied");
|
||||
Map<String, Object> sourceMap = hit.sourceAsMap();
|
||||
if (message instanceof RemoteHostMockMessage) {
|
||||
assertEquals(remoteHostAddress(), sourceMap.get("origin_address"));
|
||||
assertEquals(remoteHostAddress().toString(), sourceMap.get("origin_address"));
|
||||
} else {
|
||||
assertEquals("local[local_host]", sourceMap.get("origin_address"));
|
||||
}
|
||||
|
@ -316,7 +315,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase {
|
|||
assertAuditMessage(hit, "transport", "authentication_failed");
|
||||
|
||||
if (message instanceof RemoteHostMockMessage) {
|
||||
assertEquals(remoteHostAddress(), sourceMap.get("origin_address"));
|
||||
assertEquals(remoteHostAddress().toString(), sourceMap.get("origin_address"));
|
||||
} else {
|
||||
assertEquals("local[local_host]", sourceMap.get("origin_address"));
|
||||
}
|
||||
|
@ -336,7 +335,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase {
|
|||
assertAuditMessage(hit, "transport", "authentication_failed");
|
||||
Map<String, Object> sourceMap = hit.sourceAsMap();
|
||||
if (message instanceof RemoteHostMockMessage) {
|
||||
assertEquals(remoteHostAddress(), sourceMap.get("origin_address"));
|
||||
assertEquals(remoteHostAddress().toString(), sourceMap.get("origin_address"));
|
||||
} else {
|
||||
assertEquals("local[local_host]", sourceMap.get("origin_address"));
|
||||
}
|
||||
|
@ -391,7 +390,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase {
|
|||
Map<String, Object> sourceMap = hit.sourceAsMap();
|
||||
|
||||
if (message instanceof RemoteHostMockMessage) {
|
||||
assertEquals(remoteHostAddress(), sourceMap.get("origin_address"));
|
||||
assertEquals(remoteHostAddress().toString(), sourceMap.get("origin_address"));
|
||||
} else {
|
||||
assertEquals("local[local_host]", sourceMap.get("origin_address"));
|
||||
}
|
||||
|
@ -621,8 +620,8 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase {
|
|||
DateTime dateTime = ISODateTimeFormat.dateTimeParser().withZoneUTC().parseDateTime((String) sourceMap.get("@timestamp"));
|
||||
assertThat(dateTime.isBefore(DateTime.now(DateTimeZone.UTC)), is(true));
|
||||
|
||||
assertThat(DummyTransportAddress.INSTANCE.getHost(), equalTo(sourceMap.get("node_host_name")));
|
||||
assertThat(DummyTransportAddress.INSTANCE.getAddress(), equalTo(sourceMap.get("node_host_address")));
|
||||
assertThat(remoteHostAddress().getHost(), equalTo(sourceMap.get("node_host_name")));
|
||||
assertThat(remoteHostAddress().getAddress(), equalTo(sourceMap.get("node_host_address")));
|
||||
|
||||
assertEquals(layer, sourceMap.get("layer"));
|
||||
assertEquals(type, sourceMap.get("event_type"));
|
||||
|
@ -636,13 +635,13 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase {
|
|||
|
||||
private static class RemoteHostMockMessage extends TransportMessage {
|
||||
RemoteHostMockMessage() throws Exception {
|
||||
remoteAddress(DummyTransportAddress.INSTANCE);
|
||||
remoteAddress(remoteHostAddress());
|
||||
}
|
||||
}
|
||||
|
||||
private static class RemoteHostMockTransportRequest extends TransportRequest {
|
||||
RemoteHostMockTransportRequest() throws Exception {
|
||||
remoteAddress(DummyTransportAddress.INSTANCE);
|
||||
remoteAddress(remoteHostAddress());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -738,8 +737,8 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase {
|
|||
return actionGet.getStatus();
|
||||
}
|
||||
|
||||
static String remoteHostAddress() throws Exception {
|
||||
return DummyTransportAddress.INSTANCE.toString();
|
||||
private static LocalTransportAddress remoteHostAddress() {
|
||||
return new LocalTransportAddress("_remote_host_");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ import org.elasticsearch.cluster.service.ClusterService;
|
|||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.test.SecurityIntegTestCase;
|
||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||
|
@ -49,8 +49,8 @@ public class IndexAuditTrailUpdateMappingTests extends SecurityIntegTestCase {
|
|||
Settings settings = Settings.builder().put("xpack.security.audit.index.rollover", rollover.name().toLowerCase(Locale.ENGLISH))
|
||||
.put("path.home", createTempDir()).build();
|
||||
Transport transport = mock(Transport.class);
|
||||
when(transport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { DummyTransportAddress.INSTANCE },
|
||||
DummyTransportAddress.INSTANCE));
|
||||
when(transport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { LocalTransportAddress.buildUnique() },
|
||||
LocalTransportAddress.buildUnique()));
|
||||
auditor = new IndexAuditTrail(settings, transport, Providers.of(internalClient()), threadPool,
|
||||
mock(ClusterService.class));
|
||||
|
||||
|
|
|
@ -13,22 +13,21 @@ import org.elasticsearch.common.component.Lifecycle;
|
|||
import org.elasticsearch.common.network.NetworkAddress;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.xpack.security.user.SystemUser;
|
||||
import org.elasticsearch.xpack.security.user.User;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportMessage;
|
||||
import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger.Level;
|
||||
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
|
||||
import org.elasticsearch.xpack.security.rest.RemoteHostHeader;
|
||||
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
|
||||
import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportMessage;
|
||||
import org.elasticsearch.xpack.security.user.SystemUser;
|
||||
import org.elasticsearch.xpack.security.user.User;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -115,8 +114,8 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
.build();
|
||||
transport = mock(Transport.class);
|
||||
when(transport.lifecycleState()).thenReturn(Lifecycle.State.STARTED);
|
||||
when(transport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { DummyTransportAddress.INSTANCE },
|
||||
DummyTransportAddress.INSTANCE));
|
||||
when(transport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { LocalTransportAddress.buildUnique() },
|
||||
LocalTransportAddress.buildUnique()));
|
||||
prefix = LoggingAuditTrail.resolvePrefix(settings, transport);
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue