Merge branch 'master' into feature/sql
Original commit: elastic/x-pack-elasticsearch@35248ef725
This commit is contained in:
commit
764d802bef
|
@ -1 +0,0 @@
|
||||||
/bin/
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
rem Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
rem or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
rem you may not use this file except in compliance with the Elastic License.
|
||||||
|
|
||||||
|
set ES_CLASSPATH=!ES_CLASSPATH!;!ES_HOME!/plugins/x-pack/*
|
|
@ -324,7 +324,6 @@ public class DataCountsReporter extends AbstractComponent {
|
||||||
public void startNewIncrementalCount() {
|
public void startNewIncrementalCount() {
|
||||||
incrementalRecordStats = new DataCounts(job.getId());
|
incrementalRecordStats = new DataCounts(job.getId());
|
||||||
retrieveDiagnosticsIntermediateResults();
|
retrieveDiagnosticsIntermediateResults();
|
||||||
diagnostics.resetCounts();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public DataCounts incrementalStats() {
|
public DataCounts incrementalStats() {
|
||||||
|
@ -338,14 +337,14 @@ public class DataCountsReporter extends AbstractComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void retrieveDiagnosticsIntermediateResults() {
|
private void retrieveDiagnosticsIntermediateResults() {
|
||||||
totalRecordStats.incrementBucketCount(diagnostics.getEmptyBucketCount());
|
|
||||||
totalRecordStats.incrementBucketCount(diagnostics.getBucketCount());
|
totalRecordStats.incrementBucketCount(diagnostics.getBucketCount());
|
||||||
|
totalRecordStats.incrementEmptyBucketCount(diagnostics.getEmptyBucketCount());
|
||||||
totalRecordStats.incrementSparseBucketCount(diagnostics.getSparseBucketCount());
|
totalRecordStats.incrementSparseBucketCount(diagnostics.getSparseBucketCount());
|
||||||
totalRecordStats.updateLatestEmptyBucketTimeStamp(diagnostics.getLatestEmptyBucketTime());
|
totalRecordStats.updateLatestEmptyBucketTimeStamp(diagnostics.getLatestEmptyBucketTime());
|
||||||
totalRecordStats.updateLatestSparseBucketTimeStamp(diagnostics.getLatestSparseBucketTime());
|
totalRecordStats.updateLatestSparseBucketTimeStamp(diagnostics.getLatestSparseBucketTime());
|
||||||
|
|
||||||
incrementalRecordStats.incrementEmptyBucketCount(diagnostics.getEmptyBucketCount());
|
|
||||||
incrementalRecordStats.incrementBucketCount(diagnostics.getBucketCount());
|
incrementalRecordStats.incrementBucketCount(diagnostics.getBucketCount());
|
||||||
|
incrementalRecordStats.incrementEmptyBucketCount(diagnostics.getEmptyBucketCount());
|
||||||
incrementalRecordStats.incrementSparseBucketCount(diagnostics.getSparseBucketCount());
|
incrementalRecordStats.incrementSparseBucketCount(diagnostics.getSparseBucketCount());
|
||||||
incrementalRecordStats.updateLatestEmptyBucketTimeStamp(diagnostics.getLatestEmptyBucketTime());
|
incrementalRecordStats.updateLatestEmptyBucketTimeStamp(diagnostics.getLatestEmptyBucketTime());
|
||||||
incrementalRecordStats.updateLatestSparseBucketTimeStamp(diagnostics.getLatestSparseBucketTime());
|
incrementalRecordStats.updateLatestSparseBucketTimeStamp(diagnostics.getLatestSparseBucketTime());
|
||||||
|
|
|
@ -63,6 +63,15 @@ public class ReservedRolesStore {
|
||||||
// reporting_user doesn't have any privileges in Elasticsearch, and Kibana authorizes privileges based on this role
|
// reporting_user doesn't have any privileges in Elasticsearch, and Kibana authorizes privileges based on this role
|
||||||
.put("reporting_user", new RoleDescriptor("reporting_user", null, null,
|
.put("reporting_user", new RoleDescriptor("reporting_user", null, null,
|
||||||
null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
||||||
|
.put("kibana_dashboard_only_user", new RoleDescriptor(
|
||||||
|
"kibana_dashboard_only_user",
|
||||||
|
null,
|
||||||
|
new RoleDescriptor.IndicesPrivileges[] {
|
||||||
|
RoleDescriptor.IndicesPrivileges.builder()
|
||||||
|
.indices(".kibana*").privileges("read", "view_index_metadata").build()
|
||||||
|
},
|
||||||
|
null,
|
||||||
|
MetadataUtils.DEFAULT_RESERVED_METADATA))
|
||||||
.put(KibanaUser.ROLE_NAME, new RoleDescriptor(KibanaUser.ROLE_NAME, new String[] { "monitor", MonitoringBulkAction.NAME},
|
.put(KibanaUser.ROLE_NAME, new RoleDescriptor(KibanaUser.ROLE_NAME, new String[] { "monitor", MonitoringBulkAction.NAME},
|
||||||
new RoleDescriptor.IndicesPrivileges[] {
|
new RoleDescriptor.IndicesPrivileges[] {
|
||||||
RoleDescriptor.IndicesPrivileges.builder().indices(".kibana*", ".reporting-*").privileges("all").build(),
|
RoleDescriptor.IndicesPrivileges.builder().indices(".kibana*", ".reporting-*").privileges("all").build(),
|
||||||
|
|
|
@ -140,7 +140,10 @@ public class SecurityServerTransportInterceptor extends AbstractComponent implem
|
||||||
AsyncSender sender) {
|
AsyncSender sender) {
|
||||||
// There cannot be a request outgoing from this node that is not associated with a user.
|
// There cannot be a request outgoing from this node that is not associated with a user.
|
||||||
if (securityContext.getAuthentication() == null) {
|
if (securityContext.getAuthentication() == null) {
|
||||||
throw new IllegalStateException("there should always be a user when sending a message");
|
// we use an assertion here to ensure we catch this in our testing infrastructure, but leave the ISE for cases we do not catch
|
||||||
|
// in tests and may be hit by a user
|
||||||
|
assertNoAuthentication(action);
|
||||||
|
throw new IllegalStateException("there should always be a user when sending a message for action [" + action + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -150,6 +153,11 @@ public class SecurityServerTransportInterceptor extends AbstractComponent implem
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// pkg-private method to allow overriding for tests
|
||||||
|
void assertNoAuthentication(String action) {
|
||||||
|
assert false : "there should always be a user when sending a message for action [" + action + "]";
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public <T extends TransportRequest> TransportRequestHandler<T> interceptHandler(String action, String executor,
|
public <T extends TransportRequest> TransportRequestHandler<T> interceptHandler(String action, String executor,
|
||||||
boolean forceExecution,
|
boolean forceExecution,
|
||||||
|
|
|
@ -165,8 +165,7 @@ public class WatcherLifeCycleService extends AbstractComponent implements Cluste
|
||||||
if (isIndexInternalFormatTriggeredWatchIndex && isIndexInternalFormatWatchIndex) {
|
if (isIndexInternalFormatTriggeredWatchIndex && isIndexInternalFormatWatchIndex) {
|
||||||
executor.execute(() -> start(event.state(), false));
|
executor.execute(() -> start(event.state(), false));
|
||||||
} else {
|
} else {
|
||||||
logger.warn("Not starting watcher, run the Upgrade API first.");
|
logger.warn("not starting watcher, upgrade API run required: .watches[{}], .triggered_watches[{}]",
|
||||||
logger.debug("Upgrade required, matches interal index format: watches index [{}], triggered watches index [{}]",
|
|
||||||
isIndexInternalFormatWatchIndex, isIndexInternalFormatTriggeredWatchIndex);
|
isIndexInternalFormatWatchIndex, isIndexInternalFormatTriggeredWatchIndex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,21 +60,34 @@ public class PageParamsTests extends AbstractSerializingTestCase<PageParams> {
|
||||||
protected PageParams mutateInstance(PageParams instance) throws IOException {
|
protected PageParams mutateInstance(PageParams instance) throws IOException {
|
||||||
int from = instance.getFrom();
|
int from = instance.getFrom();
|
||||||
int size = instance.getSize();
|
int size = instance.getSize();
|
||||||
|
int amountToAdd = between(1, 20);
|
||||||
switch (between(0, 1)) {
|
switch (between(0, 1)) {
|
||||||
case 0:
|
case 0:
|
||||||
from += between(1, 20);
|
from += amountToAdd;
|
||||||
// If we have gone above the limit for max and size then we need to
|
// If we have gone above the limit for max and size then we need to
|
||||||
// change size too
|
// adjust from and size to be valid
|
||||||
if ((from + size) > PageParams.MAX_FROM_SIZE_SUM) {
|
if ((from + size) > PageParams.MAX_FROM_SIZE_SUM) {
|
||||||
size = PageParams.MAX_FROM_SIZE_SUM - from;
|
if (from >= 2 * amountToAdd) {
|
||||||
|
// If from is large enough then just subtract the amount we added twice
|
||||||
|
from -= 2 * amountToAdd;
|
||||||
|
} else {
|
||||||
|
// Otherwise change size to obey the limit
|
||||||
|
size = PageParams.MAX_FROM_SIZE_SUM - from;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
size += between(1, 20);
|
size += amountToAdd;
|
||||||
// If we have gone above the limit for max and size then we need to
|
// If we have gone above the limit for max and size then we need to
|
||||||
// change from too
|
// adjust from and size to be valid
|
||||||
if ((from + size) > PageParams.MAX_FROM_SIZE_SUM) {
|
if ((from + size) > PageParams.MAX_FROM_SIZE_SUM) {
|
||||||
from = PageParams.MAX_FROM_SIZE_SUM - size;
|
if (size >= 2 * amountToAdd) {
|
||||||
|
// If from is large enough then just subtract the amount we added twice
|
||||||
|
size -= 2 * amountToAdd;
|
||||||
|
} else {
|
||||||
|
// Otherwise change size to obey the limit
|
||||||
|
from = PageParams.MAX_FROM_SIZE_SUM - size;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
|
|
@ -37,6 +37,7 @@ public class DataCountsReporterTests extends ESTestCase {
|
||||||
private Job job;
|
private Job job;
|
||||||
private JobDataCountsPersister jobDataCountsPersister;
|
private JobDataCountsPersister jobDataCountsPersister;
|
||||||
private Settings settings;
|
private Settings settings;
|
||||||
|
private TimeValue bucketSpan = TimeValue.timeValueSeconds(300);
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setUpMocks() {
|
public void setUpMocks() {
|
||||||
|
@ -46,7 +47,7 @@ public class DataCountsReporterTests extends ESTestCase {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Arrays.asList(new Detector.Builder("metric", "field").build()));
|
AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Arrays.asList(new Detector.Builder("metric", "field").build()));
|
||||||
acBuilder.setBucketSpan(TimeValue.timeValueSeconds(300));
|
acBuilder.setBucketSpan(bucketSpan);
|
||||||
acBuilder.setLatency(TimeValue.ZERO);
|
acBuilder.setLatency(TimeValue.ZERO);
|
||||||
acBuilder.setDetectors(Arrays.asList(new Detector.Builder("metric", "field").build()));
|
acBuilder.setDetectors(Arrays.asList(new Detector.Builder("metric", "field").build()));
|
||||||
|
|
||||||
|
@ -119,26 +120,32 @@ public class DataCountsReporterTests extends ESTestCase {
|
||||||
assertAllCountFieldsEqualZero(stats);
|
assertAllCountFieldsEqualZero(stats);
|
||||||
|
|
||||||
// write some more data
|
// write some more data
|
||||||
dataCountsReporter.reportRecordWritten(5, 302000);
|
// skip a bucket so there is a non-zero empty bucket count
|
||||||
dataCountsReporter.reportRecordWritten(5, 302000);
|
long timeStamp = bucketSpan.millis() * 2 + 2000;
|
||||||
|
dataCountsReporter.reportRecordWritten(5, timeStamp);
|
||||||
|
dataCountsReporter.reportRecordWritten(5, timeStamp);
|
||||||
assertEquals(2, dataCountsReporter.incrementalStats().getInputRecordCount());
|
assertEquals(2, dataCountsReporter.incrementalStats().getInputRecordCount());
|
||||||
assertEquals(10, dataCountsReporter.incrementalStats().getInputFieldCount());
|
assertEquals(10, dataCountsReporter.incrementalStats().getInputFieldCount());
|
||||||
assertEquals(2, dataCountsReporter.incrementalStats().getProcessedRecordCount());
|
assertEquals(2, dataCountsReporter.incrementalStats().getProcessedRecordCount());
|
||||||
assertEquals(6, dataCountsReporter.incrementalStats().getProcessedFieldCount());
|
assertEquals(6, dataCountsReporter.incrementalStats().getProcessedFieldCount());
|
||||||
assertEquals(302000, dataCountsReporter.incrementalStats().getLatestRecordTimeStamp().getTime());
|
assertEquals(602000, dataCountsReporter.incrementalStats().getLatestRecordTimeStamp().getTime());
|
||||||
|
|
||||||
// check total stats
|
// check total stats
|
||||||
assertEquals(4, dataCountsReporter.runningTotalStats().getInputRecordCount());
|
assertEquals(4, dataCountsReporter.runningTotalStats().getInputRecordCount());
|
||||||
assertEquals(20, dataCountsReporter.runningTotalStats().getInputFieldCount());
|
assertEquals(20, dataCountsReporter.runningTotalStats().getInputFieldCount());
|
||||||
assertEquals(4, dataCountsReporter.runningTotalStats().getProcessedRecordCount());
|
assertEquals(4, dataCountsReporter.runningTotalStats().getProcessedRecordCount());
|
||||||
assertEquals(12, dataCountsReporter.runningTotalStats().getProcessedFieldCount());
|
assertEquals(12, dataCountsReporter.runningTotalStats().getProcessedFieldCount());
|
||||||
assertEquals(302000, dataCountsReporter.runningTotalStats().getLatestRecordTimeStamp().getTime());
|
assertEquals(602000, dataCountsReporter.runningTotalStats().getLatestRecordTimeStamp().getTime());
|
||||||
|
|
||||||
// send 'flush' signal
|
// send 'flush' signal
|
||||||
dataCountsReporter.finishReporting(ActionListener.wrap(r -> {}, e -> {}));
|
dataCountsReporter.finishReporting(ActionListener.wrap(r -> {}, e -> {}));
|
||||||
assertEquals(2, dataCountsReporter.runningTotalStats().getBucketCount());
|
assertEquals(3, dataCountsReporter.runningTotalStats().getBucketCount());
|
||||||
assertEquals(0, dataCountsReporter.runningTotalStats().getEmptyBucketCount());
|
assertEquals(1, dataCountsReporter.runningTotalStats().getEmptyBucketCount());
|
||||||
assertEquals(0, dataCountsReporter.runningTotalStats().getSparseBucketCount());
|
assertEquals(0, dataCountsReporter.runningTotalStats().getSparseBucketCount());
|
||||||
|
|
||||||
|
assertEquals(3, dataCountsReporter.incrementalStats().getBucketCount());
|
||||||
|
assertEquals(1, dataCountsReporter.incrementalStats().getEmptyBucketCount());
|
||||||
|
assertEquals(0, dataCountsReporter.incrementalStats().getSparseBucketCount());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testReportLatestTimeIncrementalStats() throws IOException {
|
public void testReportLatestTimeIncrementalStats() throws IOException {
|
||||||
|
|
|
@ -122,6 +122,7 @@ public class ReservedRolesStoreTests extends ESTestCase {
|
||||||
assertThat(ReservedRolesStore.isReserved("machine_learning_admin"), is(true));
|
assertThat(ReservedRolesStore.isReserved("machine_learning_admin"), is(true));
|
||||||
assertThat(ReservedRolesStore.isReserved("watcher_user"), is(true));
|
assertThat(ReservedRolesStore.isReserved("watcher_user"), is(true));
|
||||||
assertThat(ReservedRolesStore.isReserved("watcher_admin"), is(true));
|
assertThat(ReservedRolesStore.isReserved("watcher_admin"), is(true));
|
||||||
|
assertThat(ReservedRolesStore.isReserved("kibana_dashboard_only_user"), is(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIngestAdminRole() {
|
public void testIngestAdminRole() {
|
||||||
|
@ -355,6 +356,36 @@ public class ReservedRolesStoreTests extends ESTestCase {
|
||||||
assertThat(reportingUserRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(false));
|
assertThat(reportingUserRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testKibanaDashboardOnlyUserRole() {
|
||||||
|
RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("kibana_dashboard_only_user");
|
||||||
|
assertNotNull(roleDescriptor);
|
||||||
|
assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true));
|
||||||
|
|
||||||
|
Role dashboardsOnlyUserRole = Role.builder(roleDescriptor, null).build();
|
||||||
|
assertThat(dashboardsOnlyUserRole.cluster().check(ClusterHealthAction.NAME), is(false));
|
||||||
|
assertThat(dashboardsOnlyUserRole.cluster().check(ClusterStateAction.NAME), is(false));
|
||||||
|
assertThat(dashboardsOnlyUserRole.cluster().check(ClusterStatsAction.NAME), is(false));
|
||||||
|
assertThat(dashboardsOnlyUserRole.cluster().check(PutIndexTemplateAction.NAME), is(false));
|
||||||
|
assertThat(dashboardsOnlyUserRole.cluster().check(ClusterRerouteAction.NAME), is(false));
|
||||||
|
assertThat(dashboardsOnlyUserRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false));
|
||||||
|
assertThat(dashboardsOnlyUserRole.cluster().check(MonitoringBulkAction.NAME), is(false));
|
||||||
|
|
||||||
|
assertThat(dashboardsOnlyUserRole.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false));
|
||||||
|
|
||||||
|
final String index = ".kibana";
|
||||||
|
assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(false));
|
||||||
|
assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(false));
|
||||||
|
|
||||||
|
assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(false));
|
||||||
|
assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false));
|
||||||
|
assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(false));
|
||||||
|
assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(false));
|
||||||
|
|
||||||
|
assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(index), is(true));
|
||||||
|
assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true));
|
||||||
|
assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(index), is(true));
|
||||||
|
}
|
||||||
|
|
||||||
public void testSuperuserRole() {
|
public void testSuperuserRole() {
|
||||||
RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("superuser");
|
RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("superuser");
|
||||||
assertNotNull(roleDescriptor);
|
assertNotNull(roleDescriptor);
|
||||||
|
@ -618,7 +649,7 @@ public class ReservedRolesStoreTests extends ESTestCase {
|
||||||
assertThat(logstashAdminRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(".reporting"), is(false));
|
assertThat(logstashAdminRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(".reporting"), is(false));
|
||||||
assertThat(logstashAdminRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(".logstash"), is(true));
|
assertThat(logstashAdminRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(".logstash"), is(true));
|
||||||
assertThat(logstashAdminRole.indices().allowedIndicesMatcher("indices:foo").test(randomAlphaOfLengthBetween(8, 24)),
|
assertThat(logstashAdminRole.indices().allowedIndicesMatcher("indices:foo").test(randomAlphaOfLengthBetween(8, 24)),
|
||||||
is(false));
|
is(false));
|
||||||
|
|
||||||
final String index = ".logstash-" + randomIntBetween(0, 5);
|
final String index = ".logstash-" + randomIntBetween(0, 5);
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,6 @@ import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||||
import org.elasticsearch.license.XPackLicenseState;
|
import org.elasticsearch.license.XPackLicenseState;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.test.VersionUtils;
|
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.transport.Transport;
|
import org.elasticsearch.transport.Transport;
|
||||||
import org.elasticsearch.transport.Transport.Connection;
|
import org.elasticsearch.transport.Transport.Connection;
|
||||||
|
@ -41,7 +40,6 @@ import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.arrayContaining;
|
import static org.hamcrest.Matchers.arrayContaining;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
import static org.mockito.Matchers.any;
|
import static org.mockito.Matchers.any;
|
||||||
import static org.mockito.Matchers.eq;
|
import static org.mockito.Matchers.eq;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
|
@ -169,7 +167,11 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase {
|
||||||
SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool,
|
SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool,
|
||||||
mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class),
|
mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class),
|
||||||
securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY,
|
securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY,
|
||||||
Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))));
|
Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))) {
|
||||||
|
@Override
|
||||||
|
void assertNoAuthentication(String action) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
assertNull(securityContext.getUser());
|
assertNull(securityContext.getUser());
|
||||||
AsyncSender sender = interceptor.interceptSender(new AsyncSender() {
|
AsyncSender sender = interceptor.interceptSender(new AsyncSender() {
|
||||||
|
@ -183,7 +185,7 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase {
|
||||||
when(connection.getVersion()).thenReturn(Version.CURRENT);
|
when(connection.getVersion()).thenReturn(Version.CURRENT);
|
||||||
IllegalStateException e =
|
IllegalStateException e =
|
||||||
expectThrows(IllegalStateException.class, () -> sender.sendRequest(connection, "indices:foo", null, null, null));
|
expectThrows(IllegalStateException.class, () -> sender.sendRequest(connection, "indices:foo", null, null, null));
|
||||||
assertEquals("there should always be a user when sending a message", e.getMessage());
|
assertEquals("there should always be a user when sending a message for action [indices:foo]", e.getMessage());
|
||||||
assertNull(securityContext.getUser());
|
assertNull(securityContext.getUser());
|
||||||
verify(xPackLicenseState).isAuthAllowed();
|
verify(xPackLicenseState).isAuthAllowed();
|
||||||
verify(securityContext, never()).executeAsUser(any(User.class), any(Consumer.class), any(Version.class));
|
verify(securityContext, never()).executeAsUser(any(User.class), any(Consumer.class), any(Version.class));
|
||||||
|
|
|
@ -37,9 +37,6 @@ indices:admin/mappings/fields/get[index][s]
|
||||||
indices:admin/refresh[s]
|
indices:admin/refresh[s]
|
||||||
indices:admin/refresh[s][p]
|
indices:admin/refresh[s][p]
|
||||||
indices:admin/refresh[s][r]
|
indices:admin/refresh[s][r]
|
||||||
indices:admin/seq_no/resync
|
|
||||||
indices:admin/seq_no/resync[p]
|
|
||||||
indices:admin/seq_no/resync[r]
|
|
||||||
indices:admin/seq_no/global_checkpoint_sync
|
indices:admin/seq_no/global_checkpoint_sync
|
||||||
indices:admin/seq_no/global_checkpoint_sync[p]
|
indices:admin/seq_no/global_checkpoint_sync[p]
|
||||||
indices:admin/seq_no/global_checkpoint_sync[r]
|
indices:admin/seq_no/global_checkpoint_sync[r]
|
||||||
|
@ -116,6 +113,9 @@ internal:gateway/local/meta_state
|
||||||
internal:gateway/local/meta_state[n]
|
internal:gateway/local/meta_state[n]
|
||||||
internal:gateway/local/started_shards
|
internal:gateway/local/started_shards
|
||||||
internal:gateway/local/started_shards[n]
|
internal:gateway/local/started_shards[n]
|
||||||
|
internal:index/seq_no/resync
|
||||||
|
internal:index/seq_no/resync[p]
|
||||||
|
internal:index/seq_no/resync[r]
|
||||||
internal:index/shard/exists
|
internal:index/shard/exists
|
||||||
internal:index/shard/recovery/clean_files
|
internal:index/shard/recovery/clean_files
|
||||||
internal:index/shard/recovery/file_chunk
|
internal:index/shard/recovery/file_chunk
|
||||||
|
|
Loading…
Reference in New Issue