Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
952859d87d
|
@ -1009,7 +1009,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
|
|||
// simulate moving shard from maxNode to minNode
|
||||
final float delta = weight.weightShardAdded(this, minNode, idx) - weight.weightShardRemoved(this, maxNode, idx);
|
||||
if (delta < minCost ||
|
||||
(candidate != null && delta == minCost && candidate.id() > shard.id())) {
|
||||
(candidate != null && Float.compare(delta, minCost) == 0 && candidate.id() > shard.id())) {
|
||||
/* this last line is a tie-breaker to make the shard allocation alg deterministic
|
||||
* otherwise we rely on the iteration order of the index.getAllShards() which is a set.*/
|
||||
minCost = delta;
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.time;
|
||||
|
||||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.time.temporal.TemporalAccessor;
|
||||
|
||||
/**
|
||||
* wrapper class around java.time.DateTimeFormatter that supports multiple formats for easier parsing,
|
||||
* and one specific format for printing
|
||||
*/
|
||||
public class CompoundDateTimeFormatter {
|
||||
|
||||
final DateTimeFormatter printer;
|
||||
final DateTimeFormatter[] parsers;
|
||||
|
||||
CompoundDateTimeFormatter(DateTimeFormatter ... parsers) {
|
||||
if (parsers.length == 0) {
|
||||
throw new IllegalArgumentException("at least one date time formatter is required");
|
||||
}
|
||||
this.printer = parsers[0];
|
||||
this.parsers = parsers;
|
||||
}
|
||||
|
||||
public TemporalAccessor parse(String input) {
|
||||
DateTimeParseException failure = null;
|
||||
for (int i = 0; i < parsers.length; i++) {
|
||||
try {
|
||||
return parsers[i].parse(input);
|
||||
} catch (DateTimeParseException e) {
|
||||
if (failure == null) {
|
||||
failure = e;
|
||||
} else {
|
||||
failure.addSuppressed(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ensure that all parsers exceptions are returned instead of only the last one
|
||||
throw failure;
|
||||
}
|
||||
|
||||
public CompoundDateTimeFormatter withZone(ZoneId zoneId) {
|
||||
final DateTimeFormatter[] parsersWithZone = new DateTimeFormatter[parsers.length];
|
||||
for (int i = 0; i < parsers.length; i++) {
|
||||
parsersWithZone[i] = parsers[i].withZone(zoneId);
|
||||
}
|
||||
|
||||
return new CompoundDateTimeFormatter(parsersWithZone);
|
||||
}
|
||||
|
||||
public String format(TemporalAccessor accessor) {
|
||||
return printer.format(accessor);
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -21,13 +21,15 @@ package org.elasticsearch.monitor.jvm;
|
|||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.lang.management.ThreadInfo;
|
||||
import java.lang.management.ThreadMXBean;
|
||||
import java.time.Clock;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
|
@ -41,7 +43,7 @@ public class HotThreads {
|
|||
|
||||
private static final Object mutex = new Object();
|
||||
|
||||
private static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime");
|
||||
private static final CompoundDateTimeFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("dateOptionalTime");
|
||||
|
||||
private int busiestThreads = 3;
|
||||
private TimeValue interval = new TimeValue(500, TimeUnit.MILLISECONDS);
|
||||
|
@ -136,7 +138,7 @@ public class HotThreads {
|
|||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("Hot threads at ");
|
||||
sb.append(DATE_TIME_FORMATTER.printer().print(System.currentTimeMillis()));
|
||||
sb.append(DATE_TIME_FORMATTER.format(LocalDateTime.now(Clock.systemUTC())));
|
||||
sb.append(", interval=");
|
||||
sb.append(interval);
|
||||
sb.append(", busiestThreads=");
|
||||
|
|
|
@ -85,7 +85,7 @@ public class MovingFunctions {
|
|||
* The average is based on the count of non-null, non-NaN values.
|
||||
*/
|
||||
public static double stdDev(double[] values, double avg) {
|
||||
if (avg == Double.NaN) {
|
||||
if (Double.isNaN(avg)) {
|
||||
return Double.NaN;
|
||||
} else {
|
||||
long count = 0;
|
||||
|
|
|
@ -27,8 +27,8 @@ import org.elasticsearch.common.ParseField;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -37,6 +37,8 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
|
@ -50,7 +52,7 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
|
||||
public static final String CONTEXT_MODE_PARAM = "context_mode";
|
||||
public static final String CONTEXT_MODE_SNAPSHOT = "SNAPSHOT";
|
||||
private static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strictDateOptionalTime");
|
||||
private static final CompoundDateTimeFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("strictDateOptionalTime");
|
||||
private static final String SNAPSHOT = "snapshot";
|
||||
private static final String UUID = "uuid";
|
||||
private static final String INDICES = "indices";
|
||||
|
@ -530,11 +532,11 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
builder.field(REASON, reason);
|
||||
}
|
||||
if (verbose || startTime != 0) {
|
||||
builder.field(START_TIME, DATE_TIME_FORMATTER.printer().print(startTime));
|
||||
builder.field(START_TIME, DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(startTime).atZone(ZoneOffset.UTC)));
|
||||
builder.field(START_TIME_IN_MILLIS, startTime);
|
||||
}
|
||||
if (verbose || endTime != 0) {
|
||||
builder.field(END_TIME, DATE_TIME_FORMATTER.printer().print(endTime));
|
||||
builder.field(END_TIME, DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(endTime).atZone(ZoneOffset.UTC)));
|
||||
builder.field(END_TIME_IN_MILLIS, endTime);
|
||||
builder.humanReadableField(DURATION_IN_MILLIS, DURATION, new TimeValue(endTime - startTime));
|
||||
}
|
||||
|
|
|
@ -0,0 +1,392 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.joda;
|
||||
|
||||
import org.elasticsearch.common.time.CompoundDateTimeFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.time.temporal.TemporalAccessor;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
public class JavaJodaTimeDuellingTests extends ESTestCase {
|
||||
|
||||
public void testTimeZoneFormatting() {
|
||||
assertSameDate("2001-01-01T00:00:00Z", "date_time_no_millis");
|
||||
// the following fail under java 8 but work under java 10, needs investigation
|
||||
assertSameDate("2001-01-01T00:00:00-0800", "date_time_no_millis");
|
||||
assertSameDate("2001-01-01T00:00:00+1030", "date_time_no_millis");
|
||||
assertSameDate("2001-01-01T00:00:00-08", "date_time_no_millis");
|
||||
assertSameDate("2001-01-01T00:00:00+10:30", "date_time_no_millis");
|
||||
|
||||
// different timezone parsing styles require a different number of letters
|
||||
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss.SSSXXX", Locale.ROOT);
|
||||
formatter.parse("20181126T121212.123Z");
|
||||
formatter.parse("20181126T121212.123-08:30");
|
||||
|
||||
DateTimeFormatter formatter2 = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss.SSSXXXX", Locale.ROOT);
|
||||
formatter2.parse("20181126T121212.123+1030");
|
||||
formatter2.parse("20181126T121212.123-0830");
|
||||
|
||||
// ... and can be combined, note that this is not an XOR, so one could append both timezones with this example
|
||||
DateTimeFormatter formatter3 = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss.SSS[XXXX][XXX]", Locale.ROOT);
|
||||
formatter3.parse("20181126T121212.123Z");
|
||||
formatter3.parse("20181126T121212.123-08:30");
|
||||
formatter3.parse("20181126T121212.123+1030");
|
||||
formatter3.parse("20181126T121212.123-0830");
|
||||
}
|
||||
|
||||
public void testCustomTimeFormats() {
|
||||
assertSameDate("2010 12 06 11:05:15", "yyyy dd MM HH:mm:ss");
|
||||
assertSameDate("12/06", "dd/MM");
|
||||
assertSameDate("Nov 24 01:29:01 -0800", "MMM dd HH:mm:ss Z");
|
||||
}
|
||||
|
||||
public void testDuellingFormatsValidParsing() {
|
||||
assertSameDate("1522332219", "epoch_second");
|
||||
assertSameDate("1522332219321", "epoch_millis");
|
||||
|
||||
assertSameDate("20181126", "basic_date");
|
||||
assertSameDate("20181126T121212.123Z", "basic_date_time");
|
||||
assertSameDate("20181126T121212.123+10:00", "basic_date_time");
|
||||
assertSameDate("20181126T121212.123-0800", "basic_date_time");
|
||||
|
||||
assertSameDate("20181126T121212Z", "basic_date_time_no_millis");
|
||||
assertSameDate("2018363", "basic_ordinal_date");
|
||||
assertSameDate("2018363T121212.123Z", "basic_ordinal_date_time");
|
||||
assertSameDate("2018363T121212Z", "basic_ordinal_date_time_no_millis");
|
||||
assertSameDate("121212.123Z", "basic_time");
|
||||
assertSameDate("121212Z", "basic_time_no_millis");
|
||||
assertSameDate("T121212.123Z", "basic_t_time");
|
||||
assertSameDate("T121212Z", "basic_t_time_no_millis");
|
||||
assertSameDate("2018W313", "basic_week_date");
|
||||
assertSameDate("1W313", "basic_week_date");
|
||||
assertSameDate("18W313", "basic_week_date");
|
||||
assertSameDate("2018W313T121212.123Z", "basic_week_date_time");
|
||||
assertSameDate("2018W313T121212Z", "basic_week_date_time_no_millis");
|
||||
|
||||
assertSameDate("2018-12-31", "date");
|
||||
assertSameDate("18-5-6", "date");
|
||||
|
||||
assertSameDate("2018-12-31T12", "date_hour");
|
||||
assertSameDate("2018-12-31T8", "date_hour");
|
||||
|
||||
assertSameDate("2018-12-31T12:12", "date_hour_minute");
|
||||
assertSameDate("2018-12-31T8:3", "date_hour_minute");
|
||||
|
||||
assertSameDate("2018-12-31T12:12:12", "date_hour_minute_second");
|
||||
assertSameDate("2018-12-31T12:12:1", "date_hour_minute_second");
|
||||
|
||||
assertSameDate("2018-12-31T12:12:12.123", "date_hour_minute_second_fraction");
|
||||
assertSameDate("2018-12-31T12:12:12.123", "date_hour_minute_second_millis");
|
||||
assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_millis");
|
||||
assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_fraction");
|
||||
|
||||
assertSameDate("2018-12-31", "date_optional_time");
|
||||
assertSameDate("2018-12-1", "date_optional_time");
|
||||
assertSameDate("2018-12-31T10:15:30", "date_optional_time");
|
||||
assertSameDate("2018-12-31T10:15:3", "date_optional_time");
|
||||
assertSameDate("2018-12-31T10:5:30", "date_optional_time");
|
||||
assertSameDate("2018-12-31T1:15:30", "date_optional_time");
|
||||
|
||||
assertSameDate("2018-12-31T10:15:30.123Z", "date_time");
|
||||
assertSameDate("2018-12-31T10:15:30.11Z", "date_time");
|
||||
assertSameDate("2018-12-31T10:15:3.123Z", "date_time");
|
||||
|
||||
assertSameDate("2018-12-31T10:15:30Z", "date_time_no_millis");
|
||||
assertSameDate("2018-12-31T10:5:30Z", "date_time_no_millis");
|
||||
assertSameDate("2018-12-31T10:15:3Z", "date_time_no_millis");
|
||||
assertSameDate("2018-12-31T1:15:30Z", "date_time_no_millis");
|
||||
|
||||
assertSameDate("12", "hour");
|
||||
assertSameDate("01", "hour");
|
||||
assertSameDate("1", "hour");
|
||||
|
||||
assertSameDate("12:12", "hour_minute");
|
||||
assertSameDate("12:01", "hour_minute");
|
||||
assertSameDate("12:1", "hour_minute");
|
||||
|
||||
assertSameDate("12:12:12", "hour_minute_second");
|
||||
assertSameDate("12:12:01", "hour_minute_second");
|
||||
assertSameDate("12:12:1", "hour_minute_second");
|
||||
|
||||
assertSameDate("12:12:12.123", "hour_minute_second_fraction");
|
||||
assertSameDate("12:12:12.1", "hour_minute_second_fraction");
|
||||
assertParseException("12:12:12", "hour_minute_second_fraction");
|
||||
assertSameDate("12:12:12.123", "hour_minute_second_millis");
|
||||
assertSameDate("12:12:12.1", "hour_minute_second_millis");
|
||||
assertParseException("12:12:12", "hour_minute_second_millis");
|
||||
|
||||
assertSameDate("2018-128", "ordinal_date");
|
||||
assertSameDate("2018-1", "ordinal_date");
|
||||
|
||||
assertSameDate("2018-128T10:15:30.123Z", "ordinal_date_time");
|
||||
assertSameDate("2018-1T10:15:30.123Z", "ordinal_date_time");
|
||||
|
||||
assertSameDate("2018-128T10:15:30Z", "ordinal_date_time_no_millis");
|
||||
assertSameDate("2018-1T10:15:30Z", "ordinal_date_time_no_millis");
|
||||
|
||||
assertSameDate("10:15:30.123Z", "time");
|
||||
assertSameDate("1:15:30.123Z", "time");
|
||||
assertSameDate("10:1:30.123Z", "time");
|
||||
assertSameDate("10:15:3.123Z", "time");
|
||||
assertParseException("10:15:3.1", "time");
|
||||
assertParseException("10:15:3Z", "time");
|
||||
|
||||
assertSameDate("10:15:30Z", "time_no_millis");
|
||||
assertSameDate("01:15:30Z", "time_no_millis");
|
||||
assertSameDate("1:15:30Z", "time_no_millis");
|
||||
assertSameDate("10:5:30Z", "time_no_millis");
|
||||
assertSameDate("10:15:3Z", "time_no_millis");
|
||||
assertParseException("10:15:3", "time_no_millis");
|
||||
|
||||
assertSameDate("T10:15:30.123Z", "t_time");
|
||||
assertSameDate("T1:15:30.123Z", "t_time");
|
||||
assertSameDate("T10:1:30.123Z", "t_time");
|
||||
assertSameDate("T10:15:3.123Z", "t_time");
|
||||
assertParseException("T10:15:3.1", "t_time");
|
||||
assertParseException("T10:15:3Z", "t_time");
|
||||
|
||||
assertSameDate("T10:15:30Z", "t_time_no_millis");
|
||||
assertSameDate("T1:15:30Z", "t_time_no_millis");
|
||||
assertSameDate("T10:1:30Z", "t_time_no_millis");
|
||||
assertSameDate("T10:15:3Z", "t_time_no_millis");
|
||||
assertParseException("T10:15:3", "t_time_no_millis");
|
||||
|
||||
assertSameDate("2012-W48-6", "week_date");
|
||||
assertSameDate("2012-W01-6", "week_date");
|
||||
assertSameDate("2012-W1-6", "week_date");
|
||||
// joda comes up with a different exception message here, so we have to adapt
|
||||
assertJodaParseException("2012-W1-8", "week_date",
|
||||
"Cannot parse \"2012-W1-8\": Value 8 for dayOfWeek must be in the range [1,7]");
|
||||
assertJavaTimeParseException("2012-W1-8", "week_date", "Text '2012-W1-8' could not be parsed");
|
||||
|
||||
assertSameDate("2012-W48-6T10:15:30.123Z", "week_date_time");
|
||||
assertSameDate("2012-W1-6T10:15:30.123Z", "week_date_time");
|
||||
|
||||
assertSameDate("2012-W48-6T10:15:30Z", "week_date_time_no_millis");
|
||||
assertSameDate("2012-W1-6T10:15:30Z", "week_date_time_no_millis");
|
||||
|
||||
assertSameDate("2012", "year");
|
||||
assertSameDate("1", "year");
|
||||
assertSameDate("-2000", "year");
|
||||
|
||||
assertSameDate("2012-12", "yearMonth");
|
||||
assertSameDate("1-1", "yearMonth");
|
||||
|
||||
assertSameDate("2012-12-31", "yearMonthDay");
|
||||
assertSameDate("1-12-31", "yearMonthDay");
|
||||
assertSameDate("2012-1-31", "yearMonthDay");
|
||||
assertSameDate("2012-12-1", "yearMonthDay");
|
||||
|
||||
assertSameDate("2018", "week_year");
|
||||
assertSameDate("1", "week_year");
|
||||
assertSameDate("2017", "week_year");
|
||||
|
||||
assertSameDate("2018-W29", "weekyear_week");
|
||||
assertSameDate("2018-W1", "weekyear_week");
|
||||
|
||||
assertSameDate("2012-W31-5", "weekyear_week_day");
|
||||
assertSameDate("2012-W1-1", "weekyear_week_day");
|
||||
}
|
||||
|
||||
public void testDuelingStrictParsing() {
|
||||
assertSameDate("2018W313", "strict_basic_week_date");
|
||||
assertParseException("18W313", "strict_basic_week_date");
|
||||
assertSameDate("2018W313T121212.123Z", "strict_basic_week_date_time");
|
||||
assertParseException("2018W313T12128.123Z", "strict_basic_week_date_time");
|
||||
assertParseException("2018W313T81212.123Z", "strict_basic_week_date_time");
|
||||
assertParseException("2018W313T12812.123Z", "strict_basic_week_date_time");
|
||||
assertParseException("2018W313T12812.1Z", "strict_basic_week_date_time");
|
||||
assertSameDate("2018W313T121212Z", "strict_basic_week_date_time_no_millis");
|
||||
assertParseException("2018W313T12128Z", "strict_basic_week_date_time_no_millis");
|
||||
assertParseException("2018W313T81212Z", "strict_basic_week_date_time_no_millis");
|
||||
assertParseException("2018W313T12812Z", "strict_basic_week_date_time_no_millis");
|
||||
assertSameDate("2018-12-31", "strict_date");
|
||||
assertParseException("2018-8-31", "strict_date");
|
||||
assertSameDate("2018-12-31T12", "strict_date_hour");
|
||||
assertParseException("2018-12-31T8", "strict_date_hour");
|
||||
assertSameDate("2018-12-31T12:12", "strict_date_hour_minute");
|
||||
assertParseException("2018-12-31T8:3", "strict_date_hour_minute");
|
||||
assertSameDate("2018-12-31T12:12:12", "strict_date_hour_minute_second");
|
||||
assertParseException("2018-12-31T12:12:1", "strict_date_hour_minute_second");
|
||||
assertSameDate("2018-12-31T12:12:12.123", "strict_date_hour_minute_second_fraction");
|
||||
assertSameDate("2018-12-31T12:12:12.123", "strict_date_hour_minute_second_millis");
|
||||
assertSameDate("2018-12-31T12:12:12.1", "strict_date_hour_minute_second_millis");
|
||||
assertSameDate("2018-12-31T12:12:12.1", "strict_date_hour_minute_second_fraction");
|
||||
assertParseException("2018-12-31T12:12:12", "strict_date_hour_minute_second_millis");
|
||||
assertParseException("2018-12-31T12:12:12", "strict_date_hour_minute_second_fraction");
|
||||
assertSameDate("2018-12-31", "strict_date_optional_time");
|
||||
assertParseException("2018-12-1", "strict_date_optional_time");
|
||||
assertParseException("2018-1-31", "strict_date_optional_time");
|
||||
assertSameDate("2018-12-31T10:15:30", "strict_date_optional_time");
|
||||
assertParseException("2018-12-31T10:15:3", "strict_date_optional_time");
|
||||
assertParseException("2018-12-31T10:5:30", "strict_date_optional_time");
|
||||
assertParseException("2018-12-31T9:15:30", "strict_date_optional_time");
|
||||
assertSameDate("2018-12-31T10:15:30.123Z", "strict_date_time");
|
||||
assertSameDate("2018-12-31T10:15:30.11Z", "strict_date_time");
|
||||
assertParseException("2018-12-31T10:15:3.123Z", "strict_date_time");
|
||||
assertParseException("2018-12-31T10:5:30.123Z", "strict_date_time");
|
||||
assertParseException("2018-12-31T1:15:30.123Z", "strict_date_time");
|
||||
assertSameDate("2018-12-31T10:15:30Z", "strict_date_time_no_millis");
|
||||
assertParseException("2018-12-31T10:5:30Z", "strict_date_time_no_millis");
|
||||
assertParseException("2018-12-31T10:15:3Z", "strict_date_time_no_millis");
|
||||
assertParseException("2018-12-31T1:15:30Z", "strict_date_time_no_millis");
|
||||
assertSameDate("12", "strict_hour");
|
||||
assertSameDate("01", "strict_hour");
|
||||
assertParseException("1", "strict_hour");
|
||||
assertSameDate("12:12", "strict_hour_minute");
|
||||
assertSameDate("12:01", "strict_hour_minute");
|
||||
assertParseException("12:1", "strict_hour_minute");
|
||||
assertSameDate("12:12:12", "strict_hour_minute_second");
|
||||
assertSameDate("12:12:01", "strict_hour_minute_second");
|
||||
assertParseException("12:12:1", "strict_hour_minute_second");
|
||||
assertSameDate("12:12:12.123", "strict_hour_minute_second_fraction");
|
||||
assertSameDate("12:12:12.1", "strict_hour_minute_second_fraction");
|
||||
assertParseException("12:12:12", "strict_hour_minute_second_fraction");
|
||||
assertSameDate("12:12:12.123", "strict_hour_minute_second_millis");
|
||||
assertSameDate("12:12:12.1", "strict_hour_minute_second_millis");
|
||||
assertParseException("12:12:12", "strict_hour_minute_second_millis");
|
||||
assertSameDate("2018-128", "strict_ordinal_date");
|
||||
assertParseException("2018-1", "strict_ordinal_date");
|
||||
|
||||
assertSameDate("2018-128T10:15:30.123Z", "strict_ordinal_date_time");
|
||||
assertParseException("2018-1T10:15:30.123Z", "strict_ordinal_date_time");
|
||||
|
||||
assertSameDate("2018-128T10:15:30Z", "strict_ordinal_date_time_no_millis");
|
||||
assertParseException("2018-1T10:15:30Z", "strict_ordinal_date_time_no_millis");
|
||||
|
||||
assertSameDate("10:15:30.123Z", "strict_time");
|
||||
assertParseException("1:15:30.123Z", "strict_time");
|
||||
assertParseException("10:1:30.123Z", "strict_time");
|
||||
assertParseException("10:15:3.123Z", "strict_time");
|
||||
assertParseException("10:15:3.1", "strict_time");
|
||||
assertParseException("10:15:3Z", "strict_time");
|
||||
|
||||
assertSameDate("10:15:30Z", "strict_time_no_millis");
|
||||
assertSameDate("01:15:30Z", "strict_time_no_millis");
|
||||
assertParseException("1:15:30Z", "strict_time_no_millis");
|
||||
assertParseException("10:5:30Z", "strict_time_no_millis");
|
||||
assertParseException("10:15:3Z", "strict_time_no_millis");
|
||||
assertParseException("10:15:3", "strict_time_no_millis");
|
||||
|
||||
assertSameDate("T10:15:30.123Z", "strict_t_time");
|
||||
assertParseException("T1:15:30.123Z", "strict_t_time");
|
||||
assertParseException("T10:1:30.123Z", "strict_t_time");
|
||||
assertParseException("T10:15:3.123Z", "strict_t_time");
|
||||
assertParseException("T10:15:3.1", "strict_t_time");
|
||||
assertParseException("T10:15:3Z", "strict_t_time");
|
||||
|
||||
assertSameDate("T10:15:30Z", "strict_t_time_no_millis");
|
||||
assertParseException("T1:15:30Z", "strict_t_time_no_millis");
|
||||
assertParseException("T10:1:30Z", "strict_t_time_no_millis");
|
||||
assertParseException("T10:15:3Z", "strict_t_time_no_millis");
|
||||
assertParseException("T10:15:3", "strict_t_time_no_millis");
|
||||
|
||||
assertSameDate("2012-W48-6", "strict_week_date");
|
||||
assertSameDate("2012-W01-6", "strict_week_date");
|
||||
assertParseException("2012-W1-6", "strict_week_date");
|
||||
assertParseException("2012-W1-8", "strict_week_date");
|
||||
|
||||
assertSameDate("2012-W48-6", "strict_week_date");
|
||||
assertSameDate("2012-W01-6", "strict_week_date");
|
||||
assertParseException("2012-W1-6", "strict_week_date");
|
||||
// joda comes up with a different exception message here, so we have to adapt
|
||||
assertJodaParseException("2012-W01-8", "strict_week_date",
|
||||
"Cannot parse \"2012-W01-8\": Value 8 for dayOfWeek must be in the range [1,7]");
|
||||
assertJavaTimeParseException("2012-W01-8", "strict_week_date", "Text '2012-W01-8' could not be parsed");
|
||||
|
||||
assertSameDate("2012-W48-6T10:15:30.123Z", "strict_week_date_time");
|
||||
assertParseException("2012-W1-6T10:15:30.123Z", "strict_week_date_time");
|
||||
|
||||
assertSameDate("2012-W48-6T10:15:30Z", "strict_week_date_time_no_millis");
|
||||
assertParseException("2012-W1-6T10:15:30Z", "strict_week_date_time_no_millis");
|
||||
|
||||
assertSameDate("2012", "strict_year");
|
||||
assertParseException("1", "strict_year");
|
||||
assertSameDate("-2000", "strict_year");
|
||||
|
||||
assertSameDate("2012-12", "strict_year_month");
|
||||
assertParseException("1-1", "strict_year_month");
|
||||
|
||||
assertSameDate("2012-12-31", "strict_year_month_day");
|
||||
assertParseException("1-12-31", "strict_year_month_day");
|
||||
assertParseException("2012-1-31", "strict_year_month_day");
|
||||
assertParseException("2012-12-1", "strict_year_month_day");
|
||||
|
||||
assertSameDate("2018", "strict_weekyear");
|
||||
assertParseException("1", "strict_weekyear");
|
||||
|
||||
assertSameDate("2018", "strict_weekyear");
|
||||
assertSameDate("2017", "strict_weekyear");
|
||||
assertParseException("1", "strict_weekyear");
|
||||
|
||||
assertSameDate("2018-W29", "strict_weekyear_week");
|
||||
assertSameDate("2018-W01", "strict_weekyear_week");
|
||||
assertParseException("2018-W1", "strict_weekyear_week");
|
||||
|
||||
assertSameDate("2012-W31-5", "strict_weekyear_week_day");
|
||||
assertParseException("2012-W1-1", "strict_weekyear_week_day");
|
||||
}
|
||||
|
||||
public void testSeveralTimeFormats() {
|
||||
assertSameDate("2018-12-12", "year_month_day||ordinal_date");
|
||||
assertSameDate("2018-128", "year_month_day||ordinal_date");
|
||||
}
|
||||
|
||||
private void assertSameDate(String input, String format) {
|
||||
FormatDateTimeFormatter jodaFormatter = Joda.forPattern(format);
|
||||
DateTime jodaDateTime = jodaFormatter.parser().parseDateTime(input);
|
||||
|
||||
CompoundDateTimeFormatter javaTimeFormatter = DateFormatters.forPattern(format);
|
||||
TemporalAccessor javaTimeAccessor = javaTimeFormatter.parse(input);
|
||||
ZonedDateTime zonedDateTime = DateFormatters.toZonedDateTime(javaTimeAccessor);
|
||||
|
||||
String msg = String.format(Locale.ROOT, "Input [%s] Format [%s] Joda [%s], Java [%s]", input, format, jodaDateTime,
|
||||
DateTimeFormatter.ISO_INSTANT.format(zonedDateTime.toInstant()));
|
||||
|
||||
assertThat(msg, jodaDateTime.getMillis(), is(zonedDateTime.toInstant().toEpochMilli()));
|
||||
}
|
||||
|
||||
private void assertParseException(String input, String format) {
|
||||
assertJodaParseException(input, format, "Invalid format: \"" + input);
|
||||
assertJavaTimeParseException(input, format, "Text '" + input + "' could not be parsed");
|
||||
}
|
||||
|
||||
private void assertJodaParseException(String input, String format, String expectedMessage) {
|
||||
FormatDateTimeFormatter jodaFormatter = Joda.forPattern(format);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> jodaFormatter.parser().parseDateTime(input));
|
||||
assertThat(e.getMessage(), containsString(expectedMessage));
|
||||
}
|
||||
|
||||
private void assertJavaTimeParseException(String input, String format, String expectedMessage) {
|
||||
CompoundDateTimeFormatter javaTimeFormatter = DateFormatters.forPattern(format);
|
||||
DateTimeParseException dateTimeParseException = expectThrows(DateTimeParseException.class, () -> javaTimeFormatter.parse(input));
|
||||
assertThat(dateTimeParseException.getMessage(), startsWith(expectedMessage));
|
||||
}
|
||||
}
|
|
@ -313,6 +313,10 @@ public class MovFnWhitelistedFunctionTests extends ESTestCase {
|
|||
assertThat(actual, equalTo(Double.NaN));
|
||||
}
|
||||
|
||||
public void testStdDevNaNAvg() {
|
||||
assertThat(MovingFunctions.stdDev(new double[] { 1.0, 2.0, 3.0 }, Double.NaN), equalTo(Double.NaN));
|
||||
}
|
||||
|
||||
public void testLinearMovAvg() {
|
||||
|
||||
int numValues = randomIntBetween(1, 100);
|
||||
|
|
|
@ -124,6 +124,7 @@ import java.io.UncheckedIOException;
|
|||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.ZoneId;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
@ -176,6 +177,7 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
|
||||
private static final List<String> JODA_TIMEZONE_IDS;
|
||||
private static final List<String> JAVA_TIMEZONE_IDS;
|
||||
private static final List<String> JAVA_ZONE_IDS;
|
||||
|
||||
private static final AtomicInteger portGenerator = new AtomicInteger();
|
||||
|
||||
|
@ -203,6 +205,10 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
List<String> javaTZIds = Arrays.asList(TimeZone.getAvailableIDs());
|
||||
Collections.sort(javaTZIds);
|
||||
JAVA_TIMEZONE_IDS = Collections.unmodifiableList(javaTZIds);
|
||||
|
||||
List<String> javaZoneIds = new ArrayList<>(ZoneId.getAvailableZoneIds());
|
||||
Collections.sort(javaZoneIds);
|
||||
JAVA_ZONE_IDS = Collections.unmodifiableList(javaZoneIds);
|
||||
}
|
||||
|
||||
protected final Logger logger = Loggers.getLogger(getClass());
|
||||
|
@ -701,12 +707,19 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* generate a random TimeZone from the ones available in java.time
|
||||
* generate a random TimeZone from the ones available in java.util
|
||||
*/
|
||||
public static TimeZone randomTimeZone() {
|
||||
return TimeZone.getTimeZone(randomFrom(JAVA_TIMEZONE_IDS));
|
||||
}
|
||||
|
||||
/**
|
||||
* generate a random TimeZone from the ones available in java.time
|
||||
*/
|
||||
public static ZoneId randomZone() {
|
||||
return ZoneId.of(randomFrom(JAVA_ZONE_IDS));
|
||||
}
|
||||
|
||||
/**
|
||||
* helper to randomly perform on <code>consumer</code> with <code>value</code>
|
||||
*/
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.common.transport.TransportAddress;
|
|||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportMessage;
|
||||
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
||||
|
@ -180,11 +181,11 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
if (events.contains(AUTHENTICATION_SUCCESS) && (eventFilterPolicyRegistry.ignorePredicate()
|
||||
.test(new AuditEventMetaInfo(Optional.of(user), Optional.of(realm), Optional.empty(), Optional.empty())) == false)) {
|
||||
if (includeRequestBody) {
|
||||
logger.info("{}[rest] [authentication_success]\t{}, realm=[{}], uri=[{}], params=[{}], request_body=[{}]",
|
||||
localNodeInfo.prefix, principal(user), realm, request.uri(), request.params(), restRequestContent(request));
|
||||
logger.info("{}[rest] [authentication_success]\t{}, realm=[{}], uri=[{}], params=[{}]{}, request_body=[{}]",
|
||||
localNodeInfo.prefix, principal(user), realm, request.uri(), request.params(), opaqueId(), restRequestContent(request));
|
||||
} else {
|
||||
logger.info("{}[rest] [authentication_success]\t{}, realm=[{}], uri=[{}], params=[{}]", localNodeInfo.prefix,
|
||||
principal(user), realm, request.uri(), request.params());
|
||||
logger.info("{}[rest] [authentication_success]\t{}, realm=[{}], uri=[{}], params=[{}]{}",
|
||||
localNodeInfo.prefix, principal(user), realm, request.uri(), request.params(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -197,13 +198,13 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
.test(new AuditEventMetaInfo(Optional.of(user), Optional.of(realm), Optional.empty(), indices)) == false) {
|
||||
final LocalNodeInfo localNodeInfo = this.localNodeInfo;
|
||||
if (indices.isPresent()) {
|
||||
logger.info("{}[transport] [authentication_success]\t{}, {}, realm=[{}], action=[{}], indices=[{}], request=[{}]",
|
||||
logger.info("{}[transport] [authentication_success]\t{}, {}, realm=[{}], action=[{}], indices=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), principal(user), realm, action,
|
||||
arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName());
|
||||
arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[transport] [authentication_success]\t{}, {}, realm=[{}], action=[{}], request=[{}]",
|
||||
logger.info("{}[transport] [authentication_success]\t{}, {}, realm=[{}], action=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), principal(user), realm, action,
|
||||
message.getClass().getSimpleName());
|
||||
message.getClass().getSimpleName(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -217,12 +218,13 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
.test(new AuditEventMetaInfo(Optional.empty(), Optional.empty(), indices)) == false) {
|
||||
final LocalNodeInfo localNodeInfo = this.localNodeInfo;
|
||||
if (indices.isPresent()) {
|
||||
logger.info("{}[transport] [anonymous_access_denied]\t{}, action=[{}], indices=[{}], request=[{}]",
|
||||
logger.info("{}[transport] [anonymous_access_denied]\t{}, action=[{}], indices=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), action,
|
||||
arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName());
|
||||
arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[transport] [anonymous_access_denied]\t{}, action=[{}], request=[{}]", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, message, localNodeInfo), action, message.getClass().getSimpleName());
|
||||
logger.info("{}[transport] [anonymous_access_denied]\t{}, action=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), action,
|
||||
message.getClass().getSimpleName(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -233,11 +235,11 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
if (events.contains(ANONYMOUS_ACCESS_DENIED)
|
||||
&& (eventFilterPolicyRegistry.ignorePredicate().test(AuditEventMetaInfo.EMPTY) == false)) {
|
||||
if (includeRequestBody) {
|
||||
logger.info("{}[rest] [anonymous_access_denied]\t{}, uri=[{}], request_body=[{}]", localNodeInfo.prefix,
|
||||
hostAttributes(request), request.uri(), restRequestContent(request));
|
||||
logger.info("{}[rest] [anonymous_access_denied]\t{}, uri=[{}]{}, request_body=[{}]", localNodeInfo.prefix,
|
||||
hostAttributes(request), request.uri(), opaqueId(), restRequestContent(request));
|
||||
} else {
|
||||
logger.info("{}[rest] [anonymous_access_denied]\t{}, uri=[{}]", localNodeInfo.prefix, hostAttributes(request),
|
||||
request.uri());
|
||||
logger.info("{}[rest] [anonymous_access_denied]\t{}, uri=[{}]{}", localNodeInfo.prefix,
|
||||
hostAttributes(request), request.uri(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -250,13 +252,13 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
.test(new AuditEventMetaInfo(Optional.of(token), Optional.empty(), indices)) == false) {
|
||||
final LocalNodeInfo localNodeInfo = this.localNodeInfo;
|
||||
if (indices.isPresent()) {
|
||||
logger.info("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], indices=[{}], request=[{}]",
|
||||
logger.info("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], indices=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), token.principal(), action,
|
||||
arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName());
|
||||
arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], request=[{}]",
|
||||
logger.info("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), token.principal(), action,
|
||||
message.getClass().getSimpleName());
|
||||
message.getClass().getSimpleName(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -267,10 +269,11 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
if (events.contains(AUTHENTICATION_FAILED)
|
||||
&& (eventFilterPolicyRegistry.ignorePredicate().test(AuditEventMetaInfo.EMPTY) == false)) {
|
||||
if (includeRequestBody) {
|
||||
logger.info("{}[rest] [authentication_failed]\t{}, uri=[{}], request_body=[{}]", localNodeInfo.prefix,
|
||||
hostAttributes(request), request.uri(), restRequestContent(request));
|
||||
logger.info("{}[rest] [authentication_failed]\t{}, uri=[{}]{}, request_body=[{}]", localNodeInfo.prefix,
|
||||
hostAttributes(request), request.uri(), opaqueId(), restRequestContent(request));
|
||||
} else {
|
||||
logger.info("{}[rest] [authentication_failed]\t{}, uri=[{}]", localNodeInfo.prefix, hostAttributes(request), request.uri());
|
||||
logger.info("{}[rest] [authentication_failed]\t{}, uri=[{}]{}", localNodeInfo.prefix,
|
||||
hostAttributes(request), request.uri(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -283,12 +286,13 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
.test(new AuditEventMetaInfo(Optional.empty(), Optional.empty(), indices)) == false) {
|
||||
final LocalNodeInfo localNodeInfo = this.localNodeInfo;
|
||||
if (indices.isPresent()) {
|
||||
logger.info("{}[transport] [authentication_failed]\t{}, action=[{}], indices=[{}], request=[{}]", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, message, localNodeInfo), action, arrayToCommaDelimitedString(indices.get()),
|
||||
message.getClass().getSimpleName());
|
||||
logger.info("{}[transport] [authentication_failed]\t{}, action=[{}], indices=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), action,
|
||||
arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[transport] [authentication_failed]\t{}, action=[{}], request=[{}]", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, message, localNodeInfo), action, message.getClass().getSimpleName());
|
||||
logger.info("{}[transport] [authentication_failed]\t{}, action=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), action,
|
||||
message.getClass().getSimpleName(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -300,11 +304,12 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
&& (eventFilterPolicyRegistry.ignorePredicate()
|
||||
.test(new AuditEventMetaInfo(Optional.of(token), Optional.empty(), Optional.empty())) == false)) {
|
||||
if (includeRequestBody) {
|
||||
logger.info("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}], request_body=[{}]", localNodeInfo.prefix,
|
||||
hostAttributes(request), token.principal(), request.uri(), restRequestContent(request));
|
||||
logger.info("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}]{}, request_body=[{}]",
|
||||
localNodeInfo.prefix, hostAttributes(request), token.principal(), request.uri(), opaqueId(),
|
||||
restRequestContent(request));
|
||||
} else {
|
||||
logger.info("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}]", localNodeInfo.prefix, hostAttributes(request),
|
||||
token.principal(), request.uri());
|
||||
logger.info("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}]{}",
|
||||
localNodeInfo.prefix, hostAttributes(request), token.principal(), request.uri(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -319,13 +324,13 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
if (indices.isPresent()) {
|
||||
logger.info(
|
||||
"{}[transport] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], action=[{}], indices=[{}], "
|
||||
+ "request=[{}]",
|
||||
+ "request=[{}]{}",
|
||||
localNodeInfo.prefix, realm, originAttributes(threadContext, message, localNodeInfo), token.principal(), action,
|
||||
arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName());
|
||||
arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[transport] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], action=[{}], request=[{}]",
|
||||
logger.info("{}[transport] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], action=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, realm, originAttributes(threadContext, message, localNodeInfo), token.principal(), action,
|
||||
message.getClass().getSimpleName());
|
||||
message.getClass().getSimpleName(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -337,12 +342,13 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
&& (eventFilterPolicyRegistry.ignorePredicate()
|
||||
.test(new AuditEventMetaInfo(Optional.of(token), Optional.of(realm), Optional.empty())) == false)) {
|
||||
if (includeRequestBody) {
|
||||
logger.info("{}[rest] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], uri=[{}], request_body=[{}]",
|
||||
localNodeInfo.prefix, realm, hostAttributes(request), token.principal(), request.uri(),
|
||||
logger.info("{}[rest] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], uri=[{}]{}, "
|
||||
+ "request_body=[{}]",
|
||||
localNodeInfo.prefix, realm, hostAttributes(request), token.principal(), request.uri(), opaqueId(),
|
||||
restRequestContent(request));
|
||||
} else {
|
||||
logger.info("{}[rest] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], uri=[{}]", localNodeInfo.prefix, realm,
|
||||
hostAttributes(request), token.principal(), request.uri());
|
||||
logger.info("{}[rest] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], uri=[{}]{}",
|
||||
localNodeInfo.prefix, realm, hostAttributes(request), token.principal(), request.uri(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -357,14 +363,14 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) {
|
||||
final LocalNodeInfo localNodeInfo = this.localNodeInfo;
|
||||
if (indices.isPresent()) {
|
||||
logger.info("{}[transport] [access_granted]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]",
|
||||
logger.info("{}[transport] [access_granted]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), subject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action, arrayToCommaDelimitedString(indices.get()),
|
||||
message.getClass().getSimpleName());
|
||||
message.getClass().getSimpleName(), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[transport] [access_granted]\t{}, {}, roles=[{}], action=[{}], request=[{}]", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, message, localNodeInfo), subject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName());
|
||||
logger.info("{}[transport] [access_granted]\t{}, {}, roles=[{}], action=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), subject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -378,14 +384,14 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) {
|
||||
final LocalNodeInfo localNodeInfo = this.localNodeInfo;
|
||||
if (indices.isPresent()) {
|
||||
logger.info("{}[transport] [access_denied]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]",
|
||||
logger.info("{}[transport] [access_denied]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), subject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action, arrayToCommaDelimitedString(indices.get()),
|
||||
message.getClass().getSimpleName());
|
||||
message.getClass().getSimpleName(), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[transport] [access_denied]\t{}, {}, roles=[{}], action=[{}], request=[{}]", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, message, localNodeInfo), subject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName());
|
||||
logger.info("{}[transport] [access_denied]\t{}, {}, roles=[{}], action=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), subject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -395,10 +401,11 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
public void tamperedRequest(RestRequest request) {
|
||||
if (events.contains(TAMPERED_REQUEST) && (eventFilterPolicyRegistry.ignorePredicate().test(AuditEventMetaInfo.EMPTY) == false)) {
|
||||
if (includeRequestBody) {
|
||||
logger.info("{}[rest] [tampered_request]\t{}, uri=[{}], request_body=[{}]", localNodeInfo.prefix, hostAttributes(request),
|
||||
request.uri(), restRequestContent(request));
|
||||
logger.info("{}[rest] [tampered_request]\t{}, uri=[{}]{}, request_body=[{}]", localNodeInfo.prefix,
|
||||
hostAttributes(request), request.uri(), opaqueId(), restRequestContent(request));
|
||||
} else {
|
||||
logger.info("{}[rest] [tampered_request]\t{}, uri=[{}]", localNodeInfo.prefix, hostAttributes(request), request.uri());
|
||||
logger.info("{}[rest] [tampered_request]\t{}, uri=[{}]{}", localNodeInfo.prefix, hostAttributes(request),
|
||||
request.uri(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -411,12 +418,13 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
.test(new AuditEventMetaInfo(Optional.empty(), Optional.empty(), indices)) == false) {
|
||||
final LocalNodeInfo localNodeInfo = this.localNodeInfo;
|
||||
if (indices.isPresent()) {
|
||||
logger.info("{}[transport] [tampered_request]\t{}, action=[{}], indices=[{}], request=[{}]", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, message, localNodeInfo), action, arrayToCommaDelimitedString(indices.get()),
|
||||
message.getClass().getSimpleName());
|
||||
logger.info("{}[transport] [tampered_request]\t{}, action=[{}], indices=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), action,
|
||||
arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[transport] [tampered_request]\t{}, action=[{}], request=[{}]", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, message, localNodeInfo), action, message.getClass().getSimpleName());
|
||||
logger.info("{}[transport] [tampered_request]\t{}, action=[{}], request=[{}]{}", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, message, localNodeInfo), action, message.getClass().getSimpleName(),
|
||||
opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -430,13 +438,13 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
.test(new AuditEventMetaInfo(Optional.of(user), Optional.empty(), Optional.empty(), indices)) == false) {
|
||||
final LocalNodeInfo localNodeInfo = this.localNodeInfo;
|
||||
if (indices.isPresent()) {
|
||||
logger.info("{}[transport] [tampered_request]\t{}, {}, action=[{}], indices=[{}], request=[{}]", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, request, localNodeInfo), principal(user), action,
|
||||
arrayToCommaDelimitedString(indices.get()), request.getClass().getSimpleName());
|
||||
logger.info("{}[transport] [tampered_request]\t{}, {}, action=[{}], indices=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, request, localNodeInfo), principal(user), action,
|
||||
arrayToCommaDelimitedString(indices.get()), request.getClass().getSimpleName(), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[transport] [tampered_request]\t{}, {}, action=[{}], request=[{}]", localNodeInfo.prefix,
|
||||
logger.info("{}[transport] [tampered_request]\t{}, {}, action=[{}], request=[{}]{}", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, request, localNodeInfo), principal(user), action,
|
||||
request.getClass().getSimpleName());
|
||||
request.getClass().getSimpleName(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -445,16 +453,16 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
@Override
|
||||
public void connectionGranted(InetAddress inetAddress, String profile, SecurityIpFilterRule rule) {
|
||||
if (events.contains(CONNECTION_GRANTED) && (eventFilterPolicyRegistry.ignorePredicate().test(AuditEventMetaInfo.EMPTY) == false)) {
|
||||
logger.info("{}[ip_filter] [connection_granted]\torigin_address=[{}], transport_profile=[{}], rule=[{}]", localNodeInfo.prefix,
|
||||
NetworkAddress.format(inetAddress), profile, rule);
|
||||
logger.info("{}[ip_filter] [connection_granted]\torigin_address=[{}], transport_profile=[{}], rule=[{}]{}",
|
||||
localNodeInfo.prefix, NetworkAddress.format(inetAddress), profile, rule, opaqueId());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void connectionDenied(InetAddress inetAddress, String profile, SecurityIpFilterRule rule) {
|
||||
if (events.contains(CONNECTION_DENIED) && (eventFilterPolicyRegistry.ignorePredicate().test(AuditEventMetaInfo.EMPTY) == false)) {
|
||||
logger.info("{}[ip_filter] [connection_denied]\torigin_address=[{}], transport_profile=[{}], rule=[{}]", localNodeInfo.prefix,
|
||||
NetworkAddress.format(inetAddress), profile, rule);
|
||||
logger.info("{}[ip_filter] [connection_denied]\torigin_address=[{}], transport_profile=[{}], rule=[{}]{}",
|
||||
localNodeInfo.prefix, NetworkAddress.format(inetAddress), profile, rule, opaqueId());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -466,15 +474,14 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) {
|
||||
final LocalNodeInfo localNodeInfo = this.localNodeInfo;
|
||||
if (indices.isPresent()) {
|
||||
logger.info("{}[transport] [run_as_granted]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]",
|
||||
logger.info("{}[transport] [run_as_granted]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action, arrayToCommaDelimitedString(indices.get()),
|
||||
message.getClass().getSimpleName());
|
||||
message.getClass().getSimpleName(), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[transport] [run_as_granted]\t{}, {}, roles=[{}], action=[{}], request=[{}]", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action,
|
||||
message.getClass().getSimpleName());
|
||||
logger.info("{}[transport] [run_as_granted]\t{}, {}, roles=[{}], action=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -488,14 +495,14 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) {
|
||||
final LocalNodeInfo localNodeInfo = this.localNodeInfo;
|
||||
if (indices.isPresent()) {
|
||||
logger.info("{}[transport] [run_as_denied]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]",
|
||||
logger.info("{}[transport] [run_as_denied]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action, arrayToCommaDelimitedString(indices.get()),
|
||||
message.getClass().getSimpleName());
|
||||
message.getClass().getSimpleName(), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[transport] [run_as_denied]\t{}, {}, roles=[{}], action=[{}], request=[{}]", localNodeInfo.prefix,
|
||||
originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName());
|
||||
logger.info("{}[transport] [run_as_denied]\t{}, {}, roles=[{}], action=[{}], request=[{}]{}",
|
||||
localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName(), opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -507,12 +514,13 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
&& (eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(authentication.getUser()),
|
||||
Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), Optional.empty())) == false)) {
|
||||
if (includeRequestBody) {
|
||||
logger.info("{}[rest] [run_as_denied]\t{}, {}, roles=[{}], uri=[{}], request_body=[{}]", localNodeInfo.prefix,
|
||||
hostAttributes(request), runAsSubject(authentication), arrayToCommaDelimitedString(roleNames), request.uri(),
|
||||
restRequestContent(request));
|
||||
logger.info("{}[rest] [run_as_denied]\t{}, {}, roles=[{}], uri=[{}], request_body=[{}]{}",
|
||||
localNodeInfo.prefix, hostAttributes(request), runAsSubject(authentication),
|
||||
arrayToCommaDelimitedString(roleNames), request.uri(), restRequestContent(request), opaqueId());
|
||||
} else {
|
||||
logger.info("{}[rest] [run_as_denied]\t{}, {}, roles=[{}], uri=[{}]", localNodeInfo.prefix, hostAttributes(request),
|
||||
runAsSubject(authentication), arrayToCommaDelimitedString(roleNames), request.uri());
|
||||
logger.info("{}[rest] [run_as_denied]\t{}, {}, roles=[{}], uri=[{}]{}", localNodeInfo.prefix,
|
||||
hostAttributes(request), runAsSubject(authentication), arrayToCommaDelimitedString(roleNames), request.uri(),
|
||||
opaqueId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -552,6 +560,15 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail,
|
|||
return restOriginTag(threadContext).orElse(transportOriginTag(message).orElse(localNodeInfo.localOriginTag));
|
||||
}
|
||||
|
||||
private String opaqueId() {
|
||||
String opaqueId = threadContext.getHeader(Task.X_OPAQUE_ID);
|
||||
if (opaqueId != null) {
|
||||
return ", opaque_id=[" + opaqueId + "]";
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
private static Optional<String> restOriginTag(ThreadContext threadContext) {
|
||||
final InetSocketAddress restAddress = RemoteHostHeader.restRemoteAddress(threadContext);
|
||||
if (restAddress == null) {
|
||||
|
|
|
@ -11,6 +11,7 @@ import com.unboundid.ldap.sdk.LDAPConnectionPool;
|
|||
import com.unboundid.ldap.sdk.LDAPException;
|
||||
import com.unboundid.ldap.sdk.LDAPInterface;
|
||||
import com.unboundid.ldap.sdk.SearchResultEntry;
|
||||
import com.unboundid.ldap.sdk.ServerSet;
|
||||
import com.unboundid.ldap.sdk.SimpleBindRequest;
|
||||
import com.unboundid.ldap.sdk.controls.AuthorizationIdentityRequestControl;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -62,8 +63,6 @@ class ActiveDirectorySessionFactory extends PoolingSessionFactory {
|
|||
final DownLevelADAuthenticator downLevelADAuthenticator;
|
||||
final UpnADAuthenticator upnADAuthenticator;
|
||||
|
||||
private final int ldapPort;
|
||||
|
||||
ActiveDirectorySessionFactory(RealmConfig config, SSLService sslService, ThreadPool threadPool) throws LDAPException {
|
||||
super(config, sslService, new ActiveDirectoryGroupsResolver(config.settings()),
|
||||
ActiveDirectorySessionFactorySettings.POOL_ENABLED,
|
||||
|
@ -85,7 +84,7 @@ class ActiveDirectorySessionFactory extends PoolingSessionFactory {
|
|||
+ "] setting for active directory");
|
||||
}
|
||||
String domainDN = buildDnFromDomain(domainName);
|
||||
ldapPort = ActiveDirectorySessionFactorySettings.AD_LDAP_PORT_SETTING.get(settings);
|
||||
final int ldapPort = ActiveDirectorySessionFactorySettings.AD_LDAP_PORT_SETTING.get(settings);
|
||||
final int ldapsPort = ActiveDirectorySessionFactorySettings.AD_LDAPS_PORT_SETTING.get(settings);
|
||||
final int gcLdapPort = ActiveDirectorySessionFactorySettings.AD_GC_LDAP_PORT_SETTING.get(settings);
|
||||
final int gcLdapsPort = ActiveDirectorySessionFactorySettings.AD_GC_LDAPS_PORT_SETTING.get(settings);
|
||||
|
@ -102,7 +101,7 @@ class ActiveDirectorySessionFactory extends PoolingSessionFactory {
|
|||
@Override
|
||||
protected List<String> getDefaultLdapUrls(Settings settings) {
|
||||
return Collections.singletonList("ldap://" + settings.get(ActiveDirectorySessionFactorySettings.AD_DOMAIN_NAME_SETTING) +
|
||||
":" + ldapPort);
|
||||
":" + ActiveDirectorySessionFactorySettings.AD_LDAP_PORT_SETTING.get(settings));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -197,6 +196,11 @@ class ActiveDirectorySessionFactory extends PoolingSessionFactory {
|
|||
return bindDN;
|
||||
}
|
||||
|
||||
// Exposed for testing
|
||||
ServerSet getServerSet() {
|
||||
return super.serverSet;
|
||||
}
|
||||
|
||||
ADAuthenticator getADAuthenticator(String username) {
|
||||
if (username.indexOf('\\') > 0) {
|
||||
return downLevelADAuthenticator;
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.mock.orig.Mockito;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||
import org.elasticsearch.test.rest.FakeRestRequest.Builder;
|
||||
|
@ -122,6 +123,7 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
private ClusterService clusterService;
|
||||
private ThreadContext threadContext;
|
||||
private boolean includeRequestBody;
|
||||
private String opaqueId;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
|
@ -145,6 +147,13 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
|
||||
prefix = LoggingAuditTrail.LocalNodeInfo.resolvePrefix(settings, localNode);
|
||||
threadContext = new ThreadContext(Settings.EMPTY);
|
||||
if (randomBoolean()) {
|
||||
String id = randomAlphaOfLength(10);
|
||||
threadContext.putHeader(Task.X_OPAQUE_ID, id);
|
||||
opaqueId = ", opaque_id=[" + id + "]";
|
||||
} else {
|
||||
opaqueId = "";
|
||||
}
|
||||
}
|
||||
|
||||
public void testAnonymousAccessDeniedTransport() throws Exception {
|
||||
|
@ -155,10 +164,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
auditTrail.anonymousAccessDenied("_action", message);
|
||||
if (message instanceof IndicesRequest) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [anonymous_access_denied]\t" + origins +
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [anonymous_access_denied]\t" + origins +
|
||||
", action=[_action], request=[MockMessage]");
|
||||
", action=[_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -179,10 +188,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
auditTrail.anonymousAccessDenied(request);
|
||||
if (includeRequestBody) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[rest] [anonymous_access_denied]\torigin_address=[" +
|
||||
NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]");
|
||||
NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId + ", request_body=[" + expectedMessage + "]");
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[rest] [anonymous_access_denied]\torigin_address=[" +
|
||||
NetworkAddress.format(address) + "], uri=[_uri]");
|
||||
NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -202,10 +211,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
if (message instanceof IndicesRequest) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins +
|
||||
", principal=[_principal], action=[_action], indices=[" + indices(message) +
|
||||
"], request=[MockIndicesRequest]");
|
||||
"], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins +
|
||||
", principal=[_principal], action=[_action], request=[MockMessage]");
|
||||
", principal=[_principal], action=[_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -224,10 +233,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
auditTrail.authenticationFailed("_action", message);
|
||||
if (message instanceof IndicesRequest) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins +
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins +
|
||||
", action=[_action], request=[MockMessage]");
|
||||
", action=[_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -248,11 +257,11 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
auditTrail.authenticationFailed(new MockToken(), request);
|
||||
if (includeRequestBody) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" +
|
||||
NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri], request_body=[" +
|
||||
NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]" + opaqueId + ", request_body=[" +
|
||||
expectedMessage + "]");
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" +
|
||||
NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]");
|
||||
NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -273,10 +282,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
auditTrail.authenticationFailed(request);
|
||||
if (includeRequestBody) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" +
|
||||
NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]");
|
||||
NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId + ", request_body=[" + expectedMessage + "]");
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" +
|
||||
NetworkAddress.format(address) + "], uri=[_uri]");
|
||||
NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -303,10 +312,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
if (message instanceof IndicesRequest) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [realm_authentication_failed]\trealm=[_realm], " + origins +
|
||||
", principal=[_principal], action=[_action], indices=[" + indices(message) + "], " +
|
||||
"request=[MockIndicesRequest]");
|
||||
"request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [realm_authentication_failed]\trealm=[_realm], " + origins +
|
||||
", principal=[_principal], action=[_action], request=[MockMessage]");
|
||||
", principal=[_principal], action=[_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -327,11 +336,11 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
auditTrail.authenticationFailed("_realm", new MockToken(), request);
|
||||
if (includeRequestBody) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[rest] [realm_authentication_failed]\trealm=[_realm], origin_address=[" +
|
||||
NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri], request_body=[" +
|
||||
NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]" + opaqueId + ", request_body=[" +
|
||||
expectedMessage + "]");
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[rest] [realm_authentication_failed]\trealm=[_realm], origin_address=[" +
|
||||
NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]");
|
||||
NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]" + opaqueId);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -353,10 +362,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
: "principal=[_username], realm=[authRealm]") + ", roles=[" + role + "]";
|
||||
if (message instanceof IndicesRequest) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo +
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo +
|
||||
", action=[_action], request=[MockMessage]");
|
||||
", action=[_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -384,11 +393,11 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", principal=[" +
|
||||
SystemUser.INSTANCE.principal()
|
||||
+ "], realm=[authRealm], roles=[" + role + "], action=[internal:_action], indices=[" + indices(message)
|
||||
+ "], request=[MockIndicesRequest]");
|
||||
+ "], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", principal=[" +
|
||||
SystemUser.INSTANCE.principal() + "], realm=[authRealm], roles=[" + role
|
||||
+ "], action=[internal:_action], request=[MockMessage]");
|
||||
+ "], action=[internal:_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -410,10 +419,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
: "principal=[_username], realm=[authRealm]") + ", roles=[" + role + "]";
|
||||
if (message instanceof IndicesRequest) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo +
|
||||
", action=[internal:_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
|
||||
", action=[internal:_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo +
|
||||
", action=[internal:_action], request=[MockMessage]");
|
||||
", action=[internal:_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -442,10 +451,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
: "principal=[_username], realm=[authRealm]") + ", roles=[" + role + "]";
|
||||
if (message instanceof IndicesRequest) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo +
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo +
|
||||
", action=[_action], request=[MockMessage]");
|
||||
", action=[_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -466,10 +475,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
auditTrail.tamperedRequest(request);
|
||||
if (includeRequestBody) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[rest] [tampered_request]\torigin_address=[" +
|
||||
NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]");
|
||||
NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId + ", request_body=[" + expectedMessage + "]");
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[rest] [tampered_request]\torigin_address=[" +
|
||||
NetworkAddress.format(address) + "], uri=[_uri]");
|
||||
NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -489,10 +498,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
auditTrail.tamperedRequest(action, message);
|
||||
if (message instanceof IndicesRequest) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins +
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins +
|
||||
", action=[_action], request=[MockMessage]");
|
||||
", action=[_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -516,10 +525,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
auditTrail.tamperedRequest(user, action, message);
|
||||
if (message instanceof IndicesRequest) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo +
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
|
||||
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo +
|
||||
", action=[_action], request=[MockMessage]");
|
||||
", action=[_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -537,7 +546,7 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
final SecurityIpFilterRule rule = new SecurityIpFilterRule(false, "_all");
|
||||
auditTrail.connectionDenied(inetAddress, "default", rule);
|
||||
assertMsg(logger, Level.INFO, String.format(Locale.ROOT, prefix +
|
||||
"[ip_filter] [connection_denied]\torigin_address=[%s], transport_profile=[%s], rule=[deny %s]",
|
||||
"[ip_filter] [connection_denied]\torigin_address=[%s], transport_profile=[%s], rule=[deny %s]" + opaqueId,
|
||||
NetworkAddress.format(inetAddress), "default", "_all"));
|
||||
|
||||
// test disabled
|
||||
|
@ -562,7 +571,8 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
|
||||
auditTrail.connectionGranted(inetAddress, "default", rule);
|
||||
assertMsg(logger, Level.INFO, String.format(Locale.ROOT, prefix + "[ip_filter] [connection_granted]\torigin_address=[%s], " +
|
||||
"transport_profile=[default], rule=[allow default:accept_all]", NetworkAddress.format(inetAddress)));
|
||||
"transport_profile=[default], rule=[allow default:accept_all]" + opaqueId,
|
||||
NetworkAddress.format(inetAddress)));
|
||||
}
|
||||
|
||||
public void testRunAsGranted() throws Exception {
|
||||
|
@ -577,12 +587,12 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
assertMsg(logger, Level.INFO,
|
||||
prefix + "[transport] [run_as_granted]\t" + origins
|
||||
+ ", principal=[_username], realm=[authRealm], run_as_principal=[running as], run_as_realm=[lookRealm], roles=["
|
||||
+ role + "], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
|
||||
+ role + "], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO,
|
||||
prefix + "[transport] [run_as_granted]\t" + origins
|
||||
+ ", principal=[_username], realm=[authRealm], run_as_principal=[running as], run_as_realm=[lookRealm], roles=["
|
||||
+ role + "], action=[_action], request=[MockMessage]");
|
||||
+ role + "], action=[_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -605,12 +615,12 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
assertMsg(logger, Level.INFO,
|
||||
prefix + "[transport] [run_as_denied]\t" + origins
|
||||
+ ", principal=[_username], realm=[authRealm], run_as_principal=[running as], run_as_realm=[lookRealm], roles=["
|
||||
+ role + "], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
|
||||
+ role + "], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO,
|
||||
prefix + "[transport] [run_as_denied]\t" + origins
|
||||
+ ", principal=[_username], realm=[authRealm], run_as_principal=[running as], run_as_realm=[lookRealm], roles=["
|
||||
+ role + "], action=[_action], request=[MockMessage]");
|
||||
+ role + "], action=[_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -667,10 +677,11 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
if (includeRequestBody) {
|
||||
assertMsg(logger, Level.INFO,
|
||||
prefix + "[rest] [authentication_success]\t" + userInfo + ", realm=[_realm], uri=[_uri], params=[" + params
|
||||
+ "], request_body=[" + expectedMessage + "]");
|
||||
+ "]" + opaqueId + ", request_body=[" + expectedMessage + "]");
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO,
|
||||
prefix + "[rest] [authentication_success]\t" + userInfo + ", realm=[_realm], uri=[_uri], params=[" + params + "]");
|
||||
prefix + "[rest] [authentication_success]\t" + userInfo + ", realm=[_realm], uri=[_uri], params=[" + params
|
||||
+ "]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
@ -701,10 +712,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
|
|||
auditTrail.authenticationSuccess(realm, user, "_action", message);
|
||||
if (message instanceof IndicesRequest) {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_success]\t" + origins + ", " + userInfo
|
||||
+ ", realm=[_realm], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
|
||||
+ ", realm=[_realm], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId);
|
||||
} else {
|
||||
assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_success]\t" + origins + ", " + userInfo
|
||||
+ ", realm=[_realm], action=[_action], request=[MockMessage]");
|
||||
+ ", realm=[_realm], action=[_action], request=[MockMessage]" + opaqueId);
|
||||
}
|
||||
|
||||
// test disabled
|
||||
|
|
|
@ -8,8 +8,10 @@ package org.elasticsearch.xpack.security.authc.ldap;
|
|||
import com.unboundid.ldap.listener.InMemoryDirectoryServer;
|
||||
import com.unboundid.ldap.listener.InMemoryDirectoryServerConfig;
|
||||
import com.unboundid.ldap.sdk.Attribute;
|
||||
import com.unboundid.ldap.sdk.FailoverServerSet;
|
||||
import com.unboundid.ldap.sdk.LDAPException;
|
||||
import com.unboundid.ldap.sdk.LDAPURL;
|
||||
import com.unboundid.ldap.sdk.SingleServerSet;
|
||||
import com.unboundid.ldap.sdk.schema.Schema;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
|
@ -28,6 +30,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmConfig;
|
|||
import org.elasticsearch.xpack.core.security.authc.ldap.ActiveDirectorySessionFactorySettings;
|
||||
import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings;
|
||||
import org.elasticsearch.xpack.core.security.authc.ldap.PoolingSessionFactorySettings;
|
||||
import org.elasticsearch.xpack.core.security.authc.ldap.support.SessionFactorySettings;
|
||||
import org.elasticsearch.xpack.core.security.authc.support.CachingUsernamePasswordRealmSettings;
|
||||
import org.elasticsearch.xpack.core.security.authc.support.DnRoleMapperSettings;
|
||||
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
|
||||
|
@ -51,9 +54,11 @@ import static org.elasticsearch.xpack.core.security.authc.ldap.support.SessionFa
|
|||
import static org.elasticsearch.xpack.core.security.authc.ldap.support.SessionFactorySettings.URLS_SETTING;
|
||||
import static org.hamcrest.Matchers.arrayContaining;
|
||||
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
|
||||
import static org.hamcrest.Matchers.arrayWithSize;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasEntry;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.mockito.Matchers.any;
|
||||
|
@ -355,6 +360,48 @@ public class ActiveDirectoryRealmTests extends ESTestCase {
|
|||
assertEquals("(objectClass=down level)", sessionFactory.downLevelADAuthenticator.getUserSearchFilter());
|
||||
}
|
||||
|
||||
public void testBuildUrlFromDomainNameAndDefaultPort() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put(ActiveDirectorySessionFactorySettings.AD_DOMAIN_NAME_SETTING, "ad.test.elasticsearch.com")
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("testBuildUrlFromDomainNameAndDefaultPort", settings, globalSettings,
|
||||
TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings));
|
||||
ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService, threadPool);
|
||||
assertSingleLdapServer(sessionFactory, "ad.test.elasticsearch.com", 389);
|
||||
}
|
||||
|
||||
public void testBuildUrlFromDomainNameAndCustomPort() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put(ActiveDirectorySessionFactorySettings.AD_DOMAIN_NAME_SETTING, "ad.test.elasticsearch.com")
|
||||
.put(ActiveDirectorySessionFactorySettings.AD_LDAP_PORT_SETTING.getKey(), 10389)
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("testBuildUrlFromDomainNameAndCustomPort", settings, globalSettings,
|
||||
TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings));
|
||||
ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService, threadPool);
|
||||
assertSingleLdapServer(sessionFactory, "ad.test.elasticsearch.com", 10389);
|
||||
}
|
||||
|
||||
public void testUrlConfiguredInSettings() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put(ActiveDirectorySessionFactorySettings.AD_DOMAIN_NAME_SETTING, "ad.test.elasticsearch.com")
|
||||
.put(SessionFactorySettings.URLS_SETTING, "ldap://ad01.testing.elastic.co:20389/")
|
||||
.build();
|
||||
RealmConfig config = new RealmConfig("testBuildUrlFromDomainNameAndCustomPort", settings, globalSettings,
|
||||
TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings));
|
||||
ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService, threadPool);
|
||||
assertSingleLdapServer(sessionFactory, "ad01.testing.elastic.co", 20389);
|
||||
}
|
||||
|
||||
private void assertSingleLdapServer(ActiveDirectorySessionFactory sessionFactory, String hostname, int port) {
|
||||
assertThat(sessionFactory.getServerSet(), instanceOf(FailoverServerSet.class));
|
||||
FailoverServerSet fss = (FailoverServerSet) sessionFactory.getServerSet();
|
||||
assertThat(fss.getServerSets(), arrayWithSize(1));
|
||||
assertThat(fss.getServerSets()[0], instanceOf(SingleServerSet.class));
|
||||
SingleServerSet sss = (SingleServerSet) fss.getServerSets()[0];
|
||||
assertThat(sss.getAddress(), equalTo(hostname));
|
||||
assertThat(sss.getPort(), equalTo(port));
|
||||
}
|
||||
|
||||
private Settings settings() throws Exception {
|
||||
return settings(Settings.EMPTY);
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.common.text.TextTemplate;
|
||||
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -29,7 +30,11 @@ public class SlackMessage implements MessageElement {
|
|||
final String text;
|
||||
final Attachment[] attachments;
|
||||
|
||||
public SlackMessage(String from, String[] to, String icon, String text, Attachment[] attachments) {
|
||||
public SlackMessage(String from, String[] to, String icon, @Nullable String text, @Nullable Attachment[] attachments) {
|
||||
if(text == null && attachments == null) {
|
||||
throw new IllegalArgumentException("Both text and attachments cannot be null.");
|
||||
}
|
||||
|
||||
this.from = from;
|
||||
this.to = to;
|
||||
this.icon = icon;
|
||||
|
|
|
@ -49,7 +49,7 @@ public class SlackMessageTests extends ESTestCase {
|
|||
}
|
||||
String icon = randomBoolean() ? null : randomAlphaOfLength(10);
|
||||
String text = randomBoolean() ? null : randomAlphaOfLength(50);
|
||||
Attachment[] attachments = randomBoolean() ? null : new Attachment[randomIntBetween(0, 2)];
|
||||
Attachment[] attachments = (text != null && randomBoolean()) ? null : new Attachment[randomIntBetween(0, 2)];
|
||||
if (attachments != null) {
|
||||
for (int i = 0; i < attachments.length; i++) {
|
||||
String fallback = randomBoolean() ? null : randomAlphaOfLength(10);
|
||||
|
@ -600,6 +600,22 @@ public class SlackMessageTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testCanHaveNullText() throws Exception {
|
||||
SlackMessage slackMessage = new SlackMessage("from", new String[] {"to"}, "icon", null, new Attachment[1]);
|
||||
assertNull(slackMessage.getText());
|
||||
assertNotNull(slackMessage.getAttachments());
|
||||
}
|
||||
|
||||
public void testCanHaveNullAttachments() throws Exception {
|
||||
SlackMessage slackMessage = new SlackMessage("from", new String[] {"to"}, "icon", "text", null);
|
||||
assertNotNull(slackMessage.getText());
|
||||
assertNull(slackMessage.getAttachments());
|
||||
}
|
||||
|
||||
public void testCannotHaveNullAttachmentsAndNullText() throws Exception {
|
||||
expectThrows(IllegalArgumentException.class, () -> new SlackMessage("from", new String[]{"to"}, "icon", null, null));
|
||||
}
|
||||
|
||||
private static void writeFieldIfNotNull(XContentBuilder builder, String field, Object value) throws IOException {
|
||||
if (value != null) {
|
||||
builder.field(field, value);
|
||||
|
|
|
@ -0,0 +1,85 @@
|
|||
import org.elasticsearch.gradle.LoggedExec
|
||||
|
||||
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||
apply plugin: 'elasticsearch.rest-test'
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: xpackModule('core'), configuration: 'runtime')
|
||||
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
|
||||
testCompile project(path: xpackModule('ml'), configuration: 'runtime')
|
||||
testCompile project(path: xpackModule('ml'), configuration: 'testArtifacts')
|
||||
}
|
||||
|
||||
integTestRunner {
|
||||
/*
|
||||
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
|
||||
* other if we allow them to set the number of available processors as it's set-once in Netty.
|
||||
*/
|
||||
systemProperty 'es.set.netty.runtime.available.processors', 'false'
|
||||
}
|
||||
|
||||
// location of generated keystores and certificates
|
||||
File keystoreDir = new File(project.buildDir, 'keystore')
|
||||
|
||||
// Generate the node's keystore
|
||||
File nodeKeystore = new File(keystoreDir, 'test-node.jks')
|
||||
task createNodeKeyStore(type: LoggedExec) {
|
||||
doFirst {
|
||||
if (nodeKeystore.parentFile.exists() == false) {
|
||||
nodeKeystore.parentFile.mkdirs()
|
||||
}
|
||||
if (nodeKeystore.exists()) {
|
||||
delete nodeKeystore
|
||||
}
|
||||
}
|
||||
executable = new File(project.runtimeJavaHome, 'bin/keytool')
|
||||
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
|
||||
args '-genkey',
|
||||
'-alias', 'test-node',
|
||||
'-keystore', nodeKeystore,
|
||||
'-keyalg', 'RSA',
|
||||
'-keysize', '2048',
|
||||
'-validity', '712',
|
||||
'-dname', 'CN=smoke-test-plugins-ssl',
|
||||
'-keypass', 'keypass',
|
||||
'-storepass', 'keypass'
|
||||
}
|
||||
|
||||
// Add keystores to test classpath: it expects it there
|
||||
sourceSets.test.resources.srcDir(keystoreDir)
|
||||
processTestResources.dependsOn(createNodeKeyStore)
|
||||
|
||||
integTestCluster {
|
||||
dependsOn createNodeKeyStore
|
||||
setting 'xpack.security.enabled', 'true'
|
||||
setting 'xpack.ml.enabled', 'true'
|
||||
setting 'logger.org.elasticsearch.xpack.ml.datafeed', 'TRACE'
|
||||
setting 'xpack.monitoring.enabled', 'false'
|
||||
setting 'xpack.security.authc.token.enabled', 'true'
|
||||
setting 'xpack.security.transport.ssl.enabled', 'true'
|
||||
setting 'xpack.security.transport.ssl.keystore.path', nodeKeystore.name
|
||||
setting 'xpack.security.transport.ssl.verification_mode', 'certificate'
|
||||
setting 'xpack.security.audit.enabled', 'true'
|
||||
setting 'xpack.license.self_generated.type', 'trial'
|
||||
|
||||
keystoreSetting 'bootstrap.password', 'x-pack-test-password'
|
||||
keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass'
|
||||
|
||||
numNodes = 3
|
||||
|
||||
setupCommand 'setupDummyUser',
|
||||
'bin/elasticsearch-users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser'
|
||||
|
||||
extraConfigFile nodeKeystore.name, nodeKeystore
|
||||
|
||||
waitCondition = { node, ant ->
|
||||
File tmpFile = new File(node.cwd, 'wait.success')
|
||||
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",
|
||||
dest: tmpFile.toString(),
|
||||
username: 'x_pack_rest_user',
|
||||
password: 'x-pack-test-password',
|
||||
ignoreerrors: true,
|
||||
retries: 10)
|
||||
return tmpFile.exists()
|
||||
}
|
||||
}
|
|
@ -49,6 +49,7 @@ public class DetectionRulesIT extends MlNativeAutodetectIntegTestCase {
|
|||
cleanUp();
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31916")
|
||||
public void testCondition() throws Exception {
|
||||
DetectionRule rule = new DetectionRule.Builder(Arrays.asList(
|
||||
new RuleCondition(RuleCondition.AppliesTo.ACTUAL, Operator.LT, 100.0)
|
|
@ -1,85 +0,0 @@
|
|||
import org.elasticsearch.gradle.LoggedExec
|
||||
|
||||
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||
apply plugin: 'elasticsearch.rest-test'
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: xpackModule('core'), configuration: 'runtime')
|
||||
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
|
||||
testCompile project(path: xpackModule('ml'), configuration: 'runtime')
|
||||
testCompile project(path: xpackModule('ml'), configuration: 'testArtifacts')
|
||||
}
|
||||
|
||||
integTestRunner {
|
||||
/*
|
||||
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
|
||||
* other if we allow them to set the number of available processors as it's set-once in Netty.
|
||||
*/
|
||||
systemProperty 'es.set.netty.runtime.available.processors', 'false'
|
||||
}
|
||||
|
||||
// location of generated keystores and certificates
|
||||
File keystoreDir = new File(project.buildDir, 'keystore')
|
||||
|
||||
// Generate the node's keystore
|
||||
File nodeKeystore = new File(keystoreDir, 'test-node.jks')
|
||||
task createNodeKeyStore(type: LoggedExec) {
|
||||
doFirst {
|
||||
if (nodeKeystore.parentFile.exists() == false) {
|
||||
nodeKeystore.parentFile.mkdirs()
|
||||
}
|
||||
if (nodeKeystore.exists()) {
|
||||
delete nodeKeystore
|
||||
}
|
||||
}
|
||||
executable = new File(project.runtimeJavaHome, 'bin/keytool')
|
||||
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
|
||||
args '-genkey',
|
||||
'-alias', 'test-node',
|
||||
'-keystore', nodeKeystore,
|
||||
'-keyalg', 'RSA',
|
||||
'-keysize', '2048',
|
||||
'-validity', '712',
|
||||
'-dname', 'CN=smoke-test-plugins-ssl',
|
||||
'-keypass', 'keypass',
|
||||
'-storepass', 'keypass'
|
||||
}
|
||||
|
||||
// Add keystores to test classpath: it expects it there
|
||||
sourceSets.test.resources.srcDir(keystoreDir)
|
||||
processTestResources.dependsOn(createNodeKeyStore)
|
||||
|
||||
integTestCluster {
|
||||
dependsOn createNodeKeyStore
|
||||
setting 'xpack.index_lifecycle.enabled', 'false'
|
||||
setting 'xpack.security.enabled', 'true'
|
||||
setting 'xpack.ml.enabled', 'true'
|
||||
setting 'logger.org.elasticsearch.xpack.ml.datafeed', 'TRACE'
|
||||
setting 'xpack.monitoring.enabled', 'false'
|
||||
setting 'xpack.security.authc.token.enabled', 'true'
|
||||
setting 'xpack.security.transport.ssl.enabled', 'true'
|
||||
setting 'xpack.security.transport.ssl.keystore.path', nodeKeystore.name
|
||||
setting 'xpack.security.transport.ssl.verification_mode', 'certificate'
|
||||
setting 'xpack.security.audit.enabled', 'true'
|
||||
setting 'xpack.license.self_generated.type', 'trial'
|
||||
setting 'xpack.ml.min_disk_space_off_heap', '200mb'
|
||||
|
||||
keystoreSetting 'bootstrap.password', 'x-pack-test-password'
|
||||
keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass'
|
||||
|
||||
setupCommand 'setupDummyUser',
|
||||
'bin/elasticsearch-users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser'
|
||||
|
||||
extraConfigFile nodeKeystore.name, nodeKeystore
|
||||
|
||||
waitCondition = { node, ant ->
|
||||
File tmpFile = new File(node.cwd, 'wait.success')
|
||||
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",
|
||||
dest: tmpFile.toString(),
|
||||
username: 'x_pack_rest_user',
|
||||
password: 'x-pack-test-password',
|
||||
ignoreerrors: true,
|
||||
retries: 10)
|
||||
return tmpFile.exists()
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue