Merge branch 'master' into feature/sql
Original commit: elastic/x-pack-elasticsearch@b80d0cba1d
This commit is contained in:
commit
b755b3e543
|
@ -44,8 +44,9 @@ PUT _xpack/ml/anomaly_detectors/it_ops_new_logs
|
|||
//CONSOLE
|
||||
<1> The `categorization_field_name` property indicates which field will be
|
||||
categorized.
|
||||
<2> The resulting categories can be used in a detector by setting `by_field_name`,
|
||||
`over_field_name`, or `partition_field_name` to the keyword `mlcategory`.
|
||||
<2> The resulting categories are used in a detector by setting `by_field_name`,
|
||||
`over_field_name`, or `partition_field_name` to the keyword `mlcategory`. If you
|
||||
do not specify this keyword in one of those properties, the API request fails.
|
||||
|
||||
The optional `categorization_examples_limit` property specifies the
|
||||
maximum number of examples that are stored in memory and in the results data
|
||||
|
|
|
@ -82,10 +82,10 @@ An analysis configuration object has the following properties:
|
|||
typically between `5m` and `1h`. The default value is `5m`.
|
||||
|
||||
`categorization_field_name`::
|
||||
(string) If not null, the values of the specified field will be categorized.
|
||||
The resulting categories can be used in a detector by setting `by_field_name`,
|
||||
`over_field_name`, or `partition_field_name` to the keyword `mlcategory`.
|
||||
For more information, see
|
||||
(string) If this property is specified, the values of the specified field will
|
||||
be categorized. The resulting categories must be used in a detector by setting
|
||||
`by_field_name`, `over_field_name`, or `partition_field_name` to the keyword
|
||||
`mlcategory`. For more information, see
|
||||
{xpack-ref}/ml-configuring-categories.html[Categorizing Log Messages].
|
||||
//<<ml-configuring-categories>>.
|
||||
|
||||
|
@ -117,8 +117,11 @@ no analysis can occur and an error is returned.
|
|||
aggregates results for each influencer entity.
|
||||
|
||||
`latency`::
|
||||
(unsigned integer) The size of the window, in seconds, in which to expect data
|
||||
that is out of time order. The default value is 0 (no latency). +
|
||||
(time units) The size of the window in which to expect data that is out of
|
||||
time order. The default value is 0 (no latency). If you specify a non-zero
|
||||
value, it must be greater than or equal to one second. For more information
|
||||
about time units, see
|
||||
{ref}/common-options.html#time-units[Time Units]. +
|
||||
|
||||
NOTE: Latency is only applicable when you send data by using
|
||||
the <<ml-post-data,post data>> API.
|
||||
|
@ -138,10 +141,10 @@ NOTE: To use the `multivariate_by_fields` property, you must also specify
|
|||
`by_field_name` in your detector.
|
||||
|
||||
`summary_count_field_name`::
|
||||
(string) If not null, the data that is fed to the job is expected to be
|
||||
pre-summarized. This property value is the name of the field that contains
|
||||
the count of raw data points that have been summarized.
|
||||
The same `summary_count_field_name` applies to all detectors in the job. +
|
||||
(string) If this property is specified, the data that is fed to the job is
|
||||
expected to be pre-summarized. This property value is the name of the field
|
||||
that contains the count of raw data points that have been summarized. The same
|
||||
`summary_count_field_name` applies to all detectors in the job. +
|
||||
|
||||
NOTE: The `summary_count_field_name` property cannot be used with the `metric`
|
||||
function.
|
||||
|
|
|
@ -9,12 +9,7 @@ The update model snapshot API enables you to update certain properties of a snap
|
|||
`POST _xpack/ml/anomaly_detectors/<job_id>/model_snapshots/<snapshot_id>/_update`
|
||||
|
||||
|
||||
==== Description
|
||||
|
||||
//TBD. Is the following still true? - not sure but close/open would be the method
|
||||
Updates to the configuration are only applied after the job has been closed
|
||||
and re-opened.
|
||||
|
||||
//==== Description
|
||||
|
||||
==== Path Parameters
|
||||
|
||||
|
|
|
@ -8,11 +8,16 @@ source "`dirname "$0"`"/../elasticsearch-env
|
|||
|
||||
source "`dirname "$0"`"/x-pack-env
|
||||
|
||||
declare -a args=("$@")
|
||||
args=("${args[@]}" --path.conf "$CONF_DIR")
|
||||
|
||||
cd "$ES_HOME" > /dev/null
|
||||
"$JAVA" $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" org.elasticsearch.xpack.ssl.CertificateTool "${args[@]}"
|
||||
|
||||
"$JAVA" \
|
||||
$ES_JAVA_OPTS \
|
||||
-Des.path.home="$ES_HOME" \
|
||||
-Des.path.conf="$CONF_DIR" \
|
||||
-cp "$ES_CLASSPATH" \
|
||||
org.elasticsearch.xpack.ssl.CertificateTool \
|
||||
"$@"
|
||||
|
||||
status=$?
|
||||
cd - > /dev/null
|
||||
exit $status
|
||||
|
|
|
@ -9,7 +9,15 @@ source "`dirname "$0"`"/../elasticsearch-env
|
|||
source "`dirname "$0"`"/x-pack-env
|
||||
|
||||
cd "$ES_HOME" > /dev/null
|
||||
"$JAVA" $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" org.elasticsearch.xpack.watcher.trigger.schedule.tool.CronEvalTool "$@"
|
||||
|
||||
"$JAVA" \
|
||||
$ES_JAVA_OPTS \
|
||||
-Des.path.home="$ES_HOME" \
|
||||
-Des.path.conf="$CONF_DIR" \
|
||||
-cp "$ES_CLASSPATH" \
|
||||
org.elasticsearch.xpack.watcher.trigger.schedule.tool.CronEvalTool \
|
||||
"$@"
|
||||
|
||||
status=$?
|
||||
cd - > /dev/null
|
||||
exit $status
|
||||
|
|
|
@ -8,11 +8,16 @@ source "`dirname "$0"`"/../elasticsearch-env
|
|||
|
||||
source "`dirname "$0"`"/x-pack-env
|
||||
|
||||
declare -a args=("$@")
|
||||
args=("${args[@]}" --path.conf "$CONF_DIR")
|
||||
|
||||
cd "$ES_HOME" > /dev/null
|
||||
"$JAVA" $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" org.elasticsearch.xpack.extensions.XPackExtensionCli "${args[@]}"
|
||||
|
||||
"$JAVA" \
|
||||
$ES_JAVA_OPTS \
|
||||
-Des.path.home="$ES_HOME" \
|
||||
-Des.path.conf="$CONF_DIR" \
|
||||
-cp "$ES_CLASSPATH" \
|
||||
org.elasticsearch.xpack.extensions.XPackExtensionCli \
|
||||
"$@"
|
||||
|
||||
status=$?
|
||||
cd - > /dev/null
|
||||
exit $status
|
||||
|
|
|
@ -8,11 +8,16 @@ source "`dirname "$0"`"/../elasticsearch-env
|
|||
|
||||
source "`dirname "$0"`"/x-pack-env
|
||||
|
||||
declare -a args=("$@")
|
||||
args=("${args[@]}" --path.conf "$CONF_DIR")
|
||||
|
||||
cd "$ES_HOME" > /dev/null
|
||||
"$JAVA" $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool "${args[@]}"
|
||||
|
||||
"$JAVA" \
|
||||
$ES_JAVA_OPTS \
|
||||
-Des.path.home="$ES_HOME" \
|
||||
-Des.path.conf="$CONF_DIR" \
|
||||
-cp "$ES_CLASSPATH" \
|
||||
org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool \
|
||||
"$@"
|
||||
|
||||
status=$?
|
||||
cd - > /dev/null
|
||||
exit $status
|
||||
|
|
|
@ -8,11 +8,16 @@ source "`dirname "$0"`"/../elasticsearch-env
|
|||
|
||||
source "`dirname "$0"`"/x-pack-env
|
||||
|
||||
declare -a args=("$@")
|
||||
args=("${args[@]}" --path.conf "$CONF_DIR")
|
||||
|
||||
cd "$ES_HOME" > /dev/null
|
||||
"$JAVA" $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" org.elasticsearch.xpack.security.authc.esnative.tool.SetupPasswordTool "${args[@]}"
|
||||
|
||||
"$JAVA" \
|
||||
$ES_JAVA_OPTS \
|
||||
-Des.path.home="$ES_HOME" \
|
||||
-Des.path.conf="$CONF_DIR" \
|
||||
-cp "$ES_CLASSPATH" \
|
||||
org.elasticsearch.xpack.security.authc.esnative.tool.SetupPasswordTool \
|
||||
"$@"
|
||||
|
||||
status=$?
|
||||
cd - > /dev/null
|
||||
exit $status
|
||||
|
|
|
@ -8,11 +8,16 @@ source "`dirname "$0"`"/../elasticsearch-env
|
|||
|
||||
source "`dirname "$0"`"/x-pack-env
|
||||
|
||||
declare -a args=("$@")
|
||||
args=("${args[@]}" --path.conf "$CONF_DIR")
|
||||
|
||||
cd "$ES_HOME" > /dev/null
|
||||
"$JAVA" $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" org.elasticsearch.xpack.security.crypto.tool.SystemKeyTool "${args[@]}"
|
||||
|
||||
"$JAVA" \
|
||||
$ES_JAVA_OPTS \
|
||||
-Des.path.home="$ES_HOME" \
|
||||
-Des.path.conf="$CONF_DIR" \
|
||||
-cp "$ES_CLASSPATH" \
|
||||
org.elasticsearch.xpack.security.crypto.tool.SystemKeyTool \
|
||||
"$@"
|
||||
|
||||
status=$?
|
||||
cd - > /dev/null
|
||||
exit $status
|
||||
|
|
|
@ -8,11 +8,16 @@ source "`dirname "$0"`"/../elasticsearch-env
|
|||
|
||||
source "`dirname "$0"`"/x-pack-env
|
||||
|
||||
declare -a args=("$@")
|
||||
args=("${args[@]}" --path.conf "$CONF_DIR")
|
||||
|
||||
cd "$ES_HOME" > /dev/null
|
||||
"$JAVA" $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" org.elasticsearch.xpack.security.authc.file.tool.UsersTool "${args[@]}"
|
||||
|
||||
"$JAVA" \
|
||||
$ES_JAVA_OPTS \
|
||||
-Des.path.home="$ES_HOME" \
|
||||
-Des.path.conf="$CONF_DIR" \
|
||||
-cp "$ES_CLASSPATH" \
|
||||
org.elasticsearch.xpack.security.authc.file.tool.UsersTool \
|
||||
"$@"
|
||||
|
||||
status=$?
|
||||
cd - > /dev/null
|
||||
exit $status
|
||||
|
|
|
@ -28,6 +28,7 @@ dependencyLicenses {
|
|||
mapping from: /bc.*/, to: 'bouncycastle'
|
||||
mapping from: /owasp-java-html-sanitizer.*/, to: 'owasp-java-html-sanitizer'
|
||||
mapping from: /transport-netty.*/, to: 'elasticsearch'
|
||||
mapping from: /tribe.*/, to: 'elasticsearch'
|
||||
mapping from: /elasticsearch-rest-client.*/, to: 'elasticsearch'
|
||||
mapping from: /server.*/, to: 'elasticsearch'
|
||||
mapping from: /jdbc-proto.*/, to: 'elasticsearch'
|
||||
|
@ -37,6 +38,7 @@ dependencyLicenses {
|
|||
mapping from: /commons-.*/, to: 'commons' // pulled in by rest client
|
||||
ignoreSha 'elasticsearch-rest-client'
|
||||
ignoreSha 'transport-netty4'
|
||||
ignoreSha 'tribe'
|
||||
ignoreSha 'server'
|
||||
ignoreSha 'jdbc-proto'
|
||||
ignoreSha 'cli-proto'
|
||||
|
@ -63,6 +65,7 @@ configurations {
|
|||
|
||||
dependencies {
|
||||
// security deps
|
||||
compile project(path: ':modules:tribe', configuration: 'runtime')
|
||||
compile project(path: ':modules:transport-netty4', configuration: 'runtime')
|
||||
compile 'com.unboundid:unboundid-ldapsdk:3.2.0'
|
||||
compile 'org.bouncycastle:bcprov-jdk15on:1.55'
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.cluster.AbstractNamedDiffable;
|
||||
import org.elasticsearch.cluster.NamedDiff;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
|
@ -14,7 +13,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.license.License.OperationMode;
|
||||
import org.elasticsearch.tribe.TribeService;
|
||||
import org.elasticsearch.cluster.MergableCustomMetaData;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
|
@ -23,7 +22,7 @@ import java.util.EnumSet;
|
|||
* Contains metadata about registered licenses
|
||||
*/
|
||||
class LicensesMetaData extends AbstractNamedDiffable<MetaData.Custom> implements MetaData.Custom,
|
||||
TribeService.MergableCustomMetaData<LicensesMetaData> {
|
||||
MergableCustomMetaData<LicensesMetaData> {
|
||||
|
||||
public static final String TYPE = "licenses";
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.util.BigArrays;
|
|||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.http.HttpServerTransport;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
|
@ -106,6 +107,7 @@ import org.elasticsearch.xpack.watcher.Watcher;
|
|||
import org.elasticsearch.xpack.watcher.WatcherFeatureSet;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.GeneralSecurityException;
|
||||
|
@ -207,7 +209,7 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I
|
|||
protected Graph graph;
|
||||
protected MachineLearning machineLearning;
|
||||
protected Logstash logstash;
|
||||
protected CryptoService cryptoService;
|
||||
|
||||
protected Deprecation deprecation;
|
||||
protected Upgrade upgrade;
|
||||
protected SqlPlugin sql;
|
||||
|
@ -237,7 +239,6 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I
|
|||
} else {
|
||||
this.extensionsService = null;
|
||||
}
|
||||
cryptoService = ENCRYPT_SENSITIVE_DATA_SETTING.get(settings) ? new CryptoService(settings) : null;
|
||||
}
|
||||
|
||||
// For tests only
|
||||
|
@ -270,7 +271,8 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I
|
|||
@Override
|
||||
public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool,
|
||||
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
|
||||
NamedXContentRegistry xContentRegistry) {
|
||||
NamedXContentRegistry xContentRegistry, Environment environment,
|
||||
NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
List<Object> components = new ArrayList<>();
|
||||
components.add(sslService);
|
||||
|
||||
|
@ -290,6 +292,13 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I
|
|||
}
|
||||
components.addAll(monitoring.createComponents(internalClient, threadPool, clusterService, licenseService, sslService));
|
||||
|
||||
final CryptoService cryptoService;
|
||||
try {
|
||||
cryptoService = ENCRYPT_SENSITIVE_DATA_SETTING.get(settings) ? new CryptoService(settings) : null;
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
|
||||
// watcher http stuff
|
||||
Map<String, HttpAuthFactory> httpAuthFactories = new HashMap<>();
|
||||
httpAuthFactories.put(BasicAuth.TYPE, new BasicAuthFactory(cryptoService));
|
||||
|
@ -301,7 +310,7 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I
|
|||
components.add(httpClient);
|
||||
|
||||
Collection<Object> notificationComponents = createNotificationComponents(clusterService.getClusterSettings(), httpClient,
|
||||
httpTemplateParser, scriptService, httpAuthRegistry);
|
||||
httpTemplateParser, scriptService, httpAuthRegistry, cryptoService);
|
||||
components.addAll(notificationComponents);
|
||||
|
||||
components.addAll(watcher.createComponents(getClock(), scriptService, internalClient, licenseState,
|
||||
|
@ -328,7 +337,7 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I
|
|||
|
||||
private Collection<Object> createNotificationComponents(ClusterSettings clusterSettings, HttpClient httpClient,
|
||||
HttpRequestTemplate.Parser httpTemplateParser, ScriptService scriptService,
|
||||
HttpAuthRegistry httpAuthRegistry) {
|
||||
HttpAuthRegistry httpAuthRegistry, CryptoService cryptoService) {
|
||||
List<Object> components = new ArrayList<>();
|
||||
components.add(new EmailService(settings, cryptoService, clusterSettings));
|
||||
components.add(new HipChatService(settings, httpClient, clusterSettings));
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
|
|||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.ExtractorUtils;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
@ -224,97 +225,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
* Returns the histogram's interval as epoch millis.
|
||||
*/
|
||||
public long getHistogramIntervalMillis() {
|
||||
AggregationBuilder histogramAggregation = getHistogramAggregation(aggregations.getAggregatorFactories());
|
||||
return getHistogramIntervalMillis(histogramAggregation);
|
||||
}
|
||||
|
||||
private static long getHistogramIntervalMillis(AggregationBuilder histogramAggregation) {
|
||||
if (histogramAggregation instanceof HistogramAggregationBuilder) {
|
||||
return (long) ((HistogramAggregationBuilder) histogramAggregation).interval();
|
||||
} else if (histogramAggregation instanceof DateHistogramAggregationBuilder) {
|
||||
return validateAndGetDateHistogramInterval((DateHistogramAggregationBuilder) histogramAggregation);
|
||||
} else {
|
||||
throw new IllegalStateException("Invalid histogram aggregation [" + histogramAggregation.getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
static AggregationBuilder getHistogramAggregation(List<AggregationBuilder> aggregations) {
|
||||
if (aggregations.isEmpty()) {
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM));
|
||||
}
|
||||
if (aggregations.size() != 1) {
|
||||
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM_NO_SIBLINGS);
|
||||
}
|
||||
|
||||
AggregationBuilder agg = aggregations.get(0);
|
||||
if (isHistogram(agg)) {
|
||||
return agg;
|
||||
} else {
|
||||
return getHistogramAggregation(agg.getSubAggregations());
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean isHistogram(AggregationBuilder aggregationBuilder) {
|
||||
return aggregationBuilder instanceof HistogramAggregationBuilder
|
||||
|| aggregationBuilder instanceof DateHistogramAggregationBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the date histogram interval as epoch millis if valid, or throws
|
||||
* an {@link ElasticsearchException} with the validation error
|
||||
*/
|
||||
private static long validateAndGetDateHistogramInterval(DateHistogramAggregationBuilder dateHistogram) {
|
||||
if (dateHistogram.timeZone() != null && dateHistogram.timeZone().equals(DateTimeZone.UTC) == false) {
|
||||
throw ExceptionsHelper.badRequestException("ML requires date_histogram.time_zone to be UTC");
|
||||
}
|
||||
|
||||
if (dateHistogram.dateHistogramInterval() != null) {
|
||||
return validateAndGetCalendarInterval(dateHistogram.dateHistogramInterval().toString());
|
||||
} else {
|
||||
return dateHistogram.interval();
|
||||
}
|
||||
}
|
||||
|
||||
private static long validateAndGetCalendarInterval(String calendarInterval) {
|
||||
TimeValue interval;
|
||||
DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(calendarInterval);
|
||||
if (dateTimeUnit != null) {
|
||||
switch (dateTimeUnit) {
|
||||
case WEEK_OF_WEEKYEAR:
|
||||
interval = new TimeValue(7, TimeUnit.DAYS);
|
||||
break;
|
||||
case DAY_OF_MONTH:
|
||||
interval = new TimeValue(1, TimeUnit.DAYS);
|
||||
break;
|
||||
case HOUR_OF_DAY:
|
||||
interval = new TimeValue(1, TimeUnit.HOURS);
|
||||
break;
|
||||
case MINUTES_OF_HOUR:
|
||||
interval = new TimeValue(1, TimeUnit.MINUTES);
|
||||
break;
|
||||
case SECOND_OF_MINUTE:
|
||||
interval = new TimeValue(1, TimeUnit.SECONDS);
|
||||
break;
|
||||
case MONTH_OF_YEAR:
|
||||
case YEAR_OF_CENTURY:
|
||||
case QUARTER:
|
||||
throw ExceptionsHelper.badRequestException(invalidDateHistogramCalendarIntervalMessage(calendarInterval));
|
||||
default:
|
||||
throw ExceptionsHelper.badRequestException("Unexpected dateTimeUnit [" + dateTimeUnit + "]");
|
||||
}
|
||||
} else {
|
||||
interval = TimeValue.parseTimeValue(calendarInterval, "date_histogram.interval");
|
||||
}
|
||||
if (interval.days() > 7) {
|
||||
throw ExceptionsHelper.badRequestException(invalidDateHistogramCalendarIntervalMessage(calendarInterval));
|
||||
}
|
||||
return interval.millis();
|
||||
}
|
||||
|
||||
private static String invalidDateHistogramCalendarIntervalMessage(String interval) {
|
||||
throw ExceptionsHelper.badRequestException("When specifying a date_histogram calendar interval ["
|
||||
+ interval + "], ML does not accept intervals longer than a week because of " +
|
||||
"variable lengths of periods greater than a week");
|
||||
return ExtractorUtils.getHistogramIntervalMillis(aggregations);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -570,7 +481,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM);
|
||||
}
|
||||
|
||||
AggregationBuilder histogramAggregation = getHistogramAggregation(aggregatorFactories);
|
||||
AggregationBuilder histogramAggregation = ExtractorUtils.getHistogramAggregation(aggregatorFactories);
|
||||
checkNoMoreHistogramAggregations(histogramAggregation.getSubAggregations());
|
||||
checkHistogramAggregationHasChildMaxTimeAgg(histogramAggregation);
|
||||
checkHistogramIntervalIsPositive(histogramAggregation);
|
||||
|
@ -578,7 +489,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
|
||||
private static void checkNoMoreHistogramAggregations(List<AggregationBuilder> aggregations) {
|
||||
for (AggregationBuilder agg : aggregations) {
|
||||
if (isHistogram(agg)) {
|
||||
if (ExtractorUtils.isHistogram(agg)) {
|
||||
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_MAX_ONE_DATE_HISTOGRAM);
|
||||
}
|
||||
checkNoMoreHistogramAggregations(agg.getSubAggregations());
|
||||
|
@ -605,7 +516,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
}
|
||||
|
||||
private static void checkHistogramIntervalIsPositive(AggregationBuilder histogramAggregation) {
|
||||
long interval = getHistogramIntervalMillis(histogramAggregation);
|
||||
long interval = ExtractorUtils.getHistogramIntervalMillis(histogramAggregation);
|
||||
if (interval <= 0) {
|
||||
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO);
|
||||
}
|
||||
|
@ -616,8 +527,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
if (aggregations == null) {
|
||||
chunkingConfig = ChunkingConfig.newAuto();
|
||||
} else {
|
||||
AggregationBuilder histogramAggregation = getHistogramAggregation(aggregations.getAggregatorFactories());
|
||||
long histogramIntervalMillis = getHistogramIntervalMillis(histogramAggregation);
|
||||
long histogramIntervalMillis = ExtractorUtils.getHistogramIntervalMillis(aggregations);
|
||||
chunkingConfig = ChunkingConfig.newManual(TimeValue.timeValueMillis(
|
||||
DEFAULT_AGGREGATION_CHUNKING_BUCKETS * histogramIntervalMillis));
|
||||
}
|
||||
|
|
|
@ -6,17 +6,28 @@
|
|||
package org.elasticsearch.xpack.ml.datafeed.extractor;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.rounding.DateTimeUnit;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.RangeQueryBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Collects common utility methods needed by various {@link DataExtractor} implementations
|
||||
|
@ -54,4 +65,120 @@ public final class ExtractorUtils {
|
|||
throw new IOException("[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the (date) histogram in {@code aggFactory} and extract its interval.
|
||||
* Throws if there is no (date) histogram or if the histogram has sibling
|
||||
* aggregations.
|
||||
* @param aggFactory Aggregations factory
|
||||
* @return The histogram interval
|
||||
*/
|
||||
public static long getHistogramIntervalMillis(AggregatorFactories.Builder aggFactory) {
|
||||
AggregationBuilder histogram = getHistogramAggregation(aggFactory.getAggregatorFactories());
|
||||
return getHistogramIntervalMillis(histogram);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find and return (date) histogram in {@code aggregations}
|
||||
* @param aggregations List of aggregations
|
||||
* @return A {@link HistogramAggregationBuilder} or a {@link DateHistogramAggregationBuilder}
|
||||
*/
|
||||
public static AggregationBuilder getHistogramAggregation(List<AggregationBuilder> aggregations) {
|
||||
if (aggregations.isEmpty()) {
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM));
|
||||
}
|
||||
if (aggregations.size() != 1) {
|
||||
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM_NO_SIBLINGS);
|
||||
}
|
||||
|
||||
AggregationBuilder agg = aggregations.get(0);
|
||||
if (isHistogram(agg)) {
|
||||
return agg;
|
||||
} else {
|
||||
return getHistogramAggregation(agg.getSubAggregations());
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean isHistogram(AggregationBuilder aggregationBuilder) {
|
||||
return aggregationBuilder instanceof HistogramAggregationBuilder
|
||||
|| aggregationBuilder instanceof DateHistogramAggregationBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the interval from {@code histogramAggregation} or throw an {@code IllegalStateException}
|
||||
* if {@code histogramAggregation} is not a {@link HistogramAggregationBuilder} or a
|
||||
* {@link DateHistogramAggregationBuilder}
|
||||
*
|
||||
* @param histogramAggregation Must be a {@link HistogramAggregationBuilder} or a
|
||||
* {@link DateHistogramAggregationBuilder}
|
||||
* @return The histogram interval
|
||||
*/
|
||||
public static long getHistogramIntervalMillis(AggregationBuilder histogramAggregation) {
|
||||
if (histogramAggregation instanceof HistogramAggregationBuilder) {
|
||||
return (long) ((HistogramAggregationBuilder) histogramAggregation).interval();
|
||||
} else if (histogramAggregation instanceof DateHistogramAggregationBuilder) {
|
||||
return validateAndGetDateHistogramInterval((DateHistogramAggregationBuilder) histogramAggregation);
|
||||
} else {
|
||||
throw new IllegalStateException("Invalid histogram aggregation [" + histogramAggregation.getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the date histogram interval as epoch millis if valid, or throws
|
||||
* an {@link ElasticsearchException} with the validation error
|
||||
*/
|
||||
private static long validateAndGetDateHistogramInterval(DateHistogramAggregationBuilder dateHistogram) {
|
||||
if (dateHistogram.timeZone() != null && dateHistogram.timeZone().equals(DateTimeZone.UTC) == false) {
|
||||
throw ExceptionsHelper.badRequestException("ML requires date_histogram.time_zone to be UTC");
|
||||
}
|
||||
|
||||
if (dateHistogram.dateHistogramInterval() != null) {
|
||||
return validateAndGetCalendarInterval(dateHistogram.dateHistogramInterval().toString());
|
||||
} else {
|
||||
return dateHistogram.interval();
|
||||
}
|
||||
}
|
||||
|
||||
static long validateAndGetCalendarInterval(String calendarInterval) {
|
||||
TimeValue interval;
|
||||
DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(calendarInterval);
|
||||
if (dateTimeUnit != null) {
|
||||
switch (dateTimeUnit) {
|
||||
case WEEK_OF_WEEKYEAR:
|
||||
interval = new TimeValue(7, TimeUnit.DAYS);
|
||||
break;
|
||||
case DAY_OF_MONTH:
|
||||
interval = new TimeValue(1, TimeUnit.DAYS);
|
||||
break;
|
||||
case HOUR_OF_DAY:
|
||||
interval = new TimeValue(1, TimeUnit.HOURS);
|
||||
break;
|
||||
case MINUTES_OF_HOUR:
|
||||
interval = new TimeValue(1, TimeUnit.MINUTES);
|
||||
break;
|
||||
case SECOND_OF_MINUTE:
|
||||
interval = new TimeValue(1, TimeUnit.SECONDS);
|
||||
break;
|
||||
case MONTH_OF_YEAR:
|
||||
case YEAR_OF_CENTURY:
|
||||
case QUARTER:
|
||||
throw ExceptionsHelper.badRequestException(invalidDateHistogramCalendarIntervalMessage(calendarInterval));
|
||||
default:
|
||||
throw ExceptionsHelper.badRequestException("Unexpected dateTimeUnit [" + dateTimeUnit + "]");
|
||||
}
|
||||
} else {
|
||||
interval = TimeValue.parseTimeValue(calendarInterval, "date_histogram.interval");
|
||||
}
|
||||
if (interval.days() > 7) {
|
||||
throw ExceptionsHelper.badRequestException(invalidDateHistogramCalendarIntervalMessage(calendarInterval));
|
||||
}
|
||||
return interval.millis();
|
||||
}
|
||||
|
||||
private static String invalidDateHistogramCalendarIntervalMessage(String interval) {
|
||||
throw ExceptionsHelper.badRequestException("When specifying a date_histogram calendar interval ["
|
||||
+ interval + "], ML does not accept intervals longer than a week because of " +
|
||||
"variable lengths of periods greater than a week");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -105,7 +105,8 @@ class AggregationDataExtractor implements DataExtractor {
|
|||
}
|
||||
|
||||
private void initAggregationProcessor(Aggregations aggs) throws IOException {
|
||||
aggregationToJsonProcessor = new AggregationToJsonProcessor(context.timeField, context.fields, context.includeDocCount);
|
||||
aggregationToJsonProcessor = new AggregationToJsonProcessor(context.timeField, context.fields, context.includeDocCount,
|
||||
context.start);
|
||||
aggregationToJsonProcessor.process(aggs);
|
||||
}
|
||||
|
||||
|
@ -114,11 +115,19 @@ class AggregationDataExtractor implements DataExtractor {
|
|||
}
|
||||
|
||||
private SearchRequestBuilder buildSearchRequest() {
|
||||
long histogramSearchStartTime = context.start;
|
||||
if (context.aggs.getPipelineAggregatorFactories().isEmpty() == false) {
|
||||
// For derivative aggregations the first bucket will always be null
|
||||
// so query one extra histogram bucket back and hope there is data
|
||||
// in that bucket
|
||||
histogramSearchStartTime = Math.max(0, context.start - getHistogramInterval());
|
||||
}
|
||||
|
||||
SearchRequestBuilder searchRequestBuilder = SearchAction.INSTANCE.newRequestBuilder(client)
|
||||
.setIndices(context.indices)
|
||||
.setTypes(context.types)
|
||||
.setSize(0)
|
||||
.setQuery(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, context.start, context.end));
|
||||
.setQuery(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, histogramSearchStartTime, context.end));
|
||||
|
||||
context.aggs.getAggregatorFactories().forEach(searchRequestBuilder::addAggregation);
|
||||
context.aggs.getPipelineAggregatorFactories().forEach(searchRequestBuilder::addAggregation);
|
||||
|
@ -147,4 +156,8 @@ class AggregationDataExtractor implements DataExtractor {
|
|||
hasNext = aggregationToJsonProcessor.writeDocs(BATCH_KEY_VALUE_PAIRS, outputStream);
|
||||
return new ByteArrayInputStream(outputStream.toByteArray());
|
||||
}
|
||||
|
||||
private long getHistogramInterval() {
|
||||
return ExtractorUtils.getHistogramIntervalMillis(context.aggs);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
|||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
@ -45,6 +46,7 @@ class AggregationToJsonProcessor {
|
|||
private final LinkedHashMap<String, Object> keyValuePairs;
|
||||
private long keyValueWrittenCount;
|
||||
private SortedMap<Long, List<Map<String, Object>>> docsByBucketTimestamp;
|
||||
private long startTime;
|
||||
|
||||
/**
|
||||
* Constructs a processor that processes aggregations into JSON
|
||||
|
@ -52,8 +54,9 @@ class AggregationToJsonProcessor {
|
|||
* @param timeField the time field
|
||||
* @param fields the fields to convert into JSON
|
||||
* @param includeDocCount whether to include the doc_count
|
||||
* @param startTime buckets with a timestamp before this time are discarded
|
||||
*/
|
||||
AggregationToJsonProcessor(String timeField, Set<String> fields, boolean includeDocCount)
|
||||
AggregationToJsonProcessor(String timeField, Set<String> fields, boolean includeDocCount, long startTime)
|
||||
throws IOException {
|
||||
this.timeField = Objects.requireNonNull(timeField);
|
||||
this.fields = Objects.requireNonNull(fields);
|
||||
|
@ -61,6 +64,7 @@ class AggregationToJsonProcessor {
|
|||
keyValuePairs = new LinkedHashMap<>();
|
||||
docsByBucketTimestamp = new TreeMap<>();
|
||||
keyValueWrittenCount = 0;
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
public void process(Aggregations aggs) throws IOException {
|
||||
|
@ -145,14 +149,41 @@ class AggregationToJsonProcessor {
|
|||
"[" + agg.getName() + "] is another instance of a Date histogram");
|
||||
}
|
||||
|
||||
// buckets are ordered by time, once we get to a bucket past the
|
||||
// start time we no longer need to check the time.
|
||||
boolean checkBucketTime = true;
|
||||
for (Histogram.Bucket bucket : agg.getBuckets()) {
|
||||
List<Aggregation> childAggs = bucket.getAggregations().asList();
|
||||
if (checkBucketTime) {
|
||||
if (toHistogramKeyToEpoch(bucket.getKey()) < startTime) {
|
||||
// skip buckets outside the required time range
|
||||
continue;
|
||||
} else {
|
||||
checkBucketTime = false;
|
||||
}
|
||||
}
|
||||
|
||||
List<Aggregation> childAggs = bucket.getAggregations().asList();
|
||||
processAggs(bucket.getDocCount(), childAggs);
|
||||
keyValuePairs.remove(timeField);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Date Histograms have a {@link DateTime} object as the key,
|
||||
* Histograms have either a Double or Long.
|
||||
*/
|
||||
private long toHistogramKeyToEpoch(Object key) {
|
||||
if (key instanceof DateTime) {
|
||||
return ((DateTime)key).getMillis();
|
||||
} else if (key instanceof Double) {
|
||||
return ((Double)key).longValue();
|
||||
} else if (key instanceof Long){
|
||||
return (Long)key;
|
||||
} else {
|
||||
throw new IllegalStateException("Histogram key [" + key + "] cannot be converted to a timestamp");
|
||||
}
|
||||
}
|
||||
|
||||
private void processTimeField(Aggregation agg) {
|
||||
if (agg instanceof Max == false) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.DATAFEED_MISSING_MAX_AGGREGATION_FOR_TIME_FIELD, timeField));
|
||||
|
|
|
@ -417,7 +417,7 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin {
|
|||
final String transportType = NetworkModule.TRANSPORT_TYPE_SETTING.get(settings);
|
||||
if (NAME4.equals(transportType) == false) {
|
||||
throw new IllegalArgumentException("transport type setting [" + NetworkModule.TRANSPORT_TYPE_KEY + "] must be [" + NAME4
|
||||
+ "]");
|
||||
+ "] but is [" + transportType + "]");
|
||||
}
|
||||
} else {
|
||||
// default to security4
|
||||
|
@ -429,7 +429,8 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin {
|
|||
if (httpType.equals(NAME4)) {
|
||||
SecurityNetty4HttpServerTransport.overrideSettings(settingsBuilder, settings);
|
||||
} else {
|
||||
throw new IllegalArgumentException("http type setting [" + NetworkModule.HTTP_TYPE_KEY + "] must be [" + NAME4 + "]");
|
||||
throw new IllegalArgumentException("http type setting [" + NetworkModule.HTTP_TYPE_KEY + "] must be [" + NAME4
|
||||
+ "] but is [" + httpType + "]");
|
||||
}
|
||||
} else {
|
||||
// default to security4
|
||||
|
@ -658,6 +659,10 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin {
|
|||
for (Map.Entry<String, Settings> tribeSettings : tribesSettings.entrySet()) {
|
||||
String tribePrefix = "tribe." + tribeSettings.getKey() + ".";
|
||||
|
||||
if (TribeService.TRIBE_SETTING_KEYS.stream().anyMatch(s -> s.startsWith(tribePrefix))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
final String tribeEnabledSetting = tribePrefix + XPackSettings.SECURITY_ENABLED.getKey();
|
||||
if (settings.get(tribeEnabledSetting) != null) {
|
||||
boolean enabled = XPackSettings.SECURITY_ENABLED.get(tribeSettings.getValue());
|
||||
|
|
|
@ -81,7 +81,11 @@ public class ESNativeRealmMigrateTool extends MultiCommand {
|
|||
|
||||
public ESNativeRealmMigrateTool() {
|
||||
super("Imports file-based users and roles to the native security realm");
|
||||
subcommands.put("native", new MigrateUserOrRoles());
|
||||
subcommands.put("native", newMigrateUserOrRoles());
|
||||
}
|
||||
|
||||
protected MigrateUserOrRoles newMigrateUserOrRoles() {
|
||||
return new MigrateUserOrRoles();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -59,12 +59,20 @@ public class SetupPasswordTool extends MultiCommand {
|
|||
SetupPasswordTool(Function<Environment, CommandLineHttpClient> clientFunction,
|
||||
CheckedFunction<Environment, KeyStoreWrapper, Exception> keyStoreFunction) {
|
||||
super("Sets the passwords for reserved users");
|
||||
subcommands.put("auto", new AutoSetup());
|
||||
subcommands.put("interactive", new InteractiveSetup());
|
||||
subcommands.put("auto", newAutoSetup());
|
||||
subcommands.put("interactive", newInteractiveSetup());
|
||||
this.clientFunction = clientFunction;
|
||||
this.keyStoreFunction = keyStoreFunction;
|
||||
}
|
||||
|
||||
protected AutoSetup newAutoSetup() {
|
||||
return new AutoSetup();
|
||||
}
|
||||
|
||||
protected InteractiveSetup newInteractiveSetup() {
|
||||
return new InteractiveSetup();
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
exit(new SetupPasswordTool().main(args, Terminal.DEFAULT));
|
||||
}
|
||||
|
@ -73,7 +81,7 @@ public class SetupPasswordTool extends MultiCommand {
|
|||
* This class sets the passwords using automatically generated random passwords. The passwords will be
|
||||
* printed to the console.
|
||||
*/
|
||||
private class AutoSetup extends SetupCommand {
|
||||
class AutoSetup extends SetupCommand {
|
||||
|
||||
AutoSetup() {
|
||||
super("Uses randomly generated passwords");
|
||||
|
@ -116,7 +124,7 @@ public class SetupPasswordTool extends MultiCommand {
|
|||
/**
|
||||
* This class sets the passwords using password entered manually by the user from the console.
|
||||
*/
|
||||
private class InteractiveSetup extends SetupCommand {
|
||||
class InteractiveSetup extends SetupCommand {
|
||||
|
||||
InteractiveSetup() {
|
||||
super("Uses passwords entered by a user");
|
||||
|
|
|
@ -47,11 +47,31 @@ public class UsersTool extends MultiCommand {
|
|||
|
||||
UsersTool() {
|
||||
super("Manages elasticsearch native users");
|
||||
subcommands.put("useradd", new AddUserCommand());
|
||||
subcommands.put("userdel", new DeleteUserCommand());
|
||||
subcommands.put("passwd", new PasswordCommand());
|
||||
subcommands.put("roles", new RolesCommand());
|
||||
subcommands.put("list", new ListCommand());
|
||||
subcommands.put("useradd", newAddUserCommand());
|
||||
subcommands.put("userdel", newDeleteUserCommand());
|
||||
subcommands.put("passwd", newPasswordCommand());
|
||||
subcommands.put("roles", newRolesCommand());
|
||||
subcommands.put("list", newListCommand());
|
||||
}
|
||||
|
||||
protected AddUserCommand newAddUserCommand() {
|
||||
return new AddUserCommand();
|
||||
}
|
||||
|
||||
protected DeleteUserCommand newDeleteUserCommand() {
|
||||
return new DeleteUserCommand();
|
||||
}
|
||||
|
||||
protected PasswordCommand newPasswordCommand() {
|
||||
return new PasswordCommand();
|
||||
}
|
||||
|
||||
protected RolesCommand newRolesCommand() {
|
||||
return new RolesCommand();
|
||||
}
|
||||
|
||||
protected ListCommand newListCommand() {
|
||||
return new ListCommand();
|
||||
}
|
||||
|
||||
static class AddUserCommand extends EnvironmentAwareCommand {
|
||||
|
|
|
@ -352,7 +352,7 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
|
|||
static void failIfQueryUsesClient(QueryBuilder queryBuilder, QueryRewriteContext original)
|
||||
throws IOException {
|
||||
QueryRewriteContext copy = new QueryRewriteContext(
|
||||
original.getXContentRegistry(), null, original::nowInMillis);
|
||||
original.getXContentRegistry(), original.getWriteableRegistry(), null, original::nowInMillis);
|
||||
Rewriteable.rewrite(queryBuilder, copy);
|
||||
if (copy.hasAsyncActions()) {
|
||||
throw new IllegalStateException("role queries are not allowed to execute additional requests");
|
||||
|
|
|
@ -20,6 +20,7 @@ import org.elasticsearch.common.network.NetworkModule;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.discovery.zen.UnicastZenPing;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.MockNode;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
@ -31,6 +32,7 @@ import org.elasticsearch.test.NodeConfigurationSource;
|
|||
import org.elasticsearch.test.TestCluster;
|
||||
import org.elasticsearch.test.discovery.TestZenDiscovery;
|
||||
import org.elasticsearch.transport.MockTcpTransportPlugin;
|
||||
import org.elasticsearch.tribe.TribePlugin;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
|
@ -42,6 +44,7 @@ import java.util.Collection;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
|
@ -71,8 +74,45 @@ public abstract class TribeTransportTestCase extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected final Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.<Class<? extends Plugin>>singletonList(XPackPlugin.class);
|
||||
protected boolean addTestZenDiscovery() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public static class TribeAwareTestZenDiscoveryPlugin extends TestZenDiscovery.TestPlugin {
|
||||
|
||||
public TribeAwareTestZenDiscoveryPlugin(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings additionalSettings() {
|
||||
if (settings.getGroups("tribe", true).isEmpty()) {
|
||||
return super.additionalSettings();
|
||||
} else {
|
||||
return Settings.EMPTY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class MockTribePlugin extends TribePlugin {
|
||||
|
||||
public MockTribePlugin(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
protected Function<Settings, Node> nodeBuilder(Path configPath) {
|
||||
return settings -> new MockNode(new Environment(settings, configPath), internalCluster().getPlugins());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>();
|
||||
plugins.add(MockTribePlugin.class);
|
||||
plugins.add(TribeAwareTestZenDiscoveryPlugin.class);
|
||||
plugins.add(XPackPlugin.class);
|
||||
return plugins;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -158,8 +198,8 @@ public abstract class TribeTransportTestCase extends ESIntegTestCase {
|
|||
.put("transport.type", MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME)
|
||||
.build();
|
||||
|
||||
final List<Class<? extends Plugin>> mockPlugins = Arrays.asList(TestZenDiscovery.TestPlugin.class, MockTcpTransportPlugin.class,
|
||||
XPackPlugin.class);
|
||||
final List<Class<? extends Plugin>> mockPlugins = Arrays.asList(MockTribePlugin.class, TribeAwareTestZenDiscoveryPlugin.class,
|
||||
MockTcpTransportPlugin.class, XPackPlugin.class);
|
||||
final Node tribeNode = new MockNode(merged, mockPlugins).start();
|
||||
Client tribeClient = tribeNode.client();
|
||||
|
||||
|
|
|
@ -142,9 +142,7 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas
|
|||
Settings superSettings = super.transportClientSettings();
|
||||
Settings.Builder builder = Settings.builder().put(superSettings);
|
||||
addClientSSLSettings(builder, "");
|
||||
if (NetworkModule.TRANSPORT_TYPE_SETTING.exists(superSettings) == false) {
|
||||
builder.put(NetworkModule.TRANSPORT_TYPE_SETTING.getKey(), Security.NAME4);
|
||||
}
|
||||
addDefaultSecurityTransportType(builder, superSettings);
|
||||
|
||||
if (randomBoolean()) {
|
||||
builder.put(Security.USER_SETTING.getKey(),
|
||||
|
@ -156,6 +154,12 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas
|
|||
return builder.build();
|
||||
}
|
||||
|
||||
protected void addDefaultSecurityTransportType(Settings.Builder builder, Settings settings) {
|
||||
if (NetworkModule.TRANSPORT_TYPE_SETTING.exists(settings) == false) {
|
||||
builder.put(NetworkModule.TRANSPORT_TYPE_SETTING.getKey(), Security.NAME4);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Arrays.asList(xpackPluginClass(),
|
||||
|
|
|
@ -9,6 +9,8 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.MockTerminal;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -19,6 +21,7 @@ import java.nio.file.Files;
|
|||
import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@LuceneTestCase.SuppressFileSystems("*")
|
||||
|
@ -38,6 +41,26 @@ public class ListXPackExtensionCommandTests extends ESTestCase {
|
|||
Files.createDirectories(extensionsFile(env));
|
||||
}
|
||||
|
||||
private static class MockListXPackExtensionCommand extends ListXPackExtensionCommand {
|
||||
|
||||
private final Environment env;
|
||||
|
||||
private MockListXPackExtensionCommand(final Environment env) {
|
||||
this.env = env;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Environment createEnv(Terminal terminal, Map<String, String> settings) throws UserException {
|
||||
return env;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean addShutdownHook() {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static String buildMultiline(String... args){
|
||||
return Arrays.asList(args).stream().collect(Collectors.joining("\n", "", "\n"));
|
||||
}
|
||||
|
@ -56,67 +79,57 @@ public class ListXPackExtensionCommandTests extends ESTestCase {
|
|||
return env.pluginsFile().resolve("x-pack").resolve("extensions");
|
||||
}
|
||||
|
||||
static MockTerminal listExtensions(Path home) throws Exception {
|
||||
static MockTerminal listExtensions(Path home, Environment env) throws Exception {
|
||||
MockTerminal terminal = new MockTerminal();
|
||||
int status = new ListXPackExtensionCommand() {
|
||||
@Override
|
||||
protected boolean addShutdownHook() {
|
||||
return false;
|
||||
}
|
||||
}.main(new String[] { "-Epath.home=" + home }, terminal);
|
||||
int status = new MockListXPackExtensionCommand(env).main(new String[] { "-Epath.home=" + home }, terminal);
|
||||
assertEquals(ExitCodes.OK, status);
|
||||
return terminal;
|
||||
}
|
||||
|
||||
static MockTerminal listExtensions(Path home, String[] args) throws Exception {
|
||||
static MockTerminal listExtensions(Path home, Environment env, String[] args) throws Exception {
|
||||
String[] argsAndHome = new String[args.length + 1];
|
||||
System.arraycopy(args, 0, argsAndHome, 0, args.length);
|
||||
argsAndHome[args.length] = "-Epath.home=" + home;
|
||||
MockTerminal terminal = new MockTerminal();
|
||||
int status = new ListXPackExtensionCommand() {
|
||||
@Override
|
||||
protected boolean addShutdownHook() {
|
||||
return false;
|
||||
}
|
||||
}.main(argsAndHome, terminal);
|
||||
int status = new MockListXPackExtensionCommand(env).main(argsAndHome, terminal);
|
||||
assertEquals(ExitCodes.OK, status);
|
||||
return terminal;
|
||||
}
|
||||
|
||||
public void testExtensionsDirMissing() throws Exception {
|
||||
Files.delete(extensionsFile(env));
|
||||
IOException e = expectThrows(IOException.class, () -> listExtensions(home));
|
||||
IOException e = expectThrows(IOException.class, () -> listExtensions(home, env));
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("Extensions directory missing"));
|
||||
}
|
||||
|
||||
public void testNoExtensions() throws Exception {
|
||||
MockTerminal terminal = listExtensions(home);
|
||||
MockTerminal terminal = listExtensions(home, env);
|
||||
assertTrue(terminal.getOutput(), terminal.getOutput().isEmpty());
|
||||
}
|
||||
|
||||
public void testNoExtensionsVerbose() throws Exception {
|
||||
String[] params = { "-v" };
|
||||
MockTerminal terminal = listExtensions(home, params);
|
||||
MockTerminal terminal = listExtensions(home, env, params);
|
||||
assertEquals(terminal.getOutput(), buildMultiline("XPack Extensions directory: " + extensionsFile(env)));
|
||||
}
|
||||
|
||||
public void testOneExtension() throws Exception {
|
||||
buildFakeExtension(env, "", "fake", "org.fake");
|
||||
MockTerminal terminal = listExtensions(home);
|
||||
MockTerminal terminal = listExtensions(home, env);
|
||||
assertEquals(terminal.getOutput(), buildMultiline("fake"));
|
||||
}
|
||||
|
||||
public void testTwoExtensions() throws Exception {
|
||||
buildFakeExtension(env, "", "fake1", "org.fake1");
|
||||
buildFakeExtension(env, "", "fake2", "org.fake2");
|
||||
MockTerminal terminal = listExtensions(home);
|
||||
MockTerminal terminal = listExtensions(home, env);
|
||||
assertEquals(terminal.getOutput(), buildMultiline("fake1", "fake2"));
|
||||
}
|
||||
|
||||
public void testExtensionWithVerbose() throws Exception {
|
||||
buildFakeExtension(env, "fake desc", "fake_extension", "org.fake");
|
||||
String[] params = { "-v" };
|
||||
MockTerminal terminal = listExtensions(home, params);
|
||||
MockTerminal terminal = listExtensions(home, env, params);
|
||||
assertEquals(terminal.getOutput(), buildMultiline("XPack Extensions directory: " + extensionsFile(env),
|
||||
"fake_extension", "- XPack Extension information:", "Name: fake_extension",
|
||||
"Description: fake desc", "Version: 1.0", " * Classname: org.fake"));
|
||||
|
@ -126,7 +139,7 @@ public class ListXPackExtensionCommandTests extends ESTestCase {
|
|||
buildFakeExtension(env, "fake desc 1", "fake_extension1", "org.fake");
|
||||
buildFakeExtension(env, "fake desc 2", "fake_extension2", "org.fake2");
|
||||
String[] params = { "-v" };
|
||||
MockTerminal terminal = listExtensions(home, params);
|
||||
MockTerminal terminal = listExtensions(home, env, params);
|
||||
assertEquals(terminal.getOutput(), buildMultiline("XPack Extensions directory: " + extensionsFile(env),
|
||||
"fake_extension1", "- XPack Extension information:", "Name: fake_extension1",
|
||||
"Description: fake desc 1", "Version: 1.0", " * Classname: org.fake",
|
||||
|
@ -137,14 +150,14 @@ public class ListXPackExtensionCommandTests extends ESTestCase {
|
|||
public void testExtensionWithoutVerboseMultipleExtensions() throws Exception {
|
||||
buildFakeExtension(env, "fake desc 1", "fake_extension1", "org.fake");
|
||||
buildFakeExtension(env, "fake desc 2", "fake_extension2", "org.fake2");
|
||||
MockTerminal terminal = listExtensions(home, new String[0]);
|
||||
MockTerminal terminal = listExtensions(home, env, new String[0]);
|
||||
String output = terminal.getOutput();
|
||||
assertEquals(output, buildMultiline("fake_extension1", "fake_extension2"));
|
||||
}
|
||||
|
||||
public void testExtensionWithoutDescriptorFile() throws Exception{
|
||||
Files.createDirectories(extensionsFile(env).resolve("fake1"));
|
||||
NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listExtensions(home));
|
||||
NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listExtensions(home, env));
|
||||
assertEquals(e.getFile(),
|
||||
extensionsFile(env).resolve("fake1").resolve(XPackExtensionInfo.XPACK_EXTENSION_PROPERTIES).toString());
|
||||
}
|
||||
|
@ -152,7 +165,7 @@ public class ListXPackExtensionCommandTests extends ESTestCase {
|
|||
public void testExtensionWithWrongDescriptorFile() throws Exception{
|
||||
XPackExtensionTestUtil.writeProperties(extensionsFile(env).resolve("fake1"),
|
||||
"description", "fake desc");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listExtensions(home));
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listExtensions(home, env));
|
||||
assertEquals(e.getMessage(), "Property [name] is missing in [" +
|
||||
extensionsFile(env).resolve("fake1")
|
||||
.resolve(XPackExtensionInfo.XPACK_EXTENSION_PROPERTIES).toString() + "]");
|
||||
|
|
|
@ -353,75 +353,20 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
TermsAggregationBuilder nestedTerms = AggregationBuilders.terms("nested_terms");
|
||||
|
||||
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("time");
|
||||
AggregationBuilder histogramAggregationBuilder = DatafeedConfig.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(dateHistogram).getAggregatorFactories());
|
||||
assertEquals(dateHistogram, histogramAggregationBuilder);
|
||||
|
||||
MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time");
|
||||
dateHistogram.subAggregation(avg).subAggregation(nestedTerms).subAggregation(maxTime).field("time");
|
||||
histogramAggregationBuilder = DatafeedConfig.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(dateHistogram).getAggregatorFactories());
|
||||
assertEquals(dateHistogram, histogramAggregationBuilder);
|
||||
|
||||
TermsAggregationBuilder toplevelTerms = AggregationBuilders.terms("top_level");
|
||||
toplevelTerms.subAggregation(dateHistogram);
|
||||
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("foo", "bar");
|
||||
builder.setAggregations(new AggregatorFactories.Builder().addAggregator(toplevelTerms));
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> builder.validateAggregations());
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, builder::validateAggregations);
|
||||
|
||||
assertEquals("Aggregations can only have 1 date_histogram or histogram aggregation", e.getMessage());
|
||||
}
|
||||
|
||||
public void testGetHistogramAggregation_MissingHistogramAgg() {
|
||||
TermsAggregationBuilder terms = AggregationBuilders.terms("top_level");
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> DatafeedConfig.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(terms).getAggregatorFactories()));
|
||||
assertEquals("A date_histogram (or histogram) aggregation is required", e.getMessage());
|
||||
}
|
||||
|
||||
public void testGetHistogramAggregation_DateHistogramHasSibling() {
|
||||
AvgAggregationBuilder avg = AggregationBuilders.avg("avg");
|
||||
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("time");
|
||||
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> DatafeedConfig.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(avg).addAggregator(dateHistogram).getAggregatorFactories()));
|
||||
assertEquals("The date_histogram (or histogram) aggregation cannot have sibling aggregations", e.getMessage());
|
||||
|
||||
TermsAggregationBuilder terms = AggregationBuilders.terms("terms");
|
||||
terms.subAggregation(dateHistogram);
|
||||
terms.subAggregation(avg);
|
||||
e = expectThrows(ElasticsearchException.class,
|
||||
() -> DatafeedConfig.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(terms).getAggregatorFactories()));
|
||||
assertEquals("The date_histogram (or histogram) aggregation cannot have sibling aggregations", e.getMessage());
|
||||
}
|
||||
|
||||
public void testGetHistogramAggregation() {
|
||||
AvgAggregationBuilder avg = AggregationBuilders.avg("avg");
|
||||
TermsAggregationBuilder nestedTerms = AggregationBuilders.terms("nested_terms");
|
||||
|
||||
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("time");
|
||||
AggregationBuilder histogramAggregationBuilder = DatafeedConfig.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(dateHistogram).getAggregatorFactories());
|
||||
assertEquals(dateHistogram, histogramAggregationBuilder);
|
||||
|
||||
dateHistogram.subAggregation(avg).subAggregation(nestedTerms);
|
||||
histogramAggregationBuilder = DatafeedConfig.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(dateHistogram).getAggregatorFactories());
|
||||
assertEquals(dateHistogram, histogramAggregationBuilder);
|
||||
|
||||
TermsAggregationBuilder toplevelTerms = AggregationBuilders.terms("top_level");
|
||||
toplevelTerms.subAggregation(dateHistogram);
|
||||
histogramAggregationBuilder = DatafeedConfig.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(toplevelTerms).getAggregatorFactories());
|
||||
|
||||
assertEquals(dateHistogram, histogramAggregationBuilder);
|
||||
}
|
||||
|
||||
public static String randomValidDatafeedId() {
|
||||
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray());
|
||||
return generator.ofCodePointsLength(random(), 10, 10);
|
||||
|
|
|
@ -0,0 +1,99 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class ExtractorUtilsTests extends ESTestCase {
|
||||
|
||||
public void testGetHistogramAggregation_DateHistogramHasSibling() {
|
||||
AvgAggregationBuilder avg = AggregationBuilders.avg("avg");
|
||||
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("time");
|
||||
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> ExtractorUtils.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(avg).addAggregator(dateHistogram).getAggregatorFactories()));
|
||||
assertEquals("The date_histogram (or histogram) aggregation cannot have sibling aggregations", e.getMessage());
|
||||
|
||||
TermsAggregationBuilder terms = AggregationBuilders.terms("terms");
|
||||
terms.subAggregation(dateHistogram);
|
||||
terms.subAggregation(avg);
|
||||
e = expectThrows(ElasticsearchException.class,
|
||||
() -> ExtractorUtils.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(terms).getAggregatorFactories()));
|
||||
assertEquals("The date_histogram (or histogram) aggregation cannot have sibling aggregations", e.getMessage());
|
||||
}
|
||||
|
||||
public void testGetHistogramAggregation() {
|
||||
AvgAggregationBuilder avg = AggregationBuilders.avg("avg");
|
||||
TermsAggregationBuilder nestedTerms = AggregationBuilders.terms("nested_terms");
|
||||
|
||||
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("time");
|
||||
AggregationBuilder histogramAggregationBuilder = ExtractorUtils.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(dateHistogram).getAggregatorFactories());
|
||||
assertEquals(dateHistogram, histogramAggregationBuilder);
|
||||
|
||||
dateHistogram.subAggregation(avg).subAggregation(nestedTerms);
|
||||
histogramAggregationBuilder = ExtractorUtils.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(dateHistogram).getAggregatorFactories());
|
||||
assertEquals(dateHistogram, histogramAggregationBuilder);
|
||||
|
||||
TermsAggregationBuilder toplevelTerms = AggregationBuilders.terms("top_level");
|
||||
toplevelTerms.subAggregation(dateHistogram);
|
||||
histogramAggregationBuilder = ExtractorUtils.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(toplevelTerms).getAggregatorFactories());
|
||||
|
||||
assertEquals(dateHistogram, histogramAggregationBuilder);
|
||||
}
|
||||
|
||||
public void testGetHistogramAggregation_MissingHistogramAgg() {
|
||||
TermsAggregationBuilder terms = AggregationBuilders.terms("top_level");
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> ExtractorUtils.getHistogramAggregation(
|
||||
new AggregatorFactories.Builder().addAggregator(terms).getAggregatorFactories()));
|
||||
assertEquals("A date_histogram (or histogram) aggregation is required", e.getMessage());
|
||||
}
|
||||
|
||||
public void testGetHistogramIntervalMillis_GivenDateHistogramWithInvalidTimeZone() {
|
||||
MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time");
|
||||
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("bucket").field("time")
|
||||
.interval(300000L).timeZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone("EST"))).subAggregation(maxTime);
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> ExtractorUtils.getHistogramIntervalMillis(dateHistogram));
|
||||
|
||||
assertThat(e.getMessage(), equalTo("ML requires date_histogram.time_zone to be UTC"));
|
||||
}
|
||||
|
||||
public void testIsHistogram() {
|
||||
assertTrue(ExtractorUtils.isHistogram(AggregationBuilders.dateHistogram("time")));
|
||||
assertTrue(ExtractorUtils.isHistogram(AggregationBuilders.histogram("time")));
|
||||
assertFalse(ExtractorUtils.isHistogram(AggregationBuilders.max("time")));
|
||||
}
|
||||
|
||||
public void testValidateAndGetCalendarInterval() {
|
||||
assertEquals(300 * 1000L, ExtractorUtils.validateAndGetCalendarInterval("5m"));
|
||||
assertEquals(7200 * 1000L, ExtractorUtils.validateAndGetCalendarInterval("2h"));
|
||||
assertEquals(86400L * 1000L, ExtractorUtils.validateAndGetCalendarInterval("1d"));
|
||||
}
|
||||
|
||||
public void testValidateAndGetCalendarInterval_intervalIsLongerThanAWeek() {
|
||||
expectThrows(ElasticsearchException.class,
|
||||
() -> ExtractorUtils.validateAndGetCalendarInterval("8d"));
|
||||
}
|
||||
}
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation;
|
|||
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
|
|
|
@ -100,7 +100,7 @@ public class AggregationToJsonProcessorTests extends ESTestCase {
|
|||
createHistogramBucket(2000L, 5, Collections.singletonList(createMax("time", 2000)))
|
||||
);
|
||||
|
||||
String json = aggToString("time", Collections.emptySet(), false, histogramBuckets);
|
||||
String json = aggToString("time", Collections.emptySet(), false, histogramBuckets, 0L);
|
||||
|
||||
assertThat(json, equalTo("{\"time\":1000} {\"time\":2000}"));
|
||||
assertThat(keyValuePairsWritten, equalTo(2L));
|
||||
|
@ -246,7 +246,7 @@ public class AggregationToJsonProcessorTests extends ESTestCase {
|
|||
createTerms("my_field", new Term("c", 4, c4NumericAggs), new Term("b", 3, b4NumericAggs))))
|
||||
);
|
||||
|
||||
String json = aggToString("time", Sets.newHashSet("my_field", "my_value", "my_value2"), false, histogramBuckets);
|
||||
String json = aggToString("time", Sets.newHashSet("my_field", "my_value", "my_value2"), false, histogramBuckets, 0L);
|
||||
|
||||
assertThat(json, equalTo("{\"time\":1000,\"my_field\":\"a\",\"my_value\":111.0,\"my_value2\":112.0} " +
|
||||
"{\"time\":1000,\"my_field\":\"b\",\"my_value2\":122.0} " +
|
||||
|
@ -364,10 +364,11 @@ public class AggregationToJsonProcessorTests extends ESTestCase {
|
|||
assertThat(e.getMessage(), containsString("Multi-percentile aggregation [my_field] is not supported"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testBucketAggContainsRequiredAgg() throws IOException {
|
||||
Set<String> fields = new HashSet<>();
|
||||
fields.add("foo");
|
||||
AggregationToJsonProcessor processor = new AggregationToJsonProcessor("time", fields, false);
|
||||
AggregationToJsonProcessor processor = new AggregationToJsonProcessor("time", fields, false, 0L);
|
||||
|
||||
Terms termsAgg = mock(Terms.class);
|
||||
when(termsAgg.getBuckets()).thenReturn(Collections.emptyList());
|
||||
|
@ -394,27 +395,52 @@ public class AggregationToJsonProcessorTests extends ESTestCase {
|
|||
assertTrue(processor.bucketAggContainsRequiredAgg(termsAgg));
|
||||
}
|
||||
|
||||
public void testBucketsBeforeStartArePruned() throws IOException {
|
||||
List<Histogram.Bucket> histogramBuckets = Arrays.asList(
|
||||
createHistogramBucket(1000L, 4, Arrays.asList(
|
||||
createMax("time", 1000), createPercentiles("my_field", 1.0))),
|
||||
createHistogramBucket(2000L, 7, Arrays.asList(
|
||||
createMax("time", 2000), createPercentiles("my_field", 2.0))),
|
||||
createHistogramBucket(3000L, 10, Arrays.asList(
|
||||
createMax("time", 3000), createPercentiles("my_field", 3.0))),
|
||||
createHistogramBucket(4000L, 14, Arrays.asList(
|
||||
createMax("time", 4000), createPercentiles("my_field", 4.0)))
|
||||
);
|
||||
|
||||
String json = aggToString("time", Sets.newHashSet("my_field"), true, histogramBuckets, 2000L);
|
||||
|
||||
assertThat(json, equalTo("{\"time\":2000,\"my_field\":2.0,\"doc_count\":7} " +
|
||||
"{\"time\":3000,\"my_field\":3.0,\"doc_count\":10} " +
|
||||
"{\"time\":4000,\"my_field\":4.0,\"doc_count\":14}"));
|
||||
}
|
||||
|
||||
private String aggToString(String timeField, Set<String> fields, Histogram.Bucket bucket) throws IOException {
|
||||
return aggToString(timeField, fields, true, Collections.singletonList(bucket));
|
||||
return aggToString(timeField, fields, true, Collections.singletonList(bucket), 0L);
|
||||
}
|
||||
|
||||
private String aggToString(String timeField, Set<String> fields, List<Histogram.Bucket> buckets) throws IOException {
|
||||
return aggToString(timeField, fields, true, buckets);
|
||||
return aggToString(timeField, fields, true, buckets, 0L);
|
||||
}
|
||||
|
||||
private String aggToString(String timeField, Set<String> fields, boolean includeDocCount, List<Histogram.Bucket> buckets)
|
||||
private String aggToString(String timeField, Set<String> fields, boolean includeDocCount, List<Histogram.Bucket> buckets,
|
||||
long startTime)
|
||||
throws IOException {
|
||||
|
||||
Histogram histogram = createHistogramAggregation("buckets", buckets);
|
||||
|
||||
return aggToString(timeField, fields, includeDocCount, createAggs(Collections.singletonList(histogram)));
|
||||
return aggToString(timeField, fields, includeDocCount, createAggs(Collections.singletonList(histogram)), startTime);
|
||||
}
|
||||
|
||||
private String aggToString(String timeField, Set<String> fields, boolean includeDocCount, Aggregations aggregations)
|
||||
throws IOException {
|
||||
return aggToString(timeField, fields, includeDocCount, aggregations, 0L);
|
||||
}
|
||||
|
||||
private String aggToString(String timeField, Set<String> fields, boolean includeDocCount, Aggregations aggregations, long startTime)
|
||||
throws IOException {
|
||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
||||
|
||||
AggregationToJsonProcessor processor = new AggregationToJsonProcessor(timeField, fields, includeDocCount);
|
||||
AggregationToJsonProcessor processor = new AggregationToJsonProcessor(timeField, fields, includeDocCount, startTime);
|
||||
processor.process(aggregations);
|
||||
processor.writeDocs(10000, outputStream);
|
||||
keyValuePairsWritten = processor.getKeyValueCount();
|
||||
|
|
|
@ -8,12 +8,23 @@ package org.elasticsearch.xpack.monitoring;
|
|||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.MockNode;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.PluginInfo;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.discovery.TestZenDiscovery;
|
||||
import org.elasticsearch.tribe.TribePlugin;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -30,6 +41,47 @@ public class MonitoringPluginTests extends MonitoringIntegTestCase {
|
|||
// do nothing as monitoring is sometime unbound
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean addTestZenDiscovery() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public static class TribeAwareTestZenDiscoveryPlugin extends TestZenDiscovery.TestPlugin {
|
||||
|
||||
public TribeAwareTestZenDiscoveryPlugin(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings additionalSettings() {
|
||||
if (settings.getGroups("tribe", true).isEmpty()) {
|
||||
return super.additionalSettings();
|
||||
} else {
|
||||
return Settings.EMPTY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class MockTribePlugin extends TribePlugin {
|
||||
|
||||
public MockTribePlugin(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
protected Function<Settings, Node> nodeBuilder(Path configPath) {
|
||||
return settings -> new MockNode(new Environment(settings, configPath), internalCluster().getPlugins());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
|
||||
plugins.add(MockTribePlugin.class);
|
||||
plugins.add(TribeAwareTestZenDiscoveryPlugin.class);
|
||||
return plugins;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
|
|
|
@ -92,6 +92,7 @@ public class ShardsCollectorTests extends AbstractCollectorTestCase {
|
|||
assertThat(replicas, equalTo(expectedReplicas));
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/96")
|
||||
public void testShardsCollectorMultipleIndices() throws Exception {
|
||||
final String indexPrefix = "test-shards-";
|
||||
final int nbIndices = randomIntBetween(1, 3);
|
||||
|
|
|
@ -37,6 +37,8 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
|||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.plugins.ActionPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -87,7 +89,8 @@ public class TestPersistentTasksPlugin extends Plugin implements ActionPlugin {
|
|||
@Override
|
||||
public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool,
|
||||
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
|
||||
NamedXContentRegistry xContentRegistry) {
|
||||
NamedXContentRegistry xContentRegistry, Environment environment,
|
||||
NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
InternalClient internalClient = new InternalClient(Settings.EMPTY, threadPool, client);
|
||||
PersistentTasksService persistentTasksService = new PersistentTasksService(Settings.EMPTY, clusterService, threadPool, internalClient);
|
||||
TestPersistentTasksExecutor testPersistentAction = new TestPersistentTasksExecutor(Settings.EMPTY, clusterService);
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.elasticsearch.common.settings.SecureString;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.node.MockNode;
|
||||
|
@ -27,6 +28,8 @@ import org.elasticsearch.test.ESIntegTestCase;
|
|||
import org.elasticsearch.test.InternalTestCluster;
|
||||
import org.elasticsearch.test.NativeRealmIntegTestCase;
|
||||
import org.elasticsearch.test.SecuritySettingsSource;
|
||||
import org.elasticsearch.test.discovery.TestZenDiscovery;
|
||||
import org.elasticsearch.tribe.TribePlugin;
|
||||
import org.elasticsearch.xpack.security.action.role.GetRolesResponse;
|
||||
import org.elasticsearch.xpack.security.action.role.PutRoleResponse;
|
||||
import org.elasticsearch.xpack.security.action.user.PutUserResponse;
|
||||
|
@ -37,7 +40,9 @@ import org.junit.After;
|
|||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -45,6 +50,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
||||
|
@ -82,6 +88,14 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase {
|
|||
.put(NetworkModule.HTTP_ENABLED.getKey(), true)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
|
||||
plugins.add(MockTribePlugin.class);
|
||||
plugins.add(TribeAwareTestZenDiscoveryPlugin.class);
|
||||
return plugins;
|
||||
}
|
||||
};
|
||||
|
||||
cluster2 = new InternalTestCluster(randomLong(), createTempDir(), true, true, 1, 2,
|
||||
|
@ -149,6 +163,47 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean addTestZenDiscovery() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public static class TribeAwareTestZenDiscoveryPlugin extends TestZenDiscovery.TestPlugin {
|
||||
|
||||
public TribeAwareTestZenDiscoveryPlugin(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings additionalSettings() {
|
||||
if (settings.getGroups("tribe", true).isEmpty()) {
|
||||
return super.additionalSettings();
|
||||
} else {
|
||||
return Settings.EMPTY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class MockTribePlugin extends TribePlugin {
|
||||
|
||||
public MockTribePlugin(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
protected Function<Settings, Node> nodeBuilder(Path configPath) {
|
||||
return settings -> new MockNode(new Environment(settings, configPath), internalCluster().getPlugins());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
|
||||
plugins.add(MockTribePlugin.class);
|
||||
plugins.add(TribeAwareTestZenDiscoveryPlugin.class);
|
||||
return plugins;
|
||||
}
|
||||
|
||||
private void setupTribeNode(Settings settings) throws Exception {
|
||||
SecuritySettingsSource cluster2SettingsSource =
|
||||
new SecuritySettingsSource(1, useGeneratedSSL, createTempDir(), Scope.TEST) {
|
||||
|
|
|
@ -164,6 +164,13 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase {
|
|||
return builder.build();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void addDefaultSecurityTransportType(Settings.Builder builder, Settings settings) {
|
||||
if (useSecurity) {
|
||||
super.addDefaultSecurityTransportType(builder, settings);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase {
|
|||
public void testRetrieveUsers() throws Exception {
|
||||
final Environment nodeEnvironment = nodeEnvironment();
|
||||
String home = Environment.PATH_HOME_SETTING.get(nodeEnvironment.settings());
|
||||
String conf = nodeEnvironment.configFile().toString();
|
||||
Path conf = nodeEnvironment.configFile();
|
||||
SecurityClient c = new SecurityClient(client());
|
||||
logger.error("--> creating users");
|
||||
int numToAdd = randomIntBetween(1,10);
|
||||
|
@ -90,16 +90,16 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase {
|
|||
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.put("path.home", home)
|
||||
.put("path.conf", conf);
|
||||
.put("path.conf", conf.toString());
|
||||
SecuritySettingsSource.addSSLSettingsForStore(builder,
|
||||
"/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode");
|
||||
Settings settings = builder.build();
|
||||
logger.error("--> retrieving users using URL: {}, home: {}", url, home);
|
||||
|
||||
OptionParser parser = muor.getParser();
|
||||
OptionSet options = parser.parse("-u", username, "-p", password, "-U", url, "--path.conf", conf);
|
||||
OptionSet options = parser.parse("-u", username, "-p", password, "-U", url);
|
||||
logger.info("--> options: {}", options.asMap());
|
||||
Set<String> users = muor.getUsersThatExist(t, settings, new Environment(settings), options);
|
||||
Set<String> users = muor.getUsersThatExist(t, settings, new Environment(settings, conf), options);
|
||||
logger.info("--> output: \n{}", t.getOutput());
|
||||
for (String u : addedUsers) {
|
||||
assertThat("expected list to contain: " + u + ", real list: " + users, users.contains(u), is(true));
|
||||
|
@ -139,7 +139,7 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase {
|
|||
logger.error("--> retrieving roles using URL: {}, home: {}", url, home);
|
||||
|
||||
OptionParser parser = muor.getParser();
|
||||
OptionSet options = parser.parse("-u", username, "-p", password, "-U", url, "--path.conf", conf.toString());
|
||||
OptionSet options = parser.parse("-u", username, "-p", password, "-U", url);
|
||||
Set<String> roles = muor.getRolesThatExist(t, settings, new Environment(settings, conf), options);
|
||||
logger.info("--> output: \n{}", t.getOutput());;
|
||||
for (String r : addedRoles) {
|
||||
|
|
|
@ -11,7 +11,9 @@ import org.apache.logging.log4j.Logger;
|
|||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.MockTerminal;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.Terminal.Verbosity;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -24,6 +26,7 @@ import java.nio.file.Path;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -36,7 +39,19 @@ public class ESNativeRealmMigrateToolTests extends CommandTestCase {
|
|||
|
||||
@Override
|
||||
protected Command newCommand() {
|
||||
return new ESNativeRealmMigrateTool();
|
||||
return new ESNativeRealmMigrateTool() {
|
||||
@Override
|
||||
protected MigrateUserOrRoles newMigrateUserOrRoles() {
|
||||
return new MigrateUserOrRoles() {
|
||||
|
||||
@Override
|
||||
protected Environment createEnv(Terminal terminal, Map<String, String> settings) throws UserException {
|
||||
return new Environment(Settings.builder().put(settings).build());
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public void testUserJson() throws Exception {
|
||||
|
|
|
@ -8,12 +8,15 @@ package org.elasticsearch.xpack.security.authc.esnative.tool;
|
|||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.settings.KeyStoreWrapper;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
|
||||
import org.elasticsearch.xpack.security.user.ElasticUser;
|
||||
import org.elasticsearch.xpack.security.user.KibanaUser;
|
||||
|
@ -25,6 +28,7 @@ import org.mockito.Mockito;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.mockito.Matchers.anyString;
|
||||
import static org.mockito.Matchers.contains;
|
||||
|
@ -60,7 +64,29 @@ public class SetupPasswordToolTests extends CommandTestCase {
|
|||
|
||||
@Override
|
||||
protected Command newCommand() {
|
||||
return new SetupPasswordTool((e) -> httpClient, (e) -> keyStore);
|
||||
return new SetupPasswordTool((e) -> httpClient, (e) -> keyStore) {
|
||||
|
||||
@Override
|
||||
protected AutoSetup newAutoSetup() {
|
||||
return new AutoSetup() {
|
||||
@Override
|
||||
protected Environment createEnv(Terminal terminal, Map<String, String> settings) throws UserException {
|
||||
return new Environment(Settings.builder().put(settings).build());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InteractiveSetup newInteractiveSetup() {
|
||||
return new InteractiveSetup() {
|
||||
@Override
|
||||
protected Environment createEnv(Terminal terminal, Map<String, String> settings) throws UserException {
|
||||
return new Environment(Settings.builder().put(settings).build());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
public void testAutoSetup() throws Exception {
|
||||
|
|
|
@ -10,7 +10,9 @@ import com.google.common.jimfs.Jimfs;
|
|||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.EnvironmentAwareCommand;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.PathUtilsForTesting;
|
||||
|
@ -19,6 +21,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.test.SecuritySettingsSource;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.xpack.security.authc.UserTokenTests;
|
||||
import org.elasticsearch.xpack.security.authc.support.Hasher;
|
||||
import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
|
@ -36,6 +39,7 @@ import java.nio.file.Path;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class UsersToolTests extends CommandTestCase {
|
||||
|
@ -102,7 +106,57 @@ public class UsersToolTests extends CommandTestCase {
|
|||
|
||||
@Override
|
||||
protected Command newCommand() {
|
||||
return new UsersTool();
|
||||
return new UsersTool() {
|
||||
@Override
|
||||
protected AddUserCommand newAddUserCommand() {
|
||||
return new AddUserCommand() {
|
||||
@Override
|
||||
protected Environment createEnv(Terminal terminal, Map<String, String> settings) throws UserException {
|
||||
return new Environment(UsersToolTests.this.settings, confDir.getParent());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DeleteUserCommand newDeleteUserCommand() {
|
||||
return new DeleteUserCommand() {
|
||||
@Override
|
||||
protected Environment createEnv(Terminal terminal, Map<String, String> settings) throws UserException {
|
||||
return new Environment(UsersToolTests.this.settings, confDir.getParent());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PasswordCommand newPasswordCommand() {
|
||||
return new PasswordCommand() {
|
||||
@Override
|
||||
protected Environment createEnv(Terminal terminal, Map<String, String> settings) throws UserException {
|
||||
return new Environment(UsersToolTests.this.settings, confDir.getParent());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RolesCommand newRolesCommand() {
|
||||
return new RolesCommand() {
|
||||
@Override
|
||||
protected Environment createEnv(Terminal terminal, Map<String, String> settings) throws UserException {
|
||||
return new Environment(UsersToolTests.this.settings, confDir.getParent());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ListCommand newListCommand() {
|
||||
return new ListCommand() {
|
||||
@Override
|
||||
protected Environment createEnv(Terminal terminal, Map<String, String> settings) throws UserException {
|
||||
return new Environment(UsersToolTests.this.settings, confDir.getParent());
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** checks the user exists with the given password */
|
||||
|
|
|
@ -72,7 +72,7 @@ public class SecurityIndexSearcherWrapperIntegrationTests extends ESTestCase {
|
|||
when(client.settings()).thenReturn(Settings.EMPTY);
|
||||
final long nowInMillis = randomNonNegativeLong();
|
||||
QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), indexSettings, null, null, mapperService, null,
|
||||
null, xContentRegistry(), client, null, () -> nowInMillis, null);
|
||||
null, xContentRegistry(), writableRegistry(), client, null, () -> nowInMillis, null);
|
||||
QueryShardContext queryShardContext = spy(realQueryShardContext);
|
||||
IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, new BitsetFilterCache.Listener() {
|
||||
|
|
|
@ -679,7 +679,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
|
|||
Client client = mock(Client.class);
|
||||
when(client.settings()).thenReturn(Settings.EMPTY);
|
||||
final long nowInMillis = randomNonNegativeLong();
|
||||
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), client,
|
||||
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), client,
|
||||
() -> nowInMillis);
|
||||
QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2");
|
||||
SecurityIndexSearcherWrapper.failIfQueryUsesClient(queryBuilder1, context);
|
||||
|
|
|
@ -10,7 +10,11 @@ import com.google.common.jimfs.Jimfs;
|
|||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.io.PathUtilsForTesting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.security.crypto.CryptoService;
|
||||
import org.junit.After;
|
||||
|
@ -19,6 +23,7 @@ import java.nio.file.FileSystem;
|
|||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.PosixFilePermission;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public class SystemKeyToolTests extends CommandTestCase {
|
||||
|
@ -41,7 +46,14 @@ public class SystemKeyToolTests extends CommandTestCase {
|
|||
|
||||
@Override
|
||||
protected Command newCommand() {
|
||||
return new SystemKeyTool();
|
||||
return new SystemKeyTool() {
|
||||
|
||||
@Override
|
||||
protected Environment createEnv(Terminal terminal, Map<String, String> settings) throws UserException {
|
||||
return new Environment(Settings.builder().put(settings).build());
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
public void testGenerate() throws Exception {
|
||||
|
|
|
@ -15,10 +15,13 @@ import org.elasticsearch.client.Client;
|
|||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.index.reindex.BulkByScrollResponse;
|
||||
import org.elasticsearch.index.reindex.ReindexAction;
|
||||
import org.elasticsearch.index.reindex.ReindexPlugin;
|
||||
|
@ -112,7 +115,8 @@ public class IndexUpgradeTasksIT extends ESIntegTestCase {
|
|||
@Override
|
||||
public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool,
|
||||
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
|
||||
NamedXContentRegistry xContentRegistry) {
|
||||
NamedXContentRegistry xContentRegistry, Environment environment,
|
||||
NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
return Collections.singletonList(new IndexUpgradeService(settings, Collections.singletonList(
|
||||
new IndexUpgradeCheck("test", settings,
|
||||
new Function<IndexMetaData, UpgradeActionRequired>() {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"xpack.license.delete": {
|
||||
"documentation": "https://www.elastic.co/guide/en/shield/current/license-management.html",
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/current/license-management.html",
|
||||
"methods": ["DELETE"],
|
||||
"url": {
|
||||
"path": "/_xpack/license",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"xpack.license.get": {
|
||||
"documentation": "https://www.elastic.co/guide/en/shield/current/license-management.html",
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/current/license-management.html",
|
||||
"methods": ["GET"],
|
||||
"url": {
|
||||
"path": "/_xpack/license",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"xpack.license.post": {
|
||||
"documentation": "https://www.elastic.co/guide/en/shield/current/license-management.html",
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/current/license-management.html",
|
||||
"methods": ["PUT", "POST"],
|
||||
"url": {
|
||||
"path": "/_xpack/license",
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.xpack.security.client.SecurityClient;
|
|||
import org.elasticsearch.xpack.security.user.User;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
|
@ -50,8 +51,10 @@ public class MigrateToolIT extends MigrateToolTestCase {
|
|||
}
|
||||
|
||||
public void testRunMigrateTool() throws Exception {
|
||||
logger.info("--> CONF: {}", System.getProperty("tests.config.dir"));
|
||||
Settings settings = Settings.builder().put("path.home", PathUtils.get(System.getProperty("tests.config.dir")).getParent()).build();
|
||||
final String testConfigDir = System.getProperty("tests.config.dir");
|
||||
logger.info("--> CONF: {}", testConfigDir);
|
||||
final Path configPath = PathUtils.get(testConfigDir);
|
||||
Settings settings = Settings.builder().put("path.home", configPath.getParent()).build();
|
||||
// Cluster should already be up
|
||||
String url = "http://" + getHttpURL();
|
||||
logger.info("--> using URL: {}", url);
|
||||
|
@ -59,9 +62,8 @@ public class MigrateToolIT extends MigrateToolTestCase {
|
|||
ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles();
|
||||
OptionParser parser = muor.getParser();
|
||||
|
||||
OptionSet options = parser.parse("-u", "test_admin", "-p", "x-pack-test-password", "-U", url,
|
||||
"--path.conf", System.getProperty("tests.config.dir"));
|
||||
muor.execute(t, options, new Environment(settings));
|
||||
OptionSet options = parser.parse("-u", "test_admin", "-p", "x-pack-test-password", "-U", url);
|
||||
muor.execute(t, options, new Environment(settings, configPath));
|
||||
|
||||
logger.info("--> output:\n{}", t.getOutput());
|
||||
|
||||
|
|
Loading…
Reference in New Issue