Merge branch 'master' into ccr
* master: [ML] Fix master node deadlock during ML daily maintenance (#31836) Build: Switch integ-test-zip to OSS-only (#31866) SQL: Remove restriction for single column grouping (#31818) Build: Fix detection of Eclipse Compiler Server (#31838) Docs: Inconsistency between description and example (#31858) Re-enable bwc tests now that #29538 has been backported and 6.x intake build succeeded. QA: build improvements related to SQL projects (#31862) [Docs] Add clarification to analysis example (#31826) Check timeZone() argument in AbstractSqlQueryRequest (#31822) SQL: Fix incorrect HAVING equality (#31820) Smaller aesthetic fixes to InternalTestCluster (#31831) [Docs] Clarify accepted sort case (#31605) Temporarily disable bwc test in order to backport #29538 Remove obsolete parameters from analyze rest spec (#31795) [Docs] Fix wrong link in Korean analyzer docs (#31815) Fix profiling of ordered terms aggs (#31814) Properly mute test involving JDK11 closes #31739 Do not return all indices if a specific alias is requested via get aliases api. (#29538) Get snapshot rest client cleanups (#31740) Docs: Explain _bulk?refresh shard targeting Fix handling of points_only with term strategy in geo_shape (#31766)
This commit is contained in:
commit
4be6b0e2ae
|
@ -125,7 +125,10 @@ Map<String, String> buildMetadataMap = buildMetadataValue.tokenize(';').collectE
|
|||
allprojects {
|
||||
project.ext {
|
||||
// for ide hacks...
|
||||
isEclipse = System.getProperty("eclipse.launcher") != null || gradle.startParameter.taskNames.contains('eclipse') || gradle.startParameter.taskNames.contains('cleanEclipse')
|
||||
isEclipse = System.getProperty("eclipse.launcher") != null || // Detects gradle launched from Eclipse's IDE
|
||||
System.getProperty("eclipse.application") != null || // Detects gradle launched from the Eclipse compiler server
|
||||
gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff
|
||||
gradle.startParameter.taskNames.contains('cleanEclipse')
|
||||
isIdea = System.getProperty("idea.active") != null || gradle.startParameter.taskNames.contains('idea') || gradle.startParameter.taskNames.contains('cleanIdea')
|
||||
|
||||
// for BWC testing
|
||||
|
@ -446,7 +449,7 @@ allprojects {
|
|||
|
||||
File licenseHeaderFile;
|
||||
String prefix = ':x-pack';
|
||||
|
||||
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
prefix = prefix.replace(':', '_')
|
||||
}
|
||||
|
@ -455,7 +458,7 @@ allprojects {
|
|||
} else {
|
||||
licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/oss-license-header.txt')
|
||||
}
|
||||
|
||||
|
||||
String lineSeparator = Os.isFamily(Os.FAMILY_WINDOWS) ? '\\\\r\\\\n' : '\\\\n'
|
||||
String licenseHeader = licenseHeaderFile.getText('UTF-8').replace(System.lineSeparator(), lineSeparator)
|
||||
task copyEclipseSettings(type: Copy) {
|
||||
|
|
|
@ -2122,13 +2122,21 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
getSnapshotsRequest.snapshots(Arrays.asList(snapshot1, snapshot2).toArray(new String[0]));
|
||||
setRandomMasterTimeout(getSnapshotsRequest, expectedParams);
|
||||
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
if (randomBoolean()) {
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
} else {
|
||||
expectedParams.put("ignore_unavailable", Boolean.FALSE.toString());
|
||||
}
|
||||
|
||||
boolean verbose = randomBoolean();
|
||||
getSnapshotsRequest.verbose(verbose);
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
if (randomBoolean()) {
|
||||
boolean verbose = randomBoolean();
|
||||
getSnapshotsRequest.verbose(verbose);
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
} else {
|
||||
expectedParams.put("verbose", Boolean.TRUE.toString());
|
||||
}
|
||||
|
||||
Request request = RequestConverters.getSnapshots(getSnapshotsRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
|
|
|
@ -48,7 +48,10 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.snapshots.SnapshotId;
|
||||
import org.elasticsearch.snapshots.SnapshotInfo;
|
||||
import org.elasticsearch.snapshots.SnapshotShardFailure;
|
||||
import org.elasticsearch.snapshots.SnapshotState;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
|
@ -496,7 +499,14 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
// end::get-snapshots-execute
|
||||
|
||||
// tag::get-snapshots-response
|
||||
List<SnapshotInfo> snapshotsInfos = response.getSnapshots(); // <1>
|
||||
List<SnapshotInfo> snapshotsInfos = response.getSnapshots();
|
||||
SnapshotInfo snapshotInfo = snapshotsInfos.get(0);
|
||||
RestStatus restStatus = snapshotInfo.status(); // <1>
|
||||
SnapshotId snapshotId = snapshotInfo.snapshotId(); // <2>
|
||||
SnapshotState snapshotState = snapshotInfo.state(); // <3>
|
||||
List<SnapshotShardFailure> snapshotShardFailures = snapshotInfo.shardFailures(); // <4>
|
||||
long startTime = snapshotInfo.startTime(); // <5>
|
||||
long endTime = snapshotInfo.endTime(); // <6>
|
||||
// end::get-snapshots-response
|
||||
assertEquals(1, snapshotsInfos.size());
|
||||
}
|
||||
|
|
|
@ -102,7 +102,7 @@ Closure commonZipConfig = {
|
|||
|
||||
task buildIntegTestZip(type: Zip) {
|
||||
configure(commonZipConfig)
|
||||
with archiveFiles(transportModulesFiles, 'zip', false)
|
||||
with archiveFiles(transportModulesFiles, 'zip', true)
|
||||
}
|
||||
|
||||
task buildZip(type: Zip) {
|
||||
|
@ -193,7 +193,7 @@ subprojects {
|
|||
onlyIf toolExists
|
||||
doLast {
|
||||
final String licenseFilename
|
||||
if (project.name.contains('oss-')) {
|
||||
if (project.name.contains('oss-') || project.name == 'integ-test-zip') {
|
||||
licenseFilename = "APACHE-LICENSE-2.0.txt"
|
||||
} else {
|
||||
licenseFilename = "ELASTIC-LICENSE.txt"
|
||||
|
|
|
@ -56,7 +56,7 @@ You can analyze text using the mappings for a particular field in an index:
|
|||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-field-request]
|
||||
---------------------------------------------------
|
||||
|
||||
==== Optional arguemnts
|
||||
==== Optional arguments
|
||||
The following arguments can also optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
|
|
|
@ -93,11 +93,16 @@ argument.
|
|||
[[java-rest-high-snapshot-get-snapshots-response]]
|
||||
==== Get Snapshots Response
|
||||
|
||||
Use the `GetSnapshotsResponse` to retrieve information about the evaluated
|
||||
request:
|
||||
The returned `GetSnapshotsResponse` allows the retrieval of information about the requested
|
||||
snapshots:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-response]
|
||||
--------------------------------------------------
|
||||
<1> Indicates the node has started the request.
|
||||
<1> The REST status of a snapshot
|
||||
<2> The snapshot id
|
||||
<3> The current state of the snapshot
|
||||
<4> Information about failures that occurred during the shard snapshot process.
|
||||
<5> The snapshot start time
|
||||
<6> The snapshot end time
|
|
@ -268,7 +268,7 @@ Which responds with:
|
|||
|
||||
The `nori_part_of_speech` token filter removes tokens that match a set of
|
||||
part-of-speech tags. The list of supported tags and their meanings can be found here:
|
||||
{lucene_version_path}/org/apache/lucene/analysis/ko/POS.Tag.html[Part of speech tags]
|
||||
{lucene-core-javadoc}/../analyzers-nori/org/apache/lucene/analysis/ko/POS.Tag.html[Part of speech tags]
|
||||
|
||||
It accepts the following setting:
|
||||
|
||||
|
|
|
@ -13,15 +13,18 @@ defined per index.
|
|||
[float]
|
||||
== Index time analysis
|
||||
|
||||
For instance at index time, the built-in <<english-analyzer,`english`>> _analyzer_ would
|
||||
convert this sentence:
|
||||
For instance, at index time the built-in <<english-analyzer,`english`>> _analyzer_
|
||||
will first convert the sentence:
|
||||
|
||||
[source,text]
|
||||
------
|
||||
"The QUICK brown foxes jumped over the lazy dog!"
|
||||
------
|
||||
|
||||
into these terms, which would be added to the inverted index.
|
||||
into distinct tokens. It will then lowercase each token, remove frequent
|
||||
stopwords ("the") and reduce the terms to their word stems (foxes -> fox,
|
||||
jumped -> jump, lazy -> lazi). In the end, the following terms will be added
|
||||
to the inverted index:
|
||||
|
||||
[source,text]
|
||||
------
|
||||
|
|
|
@ -230,6 +230,13 @@ example.
|
|||
Control when the changes made by this request are visible to search. See
|
||||
<<docs-refresh,refresh>>.
|
||||
|
||||
NOTE: Only the shards that receive the bulk request will be affected by
|
||||
`refresh`. Imagine a `_bulk?refresh=wait_for` request with three
|
||||
documents in it that happen to be routed to different shards in an index
|
||||
with five shards. The request will only wait for those three shards to
|
||||
refresh. The other two shards of that make up the index do not
|
||||
participate in the `_bulk` request at all.
|
||||
|
||||
[float]
|
||||
[[bulk-update]]
|
||||
=== Update
|
||||
|
|
|
@ -486,7 +486,7 @@ The rest endpoint is: `/{index}/_alias/{alias}`.
|
|||
[float]
|
||||
==== Examples:
|
||||
|
||||
All aliases for the index users:
|
||||
All aliases for the index `logs_20162801`:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -15,8 +15,8 @@ Currently the rescore API has only one implementation: the query
|
|||
rescorer, which uses a query to tweak the scoring. In the future,
|
||||
alternative rescorers may be made available, for example, a pair-wise rescorer.
|
||||
|
||||
NOTE: An error will be thrown if an explicit <<search-request-sort,`sort`>> (other than `_score`)
|
||||
is provided with a `rescore` query.
|
||||
NOTE: An error will be thrown if an explicit <<search-request-sort,`sort`>>
|
||||
(other than `_score` in descending order) is provided with a `rescore` query.
|
||||
|
||||
NOTE: when exposing pagination to your users, you should not change
|
||||
`window_size` as you step through each page (by passing different
|
||||
|
|
|
@ -177,6 +177,13 @@ And grouping by column expression (typically used along-side an alias):
|
|||
include-tagged::{sql-specs}/docs.csv-spec[groupByExpression]
|
||||
----
|
||||
|
||||
Or a mixture of the above:
|
||||
["source","sql",subs="attributes,callouts,macros"]
|
||||
----
|
||||
include-tagged::{sql-specs}/docs.csv-spec[groupByMulti]
|
||||
----
|
||||
|
||||
|
||||
When a `GROUP BY` clause is used in a `SELECT`, _all_ output expressions must be either aggregate functions or expressions used for grouping or derivatives of (otherwise there would be more than one possible value to return for each ungrouped column).
|
||||
|
||||
To wit:
|
||||
|
|
|
@ -228,7 +228,7 @@ if (rootProject.ext.compilerJavaVersion.isJava11()) {
|
|||
].join(',')
|
||||
}
|
||||
}
|
||||
if (rootProject.ext.compilerJavaVersion.isJava11()) {
|
||||
if (rootProject.ext.runtimeJavaVersion.isJava11() || rootProject.ext.compilerJavaVersion.isJava11()) {
|
||||
// TODO remove when: https://github.com/elastic/elasticsearch/issues/31498
|
||||
integTestHa.enabled = false
|
||||
}
|
||||
|
|
|
@ -15,16 +15,6 @@
|
|||
"index": {
|
||||
"type" : "string",
|
||||
"description" : "The name of the index to scope the operation"
|
||||
},
|
||||
"prefer_local": {
|
||||
"type" : "boolean",
|
||||
"description" : "With `true`, specify that a local shard should be used if available, with `false`, use a random shard (default: true)"
|
||||
},
|
||||
"format": {
|
||||
"type": "enum",
|
||||
"options" : ["detailed","text"],
|
||||
"default": "detailed",
|
||||
"description": "Format of the output"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -33,9 +33,11 @@ public interface AliasesRequest extends IndicesRequest.Replaceable {
|
|||
String[] aliases();
|
||||
|
||||
/**
|
||||
* Sets the array of aliases that the action relates to
|
||||
* Replaces current aliases with the provided aliases.
|
||||
*
|
||||
* Sometimes aliases expressions need to be resolved to concrete aliases prior to executing the transport action.
|
||||
*/
|
||||
AliasesRequest aliases(String... aliases);
|
||||
void replaceAliases(String... aliases);
|
||||
|
||||
/**
|
||||
* Returns true if wildcards expressions among aliases should be resolved, false otherwise
|
||||
|
|
|
@ -302,7 +302,6 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
|||
/**
|
||||
* Aliases to use with this action.
|
||||
*/
|
||||
@Override
|
||||
public AliasActions aliases(String... aliases) {
|
||||
if (type == AliasActions.Type.REMOVE_INDEX) {
|
||||
throw new IllegalArgumentException("[aliases] is unsupported for [" + type + "]");
|
||||
|
@ -428,6 +427,11 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
|||
return aliases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void replaceAliases(String... aliases) {
|
||||
this.aliases = aliases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean expandAliasesWildcards() {
|
||||
//remove operations support wildcards among aliases, add operations don't
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.action.admin.indices.alias.get;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.AliasesRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
|
@ -32,15 +33,12 @@ public class GetAliasesRequest extends MasterNodeReadRequest<GetAliasesRequest>
|
|||
|
||||
private String[] indices = Strings.EMPTY_ARRAY;
|
||||
private String[] aliases = Strings.EMPTY_ARRAY;
|
||||
|
||||
private IndicesOptions indicesOptions = IndicesOptions.strictExpand();
|
||||
private String[] originalAliases = Strings.EMPTY_ARRAY;
|
||||
|
||||
public GetAliasesRequest(String[] aliases) {
|
||||
public GetAliasesRequest(String... aliases) {
|
||||
this.aliases = aliases;
|
||||
}
|
||||
|
||||
public GetAliasesRequest(String alias) {
|
||||
this.aliases = new String[]{alias};
|
||||
this.originalAliases = aliases;
|
||||
}
|
||||
|
||||
public GetAliasesRequest() {
|
||||
|
@ -51,6 +49,9 @@ public class GetAliasesRequest extends MasterNodeReadRequest<GetAliasesRequest>
|
|||
indices = in.readStringArray();
|
||||
aliases = in.readStringArray();
|
||||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
originalAliases = in.readStringArray();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -59,6 +60,9 @@ public class GetAliasesRequest extends MasterNodeReadRequest<GetAliasesRequest>
|
|||
out.writeStringArray(indices);
|
||||
out.writeStringArray(aliases);
|
||||
indicesOptions.writeIndicesOptions(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
out.writeStringArray(originalAliases);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -67,9 +71,9 @@ public class GetAliasesRequest extends MasterNodeReadRequest<GetAliasesRequest>
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetAliasesRequest aliases(String... aliases) {
|
||||
this.aliases = aliases;
|
||||
this.originalAliases = aliases;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -88,6 +92,18 @@ public class GetAliasesRequest extends MasterNodeReadRequest<GetAliasesRequest>
|
|||
return aliases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void replaceAliases(String... aliases) {
|
||||
this.aliases = aliases;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the aliases as was originally specified by the user
|
||||
*/
|
||||
public String[] getOriginalAliases() {
|
||||
return originalAliases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean expandAliasesWildcards() {
|
||||
return true;
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public class TransportGetAliasesAction extends TransportMasterNodeReadAction<GetAliasesRequest, GetAliasesResponse> {
|
||||
|
@ -62,7 +63,24 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction<Get
|
|||
@Override
|
||||
protected void masterOperation(GetAliasesRequest request, ClusterState state, ActionListener<GetAliasesResponse> listener) {
|
||||
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request);
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> result = state.metaData().findAliases(request.aliases(), concreteIndices);
|
||||
listener.onResponse(new GetAliasesResponse(result));
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> aliases = state.metaData().findAliases(request.aliases(), concreteIndices);
|
||||
listener.onResponse(new GetAliasesResponse(postProcess(request, concreteIndices, aliases)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills alias result with empty entries for requested indices when no specific aliases were requested.
|
||||
*/
|
||||
static ImmutableOpenMap<String, List<AliasMetaData>> postProcess(GetAliasesRequest request, String[] concreteIndices,
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> aliases) {
|
||||
boolean noAliasesSpecified = request.getOriginalAliases() == null || request.getOriginalAliases().length == 0;
|
||||
ImmutableOpenMap.Builder<String, List<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder(aliases);
|
||||
for (String index : concreteIndices) {
|
||||
if (aliases.get(index) == null && noAliasesSpecified) {
|
||||
List<AliasMetaData> previous = mapBuilder.put(index, Collections.emptyList());
|
||||
assert previous == null;
|
||||
}
|
||||
}
|
||||
return mapBuilder.build();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -265,8 +265,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
|||
|
||||
boolean matchAllAliases = matchAllAliases(aliases);
|
||||
ImmutableOpenMap.Builder<String, List<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
|
||||
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
|
||||
for (String index : intersection) {
|
||||
for (String index : concreteIndices) {
|
||||
IndexMetaData indexMetaData = indices.get(index);
|
||||
List<AliasMetaData> filteredValues = new ArrayList<>();
|
||||
for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) {
|
||||
|
@ -276,11 +275,11 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
|||
}
|
||||
}
|
||||
|
||||
if (!filteredValues.isEmpty()) {
|
||||
if (filteredValues.isEmpty() == false) {
|
||||
// Make the list order deterministic
|
||||
CollectionUtil.timSort(filteredValues, Comparator.comparing(AliasMetaData::alias));
|
||||
mapBuilder.put(index, Collections.unmodifiableList(filteredValues));
|
||||
}
|
||||
mapBuilder.put(index, Collections.unmodifiableList(filteredValues));
|
||||
}
|
||||
return mapBuilder.build();
|
||||
}
|
||||
|
|
|
@ -199,6 +199,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder = new Builder(name);
|
||||
Boolean pointsOnly = null;
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = entry.getKey();
|
||||
|
@ -230,13 +231,18 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
} else if (GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName().equals(fieldName)) {
|
||||
builder.ignoreZValue(XContentMapValues.nodeBooleanValue(fieldNode, name + "." + GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName()));
|
||||
iterator.remove();
|
||||
} else if (Names.STRATEGY_POINTS_ONLY.equals(fieldName)
|
||||
&& builder.fieldType().strategyName.equals(SpatialStrategy.TERM.getStrategyName()) == false) {
|
||||
boolean pointsOnly = XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.STRATEGY_POINTS_ONLY);
|
||||
builder.fieldType().setPointsOnly(pointsOnly);
|
||||
} else if (Names.STRATEGY_POINTS_ONLY.equals(fieldName)) {
|
||||
pointsOnly = XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.STRATEGY_POINTS_ONLY);
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
if (pointsOnly != null) {
|
||||
if (builder.fieldType().strategyName.equals(SpatialStrategy.TERM.getStrategyName()) && pointsOnly == false) {
|
||||
throw new IllegalArgumentException("points_only cannot be set to false for term strategy");
|
||||
} else {
|
||||
builder.fieldType().setPointsOnly(pointsOnly);
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
@ -565,7 +571,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
} else if (includeDefaults && fieldType().treeLevels() == 0) { // defaults only make sense if tree levels are not specified
|
||||
builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(50));
|
||||
}
|
||||
if (includeDefaults || fieldType().strategyName() != Defaults.STRATEGY) {
|
||||
if (includeDefaults || fieldType().strategyName().equals(Defaults.STRATEGY) == false) {
|
||||
builder.field(Names.STRATEGY, fieldType().strategyName());
|
||||
}
|
||||
if (includeDefaults || fieldType().distanceErrorPct() != fieldType().defaultDistanceErrorPct) {
|
||||
|
@ -574,8 +580,15 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
if (includeDefaults || fieldType().orientation() != Defaults.ORIENTATION) {
|
||||
builder.field(Names.ORIENTATION, fieldType().orientation());
|
||||
}
|
||||
if (includeDefaults || fieldType().pointsOnly() != GeoShapeFieldMapper.Defaults.POINTS_ONLY) {
|
||||
builder.field(Names.STRATEGY_POINTS_ONLY, fieldType().pointsOnly());
|
||||
if (fieldType().strategyName().equals(SpatialStrategy.TERM.getStrategyName())) {
|
||||
// For TERMs strategy the defaults for points only change to true
|
||||
if (includeDefaults || fieldType().pointsOnly() != true) {
|
||||
builder.field(Names.STRATEGY_POINTS_ONLY, fieldType().pointsOnly());
|
||||
}
|
||||
} else {
|
||||
if (includeDefaults || fieldType().pointsOnly() != GeoShapeFieldMapper.Defaults.POINTS_ONLY) {
|
||||
builder.field(Names.STRATEGY_POINTS_ONLY, fieldType().pointsOnly());
|
||||
}
|
||||
}
|
||||
if (includeDefaults || coerce.explicit()) {
|
||||
builder.field(Names.COERCE, coerce.value());
|
||||
|
|
|
@ -77,6 +77,10 @@ public class RestGetAliasesAction extends BaseRestHandler {
|
|||
|
||||
@Override
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
// The TransportGetAliasesAction was improved do the same post processing as is happening here.
|
||||
// We can't remove this logic yet to support mixed clusters. We should be able to remove this logic here
|
||||
// in when 8.0 becomes the new version in the master branch.
|
||||
|
||||
final boolean namesProvided = request.hasParam("name");
|
||||
final String[] aliases = request.paramAsStringArrayOrEmptyIfAll("name");
|
||||
final GetAliasesRequest getAliasesRequest = new GetAliasesRequest(aliases);
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation;
|
|||
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
||||
import org.elasticsearch.search.profile.aggregation.ProfilingAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -256,7 +257,7 @@ public class AggregationPath {
|
|||
Aggregator aggregator = root;
|
||||
for (int i = 0; i < pathElements.size(); i++) {
|
||||
AggregationPath.PathElement token = pathElements.get(i);
|
||||
aggregator = aggregator.subAggregator(token.name);
|
||||
aggregator = ProfilingAggregator.unwrap(aggregator.subAggregator(token.name));
|
||||
assert (aggregator instanceof SingleBucketAggregator && i <= pathElements.size() - 1)
|
||||
|| (aggregator instanceof NumericMetricsAggregator && i == pathElements.size() - 1) :
|
||||
"this should be picked up before aggregation execution - on validate";
|
||||
|
@ -272,7 +273,7 @@ public class AggregationPath {
|
|||
*/
|
||||
public Aggregator resolveTopmostAggregator(Aggregator root) {
|
||||
AggregationPath.PathElement token = pathElements.get(0);
|
||||
Aggregator aggregator = root.subAggregator(token.name);
|
||||
Aggregator aggregator = ProfilingAggregator.unwrap(root.subAggregator(token.name));
|
||||
assert (aggregator instanceof SingleBucketAggregator )
|
||||
|| (aggregator instanceof NumericMetricsAggregator) : "this should be picked up before aggregation execution - on validate";
|
||||
return aggregator;
|
||||
|
@ -287,7 +288,7 @@ public class AggregationPath {
|
|||
public void validate(Aggregator root) throws AggregationExecutionException {
|
||||
Aggregator aggregator = root;
|
||||
for (int i = 0; i < pathElements.size(); i++) {
|
||||
aggregator = aggregator.subAggregator(pathElements.get(i).name);
|
||||
aggregator = ProfilingAggregator.unwrap(aggregator.subAggregator(pathElements.get(i).name));
|
||||
if (aggregator == null) {
|
||||
throw new AggregationExecutionException("Invalid aggregator order path [" + this + "]. Unknown aggregation ["
|
||||
+ pathElements.get(i).name + "]");
|
||||
|
|
|
@ -114,4 +114,11 @@ public class ProfilingAggregator extends Aggregator {
|
|||
public String toString() {
|
||||
return delegate.toString();
|
||||
}
|
||||
|
||||
public static Aggregator unwrap(Aggregator agg) {
|
||||
if (agg instanceof ProfilingAggregator) {
|
||||
return ((ProfilingAggregator) agg).delegate;
|
||||
}
|
||||
return agg;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,9 +112,9 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns REST status corresponding to this failure
|
||||
* Returns {@link RestStatus} corresponding to this failure
|
||||
*
|
||||
* @return REST STATUS
|
||||
* @return REST status
|
||||
*/
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.action.admin.indices.alias.get;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class TransportGetAliasesActionTests extends ESTestCase {
|
||||
|
||||
public void testPostProcess() {
|
||||
GetAliasesRequest request = new GetAliasesRequest();
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> aliases = ImmutableOpenMap.<String, List<AliasMetaData>>builder()
|
||||
.fPut("b", Collections.singletonList(new AliasMetaData.Builder("y").build()))
|
||||
.build();
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> result =
|
||||
TransportGetAliasesAction.postProcess(request, new String[]{"a", "b", "c"}, aliases);
|
||||
assertThat(result.size(), equalTo(3));
|
||||
assertThat(result.get("a").size(), equalTo(0));
|
||||
assertThat(result.get("b").size(), equalTo(1));
|
||||
assertThat(result.get("c").size(), equalTo(0));
|
||||
|
||||
request = new GetAliasesRequest();
|
||||
request.replaceAliases("y", "z");
|
||||
aliases = ImmutableOpenMap.<String, List<AliasMetaData>>builder()
|
||||
.fPut("b", Collections.singletonList(new AliasMetaData.Builder("y").build()))
|
||||
.build();
|
||||
result = TransportGetAliasesAction.postProcess(request, new String[]{"a", "b", "c"}, aliases);
|
||||
assertThat(result.size(), equalTo(3));
|
||||
assertThat(result.get("a").size(), equalTo(0));
|
||||
assertThat(result.get("b").size(), equalTo(1));
|
||||
assertThat(result.get("c").size(), equalTo(0));
|
||||
|
||||
request = new GetAliasesRequest("y", "z");
|
||||
aliases = ImmutableOpenMap.<String, List<AliasMetaData>>builder()
|
||||
.fPut("b", Collections.singletonList(new AliasMetaData.Builder("y").build()))
|
||||
.build();
|
||||
result = TransportGetAliasesAction.postProcess(request, new String[]{"a", "b", "c"}, aliases);
|
||||
assertThat(result.size(), equalTo(1));
|
||||
assertThat(result.get("b").size(), equalTo(1));
|
||||
}
|
||||
|
||||
}
|
|
@ -570,24 +570,20 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
logger.info("--> getting alias1");
|
||||
GetAliasesResponse getResponse = admin().indices().prepareGetAliases("alias1").get();
|
||||
assertThat(getResponse, notNullValue());
|
||||
assertThat(getResponse.getAliases().size(), equalTo(5));
|
||||
assertThat(getResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
|
||||
assertTrue(getResponse.getAliases().get("test").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("test123").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("foobarbaz").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("bazbar").isEmpty());
|
||||
AliasesExistResponse existsResponse = admin().indices().prepareAliasesExist("alias1").get();
|
||||
assertThat(existsResponse.exists(), equalTo(true));
|
||||
|
||||
logger.info("--> getting all aliases that start with alias*");
|
||||
getResponse = admin().indices().prepareGetAliases("alias*").get();
|
||||
assertThat(getResponse, notNullValue());
|
||||
assertThat(getResponse.getAliases().size(), equalTo(5));
|
||||
assertThat(getResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(2));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1"));
|
||||
|
@ -599,10 +595,6 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
assertThat(getResponse.getAliases().get("foobar").get(1).getFilter(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(1).getIndexRouting(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(1).getSearchRouting(), nullValue());
|
||||
assertTrue(getResponse.getAliases().get("test").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("test123").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("foobarbaz").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("bazbar").isEmpty());
|
||||
existsResponse = admin().indices().prepareAliasesExist("alias*").get();
|
||||
assertThat(existsResponse.exists(), equalTo(true));
|
||||
|
||||
|
@ -687,13 +679,12 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
logger.info("--> getting f* for index *bar");
|
||||
getResponse = admin().indices().prepareGetAliases("f*").addIndices("*bar").get();
|
||||
assertThat(getResponse, notNullValue());
|
||||
assertThat(getResponse.getAliases().size(), equalTo(2));
|
||||
assertThat(getResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
|
||||
assertTrue(getResponse.getAliases().get("bazbar").isEmpty());
|
||||
existsResponse = admin().indices().prepareAliasesExist("f*")
|
||||
.addIndices("*bar").get();
|
||||
assertThat(existsResponse.exists(), equalTo(true));
|
||||
|
@ -702,14 +693,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
logger.info("--> getting f* for index *bac");
|
||||
getResponse = admin().indices().prepareGetAliases("foo").addIndices("*bac").get();
|
||||
assertThat(getResponse, notNullValue());
|
||||
assertThat(getResponse.getAliases().size(), equalTo(2));
|
||||
assertThat(getResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
|
||||
assertTrue(getResponse.getAliases().get("bazbar").isEmpty());
|
||||
existsResponse = admin().indices().prepareAliasesExist("foo")
|
||||
.addIndices("*bac").get();
|
||||
assertThat(existsResponse.exists(), equalTo(true));
|
||||
|
@ -727,6 +717,19 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
.addIndices("foobar").get();
|
||||
assertThat(existsResponse.exists(), equalTo(true));
|
||||
|
||||
for (String aliasName : new String[]{null, "_all", "*"}) {
|
||||
logger.info("--> getting {} alias for index foobar", aliasName);
|
||||
getResponse = aliasName != null ? admin().indices().prepareGetAliases(aliasName).addIndices("foobar").get() :
|
||||
admin().indices().prepareGetAliases().addIndices("foobar").get();
|
||||
assertThat(getResponse, notNullValue());
|
||||
assertThat(getResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(4));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(1).alias(), equalTo("alias2"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(2).alias(), equalTo("bac"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(3).alias(), equalTo("foo"));
|
||||
}
|
||||
|
||||
// alias at work again
|
||||
logger.info("--> getting * for index *bac");
|
||||
getResponse = admin().indices().prepareGetAliases("*").addIndices("*bac").get();
|
||||
|
|
|
@ -42,6 +42,7 @@ import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_Z_
|
|||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
|
@ -588,10 +589,65 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public String toXContentString(GeoShapeFieldMapper mapper) throws IOException {
|
||||
public void testPointsOnlyDefaultsWithTermStrategy() throws IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("tree", "quadtree")
|
||||
.field("precision", "10m")
|
||||
.field("strategy", "term")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
|
||||
|
||||
GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
|
||||
PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy();
|
||||
|
||||
assertThat(strategy.getDistErrPct(), equalTo(0.0));
|
||||
assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class));
|
||||
assertThat(strategy.getGrid().getMaxLevels(), equalTo(23));
|
||||
assertThat(strategy.isPointsOnly(), equalTo(true));
|
||||
// term strategy changes the default for points_only, check that we handle it correctly
|
||||
assertThat(toXContentString(geoShapeFieldMapper, false), not(containsString("points_only")));
|
||||
}
|
||||
|
||||
|
||||
public void testPointsOnlyFalseWithTermStrategy() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("tree", "quadtree")
|
||||
.field("precision", "10m")
|
||||
.field("strategy", "term")
|
||||
.field("points_only", false)
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> parser.parse("type1", new CompressedXContent(mapping))
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("points_only cannot be set to false for term strategy"));
|
||||
}
|
||||
|
||||
public String toXContentString(GeoShapeFieldMapper mapper, boolean includeDefaults) throws IOException {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
|
||||
mapper.doXContentBody(builder, true, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true")));
|
||||
ToXContent.Params params;
|
||||
if (includeDefaults) {
|
||||
params = new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"));
|
||||
} else {
|
||||
params = ToXContent.EMPTY_PARAMS;
|
||||
}
|
||||
mapper.doXContentBody(builder, includeDefaults, params);
|
||||
return Strings.toString(builder.endObject());
|
||||
}
|
||||
|
||||
public String toXContentString(GeoShapeFieldMapper mapper) throws IOException {
|
||||
return toXContentString(mapper, true);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.search.profile.aggregation;
|
|||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedOrdinalsSamplerAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.GlobalOrdinalsStringTermsAggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregator;
|
||||
|
@ -120,9 +121,17 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
|||
|
||||
public void testMultiLevelProfile() {
|
||||
SearchResponse response = client().prepareSearch("idx").setProfile(true)
|
||||
.addAggregation(histogram("histo").field(NUMBER_FIELD).interval(1L)
|
||||
.subAggregation(terms("terms").field(TAG_FIELD)
|
||||
.subAggregation(avg("avg").field(NUMBER_FIELD)))).get();
|
||||
.addAggregation(
|
||||
histogram("histo")
|
||||
.field(NUMBER_FIELD)
|
||||
.interval(1L)
|
||||
.subAggregation(
|
||||
terms("terms")
|
||||
.field(TAG_FIELD)
|
||||
.order(BucketOrder.aggregation("avg", false))
|
||||
.subAggregation(avg("avg").field(NUMBER_FIELD))
|
||||
)
|
||||
).get();
|
||||
assertSearchResponse(response);
|
||||
Map<String, ProfileShardResult> profileResults = response.getProfileResults();
|
||||
assertThat(profileResults, notNullValue());
|
||||
|
|
|
@ -263,8 +263,6 @@ public final class InternalTestCluster extends TestCluster {
|
|||
this.nodePrefix = nodePrefix;
|
||||
|
||||
assert nodePrefix != null;
|
||||
ArrayList<Class<? extends Plugin>> tmpMockPlugins = new ArrayList<>(mockPlugins);
|
||||
|
||||
|
||||
this.mockPlugins = mockPlugins;
|
||||
|
||||
|
@ -458,14 +456,9 @@ public final class InternalTestCluster extends TestCluster {
|
|||
|
||||
private synchronized NodeAndClient getRandomNodeAndClient(Predicate<NodeAndClient> predicate) {
|
||||
ensureOpen();
|
||||
Collection<NodeAndClient> values = nodes.values().stream().filter(predicate).collect(Collectors.toCollection(ArrayList::new));
|
||||
if (!values.isEmpty()) {
|
||||
int whichOne = random.nextInt(values.size());
|
||||
for (NodeAndClient nodeAndClient : values) {
|
||||
if (whichOne-- == 0) {
|
||||
return nodeAndClient;
|
||||
}
|
||||
}
|
||||
List<NodeAndClient> values = nodes.values().stream().filter(predicate).collect(Collectors.toList());
|
||||
if (values.isEmpty() == false) {
|
||||
return randomFrom(random, values);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -476,18 +469,14 @@ public final class InternalTestCluster extends TestCluster {
|
|||
* stop any of the running nodes.
|
||||
*/
|
||||
public synchronized void ensureAtLeastNumDataNodes(int n) {
|
||||
boolean added = false;
|
||||
int size = numDataNodes();
|
||||
for (int i = size; i < n; i++) {
|
||||
if (size < n) {
|
||||
logger.info("increasing cluster size from {} to {}", size, n);
|
||||
added = true;
|
||||
if (numSharedDedicatedMasterNodes > 0) {
|
||||
startDataOnlyNode(Settings.EMPTY);
|
||||
startDataOnlyNodes(n - size);
|
||||
} else {
|
||||
startNode(Settings.EMPTY);
|
||||
startNodes(n - size);
|
||||
}
|
||||
}
|
||||
if (added) {
|
||||
validateClusterFormed();
|
||||
}
|
||||
}
|
||||
|
@ -1377,8 +1366,9 @@ public final class InternalTestCluster extends TestCluster {
|
|||
.filter(nac -> nodes.containsKey(nac.name) == false) // filter out old masters
|
||||
.count();
|
||||
final int currentMasters = getMasterNodesCount();
|
||||
if (autoManageMinMasterNodes && currentMasters > 1 && newMasters > 0) {
|
||||
// special case for 1 node master - we can't update the min master nodes before we add more nodes.
|
||||
if (autoManageMinMasterNodes && currentMasters > 0 && newMasters > 0 &&
|
||||
getMinMasterNodes(currentMasters + newMasters) <= currentMasters) {
|
||||
// if we're adding too many master-eligible nodes at once, we can't update the min master setting before adding the nodes.
|
||||
updateMinMasterNodes(currentMasters + newMasters);
|
||||
}
|
||||
List<Future<?>> futures = nodeAndClients.stream().map(node -> executor.submit(node::startNode)).collect(Collectors.toList());
|
||||
|
@ -1393,7 +1383,8 @@ public final class InternalTestCluster extends TestCluster {
|
|||
}
|
||||
nodeAndClients.forEach(this::publishNode);
|
||||
|
||||
if (autoManageMinMasterNodes && currentMasters == 1 && newMasters > 0) {
|
||||
if (autoManageMinMasterNodes && currentMasters > 0 && newMasters > 0 &&
|
||||
getMinMasterNodes(currentMasters + newMasters) > currentMasters) {
|
||||
// update once masters have joined
|
||||
validateClusterFormed();
|
||||
updateMinMasterNodes(currentMasters + newMasters);
|
||||
|
@ -1648,27 +1639,24 @@ public final class InternalTestCluster extends TestCluster {
|
|||
}
|
||||
|
||||
/**
|
||||
* Starts a node with default settings and returns it's name.
|
||||
* Starts a node with default settings and returns its name.
|
||||
*/
|
||||
public synchronized String startNode() {
|
||||
return startNode(Settings.EMPTY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a node with the given settings builder and returns it's name.
|
||||
* Starts a node with the given settings builder and returns its name.
|
||||
*/
|
||||
public synchronized String startNode(Settings.Builder settings) {
|
||||
return startNode(settings.build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a node with the given settings and returns it's name.
|
||||
* Starts a node with the given settings and returns its name.
|
||||
*/
|
||||
public synchronized String startNode(Settings settings) {
|
||||
final int defaultMinMasterNodes = getMinMasterNodes(getMasterNodesCount() + (Node.NODE_MASTER_SETTING.get(settings) ? 1 : 0));
|
||||
NodeAndClient buildNode = buildNode(settings, defaultMinMasterNodes);
|
||||
startAndPublishNodesAndClients(Collections.singletonList(buildNode));
|
||||
return buildNode.name;
|
||||
return startNodes(settings).get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -45,6 +45,7 @@ subprojects {
|
|||
ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-ml:${version}": xpackModule('ml')]
|
||||
ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-monitoring:${version}": xpackModule('monitoring')]
|
||||
ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-security:${version}": xpackModule('security')]
|
||||
ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-sql:${version}": xpackModule('sql')]
|
||||
ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-upgrade:${version}": xpackModule('upgrade')]
|
||||
ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-watcher:${version}": xpackModule('watcher')]
|
||||
ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-ccr:${version}": xpackModule('ccr')]
|
||||
|
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.action;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.action.support.ThreadedActionListener;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
|
@ -57,8 +58,8 @@ public class TransportDeleteExpiredDataAction extends HandledTransportAction<Del
|
|||
Auditor auditor = new Auditor(client, clusterService.nodeName());
|
||||
List<MlDataRemover> dataRemovers = Arrays.asList(
|
||||
new ExpiredResultsRemover(client, clusterService, auditor),
|
||||
new ExpiredForecastsRemover(client),
|
||||
new ExpiredModelSnapshotsRemover(client, clusterService),
|
||||
new ExpiredForecastsRemover(client, threadPool),
|
||||
new ExpiredModelSnapshotsRemover(client, threadPool, clusterService),
|
||||
new UnusedStateRemover(client, clusterService)
|
||||
);
|
||||
Iterator<MlDataRemover> dataRemoversIterator = new VolatileCursorIterator<>(dataRemovers);
|
||||
|
@ -69,9 +70,15 @@ public class TransportDeleteExpiredDataAction extends HandledTransportAction<Del
|
|||
ActionListener<DeleteExpiredDataAction.Response> listener) {
|
||||
if (mlDataRemoversIterator.hasNext()) {
|
||||
MlDataRemover remover = mlDataRemoversIterator.next();
|
||||
remover.remove(ActionListener.wrap(
|
||||
booleanResponse -> deleteExpiredData(mlDataRemoversIterator, listener),
|
||||
listener::onFailure));
|
||||
ActionListener<Boolean> nextListener = ActionListener.wrap(
|
||||
booleanResponse -> deleteExpiredData(mlDataRemoversIterator, listener), listener::onFailure);
|
||||
// Removing expired ML data and artifacts requires multiple operations.
|
||||
// These are queued up and executed sequentially in the action listener,
|
||||
// the chained calls must all run the ML utility thread pool NOT the thread
|
||||
// the previous action returned in which in the case of a transport_client_boss
|
||||
// thread is a disaster.
|
||||
remover.remove(new ThreadedActionListener<>(logger, threadPool, MachineLearning.UTILITY_THREAD_POOL_NAME, nextListener,
|
||||
false));
|
||||
} else {
|
||||
logger.info("Completed deletion of expired data");
|
||||
listener.onResponse(new DeleteExpiredDataAction.Response(true));
|
||||
|
|
|
@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener;
|
|||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.ThreadedActionListener;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
|
@ -27,11 +28,13 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest;
|
|||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.core.ml.job.results.Forecast;
|
||||
import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.results.Result;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.chrono.ISOChronology;
|
||||
|
||||
|
@ -57,10 +60,12 @@ public class ExpiredForecastsRemover implements MlDataRemover {
|
|||
private static final String RESULTS_INDEX_PATTERN = AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*";
|
||||
|
||||
private final Client client;
|
||||
private final ThreadPool threadPool;
|
||||
private final long cutoffEpochMs;
|
||||
|
||||
public ExpiredForecastsRemover(Client client) {
|
||||
public ExpiredForecastsRemover(Client client, ThreadPool threadPool) {
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.threadPool = Objects.requireNonNull(threadPool);
|
||||
this.cutoffEpochMs = DateTime.now(ISOChronology.getInstance()).getMillis();
|
||||
}
|
||||
|
||||
|
@ -79,7 +84,8 @@ public class ExpiredForecastsRemover implements MlDataRemover {
|
|||
|
||||
SearchRequest searchRequest = new SearchRequest(RESULTS_INDEX_PATTERN);
|
||||
searchRequest.source(source);
|
||||
client.execute(SearchAction.INSTANCE, searchRequest, forecastStatsHandler);
|
||||
client.execute(SearchAction.INSTANCE, searchRequest, new ThreadedActionListener<>(LOGGER, threadPool,
|
||||
MachineLearning.UTILITY_THREAD_POOL_NAME, forecastStatsHandler, false));
|
||||
}
|
||||
|
||||
private void deleteForecasts(SearchResponse searchResponse, ActionListener<Boolean> listener) {
|
||||
|
|
|
@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener;
|
|||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.ThreadedActionListener;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
@ -18,11 +19,13 @@ import org.elasticsearch.index.query.QueryBuilder;
|
|||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshotField;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
|
@ -51,10 +54,12 @@ public class ExpiredModelSnapshotsRemover extends AbstractExpiredJobDataRemover
|
|||
private static final int MODEL_SNAPSHOT_SEARCH_SIZE = 10000;
|
||||
|
||||
private final Client client;
|
||||
private final ThreadPool threadPool;
|
||||
|
||||
public ExpiredModelSnapshotsRemover(Client client, ClusterService clusterService) {
|
||||
public ExpiredModelSnapshotsRemover(Client client, ThreadPool threadPool, ClusterService clusterService) {
|
||||
super(clusterService);
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.threadPool = Objects.requireNonNull(threadPool);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -84,7 +89,12 @@ public class ExpiredModelSnapshotsRemover extends AbstractExpiredJobDataRemover
|
|||
|
||||
searchRequest.source(new SearchSourceBuilder().query(query).size(MODEL_SNAPSHOT_SEARCH_SIZE));
|
||||
|
||||
client.execute(SearchAction.INSTANCE, searchRequest, new ActionListener<SearchResponse>() {
|
||||
client.execute(SearchAction.INSTANCE, searchRequest, new ThreadedActionListener<>(LOGGER, threadPool,
|
||||
MachineLearning.UTILITY_THREAD_POOL_NAME, expiredSnapshotsListener(job.getId(), listener), false));
|
||||
}
|
||||
|
||||
private ActionListener<SearchResponse> expiredSnapshotsListener(String jobId, ActionListener<Boolean> listener) {
|
||||
return new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(SearchResponse searchResponse) {
|
||||
try {
|
||||
|
@ -100,9 +110,9 @@ public class ExpiredModelSnapshotsRemover extends AbstractExpiredJobDataRemover
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(new ElasticsearchException("[" + job.getId() + "] Search for expired snapshots failed", e));
|
||||
listener.onFailure(new ElasticsearchException("[" + jobId + "] Search for expired snapshots failed", e));
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
private void deleteModelSnapshots(Iterator<ModelSnapshot> modelSnapshotIterator, ActionListener<Boolean> listener) {
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.cluster.ClusterState;
|
|||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
@ -21,6 +22,8 @@ import org.elasticsearch.mock.orig.Mockito;
|
|||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.FixedExecutorBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.core.ml.MLMetadataField;
|
||||
import org.elasticsearch.xpack.core.ml.MlMetadata;
|
||||
import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction;
|
||||
|
@ -28,6 +31,8 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|||
import org.elasticsearch.xpack.core.ml.job.config.JobTests;
|
||||
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
@ -38,24 +43,27 @@ import java.util.Arrays;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.same;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
||||
|
||||
private Client client;
|
||||
private ThreadPool threadPool;
|
||||
private ClusterService clusterService;
|
||||
private ClusterState clusterState;
|
||||
private List<SearchRequest> capturedSearchRequests;
|
||||
private List<DeleteModelSnapshotAction.Request> capturedDeleteModelSnapshotRequests;
|
||||
private List<SearchResponse> searchResponsesPerCall;
|
||||
private ActionListener<Boolean> listener;
|
||||
private TestListener listener;
|
||||
|
||||
@Before
|
||||
public void setUpTests() {
|
||||
|
@ -66,7 +74,19 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
clusterState = mock(ClusterState.class);
|
||||
when(clusterService.state()).thenReturn(clusterState);
|
||||
client = mock(Client.class);
|
||||
listener = mock(ActionListener.class);
|
||||
listener = new TestListener();
|
||||
|
||||
// Init thread pool
|
||||
Settings settings = Settings.builder()
|
||||
.put("node.name", "expired_model_snapshots_remover_test")
|
||||
.build();
|
||||
threadPool = new ThreadPool(settings,
|
||||
new FixedExecutorBuilder(settings, MachineLearning.UTILITY_THREAD_POOL_NAME, 1, 1000, ""));
|
||||
}
|
||||
|
||||
@After
|
||||
public void shutdownThreadPool() throws InterruptedException {
|
||||
terminate(threadPool);
|
||||
}
|
||||
|
||||
public void testRemove_GivenJobsWithoutRetentionPolicy() {
|
||||
|
@ -78,7 +98,8 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
|
||||
createExpiredModelSnapshotsRemover().remove(listener);
|
||||
|
||||
verify(listener).onResponse(true);
|
||||
listener.waitToCompletion();
|
||||
assertThat(listener.success, is(true));
|
||||
Mockito.verifyNoMoreInteractions(client);
|
||||
}
|
||||
|
||||
|
@ -88,7 +109,8 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
|
||||
createExpiredModelSnapshotsRemover().remove(listener);
|
||||
|
||||
verify(listener).onResponse(true);
|
||||
listener.waitToCompletion();
|
||||
assertThat(listener.success, is(true));
|
||||
Mockito.verifyNoMoreInteractions(client);
|
||||
}
|
||||
|
||||
|
@ -108,6 +130,9 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
|
||||
createExpiredModelSnapshotsRemover().remove(listener);
|
||||
|
||||
listener.waitToCompletion();
|
||||
assertThat(listener.success, is(true));
|
||||
|
||||
assertThat(capturedSearchRequests.size(), equalTo(2));
|
||||
SearchRequest searchRequest = capturedSearchRequests.get(0);
|
||||
assertThat(searchRequest.indices(), equalTo(new String[] {AnomalyDetectorsIndex.jobResultsAliasedName("snapshots-1")}));
|
||||
|
@ -124,8 +149,6 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
deleteSnapshotRequest = capturedDeleteModelSnapshotRequests.get(2);
|
||||
assertThat(deleteSnapshotRequest.getJobId(), equalTo("snapshots-2"));
|
||||
assertThat(deleteSnapshotRequest.getSnapshotId(), equalTo("snapshots-2_1"));
|
||||
|
||||
verify(listener).onResponse(true);
|
||||
}
|
||||
|
||||
public void testRemove_GivenClientSearchRequestsFail() throws IOException {
|
||||
|
@ -144,13 +167,14 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
|
||||
createExpiredModelSnapshotsRemover().remove(listener);
|
||||
|
||||
listener.waitToCompletion();
|
||||
assertThat(listener.success, is(false));
|
||||
|
||||
assertThat(capturedSearchRequests.size(), equalTo(1));
|
||||
SearchRequest searchRequest = capturedSearchRequests.get(0);
|
||||
assertThat(searchRequest.indices(), equalTo(new String[] {AnomalyDetectorsIndex.jobResultsAliasedName("snapshots-1")}));
|
||||
|
||||
assertThat(capturedDeleteModelSnapshotRequests.size(), equalTo(0));
|
||||
|
||||
verify(listener).onFailure(any());
|
||||
}
|
||||
|
||||
public void testRemove_GivenClientDeleteSnapshotRequestsFail() throws IOException {
|
||||
|
@ -169,6 +193,9 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
|
||||
createExpiredModelSnapshotsRemover().remove(listener);
|
||||
|
||||
listener.waitToCompletion();
|
||||
assertThat(listener.success, is(false));
|
||||
|
||||
assertThat(capturedSearchRequests.size(), equalTo(1));
|
||||
SearchRequest searchRequest = capturedSearchRequests.get(0);
|
||||
assertThat(searchRequest.indices(), equalTo(new String[] {AnomalyDetectorsIndex.jobResultsAliasedName("snapshots-1")}));
|
||||
|
@ -177,8 +204,6 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
DeleteModelSnapshotAction.Request deleteSnapshotRequest = capturedDeleteModelSnapshotRequests.get(0);
|
||||
assertThat(deleteSnapshotRequest.getJobId(), equalTo("snapshots-1"));
|
||||
assertThat(deleteSnapshotRequest.getSnapshotId(), equalTo("snapshots-1_1"));
|
||||
|
||||
verify(listener).onFailure(any());
|
||||
}
|
||||
|
||||
private void givenJobs(List<Job> jobs) {
|
||||
|
@ -192,7 +217,7 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private ExpiredModelSnapshotsRemover createExpiredModelSnapshotsRemover() {
|
||||
return new ExpiredModelSnapshotsRemover(client, clusterService);
|
||||
return new ExpiredModelSnapshotsRemover(client, threadPool, clusterService);
|
||||
}
|
||||
|
||||
private static ModelSnapshot createModelSnapshot(String jobId, String snapshotId) {
|
||||
|
@ -230,7 +255,7 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
int callCount = 0;
|
||||
|
||||
@Override
|
||||
public Void answer(InvocationOnMock invocationOnMock) throws Throwable {
|
||||
public Void answer(InvocationOnMock invocationOnMock) {
|
||||
SearchRequest searchRequest = (SearchRequest) invocationOnMock.getArguments()[1];
|
||||
capturedSearchRequests.add(searchRequest);
|
||||
ActionListener<SearchResponse> listener = (ActionListener<SearchResponse>) invocationOnMock.getArguments()[2];
|
||||
|
@ -244,7 +269,7 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
}).when(client).execute(same(SearchAction.INSTANCE), any(), any());
|
||||
doAnswer(new Answer<Void>() {
|
||||
@Override
|
||||
public Void answer(InvocationOnMock invocationOnMock) throws Throwable {
|
||||
public Void answer(InvocationOnMock invocationOnMock) {
|
||||
capturedDeleteModelSnapshotRequests.add((DeleteModelSnapshotAction.Request) invocationOnMock.getArguments()[1]);
|
||||
ActionListener<DeleteModelSnapshotAction.Response> listener =
|
||||
(ActionListener<DeleteModelSnapshotAction.Response>) invocationOnMock.getArguments()[2];
|
||||
|
@ -257,4 +282,30 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase {
|
|||
}
|
||||
}).when(client).execute(same(DeleteModelSnapshotAction.INSTANCE), any(), any());
|
||||
}
|
||||
|
||||
private class TestListener implements ActionListener<Boolean> {
|
||||
|
||||
private boolean success;
|
||||
private final CountDownLatch latch = new CountDownLatch(1);
|
||||
|
||||
@Override
|
||||
public void onResponse(Boolean aBoolean) {
|
||||
success = aBoolean;
|
||||
latch.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
latch.countDown();
|
||||
}
|
||||
|
||||
public void waitToCompletion() {
|
||||
try {
|
||||
latch.await(10, TimeUnit.SECONDS);
|
||||
} catch (InterruptedException e) {
|
||||
fail("listener timed out before completing");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -200,7 +200,7 @@ class IndicesAndAliasesResolver {
|
|||
if (aliasesRequest.expandAliasesWildcards()) {
|
||||
List<String> aliases = replaceWildcardsWithAuthorizedAliases(aliasesRequest.aliases(),
|
||||
loadAuthorizedAliases(authorizedIndices.get(), metaData));
|
||||
aliasesRequest.aliases(aliases.toArray(new String[aliases.size()]));
|
||||
aliasesRequest.replaceAliases(aliases.toArray(new String[aliases.size()]));
|
||||
}
|
||||
if (indicesReplacedWithNoIndices) {
|
||||
if (indicesRequest instanceof GetAliasesRequest == false) {
|
||||
|
|
|
@ -112,7 +112,7 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme
|
|||
}
|
||||
|
||||
public AbstractSqlQueryRequest timeZone(TimeZone timeZone) {
|
||||
if (query == null) {
|
||||
if (timeZone == null) {
|
||||
throw new IllegalArgumentException("time zone may not be null.");
|
||||
}
|
||||
this.timeZone = timeZone;
|
||||
|
|
|
@ -110,4 +110,10 @@ public class SqlQueryRequestTests extends AbstractSerializingTestCase<SqlQueryRe
|
|||
mutator.accept(newRequest);
|
||||
return newRequest;
|
||||
}
|
||||
|
||||
public void testTimeZoneNullException() {
|
||||
final SqlQueryRequest sqlQueryRequest = createTestInstance();
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> sqlQueryRequest.timeZone(null));
|
||||
assertEquals("time zone may not be null.", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,6 +104,14 @@ public abstract class Expressions {
|
|||
return null;
|
||||
}
|
||||
|
||||
public static boolean equalsAsAttribute(Expression left, Expression right) {
|
||||
if (!left.semanticEquals(right)) {
|
||||
Attribute l = attribute(left);
|
||||
return (l != null && l.semanticEquals(attribute(right)));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public static TypeResolution typeMustBe(Expression e, Predicate<Expression> predicate, String message) {
|
||||
return predicate.test(e) ? TypeResolution.TYPE_RESOLVED : new TypeResolution(message);
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ public class Equals extends BinaryComparison {
|
|||
return new Equals(location(), newLeft, newRight);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object fold() {
|
||||
return Objects.equals(left().fold(), right().fold());
|
||||
}
|
||||
|
@ -38,6 +39,6 @@ public class Equals extends BinaryComparison {
|
|||
|
||||
@Override
|
||||
public String symbol() {
|
||||
return "=";
|
||||
return "==";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,7 +112,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
new ReplaceAggsWithStats(),
|
||||
new PromoteStatsToExtendedStats(),
|
||||
new ReplaceAggsWithPercentiles(),
|
||||
new ReplceAggsWithPercentileRanks()
|
||||
new ReplaceAggsWithPercentileRanks()
|
||||
);
|
||||
|
||||
Batch operators = new Batch("Operator Optimization",
|
||||
|
@ -132,7 +132,9 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
new PruneFilters(),
|
||||
new PruneOrderBy(),
|
||||
new PruneOrderByNestedFields(),
|
||||
new PruneCast()
|
||||
new PruneCast(),
|
||||
// order by alignment of the aggs
|
||||
new SortAggregateOnOrderBy()
|
||||
// requires changes in the folding
|
||||
// since the exact same function, with the same ID can appear in multiple places
|
||||
// see https://github.com/elastic/x-pack-elasticsearch/issues/3527
|
||||
|
@ -612,7 +614,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
}
|
||||
|
||||
static class ReplceAggsWithPercentileRanks extends Rule<LogicalPlan, LogicalPlan> {
|
||||
static class ReplaceAggsWithPercentileRanks extends Rule<LogicalPlan, LogicalPlan> {
|
||||
|
||||
@Override
|
||||
public LogicalPlan apply(LogicalPlan p) {
|
||||
|
@ -828,6 +830,46 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Align the order in aggregate based on the order by.
|
||||
*/
|
||||
static class SortAggregateOnOrderBy extends OptimizerRule<OrderBy> {
|
||||
|
||||
@Override
|
||||
protected LogicalPlan rule(OrderBy ob) {
|
||||
List<Order> order = ob.order();
|
||||
|
||||
// remove constants
|
||||
List<Order> nonConstant = order.stream().filter(o -> !o.child().foldable()).collect(toList());
|
||||
|
||||
// if the sort points to an agg, change the agg order based on the order
|
||||
if (ob.child() instanceof Aggregate) {
|
||||
Aggregate a = (Aggregate) ob.child();
|
||||
List<Expression> groupings = new ArrayList<>(a.groupings());
|
||||
boolean orderChanged = false;
|
||||
|
||||
for (int orderIndex = 0; orderIndex < nonConstant.size(); orderIndex++) {
|
||||
Order o = nonConstant.get(orderIndex);
|
||||
Expression fieldToOrder = o.child();
|
||||
for (Expression group : a.groupings()) {
|
||||
if (Expressions.equalsAsAttribute(fieldToOrder, group)) {
|
||||
// move grouping in front
|
||||
groupings.remove(group);
|
||||
groupings.add(orderIndex, group);
|
||||
orderChanged = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (orderChanged) {
|
||||
Aggregate newAgg = new Aggregate(a.location(), a.child(), groupings, a.aggregates());
|
||||
return new OrderBy(ob.location(), newAgg, ob.order());
|
||||
}
|
||||
}
|
||||
return ob;
|
||||
}
|
||||
}
|
||||
|
||||
static class CombineLimits extends OptimizerRule<Limit> {
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,8 +5,6 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.planner;
|
||||
|
||||
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.AggregateExec;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.Unexecutable;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.UnplannedExec;
|
||||
|
@ -71,23 +69,11 @@ abstract class Verifier {
|
|||
failures.add(fail(e, "Unresolved expression"));
|
||||
}
|
||||
});
|
||||
|
||||
if (p instanceof AggregateExec) {
|
||||
forbidMultiFieldGroupBy((AggregateExec) p, failures);
|
||||
}
|
||||
});
|
||||
|
||||
return failures;
|
||||
}
|
||||
|
||||
private static void forbidMultiFieldGroupBy(AggregateExec a, List<Failure> failures) {
|
||||
if (a.groupings().size() > 1) {
|
||||
failures.add(fail(a.groupings().get(0), "Currently, only a single expression can be used with GROUP BY; please select one of "
|
||||
+ Expressions.names(a.groupings())));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static List<Failure> verifyExecutingPlan(PhysicalPlan plan) {
|
||||
List<Failure> failures = new ArrayList<>();
|
||||
|
||||
|
|
|
@ -1,52 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.planner;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.IndexResolution;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.optimizer.Optimizer;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.EsField;
|
||||
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||
import org.elasticsearch.xpack.sql.type.TextEsField;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
|
||||
public class VerifierErrorMessagesTests extends ESTestCase {
|
||||
|
||||
private SqlParser parser = new SqlParser();
|
||||
private Optimizer optimizer = new Optimizer();
|
||||
private Planner planner = new Planner();
|
||||
|
||||
private String verify(String sql) {
|
||||
Map<String, EsField> mapping = new LinkedHashMap<>();
|
||||
mapping.put("bool", new EsField("bool", DataType.BOOLEAN, Collections.emptyMap(), true));
|
||||
mapping.put("int", new EsField("int", DataType.INTEGER, Collections.emptyMap(), true));
|
||||
mapping.put("text", new TextEsField("text", Collections.emptyMap(), true));
|
||||
mapping.put("keyword", new KeywordEsField("keyword", Collections.emptyMap(), true, DataType.KEYWORD.defaultPrecision, true));
|
||||
EsIndex test = new EsIndex("test", mapping);
|
||||
IndexResolution getIndexResult = IndexResolution.valid(test);
|
||||
Analyzer analyzer = new Analyzer(new FunctionRegistry(), getIndexResult, TimeZone.getTimeZone("UTC"));
|
||||
LogicalPlan plan = optimizer.optimize(analyzer.analyze(parser.createStatement(sql), true));
|
||||
PlanningException e = expectThrows(PlanningException.class, () -> planner.mapPlan(plan, true));
|
||||
assertTrue(e.getMessage().startsWith("Found "));
|
||||
String header = "Found 1 problem(s)\nline ";
|
||||
return e.getMessage().substring(header.length());
|
||||
}
|
||||
|
||||
public void testMultiGroupBy() {
|
||||
assertEquals("1:32: Currently, only a single expression can be used with GROUP BY; please select one of [bool, keyword]",
|
||||
verify("SELECT bool FROM test GROUP BY bool, keyword"));
|
||||
}
|
||||
}
|
|
@ -4,6 +4,7 @@ import org.elasticsearch.gradle.test.RunTask
|
|||
description = 'Integration tests for SQL'
|
||||
apply plugin: 'elasticsearch.build'
|
||||
archivesBaseName = 'qa-sql'
|
||||
group = "org.elasticsearch.x-pack.qa.sql"
|
||||
|
||||
dependencies {
|
||||
compile "org.elasticsearch.test:framework:${version}"
|
||||
|
|
|
@ -65,7 +65,7 @@ public class CliExplainIT extends CliIntegrationTestCase {
|
|||
assertThat(readLine(), startsWith("----------"));
|
||||
assertThat(readLine(), startsWith("With[{}]"));
|
||||
assertThat(readLine(), startsWith("\\_Project[[?*]]"));
|
||||
assertThat(readLine(), startsWith(" \\_Filter[?i = 2]"));
|
||||
assertThat(readLine(), startsWith(" \\_Filter[?i == 2]"));
|
||||
assertThat(readLine(), startsWith(" \\_UnresolvedRelation[[][index=test],null,Unknown index [test]]"));
|
||||
assertEquals("", readLine());
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ import org.apache.logging.log4j.Logger;
|
|||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.CsvTestCase;
|
||||
import org.elasticsearch.xpack.qa.sql.jdbc.DataLoader;
|
||||
import org.elasticsearch.xpack.qa.sql.jdbc.JdbcAssert;
|
||||
import org.elasticsearch.xpack.qa.sql.jdbc.JdbcTestUtils;
|
||||
import org.elasticsearch.xpack.qa.sql.jdbc.SpecBaseIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.qa.sql.jdbc.SqlSpecTestCase;
|
||||
|
||||
|
@ -68,8 +68,8 @@ public class JdbcDocCsvSpectIT extends SpecBaseIntegrationTestCase {
|
|||
//
|
||||
// uncomment this to printout the result set and create new CSV tests
|
||||
//
|
||||
//JdbcTestUtils.logLikeCLI(elastic, log);
|
||||
JdbcAssert.assertResultSets(expected, elastic, log, true);
|
||||
JdbcTestUtils.logLikeCLI(elastic, log);
|
||||
//JdbcAssert.assertResultSets(expected, elastic, log, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -48,6 +48,39 @@ SELECT emp_no * 2 AS e FROM test_emp GROUP BY e ORDER BY e;
|
|||
groupByModScalar
|
||||
SELECT (emp_no % 3) + 1 AS e FROM test_emp GROUP BY e ORDER BY e;
|
||||
|
||||
// multiple group by
|
||||
groupByMultiOnText
|
||||
SELECT gender g, languages l FROM "test_emp" GROUP BY gender, languages ORDER BY gender ASC, languages ASC;
|
||||
groupByMultiOnTextWithWhereClause
|
||||
SELECT gender g, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY gender, languages ORDER BY gender ASC, languages ASC;
|
||||
groupByMultiOnTextWithWhereAndLimit
|
||||
SELECT gender g, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY gender, languages ORDER BY gender, languages LIMIT 1;
|
||||
groupByMultiOnTextOnAlias
|
||||
SELECT gender g, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY g, l ORDER BY gender ASC, languages ASC;
|
||||
groupByMultiOnTextOnAliasOrderDesc
|
||||
SELECT gender g, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY g, l ORDER BY g, l ASC;
|
||||
|
||||
groupByMultiOnDate
|
||||
SELECT birth_date b, languages l FROM "test_emp" GROUP BY birth_date, languages ORDER BY birth_date DESC, languages;
|
||||
groupByMultiOnDateWithWhereClause
|
||||
SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date, languages ORDER BY birth_date DESC, languages;
|
||||
groupByMultiOnDateWithWhereAndLimit
|
||||
SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date, languages ORDER BY birth_date DESC, languages LIMIT 1;
|
||||
groupByMultiOnDateOnAlias
|
||||
SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY b, l ORDER BY birth_date DESC, languages;
|
||||
|
||||
groupByMultiAddScalar
|
||||
SELECT emp_no + 1 AS e, languages + 5 AS l FROM test_emp GROUP BY e, l ORDER BY e, l;
|
||||
groupByMultiMinScalarDesc
|
||||
SELECT emp_no - 1 AS e, languages - 5 AS l FROM test_emp GROUP BY e, l ORDER BY e DESC, l;
|
||||
groupByMultiAddScalarDesc
|
||||
SELECT emp_no % 2 AS e, languages % 10 AS l FROM test_emp GROUP BY e, l ORDER BY e DESC, l;
|
||||
groupByMultiMulScalar
|
||||
SELECT emp_no * 2 AS e, languages * 2 AS l FROM test_emp GROUP BY e, l ORDER BY e, l;
|
||||
groupByMultiModScalar
|
||||
SELECT (emp_no % 3) + 1 AS e, (languages % 3) + 1 AS l FROM test_emp GROUP BY e, l ORDER BY e, l;
|
||||
|
||||
|
||||
//
|
||||
// Aggregate Functions
|
||||
//
|
||||
|
@ -76,13 +109,26 @@ countDistinct
|
|||
SELECT COUNT(DISTINCT hire_date) AS count FROM test_emp;
|
||||
// end::countDistinct
|
||||
|
||||
aggCountAliasAndWhereClauseMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(*) c FROM "test_emp" WHERE emp_no < 10020 GROUP BY gender, languages ORDER BY gender, languages;
|
||||
aggCountAliasAndWhereClauseAndLimitMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(*) c FROM "test_emp" WHERE emp_no < 10020 GROUP BY gender, languages ORDER BY gender, languages LIMIT 1;
|
||||
aggCountAliasWithCastAndFilterMultiGroupBy
|
||||
SELECT gender g, languages l, CAST(COUNT(*) AS INT) c FROM "test_emp" WHERE emp_no < 10020 GROUP BY gender, languages ORDER BY gender, languages;
|
||||
aggCountWithAliasMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(*) c FROM "test_emp" GROUP BY g, l ORDER BY gender, languages;
|
||||
aggCountWithAliasMultiGroupByDifferentOrder
|
||||
SELECT gender g, languages l, COUNT(*) c FROM "test_emp" GROUP BY g, l ORDER BY languages ASC, gender DESC;
|
||||
|
||||
|
||||
|
||||
// Conditional COUNT
|
||||
aggCountAndHaving
|
||||
SELECT gender g, COUNT(*) c FROM "test_emp" GROUP BY g HAVING COUNT(*) > 10 ORDER BY gender;
|
||||
aggCountAndHavingEquality
|
||||
SELECT gender g, COUNT(*) c FROM "test_emp" GROUP BY g HAVING COUNT(*) = 10 ORDER BY gender;
|
||||
aggCountOnColumnAndHaving
|
||||
SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING COUNT(gender) > 10 ORDER BY gender;
|
||||
// NOT supported yet since Having introduces a new agg
|
||||
aggCountOnColumnAndWildcardAndHaving
|
||||
SELECT gender g, COUNT(*) c FROM "test_emp" GROUP BY g HAVING COUNT(gender) > 10 ORDER BY gender;
|
||||
aggCountAndHavingOnAlias
|
||||
|
@ -91,21 +137,49 @@ aggCountOnColumnAndHavingOnAlias
|
|||
SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c > 10 ORDER BY gender;
|
||||
aggCountOnColumnAndMultipleHaving
|
||||
SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c > 10 AND c < 70 ORDER BY gender ;
|
||||
aggCountOnColumnAndMultipleHavingEquals
|
||||
SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c > 10 AND c = 63 ORDER BY gender ;
|
||||
aggCountOnColumnAndMultipleHavingWithLimit
|
||||
SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c > 10 AND c < 70 ORDER BY gender LIMIT 1;
|
||||
aggCountOnColumnAndHavingBetween
|
||||
SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c BETWEEN 10 AND 70 ORDER BY gender;
|
||||
aggCountOnColumnAndHavingBetweenWithLimit
|
||||
SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c BETWEEN 10 AND 70 ORDER BY gender LIMIT 1;
|
||||
|
||||
aggCountOnColumnAndHavingOnAliasAndFunction
|
||||
SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c > 10 AND COUNT(gender) < 70 ORDER BY gender;
|
||||
// NOT supported yet since Having introduces a new agg
|
||||
aggCountOnColumnAndHavingOnAliasAndFunctionWildcard -> COUNT(*/1) vs COUNT(gender)
|
||||
SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c > 10 AND COUNT(*) < 70 ORDER BY gender;
|
||||
aggCountOnColumnAndHavingOnAliasAndFunctionConstant
|
||||
SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c > 10 AND COUNT(1) < 70 ORDER BY gender;
|
||||
|
||||
aggCountAndHavingMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(*) c FROM "test_emp" GROUP BY g, l HAVING COUNT(*) > 10 ORDER BY gender, l;
|
||||
aggCountOnColumnAndHavingMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(gender) c FROM "test_emp" GROUP BY g, l HAVING COUNT(gender) > 10 ORDER BY gender, languages;
|
||||
aggCountOnSecondColumnAndHavingMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(languages) c FROM "test_emp" GROUP BY g, l HAVING COUNT(gender) > 10 ORDER BY gender, languages;
|
||||
aggCountOnColumnAndWildcardAndHavingMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(*) c FROM "test_emp" GROUP BY g, l HAVING COUNT(gender) > 10 ORDER BY gender, languages;
|
||||
aggCountAndHavingOnAliasMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(*) c FROM "test_emp" GROUP BY g, l HAVING c > 10 ORDER BY gender, languages;
|
||||
aggCountOnColumnAndHavingOnAliasMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(gender) c FROM "test_emp" GROUP BY g, l HAVING c > 10 ORDER BY gender, languages;
|
||||
aggCountOnColumnAndMultipleHavingMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(gender) c FROM "test_emp" GROUP BY g, l HAVING c > 10 AND c < 70 ORDER BY gender, languages;
|
||||
aggCountOnColumnAndMultipleHavingWithLimitMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(gender) c FROM "test_emp" GROUP BY g, l HAVING c > 10 AND c < 70 ORDER BY gender, languages LIMIT 1;
|
||||
aggCountOnColumnAndHavingBetweenMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(gender) c FROM "test_emp" GROUP BY g, l HAVING c BETWEEN 10 AND 70 ORDER BY gender, languages;
|
||||
aggCountOnColumnAndHavingBetweenWithLimitMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(gender) c FROM "test_emp" GROUP BY g, l HAVING c BETWEEN 10 AND 70 ORDER BY gender, languages LIMIT 1;
|
||||
|
||||
aggCountOnColumnAndHavingOnAliasAndFunctionMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(gender) c FROM "test_emp" GROUP BY g, l HAVING c > 10 AND COUNT(gender) < 70 ORDER BY gender, languages;
|
||||
aggCountOnColumnAndHavingOnAliasAndFunctionWildcardMultiGroupBy -> COUNT(*/1) vs COUNT(gender)
|
||||
SELECT gender g, languages l, COUNT(gender) c FROM "test_emp" GROUP BY g, l HAVING c > 10 AND COUNT(*) < 70 ORDER BY gender, languages;
|
||||
aggCountOnColumnAndHavingOnAliasAndFunctionConstantMultiGroupBy
|
||||
SELECT gender g, languages l, COUNT(gender) c FROM "test_emp" GROUP BY g, l HAVING c > 10 AND COUNT(1) < 70 ORDER BY gender, languages;
|
||||
|
||||
|
||||
// MIN
|
||||
aggMinImplicit
|
||||
|
@ -145,6 +219,22 @@ SELECT gender g, MIN(emp_no) m FROM "test_emp" GROUP BY g HAVING m BETWEEN 10 AN
|
|||
aggMinWithMultipleHavingOnAliasAndFunction
|
||||
SELECT gender g, MIN(emp_no) m FROM "test_emp" GROUP BY g HAVING m > 10 AND MIN(emp_no) < 99999 ORDER BY gender;
|
||||
|
||||
aggMinWithHavingGroupMultiGroupBy
|
||||
SELECT gender g, languages l, MIN(emp_no) m FROM "test_emp" GROUP BY g, l HAVING MIN(emp_no) > 10 ORDER BY gender, languages;
|
||||
aggMinWithHavingOnAliasMultiGroupBy
|
||||
SELECT gender g, languages l, MIN(emp_no) m FROM "test_emp" GROUP BY g, l HAVING m > 10 ORDER BY gender, languages;
|
||||
aggMinWithMultipleHavingMultiGroupBy
|
||||
SELECT gender g, languages l, MIN(emp_no) m FROM "test_emp" GROUP BY g, l HAVING m > 10 AND m < 99999 ORDER BY gender, languages;
|
||||
aggMinWithMultipleHavingBetweenMultiGroupBy
|
||||
SELECT gender g, languages l, MIN(emp_no) m FROM "test_emp" GROUP BY g, l HAVING m BETWEEN 10 AND 99999 ORDER BY gender, languages;
|
||||
aggMinWithMultipleHavingWithLimitMultiGroupBy
|
||||
SELECT gender g, languages l, MIN(emp_no) m FROM "test_emp" GROUP BY g, l HAVING m > 10 AND m < 99999 ORDER BY g, l LIMIT 1;
|
||||
aggMinWithMultipleHavingBetweenMultiGroupBy
|
||||
SELECT gender g, languages l, MIN(emp_no) m FROM "test_emp" GROUP BY g, l HAVING m BETWEEN 10 AND 99999 ORDER BY g, l LIMIT 1;
|
||||
aggMinWithMultipleHavingOnAliasAndFunctionMultiGroupBy
|
||||
SELECT gender g, languages l, MIN(emp_no) m FROM "test_emp" GROUP BY g, l HAVING m > 10 AND MIN(emp_no) < 99999 ORDER BY gender, languages;
|
||||
|
||||
|
||||
// MAX
|
||||
aggMaxImplicit
|
||||
// tag::max
|
||||
|
@ -253,6 +343,8 @@ SELECT gender g, CAST(AVG(emp_no) AS FLOAT) a FROM "test_emp" GROUP BY g HAVING
|
|||
//
|
||||
aggGroupByOnScalarWithHaving
|
||||
SELECT emp_no + 1 AS e FROM test_emp GROUP BY e HAVING AVG(salary) BETWEEN 1 AND 10010 ORDER BY e;
|
||||
aggMultiGroupByOnScalarWithHaving
|
||||
SELECT emp_no + 1 AS e, languages % 10 AS l FROM test_emp GROUP BY e, l HAVING AVG(salary) BETWEEN 1 AND 10010 ORDER BY e, l;
|
||||
|
||||
//
|
||||
// Mixture of Aggs that triggers promotion of aggs to stats
|
||||
|
@ -272,12 +364,34 @@ SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_
|
|||
aggHavingScalarOnAggFunctionsWithoutAliasesInAndNotInGroupBy
|
||||
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages HAVING MAX(salary) % MIN(salary) + AVG(salary) > 3000 ORDER BY languages;
|
||||
|
||||
aggMultiGroupByMultiIncludingScalarFunction
|
||||
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY gender, languages ORDER BY gender, languages;
|
||||
aggMultiGroupByHavingWithAggNotInGroupBy
|
||||
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY gender, languages HAVING AVG(salary) > 30000 ORDER BY gender, languages;
|
||||
aggMultiGroupByHavingWithAliasOnScalarFromGroupBy
|
||||
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY gender, languages HAVING d BETWEEN 50 AND 10000 AND AVG(salary) > 30000 ORDER BY gender, languages;
|
||||
aggMultiGroupByHavingWithScalarFunctionBasedOnAliasFromGroupBy
|
||||
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY gender, languages HAVING ma % mi > 1 AND AVG(salary) > 30000 ORDER BY gender, languages;
|
||||
aggMultiGroupByHavingWithMultipleScalarFunctionsBasedOnAliasFromGroupBy
|
||||
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY gender, languages HAVING d - ma % mi > 0 AND AVG(salary) > 30000 ORDER BY gender, languages;
|
||||
aggMultiGroupByHavingWithMultipleScalarFunctionsBasedOnAliasFromGroupByAndAggNotInGroupBy
|
||||
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY gender, languages HAVING ROUND(d - ABS(ma % mi)) + AVG(salary) > 0 AND AVG(salary) > 30000 ORDER BY gender, languages;
|
||||
aggMultiGroupByHavingScalarOnAggFunctionsWithoutAliasesInAndNotInGroupBy
|
||||
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY gender, languages HAVING MAX(salary) % MIN(salary) + AVG(salary) > 3000 ORDER BY gender, languages;
|
||||
|
||||
|
||||
//
|
||||
// Mixture of aggs that get promoted plus filtering on one of them
|
||||
//
|
||||
aggMultiWithHaving
|
||||
SELECT MIN(salary) min, MAX(salary) max, gender g, COUNT(*) c FROM "test_emp" WHERE languages > 0 GROUP BY g HAVING max > 74600 ORDER BY gender;
|
||||
|
||||
aggMultiGroupByMultiWithHaving
|
||||
SELECT MIN(salary) min, MAX(salary) max, gender g, languages l, COUNT(*) c FROM "test_emp" WHERE languages > 0 GROUP BY g, languages HAVING max > 74600 ORDER BY gender, languages;
|
||||
|
||||
|
||||
// filter on count (which is a special agg)
|
||||
aggMultiWithHavingOnCount
|
||||
SELECT MIN(salary) min, MAX(salary) max, gender g, COUNT(*) c FROM "test_emp" WHERE languages > 0 GROUP BY g HAVING c > 40 ORDER BY gender;
|
||||
SELECT MIN(salary) min, MAX(salary) max, gender g, COUNT(*) c FROM "test_emp" WHERE languages > 0 GROUP BY g HAVING c > 40 ORDER BY gender;
|
||||
aggMultiGroupByMultiWithHavingOnCount
|
||||
SELECT MIN(salary) min, MAX(salary) max, gender g, languages l, COUNT(*) c FROM "test_emp" WHERE languages > 0 GROUP BY g, languages HAVING c > 40 ORDER BY gender, languages;
|
|
@ -456,6 +456,27 @@ SELECT languages + 1 AS l FROM emp GROUP BY l;
|
|||
// end::groupByExpression
|
||||
;
|
||||
|
||||
groupByMulti
|
||||
// tag::groupByMulti
|
||||
SELECT gender g, languages l, COUNT(*) c FROM "emp" GROUP BY g, l ORDER BY languages ASC, gender DESC;
|
||||
|
||||
g | l | c
|
||||
---------------+---------------+---------------
|
||||
F |2 |4
|
||||
F |3 |8
|
||||
F |4 |7
|
||||
F |5 |7
|
||||
F |6 |11
|
||||
M |2 |12
|
||||
M |3 |12
|
||||
M |4 |15
|
||||
M |5 |11
|
||||
M |6 |13
|
||||
|
||||
// end::groupByMulti
|
||||
;
|
||||
|
||||
|
||||
groupByAndAgg
|
||||
// tag::groupByAndAgg
|
||||
SELECT gender AS g, COUNT(*) AS c FROM emp GROUP BY gender;
|
||||
|
|
Loading…
Reference in New Issue