Merge branch 'master' into feature/client_aggs_parsing
This commit is contained in:
commit
1cb34b5eba
|
@ -195,14 +195,16 @@ public class SnapshotsInProgress extends AbstractNamedDiffable<Custom> implement
|
|||
return snapshot.toString();
|
||||
}
|
||||
|
||||
private ImmutableOpenMap<String, List<ShardId>> findWaitingIndices(ImmutableOpenMap<ShardId, ShardSnapshotStatus> shards) {
|
||||
// package private for testing
|
||||
ImmutableOpenMap<String, List<ShardId>> findWaitingIndices(ImmutableOpenMap<ShardId, ShardSnapshotStatus> shards) {
|
||||
Map<String, List<ShardId>> waitingIndicesMap = new HashMap<>();
|
||||
for (ObjectObjectCursor<ShardId, ShardSnapshotStatus> entry : shards) {
|
||||
if (entry.value.state() == State.WAITING) {
|
||||
List<ShardId> waitingShards = waitingIndicesMap.get(entry.key.getIndex());
|
||||
final String indexName = entry.key.getIndexName();
|
||||
List<ShardId> waitingShards = waitingIndicesMap.get(indexName);
|
||||
if (waitingShards == null) {
|
||||
waitingShards = new ArrayList<>();
|
||||
waitingIndicesMap.put(entry.key.getIndexName(), waitingShards);
|
||||
waitingIndicesMap.put(indexName, waitingShards);
|
||||
}
|
||||
waitingShards.add(entry.key);
|
||||
}
|
||||
|
@ -216,7 +218,6 @@ public class SnapshotsInProgress extends AbstractNamedDiffable<Custom> implement
|
|||
}
|
||||
return waitingIndicesBuilder.build();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -523,15 +523,21 @@ public class Node implements Closeable {
|
|||
|
||||
boolean clean = true;
|
||||
for (final String defaultPathData : Environment.DEFAULT_PATH_DATA_SETTING.get(settings)) {
|
||||
final Path nodeDirectory = NodeEnvironment.resolveNodePath(getPath(defaultPathData), nodeEnv.getNodeLockId());
|
||||
if (Files.exists(nodeDirectory) == false) {
|
||||
final Path defaultNodeDirectory = NodeEnvironment.resolveNodePath(getPath(defaultPathData), nodeEnv.getNodeLockId());
|
||||
if (Files.exists(defaultNodeDirectory) == false) {
|
||||
continue;
|
||||
}
|
||||
final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(nodeDirectory);
|
||||
|
||||
if (isDefaultPathDataInPathData(nodeEnv, defaultNodeDirectory)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(defaultNodeDirectory);
|
||||
final Set<String> availableIndexFolders = nodeEnv.availableIndexFoldersForPath(nodePath);
|
||||
if (availableIndexFolders.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
clean = false;
|
||||
logger.error("detected index data in default.path.data [{}] where there should not be any", nodePath.indicesPath);
|
||||
for (final String availableIndexFolder : availableIndexFolders) {
|
||||
|
@ -554,6 +560,15 @@ public class Node implements Closeable {
|
|||
throw new IllegalStateException(message);
|
||||
}
|
||||
|
||||
private static boolean isDefaultPathDataInPathData(final NodeEnvironment nodeEnv, final Path defaultNodeDirectory) throws IOException {
|
||||
for (final NodeEnvironment.NodePath dataPath : nodeEnv.nodePaths()) {
|
||||
if (Files.isSameFile(dataPath.path, defaultNodeDirectory)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "read path that is not configured in environment")
|
||||
private static Path getPath(final String path) {
|
||||
return PathUtils.get(path);
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
|||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Serialization and merge logic for {@link GeoCentroidAggregator}.
|
||||
|
@ -154,4 +155,24 @@ public class InternalGeoCentroid extends InternalAggregation implements GeoCentr
|
|||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean doEquals(Object o) {
|
||||
InternalGeoCentroid that = (InternalGeoCentroid) o;
|
||||
return count == that.count &&
|
||||
Objects.equals(centroid, that.centroid);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(centroid, count);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "InternalGeoCentroid{" +
|
||||
"centroid=" + centroid +
|
||||
", count=" + count +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,11 +112,6 @@ public class InternalStats extends InternalNumericMetricsAggregation.MultiValue
|
|||
return sum;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCountAsString() {
|
||||
return valueAsString(Metrics.count.name());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMinAsString() {
|
||||
return valueAsString(Metrics.min.name());
|
||||
|
|
|
@ -50,11 +50,6 @@ public interface Stats extends NumericMetricsAggregation.MultiValue {
|
|||
*/
|
||||
double getSum();
|
||||
|
||||
/**
|
||||
* @return The number of values that were aggregated as a String.
|
||||
*/
|
||||
String getCountAsString();
|
||||
|
||||
/**
|
||||
* @return The minimum value of all aggregated values as a String.
|
||||
*/
|
||||
|
|
|
@ -126,7 +126,7 @@ public final class AliasFilter implements Writeable {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(aliases, filter, reparseAliases);
|
||||
return Objects.hash(reparseAliases, Arrays.hashCode(aliases), filter);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -81,7 +81,9 @@ public class ContextMappings implements ToXContent {
|
|||
public ContextMapping get(String name) {
|
||||
ContextMapping contextMapping = contextNameMap.get(name);
|
||||
if (contextMapping == null) {
|
||||
throw new IllegalArgumentException("Unknown context name[" + name + "], must be one of " + contextNameMap.size());
|
||||
List<String> keys = new ArrayList<>(contextNameMap.keySet());
|
||||
Collections.sort(keys);
|
||||
throw new IllegalArgumentException("Unknown context name [" + name + "], must be one of " + keys.toString());
|
||||
}
|
||||
return contextMapping;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cluster;
|
||||
|
||||
import org.elasticsearch.cluster.SnapshotsInProgress.Entry;
|
||||
import org.elasticsearch.cluster.SnapshotsInProgress.ShardSnapshotStatus;
|
||||
import org.elasticsearch.cluster.SnapshotsInProgress.State;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.repositories.IndexId;
|
||||
import org.elasticsearch.snapshots.Snapshot;
|
||||
import org.elasticsearch.snapshots.SnapshotId;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Unit tests for the {@link SnapshotsInProgress} class and its inner classes.
|
||||
*/
|
||||
public class SnapshotsInProgressTests extends ESTestCase {
|
||||
|
||||
/**
|
||||
* Makes sure that the indices being waited on before snapshotting commences
|
||||
* are populated with all shards in the relocating or initializing state.
|
||||
*/
|
||||
public void testWaitingIndices() {
|
||||
final Snapshot snapshot = new Snapshot("repo", new SnapshotId("snap", randomAlphaOfLength(5)));
|
||||
final String idx1Name = "idx1";
|
||||
final String idx2Name = "idx2";
|
||||
final String idx3Name = "idx3";
|
||||
final String idx1UUID = randomAlphaOfLength(5);
|
||||
final String idx2UUID = randomAlphaOfLength(5);
|
||||
final String idx3UUID = randomAlphaOfLength(5);
|
||||
final List<IndexId> indices = Arrays.asList(new IndexId(idx1Name, randomAlphaOfLength(5)),
|
||||
new IndexId(idx2Name, randomAlphaOfLength(5)), new IndexId(idx3Name, randomAlphaOfLength(5)));
|
||||
ImmutableOpenMap.Builder<ShardId, ShardSnapshotStatus> shards = ImmutableOpenMap.builder();
|
||||
|
||||
// test more than one waiting shard in an index
|
||||
shards.put(new ShardId(idx1Name, idx1UUID, 0), new ShardSnapshotStatus(randomAlphaOfLength(2), State.WAITING));
|
||||
shards.put(new ShardId(idx1Name, idx1UUID, 1), new ShardSnapshotStatus(randomAlphaOfLength(2), State.WAITING));
|
||||
shards.put(new ShardId(idx1Name, idx1UUID, 2), new ShardSnapshotStatus(randomAlphaOfLength(2), randomNonWaitingState()));
|
||||
// test exactly one waiting shard in an index
|
||||
shards.put(new ShardId(idx2Name, idx2UUID, 0), new ShardSnapshotStatus(randomAlphaOfLength(2), State.WAITING));
|
||||
shards.put(new ShardId(idx2Name, idx2UUID, 1), new ShardSnapshotStatus(randomAlphaOfLength(2), randomNonWaitingState()));
|
||||
// test no waiting shards in an index
|
||||
shards.put(new ShardId(idx3Name, idx3UUID, 0), new ShardSnapshotStatus(randomAlphaOfLength(2), randomNonWaitingState()));
|
||||
Entry entry = new Entry(snapshot, randomBoolean(), randomBoolean(), State.INIT,
|
||||
indices, System.currentTimeMillis(), randomLong(), shards.build());
|
||||
|
||||
ImmutableOpenMap<String, List<ShardId>> waitingIndices = entry.waitingIndices();
|
||||
assertEquals(2, waitingIndices.get(idx1Name).size());
|
||||
assertEquals(1, waitingIndices.get(idx2Name).size());
|
||||
assertFalse(waitingIndices.containsKey(idx3Name));
|
||||
}
|
||||
|
||||
private State randomNonWaitingState() {
|
||||
return randomFrom(Arrays.stream(State.values()).filter(s -> s != State.WAITING).collect(Collectors.toSet()));
|
||||
}
|
||||
}
|
|
@ -214,7 +214,6 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
|
||||
assertThat(stats.getSumAsString(), equalTo("0055.0"));
|
||||
assertThat(stats.getCount(), equalTo(10L));
|
||||
assertThat(stats.getCountAsString(), equalTo("0010.0"));
|
||||
assertThat(stats.getSumOfSquares(), equalTo((double) 1 + 4 + 9 + 16 + 25 + 36 + 49 + 64 + 81 + 100));
|
||||
assertThat(stats.getSumOfSquaresAsString(), equalTo("0385.0"));
|
||||
assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)));
|
||||
|
|
|
@ -162,7 +162,6 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
assertThat(stats.getSum(), equalTo((double) 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10));
|
||||
assertThat(stats.getSumAsString(), equalTo("0055.0"));
|
||||
assertThat(stats.getCount(), equalTo(10L));
|
||||
assertThat(stats.getCountAsString(), equalTo("0010.0"));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.geocentroid;
|
||||
|
||||
import org.apache.lucene.geo.GeoEncodingUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.geo.RandomGeoGenerator;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class InternalGeoCentroidTests extends InternalAggregationTestCase<InternalGeoCentroid> {
|
||||
|
||||
@Override
|
||||
protected InternalGeoCentroid createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
GeoPoint centroid = RandomGeoGenerator.randomPoint(random());
|
||||
|
||||
// Re-encode lat/longs to avoid rounding issue when testing InternalGeoCentroid#hashCode() and
|
||||
// InternalGeoCentroid#equals()
|
||||
int encodedLon = GeoEncodingUtils.encodeLongitude(centroid.lon());
|
||||
centroid.resetLon(GeoEncodingUtils.decodeLongitude(encodedLon));
|
||||
int encodedLat = GeoEncodingUtils.encodeLatitude(centroid.lat());
|
||||
centroid.resetLat(GeoEncodingUtils.decodeLatitude(encodedLat));
|
||||
|
||||
return new InternalGeoCentroid("_name", centroid, 1, Collections.emptyList(), Collections.emptyMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Writeable.Reader<InternalGeoCentroid> instanceReader() {
|
||||
return InternalGeoCentroid::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertReduced(InternalGeoCentroid reduced, List<InternalGeoCentroid> inputs) {
|
||||
GeoPoint expected = new GeoPoint(0, 0);
|
||||
int i = 0;
|
||||
for (InternalGeoCentroid input : inputs) {
|
||||
expected.reset(expected.lat() + (input.centroid().lat() - expected.lat()) / (i+1),
|
||||
expected.lon() + (input.centroid().lon() - expected.lon()) / (i+1));
|
||||
i++;
|
||||
}
|
||||
assertEquals(expected.getLat(), reduced.centroid().getLat(), 1E-5D);
|
||||
assertEquals(expected.getLon(), reduced.centroid().getLon(), 1E-5D);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.internal;
|
||||
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class AliasFilterTests extends ESTestCase {
|
||||
|
||||
public void testEqualsAndHashCode() {
|
||||
final QueryBuilder filter = QueryBuilders.termQuery("field", "value");
|
||||
final String[] aliases = new String[] { "alias_0", "alias_1" };
|
||||
final AliasFilter aliasFilter = new AliasFilter(filter, aliases);
|
||||
final EqualsHashCodeTestUtils.CopyFunction<AliasFilter> aliasFilterCopyFunction = x -> {
|
||||
assertThat(x.getQueryBuilder(), instanceOf(TermQueryBuilder.class));
|
||||
final BytesStreamOutput out = new BytesStreamOutput();
|
||||
x.getQueryBuilder().writeTo(out);
|
||||
final QueryBuilder otherFilter = new TermQueryBuilder(out.bytes().streamInput());
|
||||
final String[] otherAliases = Arrays.copyOf(x.getAliases(), x.getAliases().length);
|
||||
return new AliasFilter(otherFilter, otherAliases);
|
||||
};
|
||||
|
||||
final EqualsHashCodeTestUtils.MutateFunction<AliasFilter> aliasFilterMutationFunction = x -> {
|
||||
assertThat(x.getQueryBuilder(), instanceOf(TermQueryBuilder.class));
|
||||
final BytesStreamOutput out = new BytesStreamOutput();
|
||||
x.getQueryBuilder().writeTo(out);
|
||||
final QueryBuilder otherFilter = new TermQueryBuilder(out.bytes().streamInput());
|
||||
assertThat(x.getAliases().length, greaterThan(0));
|
||||
final String[] otherAliases = Arrays.copyOf(x.getAliases(), x.getAliases().length - 1);
|
||||
return new AliasFilter(otherFilter, otherAliases);
|
||||
};
|
||||
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(aliasFilter, aliasFilterCopyFunction, aliasFilterMutationFunction);
|
||||
}
|
||||
|
||||
}
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.mapper.CompletionFieldMapper.CompletionFieldType;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
@ -673,6 +674,31 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase {
|
|||
Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(createParseContext(parser)));
|
||||
assertEquals("category context must be an object, string, number or boolean", e.getMessage());
|
||||
}
|
||||
|
||||
public void testUnknownQueryContextParsing() throws Exception {
|
||||
String mapping = jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("completion")
|
||||
.field("type", "completion")
|
||||
.startArray("contexts")
|
||||
.startObject()
|
||||
.field("name", "ctx")
|
||||
.field("type", "category")
|
||||
.endObject()
|
||||
.startObject()
|
||||
.field("name", "type")
|
||||
.field("type", "category")
|
||||
.endObject()
|
||||
.endArray()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||
FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
|
||||
CompletionFieldType completionFieldType = (CompletionFieldType) fieldMapper.fieldType();
|
||||
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> completionFieldType.getContextMappings().get("brand"));
|
||||
assertEquals("Unknown context name [brand], must be one of [ctx, type]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testParsingContextFromDocument() throws Exception {
|
||||
CategoryContextMapping mapping = ContextBuilder.category("cat").field("category").build();
|
||||
|
|
|
@ -13,3 +13,9 @@ providing the source in bytes or as a string.
|
|||
In previous versions of Elasticsearch, delete by query requests without an explicit query
|
||||
were accepted, match_all was used as the default query and all documents were deleted
|
||||
as a result. From version 6.0.0, a `DeleteByQueryRequest` requires an explicit query be set.
|
||||
|
||||
=== `InternalStats` and `Stats` getCountAsString() method removed
|
||||
|
||||
The `count` value in the stats aggregation represents a doc count that shouldnn't require a formatted
|
||||
version. This method was deprecated in 5.4 in favour of just using
|
||||
`String.valueOf(getCount())` if needed
|
||||
|
|
|
@ -10,6 +10,8 @@ script inside a <<query-dsl-function-score-query,function score query>>.
|
|||
Statistics over the document collection are computed *per shard*, not per
|
||||
index.
|
||||
|
||||
It should be noted that the `_index` variable is not supported in the painless language, but `_index` is defined when using the groovy language.
|
||||
|
||||
[float]
|
||||
=== Nomenclature:
|
||||
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.node;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verifyNoMoreInteractions;
|
||||
|
||||
public class EvilNodeTests extends ESTestCase {
|
||||
|
||||
public void testDefaultPathDataIncludedInPathData() throws IOException {
|
||||
final Path zero = createTempDir().toAbsolutePath();
|
||||
final Path one = createTempDir().toAbsolutePath();
|
||||
// creating hard links to directories is okay on macOS so we exercise it here
|
||||
final int random;
|
||||
if (Constants.MAC_OS_X) {
|
||||
random = randomFrom(0, 1, 2);
|
||||
} else {
|
||||
random = randomFrom(0, 1);
|
||||
}
|
||||
final Path defaultPathData;
|
||||
final Path choice = randomFrom(zero, one);
|
||||
switch (random) {
|
||||
case 0:
|
||||
defaultPathData = choice;
|
||||
break;
|
||||
case 1:
|
||||
defaultPathData = createTempDir().toAbsolutePath().resolve("link");
|
||||
Files.createSymbolicLink(defaultPathData, choice);
|
||||
break;
|
||||
case 2:
|
||||
defaultPathData = createTempDir().toAbsolutePath().resolve("link");
|
||||
Files.createLink(defaultPathData, choice);
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError(Integer.toString(random));
|
||||
}
|
||||
final Settings settings = Settings.builder()
|
||||
.put("path.home", createTempDir().toAbsolutePath())
|
||||
.put("path.data.0", zero)
|
||||
.put("path.data.1", one)
|
||||
.put("default.path.data", defaultPathData)
|
||||
.build();
|
||||
try (NodeEnvironment nodeEnv = new NodeEnvironment(settings, new Environment(settings))) {
|
||||
final Path defaultPathDataWithNodesAndId = defaultPathData.resolve("nodes/0");
|
||||
Files.createDirectories(defaultPathDataWithNodesAndId);
|
||||
final NodeEnvironment.NodePath defaultNodePath = new NodeEnvironment.NodePath(defaultPathDataWithNodesAndId);
|
||||
Files.createDirectories(defaultNodePath.indicesPath.resolve(UUIDs.randomBase64UUID()));
|
||||
final Logger mock = mock(Logger.class);
|
||||
// nothing should happen here
|
||||
Node.checkForIndexDataInDefaultPathData(settings, nodeEnv, mock);
|
||||
verifyNoMoreInteractions(mock);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue