Merge pull request #13349 from jasontedor/predicate-out-with-the-old-in-with-the-new
Remove and forbid the use of com.google.common.base.Predicate(s)?
This commit is contained in:
commit
5e36c91e8c
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.admin.indices.mapping.get;
|
package org.elasticsearch.action.admin.indices.mapping.get;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.Collections2;
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData;
|
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData;
|
||||||
|
@ -51,8 +49,10 @@ import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.newLinkedList;
|
import static org.elasticsearch.common.util.CollectionUtils.newLinkedList;
|
||||||
|
|
||||||
|
@ -96,14 +96,10 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
|
||||||
typeIntersection = indexService.mapperService().types();
|
typeIntersection = indexService.mapperService().types();
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
typeIntersection = Collections2.filter(indexService.mapperService().types(), new Predicate<String>() {
|
typeIntersection = indexService.mapperService().types()
|
||||||
|
.stream()
|
||||||
@Override
|
.filter(type -> Regex.simpleMatch(request.types(), type))
|
||||||
public boolean apply(String type) {
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
return Regex.simpleMatch(request.types(), type);
|
|
||||||
}
|
|
||||||
|
|
||||||
});
|
|
||||||
if (typeIntersection.isEmpty()) {
|
if (typeIntersection.isEmpty()) {
|
||||||
throw new TypeMissingException(shardId.index(), request.types());
|
throw new TypeMissingException(shardId.index(), request.types());
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster.metadata;
|
package org.elasticsearch.cluster.metadata;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.action.IndicesRequest;
|
import org.elasticsearch.action.IndicesRequest;
|
||||||
import org.elasticsearch.action.support.IndicesOptions;
|
import org.elasticsearch.action.support.IndicesOptions;
|
||||||
|
@ -50,8 +49,8 @@ import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static com.google.common.collect.Maps.filterEntries;
|
|
||||||
import static com.google.common.collect.Maps.newHashMap;
|
import static com.google.common.collect.Maps.newHashMap;
|
||||||
|
|
||||||
public class IndexNameExpressionResolver extends AbstractComponent {
|
public class IndexNameExpressionResolver extends AbstractComponent {
|
||||||
|
@ -600,12 +599,11 @@ public class IndexNameExpressionResolver extends AbstractComponent {
|
||||||
} else {
|
} else {
|
||||||
// Other wildcard expressions:
|
// Other wildcard expressions:
|
||||||
final String pattern = expression;
|
final String pattern = expression;
|
||||||
matches = filterEntries(metaData.getAliasAndIndexLookup(), new Predicate<Map.Entry<String, AliasOrIndex>>() {
|
matches = metaData.getAliasAndIndexLookup()
|
||||||
@Override
|
.entrySet()
|
||||||
public boolean apply(@Nullable Map.Entry<String, AliasOrIndex> input) {
|
.stream()
|
||||||
return Regex.simpleMatch(pattern, input.getKey());
|
.filter(e -> Regex.simpleMatch(pattern, e.getKey()))
|
||||||
}
|
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||||
});
|
|
||||||
}
|
}
|
||||||
for (Map.Entry<String, AliasOrIndex> entry : matches.entrySet()) {
|
for (Map.Entry<String, AliasOrIndex> entry : matches.entrySet()) {
|
||||||
AliasOrIndex aliasOrIndex = entry.getValue();
|
AliasOrIndex aliasOrIndex = entry.getValue();
|
||||||
|
|
|
@ -22,8 +22,6 @@ package org.elasticsearch.cluster.metadata;
|
||||||
import com.carrotsearch.hppc.ObjectHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.Collections2;
|
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import com.google.common.collect.UnmodifiableIterator;
|
import com.google.common.collect.UnmodifiableIterator;
|
||||||
import org.apache.lucene.util.CollectionUtil;
|
import org.apache.lucene.util.CollectionUtil;
|
||||||
|
@ -74,8 +72,11 @@ import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.SortedMap;
|
import java.util.SortedMap;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.elasticsearch.common.settings.Settings.*;
|
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
|
||||||
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
|
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
|
||||||
|
|
||||||
public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, FromXContentBuilder<MetaData>, ToXContent {
|
public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, FromXContentBuilder<MetaData>, ToXContent {
|
||||||
|
|
||||||
|
@ -382,10 +383,11 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: make this a List so we don't have to copy below
|
// TODO: make this a List so we don't have to copy below
|
||||||
Collection<IndexWarmersMetaData.Entry> filteredWarmers = Collections2.filter(indexWarmersMetaData.entries(), new Predicate<IndexWarmersMetaData.Entry>() {
|
Collection<IndexWarmersMetaData.Entry> filteredWarmers =
|
||||||
|
indexWarmersMetaData
|
||||||
@Override
|
.entries()
|
||||||
public boolean apply(IndexWarmersMetaData.Entry warmer) {
|
.stream()
|
||||||
|
.filter(warmer -> {
|
||||||
if (warmers.length != 0 && types.length != 0) {
|
if (warmers.length != 0 && types.length != 0) {
|
||||||
return Regex.simpleMatch(warmers, warmer.name()) && Regex.simpleMatch(types, warmer.types());
|
return Regex.simpleMatch(warmers, warmer.name()) && Regex.simpleMatch(types, warmer.types());
|
||||||
} else if (warmers.length != 0) {
|
} else if (warmers.length != 0) {
|
||||||
|
@ -395,9 +397,9 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
|
||||||
} else {
|
} else {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
|
||||||
});
|
|
||||||
if (!filteredWarmers.isEmpty()) {
|
if (!filteredWarmers.isEmpty()) {
|
||||||
mapBuilder.put(index, Collections.unmodifiableList(new ArrayList<>(filteredWarmers)));
|
mapBuilder.put(index, Collections.unmodifiableList(new ArrayList<>(filteredWarmers)));
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.cluster.routing;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
import org.apache.lucene.util.CollectionUtil;
|
import org.apache.lucene.util.CollectionUtil;
|
||||||
|
@ -40,6 +39,7 @@ import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static com.google.common.collect.Maps.newHashMap;
|
import static com.google.common.collect.Maps.newHashMap;
|
||||||
import static com.google.common.collect.Sets.newHashSet;
|
import static com.google.common.collect.Sets.newHashSet;
|
||||||
|
@ -294,7 +294,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
|
||||||
List<ShardRouting> shards = new ArrayList<>();
|
List<ShardRouting> shards = new ArrayList<>();
|
||||||
for (RoutingNode routingNode : this) {
|
for (RoutingNode routingNode : this) {
|
||||||
for (ShardRouting shardRouting : routingNode) {
|
for (ShardRouting shardRouting : routingNode) {
|
||||||
if (predicate.apply(shardRouting)) {
|
if (predicate.test(shardRouting)) {
|
||||||
shards.add(shardRouting);
|
shards.add(shardRouting);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,8 +20,6 @@
|
||||||
package org.elasticsearch.cluster.routing;
|
package org.elasticsearch.cluster.routing;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntSet;
|
import com.carrotsearch.hppc.IntSet;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.base.Predicates;
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.UnmodifiableIterator;
|
import com.google.common.collect.UnmodifiableIterator;
|
||||||
|
@ -39,6 +37,7 @@ import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static com.google.common.collect.Maps.newHashMap;
|
import static com.google.common.collect.Maps.newHashMap;
|
||||||
|
|
||||||
|
@ -184,19 +183,8 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
|
||||||
return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, includeRelocationTargets, ASSIGNED_PREDICATE);
|
return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, includeRelocationTargets, ASSIGNED_PREDICATE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Predicate<ShardRouting> ACTIVE_PREDICATE = new Predicate<ShardRouting>() {
|
private static Predicate<ShardRouting> ACTIVE_PREDICATE = shardRouting -> shardRouting.active();
|
||||||
@Override
|
private static Predicate<ShardRouting> ASSIGNED_PREDICATE = shardRouting -> shardRouting.assignedToNode();
|
||||||
public boolean apply(ShardRouting shardRouting) {
|
|
||||||
return shardRouting.active();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private static Predicate<ShardRouting> ASSIGNED_PREDICATE = new Predicate<ShardRouting>() {
|
|
||||||
@Override
|
|
||||||
public boolean apply(ShardRouting shardRouting) {
|
|
||||||
return shardRouting.assignedToNode();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: replace with JDK 8 native java.util.function.Predicate
|
// TODO: replace with JDK 8 native java.util.function.Predicate
|
||||||
private GroupShardsIterator allSatisfyingPredicateShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets, Predicate<ShardRouting> predicate) {
|
private GroupShardsIterator allSatisfyingPredicateShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets, Predicate<ShardRouting> predicate) {
|
||||||
|
@ -210,7 +198,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
|
||||||
}
|
}
|
||||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||||
for (ShardRouting shardRouting : indexShardRoutingTable) {
|
for (ShardRouting shardRouting : indexShardRoutingTable) {
|
||||||
if (predicate.apply(shardRouting)) {
|
if (predicate.test(shardRouting)) {
|
||||||
set.add(shardRouting.shardsIt());
|
set.add(shardRouting.shardsIt());
|
||||||
if (includeRelocationTargets && shardRouting.relocating()) {
|
if (includeRelocationTargets && shardRouting.relocating()) {
|
||||||
set.add(new PlainShardIterator(shardRouting.shardId(), Collections.singletonList(shardRouting.buildTargetRelocatingShard())));
|
set.add(new PlainShardIterator(shardRouting.shardId(), Collections.singletonList(shardRouting.buildTargetRelocatingShard())));
|
||||||
|
@ -225,11 +213,11 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
|
||||||
}
|
}
|
||||||
|
|
||||||
public ShardsIterator allShards(String[] indices) {
|
public ShardsIterator allShards(String[] indices) {
|
||||||
return allShardsSatisfyingPredicate(indices, Predicates.<ShardRouting>alwaysTrue(), false);
|
return allShardsSatisfyingPredicate(indices, shardRouting -> true, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ShardsIterator allShardsIncludingRelocationTargets(String[] indices) {
|
public ShardsIterator allShardsIncludingRelocationTargets(String[] indices) {
|
||||||
return allShardsSatisfyingPredicate(indices, Predicates.<ShardRouting>alwaysTrue(), true);
|
return allShardsSatisfyingPredicate(indices, shardRouting -> true, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: replace with JDK 8 native java.util.function.Predicate
|
// TODO: replace with JDK 8 native java.util.function.Predicate
|
||||||
|
@ -244,7 +232,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
|
||||||
}
|
}
|
||||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||||
for (ShardRouting shardRouting : indexShardRoutingTable) {
|
for (ShardRouting shardRouting : indexShardRoutingTable) {
|
||||||
if (predicate.apply(shardRouting)) {
|
if (predicate.test(shardRouting)) {
|
||||||
shards.add(shardRouting);
|
shards.add(shardRouting);
|
||||||
if (includeRelocationTargets && shardRouting.relocating()) {
|
if (includeRelocationTargets && shardRouting.relocating()) {
|
||||||
shards.add(shardRouting.buildTargetRelocatingShard());
|
shards.add(shardRouting.buildTargetRelocatingShard());
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster.routing.allocation.allocator;
|
package org.elasticsearch.cluster.routing.allocation.allocator;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.apache.lucene.util.ArrayUtil;
|
import org.apache.lucene.util.ArrayUtil;
|
||||||
import org.apache.lucene.util.IntroSorter;
|
import org.apache.lucene.util.IntroSorter;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
|
@ -40,6 +39,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
|
import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
|
||||||
|
|
||||||
|
@ -226,13 +226,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
|
||||||
private final float threshold;
|
private final float threshold;
|
||||||
private final MetaData metaData;
|
private final MetaData metaData;
|
||||||
|
|
||||||
private final Predicate<ShardRouting> assignedFilter = new Predicate<ShardRouting>() {
|
private final Predicate<ShardRouting> assignedFilter = shard -> shard.assignedToNode();
|
||||||
@Override
|
|
||||||
public boolean apply(ShardRouting input) {
|
|
||||||
return input.assignedToNode();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
public Balancer(ESLogger logger, RoutingAllocation allocation, WeightFunction weight, float threshold) {
|
public Balancer(ESLogger logger, RoutingAllocation allocation, WeightFunction weight, float threshold) {
|
||||||
this.logger = logger;
|
this.logger = logger;
|
||||||
|
|
|
@ -20,10 +20,8 @@
|
||||||
package org.elasticsearch.common.settings;
|
package org.elasticsearch.common.settings;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.ImmutableSortedMap;
|
import com.google.common.collect.ImmutableSortedMap;
|
||||||
import com.google.common.collect.Iterables;
|
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.Booleans;
|
import org.elasticsearch.common.Booleans;
|
||||||
|
@ -1014,7 +1012,7 @@ public final class Settings implements ToXContent {
|
||||||
final Matcher matcher = ARRAY_PATTERN.matcher(entry.getKey());
|
final Matcher matcher = ARRAY_PATTERN.matcher(entry.getKey());
|
||||||
if (matcher.matches()) {
|
if (matcher.matches()) {
|
||||||
prefixesToRemove.add(matcher.group(1));
|
prefixesToRemove.add(matcher.group(1));
|
||||||
} else if (Iterables.any(map.keySet(), startsWith(entry.getKey() + "."))) {
|
} else if (map.keySet().stream().anyMatch(key -> key.startsWith(entry.getKey() + "."))) {
|
||||||
prefixesToRemove.add(entry.getKey());
|
prefixesToRemove.add(entry.getKey());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1223,22 +1221,4 @@ public final class Settings implements ToXContent {
|
||||||
return new Settings(Collections.unmodifiableMap(map));
|
return new Settings(Collections.unmodifiableMap(map));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static StartsWithPredicate startsWith(String prefix) {
|
|
||||||
return new StartsWithPredicate(prefix);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final class StartsWithPredicate implements Predicate<String> {
|
|
||||||
|
|
||||||
private String prefix;
|
|
||||||
|
|
||||||
public StartsWithPredicate(String prefix) {
|
|
||||||
this.prefix = prefix;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean apply(String input) {
|
|
||||||
return input.startsWith(prefix);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
package org.elasticsearch.gateway;
|
package org.elasticsearch.gateway;
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.Collections2;
|
import com.google.common.collect.Collections2;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
|
@ -48,9 +47,12 @@ import java.io.IOException;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.nio.file.*;
|
import java.nio.file.*;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.Predicate;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* MetaDataStateFormat is a base class to write checksummed
|
* MetaDataStateFormat is a base class to write checksummed
|
||||||
|
@ -279,7 +281,12 @@ public abstract class MetaDataStateFormat<T> {
|
||||||
// new format (ie. legacy == false) then we know that the latest version state ought to use this new format.
|
// new format (ie. legacy == false) then we know that the latest version state ought to use this new format.
|
||||||
// In case the state file with the latest version does not use the new format while older state files do,
|
// In case the state file with the latest version does not use the new format while older state files do,
|
||||||
// the list below will be empty and loading the state will fail
|
// the list below will be empty and loading the state will fail
|
||||||
for (PathAndStateId pathAndStateId : Collections2.filter(files, new StateIdAndLegacyPredicate(maxStateId, maxStateIdIsLegacy))) {
|
Collection<PathAndStateId> pathAndStateIds = files
|
||||||
|
.stream()
|
||||||
|
.filter(new StateIdAndLegacyPredicate(maxStateId, maxStateIdIsLegacy))
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
|
||||||
|
for (PathAndStateId pathAndStateId : pathAndStateIds) {
|
||||||
try {
|
try {
|
||||||
final Path stateFile = pathAndStateId.file;
|
final Path stateFile = pathAndStateId.file;
|
||||||
final long id = pathAndStateId.id;
|
final long id = pathAndStateId.id;
|
||||||
|
@ -328,7 +335,7 @@ public abstract class MetaDataStateFormat<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean apply(PathAndStateId input) {
|
public boolean test(PathAndStateId input) {
|
||||||
return input.id == id && input.legacy == legacy;
|
return input.id == id && input.legacy == legacy;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,8 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.codec.postingsformat;
|
package org.elasticsearch.index.codec.postingsformat;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.base.Predicates;
|
|
||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
import org.apache.lucene.codecs.FieldsConsumer;
|
import org.apache.lucene.codecs.FieldsConsumer;
|
||||||
import org.apache.lucene.codecs.FieldsProducer;
|
import org.apache.lucene.codecs.FieldsProducer;
|
||||||
|
@ -36,6 +34,7 @@ import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is the old default postings format for Elasticsearch that special cases
|
* This is the old default postings format for Elasticsearch that special cases
|
||||||
|
@ -60,13 +59,8 @@ public class Elasticsearch090PostingsFormat extends PostingsFormat {
|
||||||
public PostingsFormat getDefaultWrapped() {
|
public PostingsFormat getDefaultWrapped() {
|
||||||
return bloomPostings.getDelegate();
|
return bloomPostings.getDelegate();
|
||||||
}
|
}
|
||||||
protected static final Predicate<String> UID_FIELD_FILTER = new Predicate<String>() {
|
|
||||||
|
|
||||||
@Override
|
protected static final Predicate<String> UID_FIELD_FILTER = field -> UidFieldMapper.NAME.equals(field);
|
||||||
public boolean apply(String s) {
|
|
||||||
return UidFieldMapper.NAME.equals(s);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
|
public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.google.common.base.Function;
|
import com.google.common.base.Function;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
|
@ -71,6 +70,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.CopyOnWriteArrayList;
|
import java.util.concurrent.CopyOnWriteArrayList;
|
||||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
|
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
|
||||||
|
|
||||||
|
@ -202,20 +202,13 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||||
if (includingDefaultMapping) {
|
if (includingDefaultMapping) {
|
||||||
iterator = mappers.values().iterator();
|
iterator = mappers.values().iterator();
|
||||||
} else {
|
} else {
|
||||||
iterator = Iterators.filter(mappers.values().iterator(), NOT_A_DEFAULT_DOC_MAPPER);
|
iterator = mappers.values().stream().filter(mapper -> !DEFAULT_MAPPING.equals(mapper.type())).iterator();
|
||||||
}
|
}
|
||||||
return Iterators.unmodifiableIterator(iterator);
|
return Iterators.unmodifiableIterator(iterator);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Predicate<DocumentMapper> NOT_A_DEFAULT_DOC_MAPPER = new Predicate<DocumentMapper>() {
|
|
||||||
@Override
|
|
||||||
public boolean apply(DocumentMapper input) {
|
|
||||||
return !DEFAULT_MAPPING.equals(input.type());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
public AnalysisService analysisService() {
|
public AnalysisService analysisService() {
|
||||||
return this.analysisService;
|
return this.analysisService;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.indices.cluster;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||||
import org.elasticsearch.cluster.ClusterService;
|
import org.elasticsearch.cluster.ClusterService;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
|
@ -524,13 +523,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
||||||
} else if (isPeerRecovery(shardRouting)) {
|
} else if (isPeerRecovery(shardRouting)) {
|
||||||
final DiscoveryNode sourceNode = findSourceNodeForPeerRecovery(routingTable, nodes, shardRouting);
|
final DiscoveryNode sourceNode = findSourceNodeForPeerRecovery(routingTable, nodes, shardRouting);
|
||||||
// check if there is an existing recovery going, and if so, and the source node is not the same, cancel the recovery to restart it
|
// check if there is an existing recovery going, and if so, and the source node is not the same, cancel the recovery to restart it
|
||||||
final Predicate<RecoveryStatus> shouldCancel = new Predicate<RecoveryStatus>() {
|
if (recoveryTarget.cancelRecoveriesForShard(indexShard.shardId(), "recovery source node changed", status -> !status.sourceNode().equals(sourceNode))) {
|
||||||
@Override
|
|
||||||
public boolean apply(@Nullable RecoveryStatus status) {
|
|
||||||
return status.sourceNode().equals(sourceNode) == false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if (recoveryTarget.cancelRecoveriesForShard(indexShard.shardId(), "recovery source node changed", shouldCancel)) {
|
|
||||||
logger.debug("[{}][{}] removing shard (recovery source changed), current [{}], global [{}])", shardRouting.index(), shardRouting.id(), currentRoutingEntry, shardRouting);
|
logger.debug("[{}][{}] removing shard (recovery source changed), current [{}], global [{}])", shardRouting.index(), shardRouting.id(), currentRoutingEntry, shardRouting);
|
||||||
// closing the shard will also cancel any ongoing recovery.
|
// closing the shard will also cancel any ongoing recovery.
|
||||||
indexService.removeShard(shardRouting.id(), "removing shard (recovery source node changed)");
|
indexService.removeShard(shardRouting.id(), "removing shard (recovery source node changed)");
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.indices.recovery;
|
package org.elasticsearch.indices.recovery;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.base.Predicates;
|
|
||||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.common.logging.ESLogger;
|
import org.elasticsearch.common.logging.ESLogger;
|
||||||
|
@ -34,6 +32,7 @@ import org.elasticsearch.threadpool.ThreadPool;
|
||||||
|
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class holds a collection of all on going recoveries on the current node (i.e., the node is the target node
|
* This class holds a collection of all on going recoveries on the current node (i.e., the node is the target node
|
||||||
|
@ -139,7 +138,7 @@ public class RecoveriesCollection {
|
||||||
|
|
||||||
/** cancel all ongoing recoveries for the given shard. typically because the shards is closed */
|
/** cancel all ongoing recoveries for the given shard. typically because the shards is closed */
|
||||||
public boolean cancelRecoveriesForShard(ShardId shardId, String reason) {
|
public boolean cancelRecoveriesForShard(ShardId shardId, String reason) {
|
||||||
return cancelRecoveriesForShard(shardId, reason, Predicates.<RecoveryStatus>alwaysTrue());
|
return cancelRecoveriesForShard(shardId, reason, status -> true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -160,7 +159,7 @@ public class RecoveriesCollection {
|
||||||
// if we can't increment the status, the recovery is not there any more.
|
// if we can't increment the status, the recovery is not there any more.
|
||||||
if (status.tryIncRef()) {
|
if (status.tryIncRef()) {
|
||||||
try {
|
try {
|
||||||
cancel = shouldCancel.apply(status);
|
cancel = shouldCancel.test(status);
|
||||||
} finally {
|
} finally {
|
||||||
status.decRef();
|
status.decRef();
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.indices.recovery;
|
package org.elasticsearch.indices.recovery;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.IndexFormatTooNewException;
|
import org.apache.lucene.index.IndexFormatTooNewException;
|
||||||
import org.apache.lucene.index.IndexFormatTooOldException;
|
import org.apache.lucene.index.IndexFormatTooOldException;
|
||||||
|
@ -58,6 +57,7 @@ import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
|
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
|
||||||
|
|
||||||
|
|
|
@ -19,13 +19,13 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations;
|
package org.elasticsearch.search.aggregations;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
|
|
||||||
import org.apache.lucene.search.LeafCollector;
|
import org.apache.lucene.search.LeafCollector;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Per-leaf bucket collector.
|
* Per-leaf bucket collector.
|
||||||
|
@ -44,13 +44,8 @@ public abstract class LeafBucketCollector implements LeafCollector {
|
||||||
};
|
};
|
||||||
|
|
||||||
public static LeafBucketCollector wrap(Iterable<LeafBucketCollector> collectors) {
|
public static LeafBucketCollector wrap(Iterable<LeafBucketCollector> collectors) {
|
||||||
final Iterable<LeafBucketCollector> actualCollectors = Iterables.filter(collectors,
|
final Iterable<LeafBucketCollector> actualCollectors =
|
||||||
new Predicate<LeafBucketCollector> () {
|
StreamSupport.stream(collectors.spliterator(), false).filter(c -> c != NO_OP_COLLECTOR)::iterator;
|
||||||
@Override
|
|
||||||
public boolean apply(LeafBucketCollector c) {
|
|
||||||
return c != NO_OP_COLLECTOR;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
final LeafBucketCollector[] colls = Iterables.toArray(actualCollectors, LeafBucketCollector.class);
|
final LeafBucketCollector[] colls = Iterables.toArray(actualCollectors, LeafBucketCollector.class);
|
||||||
switch (colls.length) {
|
switch (colls.length) {
|
||||||
case 0:
|
case 0:
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.action.admin.indices.shards;
|
||||||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
import org.elasticsearch.client.Requests;
|
import org.elasticsearch.client.Requests;
|
||||||
|
@ -42,6 +41,7 @@ import org.junit.Test;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
||||||
|
@ -209,7 +209,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean apply(Settings settings) {
|
public boolean test(Settings settings) {
|
||||||
return nodesWithShard.contains(settings.get("name"));
|
return nodesWithShard.contains(settings.get("name"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.admin.indices.upgrade;
|
package org.elasticsearch.action.admin.indices.upgrade;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.admin.indices.segments.IndexSegments;
|
import org.elasticsearch.action.admin.indices.segments.IndexSegments;
|
||||||
|
@ -43,6 +42,7 @@ import org.junit.BeforeClass;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||||
|
@ -97,12 +97,7 @@ public class UpgradeIT extends ESBackcompatTestCase {
|
||||||
if (globalCompatibilityVersion().before(Version.V_1_4_0_Beta1)) {
|
if (globalCompatibilityVersion().before(Version.V_1_4_0_Beta1)) {
|
||||||
// before 1.4 and the wait_if_ongoing flag, flushes could fail randomly, so we
|
// before 1.4 and the wait_if_ongoing flag, flushes could fail randomly, so we
|
||||||
// need to continue to try flushing until all shards succeed
|
// need to continue to try flushing until all shards succeed
|
||||||
assertTrue(awaitBusy(new Predicate<Object>() {
|
assertTrue(awaitBusy(() -> flush(indexName).getFailedShards() == 0));
|
||||||
@Override
|
|
||||||
public boolean apply(Object o) {
|
|
||||||
return flush(indexName).getFailedShards() == 0;
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
} else {
|
} else {
|
||||||
assertEquals(0, flush(indexName).getFailedShards());
|
assertEquals(0, flush(indexName).getFailedShards());
|
||||||
}
|
}
|
||||||
|
@ -143,15 +138,12 @@ public class UpgradeIT extends ESBackcompatTestCase {
|
||||||
|
|
||||||
logger.info("--> Running upgrade on index " + indexToUpgrade);
|
logger.info("--> Running upgrade on index " + indexToUpgrade);
|
||||||
assertNoFailures(client().admin().indices().prepareUpgrade(indexToUpgrade).get());
|
assertNoFailures(client().admin().indices().prepareUpgrade(indexToUpgrade).get());
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object o) {
|
|
||||||
try {
|
try {
|
||||||
return isUpgraded(client(), indexToUpgrade);
|
return isUpgraded(client(), indexToUpgrade);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw ExceptionsHelper.convertToRuntime(e);
|
throw ExceptionsHelper.convertToRuntime(e);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
});
|
});
|
||||||
logger.info("--> Single index upgrade complete");
|
logger.info("--> Single index upgrade complete");
|
||||||
|
|
||||||
|
|
|
@ -18,17 +18,14 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.action.support.replication;
|
package org.elasticsearch.action.support.replication;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.Version;
|
|
||||||
import org.elasticsearch.action.ActionWriteResponse;
|
import org.elasticsearch.action.ActionWriteResponse;
|
||||||
import org.elasticsearch.action.UnavailableShardsException;
|
import org.elasticsearch.action.UnavailableShardsException;
|
||||||
import org.elasticsearch.action.WriteConsistencyLevel;
|
import org.elasticsearch.action.WriteConsistencyLevel;
|
||||||
import org.elasticsearch.action.support.ActionFilter;
|
import org.elasticsearch.action.support.ActionFilter;
|
||||||
import org.elasticsearch.action.support.ActionFilters;
|
import org.elasticsearch.action.support.ActionFilters;
|
||||||
import org.elasticsearch.action.support.PlainActionFuture;
|
import org.elasticsearch.action.support.PlainActionFuture;
|
||||||
import org.elasticsearch.cluster.ClusterName;
|
|
||||||
import org.elasticsearch.cluster.ClusterService;
|
import org.elasticsearch.cluster.ClusterService;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.ClusterStateObserver;
|
import org.elasticsearch.cluster.ClusterStateObserver;
|
||||||
|
@ -40,16 +37,15 @@ import org.elasticsearch.cluster.block.ClusterBlocks;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
import org.elasticsearch.cluster.routing.ShardIterator;
|
||||||
import org.elasticsearch.cluster.routing.*;
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
import org.elasticsearch.common.lease.Releasable;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
|
||||||
import org.elasticsearch.index.shard.IndexShardNotStartedException;
|
import org.elasticsearch.index.shard.IndexShardNotStartedException;
|
||||||
import org.elasticsearch.index.shard.IndexShardState;
|
import org.elasticsearch.index.shard.IndexShardState;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
|
@ -70,7 +66,6 @@ import org.junit.Test;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
@ -79,8 +74,11 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state;
|
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state;
|
||||||
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithStartedPrimary;
|
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithStartedPrimary;
|
||||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.*;
|
import static org.hamcrest.Matchers.arrayWithSize;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
public class ShardReplicationTests extends ESTestCase {
|
public class ShardReplicationTests extends ESTestCase {
|
||||||
|
|
||||||
|
@ -441,12 +439,7 @@ public class ShardReplicationTests extends ESTestCase {
|
||||||
t.start();
|
t.start();
|
||||||
// shard operation should be ongoing, so the counter is at 2
|
// shard operation should be ongoing, so the counter is at 2
|
||||||
// we have to wait here because increment happens in thread
|
// we have to wait here because increment happens in thread
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> count.get() == 2);
|
||||||
@Override
|
|
||||||
public boolean apply(@Nullable Object input) {
|
|
||||||
return (count.get() == 2);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
assertIndexShardCounter(2);
|
assertIndexShardCounter(2);
|
||||||
assertThat(transport.capturedRequests().length, equalTo(0));
|
assertThat(transport.capturedRequests().length, equalTo(0));
|
||||||
|
@ -505,12 +498,7 @@ public class ShardReplicationTests extends ESTestCase {
|
||||||
t.start();
|
t.start();
|
||||||
// shard operation should be ongoing, so the counter is at 2
|
// shard operation should be ongoing, so the counter is at 2
|
||||||
// we have to wait here because increment happens in thread
|
// we have to wait here because increment happens in thread
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> count.get() == 2);
|
||||||
@Override
|
|
||||||
public boolean apply(@Nullable Object input) {
|
|
||||||
return count.get() == 2;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
((ActionWithDelay) action).countDownLatch.countDown();
|
((ActionWithDelay) action).countDownLatch.countDown();
|
||||||
t.join();
|
t.join();
|
||||||
// operation should have finished and counter decreased because no outstanding replica requests
|
// operation should have finished and counter decreased because no outstanding replica requests
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.bwcompat;
|
package org.elasticsearch.bwcompat;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
@ -292,13 +291,12 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
|
||||||
* Waits for the index to show up in the cluster state in closed state
|
* Waits for the index to show up in the cluster state in closed state
|
||||||
*/
|
*/
|
||||||
void ensureClosed(final String index) throws InterruptedException {
|
void ensureClosed(final String index) throws InterruptedException {
|
||||||
assertTrue(awaitBusy(new Predicate<Object>() {
|
assertTrue(awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object o) {
|
|
||||||
ClusterState state = client().admin().cluster().prepareState().get().getState();
|
ClusterState state = client().admin().cluster().prepareState().get().getState();
|
||||||
return state.metaData().hasIndex(index) && state.metaData().index(index).getState() == IndexMetaData.State.CLOSE;
|
return state.metaData().hasIndex(index) && state.metaData().index(index).getState() == IndexMetaData.State.CLOSE;
|
||||||
}
|
}
|
||||||
}));
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.cache.recycler;
|
package org.elasticsearch.cache.recycler;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
@ -45,12 +44,8 @@ public class MockPageCacheRecycler extends PageCacheRecycler {
|
||||||
// not empty, we might be executing on a shared cluster that keeps on obtaining
|
// not empty, we might be executing on a shared cluster that keeps on obtaining
|
||||||
// and releasing pages, lets make sure that after a reasonable timeout, all master
|
// and releasing pages, lets make sure that after a reasonable timeout, all master
|
||||||
// copy (snapshot) have been released
|
// copy (snapshot) have been released
|
||||||
boolean success = ESTestCase.awaitBusy(new Predicate<Object>() {
|
boolean success =
|
||||||
@Override
|
ESTestCase.awaitBusy(() -> Sets.intersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet()).isEmpty());
|
||||||
public boolean apply(Object input) {
|
|
||||||
return Sets.intersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet()).isEmpty();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (!success) {
|
if (!success) {
|
||||||
masterCopy.keySet().retainAll(ACQUIRED_PAGES.keySet());
|
masterCopy.keySet().retainAll(ACQUIRED_PAGES.keySet());
|
||||||
ACQUIRED_PAGES.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
|
ACQUIRED_PAGES.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.client.transport;
|
package org.elasticsearch.client.transport;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||||
import org.elasticsearch.action.support.PlainListenableActionFuture;
|
import org.elasticsearch.action.support.PlainListenableActionFuture;
|
||||||
|
@ -30,7 +29,6 @@ import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.transport.TransportAddress;
|
import org.elasticsearch.common.transport.TransportAddress;
|
||||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.test.InternalTestCluster;
|
|
||||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -72,12 +70,7 @@ public class TransportClientRetryIT extends ESIntegTestCase {
|
||||||
int size = cluster().size();
|
int size = cluster().size();
|
||||||
//kill all nodes one by one, leaving a single master/data node at the end of the loop
|
//kill all nodes one by one, leaving a single master/data node at the end of the loop
|
||||||
for (int j = 1; j < size; j++) {
|
for (int j = 1; j < size; j++) {
|
||||||
internalCluster().stopRandomNode(new Predicate<Settings>() {
|
internalCluster().stopRandomNode(input -> true);
|
||||||
@Override
|
|
||||||
public boolean apply(Settings input) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest().local(true);
|
ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest().local(true);
|
||||||
ClusterState clusterState;
|
ClusterState clusterState;
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster;
|
package org.elasticsearch.cluster;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest;
|
import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest;
|
||||||
import org.elasticsearch.cluster.metadata.IndexTemplateFilter;
|
import org.elasticsearch.cluster.metadata.IndexTemplateFilter;
|
||||||
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
|
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
|
||||||
|
@ -85,12 +84,7 @@ public class ClusterModuleTests extends ModuleTestCase {
|
||||||
public void testRegisterClusterDynamicSetting() {
|
public void testRegisterClusterDynamicSetting() {
|
||||||
ClusterModule module = new ClusterModule(Settings.EMPTY);
|
ClusterModule module = new ClusterModule(Settings.EMPTY);
|
||||||
module.registerClusterDynamicSetting("foo.bar", Validator.EMPTY);
|
module.registerClusterDynamicSetting("foo.bar", Validator.EMPTY);
|
||||||
assertInstanceBindingWithAnnotation(module, DynamicSettings.class, new Predicate<DynamicSettings>() {
|
assertInstanceBindingWithAnnotation(module, DynamicSettings.class, dynamicSettings -> dynamicSettings.hasDynamicSetting("foo.bar"), ClusterDynamicSettings.class);
|
||||||
@Override
|
|
||||||
public boolean apply(DynamicSettings dynamicSettings) {
|
|
||||||
return dynamicSettings.hasDynamicSetting("foo.bar");
|
|
||||||
}
|
|
||||||
}, ClusterDynamicSettings.class);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRegisterIndexDynamicSettingDuplicate() {
|
public void testRegisterIndexDynamicSettingDuplicate() {
|
||||||
|
@ -105,12 +99,7 @@ public class ClusterModuleTests extends ModuleTestCase {
|
||||||
public void testRegisterIndexDynamicSetting() {
|
public void testRegisterIndexDynamicSetting() {
|
||||||
ClusterModule module = new ClusterModule(Settings.EMPTY);
|
ClusterModule module = new ClusterModule(Settings.EMPTY);
|
||||||
module.registerIndexDynamicSetting("foo.bar", Validator.EMPTY);
|
module.registerIndexDynamicSetting("foo.bar", Validator.EMPTY);
|
||||||
assertInstanceBindingWithAnnotation(module, DynamicSettings.class, new Predicate<DynamicSettings>() {
|
assertInstanceBindingWithAnnotation(module, DynamicSettings.class, dynamicSettings -> dynamicSettings.hasDynamicSetting("foo.bar"), IndexDynamicSettings.class);
|
||||||
@Override
|
|
||||||
public boolean apply(DynamicSettings dynamicSettings) {
|
|
||||||
return dynamicSettings.hasDynamicSetting("foo.bar");
|
|
||||||
}
|
|
||||||
}, IndexDynamicSettings.class);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRegisterAllocationDeciderDuplicate() {
|
public void testRegisterAllocationDeciderDuplicate() {
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.cluster;
|
package org.elasticsearch.cluster;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
import org.apache.log4j.Level;
|
import org.apache.log4j.Level;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
|
@ -570,12 +569,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
|
||||||
invoked2.await();
|
invoked2.await();
|
||||||
|
|
||||||
// whenever we test for no tasks, we need to awaitBusy since this is a live node
|
// whenever we test for no tasks, we need to awaitBusy since this is a live node
|
||||||
assertTrue(awaitBusy(new Predicate<Object>() {
|
assertTrue(awaitBusy(() -> clusterService.pendingTasks().isEmpty()));
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
return clusterService.pendingTasks().isEmpty();
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
waitNoPendingTasksOnAll();
|
waitNoPendingTasksOnAll();
|
||||||
|
|
||||||
final CountDownLatch block2 = new CountDownLatch(1);
|
final CountDownLatch block2 = new CountDownLatch(1);
|
||||||
|
@ -688,12 +682,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
|
||||||
internalCluster().stopRandomNonMasterNode();
|
internalCluster().stopRandomNonMasterNode();
|
||||||
|
|
||||||
// there should not be any master as the minimum number of required eligible masters is not met
|
// there should not be any master as the minimum number of required eligible masters is not met
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> clusterService1.state().nodes().masterNode() == null && clusterService1.state().status() == ClusterState.ClusterStateStatus.APPLIED);
|
||||||
@Override
|
|
||||||
public boolean apply(Object obj) {
|
|
||||||
return clusterService1.state().nodes().masterNode() == null && clusterService1.state().status() == ClusterState.ClusterStateStatus.APPLIED;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertThat(testService1.master(), is(false));
|
assertThat(testService1.master(), is(false));
|
||||||
|
|
||||||
// bring the node back up
|
// bring the node back up
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster;
|
package org.elasticsearch.cluster;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
|
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.common.Priority;
|
import org.elasticsearch.common.Priority;
|
||||||
|
@ -36,12 +34,16 @@ import org.junit.Test;
|
||||||
|
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.elasticsearch.test.ESIntegTestCase.Scope;
|
import static org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.empty;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.isOneOf;
|
||||||
|
import static org.hamcrest.Matchers.not;
|
||||||
|
|
||||||
@ClusterScope(scope = Scope.TEST, numDataNodes = 0)
|
@ClusterScope(scope = Scope.TEST, numDataNodes = 0)
|
||||||
@ESIntegTestCase.SuppressLocalMode
|
@ESIntegTestCase.SuppressLocalMode
|
||||||
|
@ -98,12 +100,9 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
internalCluster().stopCurrentMasterNode();
|
internalCluster().stopCurrentMasterNode();
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> {
|
||||||
@Override
|
ClusterState clusterState = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState();
|
||||||
public boolean apply(Object obj) {
|
return clusterState.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID);
|
||||||
ClusterState state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState();
|
|
||||||
return state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState();
|
state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState();
|
||||||
assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true));
|
assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true));
|
||||||
|
@ -278,20 +277,26 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertNoMasterBlockOnAllNodes() throws InterruptedException {
|
private void assertNoMasterBlockOnAllNodes() throws InterruptedException {
|
||||||
assertThat(awaitBusy(new Predicate<Object>() {
|
Predicate<Client> hasNoMasterBlock = client -> {
|
||||||
@Override
|
ClusterState state = client.admin().cluster().prepareState().setLocal(true).execute().actionGet().getState();
|
||||||
public boolean apply(Object obj) {
|
return state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID);
|
||||||
|
};
|
||||||
|
assertTrue(awaitBusy(
|
||||||
|
() -> {
|
||||||
boolean success = true;
|
boolean success = true;
|
||||||
for (Client client : internalCluster()) {
|
for (Client client : internalCluster()) {
|
||||||
ClusterState state = client.admin().cluster().prepareState().setLocal(true).execute().actionGet().getState();
|
boolean clientHasNoMasterBlock = hasNoMasterBlock.test(client);
|
||||||
success &= state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID);
|
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("Checking for NO_MASTER_BLOCK on client: {} NO_MASTER_BLOCK: [{}]", client, state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID));
|
logger.debug("Checking for NO_MASTER_BLOCK on client: {} NO_MASTER_BLOCK: [{}]", client, clientHasNoMasterBlock);
|
||||||
}
|
}
|
||||||
|
success &= clientHasNoMasterBlock;
|
||||||
}
|
}
|
||||||
return success;
|
return success;
|
||||||
}
|
},
|
||||||
}, 20, TimeUnit.SECONDS), equalTo(true));
|
20,
|
||||||
|
TimeUnit.SECONDS
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster;
|
package org.elasticsearch.cluster;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
|
|
||||||
import org.elasticsearch.action.ActionRequestBuilder;
|
import org.elasticsearch.action.ActionRequestBuilder;
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||||
|
@ -244,14 +242,11 @@ public class NoMasterNodeIT extends ESIntegTestCase {
|
||||||
logger.info("Cluster state:\n" + clusterState.getState().prettyPrint());
|
logger.info("Cluster state:\n" + clusterState.getState().prettyPrint());
|
||||||
|
|
||||||
internalCluster().stopRandomDataNode();
|
internalCluster().stopRandomDataNode();
|
||||||
assertThat(awaitBusy(new Predicate<Object>() {
|
assertTrue(awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object o) {
|
|
||||||
ClusterState state = client().admin().cluster().prepareState().setLocal(true).get().getState();
|
ClusterState state = client().admin().cluster().prepareState().setLocal(true).get().getState();
|
||||||
return state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID);
|
return state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID);
|
||||||
}
|
}
|
||||||
}), equalTo(true));
|
));
|
||||||
|
|
||||||
|
|
||||||
GetResponse getResponse = client().prepareGet("test1", "type1", "1").get();
|
GetResponse getResponse = client().prepareGet("test1", "type1", "1").get();
|
||||||
assertExists(getResponse);
|
assertExists(getResponse);
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster.ack;
|
package org.elasticsearch.cluster.ack;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse;
|
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
|
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
|
||||||
|
@ -34,8 +35,8 @@ import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||||
import org.elasticsearch.cluster.metadata.AliasOrIndex;
|
import org.elasticsearch.cluster.metadata.AliasOrIndex;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
|
||||||
import org.elasticsearch.cluster.routing.RoutingNode;
|
import org.elasticsearch.cluster.routing.RoutingNode;
|
||||||
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||||
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
|
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -44,17 +45,19 @@ import org.elasticsearch.index.query.QueryBuilders;
|
||||||
import org.elasticsearch.search.warmer.IndexWarmersMetaData;
|
import org.elasticsearch.search.warmer.IndexWarmersMetaData;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.*;
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||||
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||||
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.State;
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
import static org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.anyOf;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
@ClusterScope(minNumDataNodes = 2)
|
@ClusterScope(minNumDataNodes = 2)
|
||||||
public class AckIT extends ESIntegTestCase {
|
public class AckIT extends ESIntegTestCase {
|
||||||
|
@ -120,9 +123,7 @@ public class AckIT extends ESIntegTestCase {
|
||||||
/* Since we don't wait for the ack here we have to wait until the search request has been executed from the master
|
/* Since we don't wait for the ack here we have to wait until the search request has been executed from the master
|
||||||
* otherwise the test infra might have already deleted the index and the search request fails on all shards causing
|
* otherwise the test infra might have already deleted the index and the search request fails on all shards causing
|
||||||
* the test to fail too. We simply wait until the the warmer has been installed and also clean it up afterwards.*/
|
* the test to fail too. We simply wait until the the warmer has been installed and also clean it up afterwards.*/
|
||||||
assertTrue(awaitBusy(new Predicate<Object>() {
|
assertTrue(awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
for (Client client : clients()) {
|
for (Client client : clients()) {
|
||||||
GetWarmersResponse getWarmersResponse = client.admin().indices().prepareGetWarmers().setLocal(true).get();
|
GetWarmersResponse getWarmersResponse = client.admin().indices().prepareGetWarmers().setLocal(true).get();
|
||||||
if (getWarmersResponse.warmers().size() != 1) {
|
if (getWarmersResponse.warmers().size() != 1) {
|
||||||
|
@ -130,7 +131,6 @@ public class AckIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
assertAcked(client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("custom_warmer"));
|
assertAcked(client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("custom_warmer"));
|
||||||
}
|
}
|
||||||
|
@ -161,9 +161,7 @@ public class AckIT extends ESIntegTestCase {
|
||||||
|
|
||||||
DeleteWarmerResponse deleteWarmerResponse = client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("custom_warmer").setTimeout("0s").get();
|
DeleteWarmerResponse deleteWarmerResponse = client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("custom_warmer").setTimeout("0s").get();
|
||||||
assertFalse(deleteWarmerResponse.isAcknowledged());
|
assertFalse(deleteWarmerResponse.isAcknowledged());
|
||||||
assertTrue(awaitBusy(new Predicate<Object>() { // wait until they are all deleted
|
assertTrue(awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
for (Client client : clients()) {
|
for (Client client : clients()) {
|
||||||
GetWarmersResponse getWarmersResponse = client.admin().indices().prepareGetWarmers().setLocal(true).get();
|
GetWarmersResponse getWarmersResponse = client.admin().indices().prepareGetWarmers().setLocal(true).get();
|
||||||
if (getWarmersResponse.warmers().size() > 0) {
|
if (getWarmersResponse.warmers().size() > 0) {
|
||||||
|
@ -171,7 +169,6 @@ public class AckIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.cluster.allocation;
|
package org.elasticsearch.cluster.allocation;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||||
|
@ -78,10 +77,8 @@ public class AwarenessAllocationIT extends ESIntegTestCase {
|
||||||
final String node3 = internalCluster().startNode(Settings.settingsBuilder().put(commonSettings).put("node.rack_id", "rack_2").build());
|
final String node3 = internalCluster().startNode(Settings.settingsBuilder().put(commonSettings).put("node.rack_id", "rack_2").build());
|
||||||
|
|
||||||
// On slow machines the initial relocation might be delayed
|
// On slow machines the initial relocation might be delayed
|
||||||
assertThat(awaitBusy(new Predicate<Object>() {
|
assertThat(awaitBusy(
|
||||||
@Override
|
() -> {
|
||||||
public boolean apply(Object input) {
|
|
||||||
|
|
||||||
logger.info("--> waiting for no relocation");
|
logger.info("--> waiting for no relocation");
|
||||||
ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("3").setWaitForRelocatingShards(0).get();
|
ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("3").setWaitForRelocatingShards(0).get();
|
||||||
if (clusterHealth.isTimedOut()) {
|
if (clusterHealth.isTimedOut()) {
|
||||||
|
@ -100,8 +97,10 @@ public class AwarenessAllocationIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return counts.get(node3) == totalPrimaries;
|
return counts.get(node3) == totalPrimaries;
|
||||||
}
|
},
|
||||||
}, 10, TimeUnit.SECONDS), equalTo(true));
|
10,
|
||||||
|
TimeUnit.SECONDS
|
||||||
|
), equalTo(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster.routing.allocation.decider;
|
package org.elasticsearch.cluster.routing.allocation.decider;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.ClusterInfo;
|
import org.elasticsearch.cluster.ClusterInfo;
|
||||||
import org.elasticsearch.cluster.ClusterInfoService;
|
import org.elasticsearch.cluster.ClusterInfoService;
|
||||||
|
@ -29,7 +28,14 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||||
import org.elasticsearch.cluster.routing.*;
|
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||||
|
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
||||||
|
import org.elasticsearch.cluster.routing.RoutingNode;
|
||||||
|
import org.elasticsearch.cluster.routing.RoutingNodes;
|
||||||
|
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||||
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
|
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||||
|
import org.elasticsearch.cluster.routing.TestShardRouting;
|
||||||
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||||
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||||
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators;
|
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators;
|
||||||
|
@ -37,15 +43,24 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand;
|
||||||
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
|
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
|
||||||
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
|
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.test.ESAllocationTestCase;
|
|
||||||
import org.elasticsearch.test.gateway.NoopGatewayAllocator;
|
|
||||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
|
import org.elasticsearch.test.ESAllocationTestCase;
|
||||||
|
import org.elasticsearch.test.gateway.NoopGatewayAllocator;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.AbstractMap;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.*;
|
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
|
||||||
|
import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
|
||||||
|
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
|
||||||
|
import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
|
@ -899,12 +914,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
|
||||||
public void logShardStates(ClusterState state) {
|
public void logShardStates(ClusterState state) {
|
||||||
RoutingNodes rn = state.getRoutingNodes();
|
RoutingNodes rn = state.getRoutingNodes();
|
||||||
logger.info("--> counts: total: {}, unassigned: {}, initializing: {}, relocating: {}, started: {}",
|
logger.info("--> counts: total: {}, unassigned: {}, initializing: {}, relocating: {}, started: {}",
|
||||||
rn.shards(new Predicate<ShardRouting>() {
|
rn.shards(shard -> true).size(),
|
||||||
@Override
|
|
||||||
public boolean apply(ShardRouting input) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}).size(),
|
|
||||||
rn.shardsWithState(UNASSIGNED).size(),
|
rn.shardsWithState(UNASSIGNED).size(),
|
||||||
rn.shardsWithState(INITIALIZING).size(),
|
rn.shardsWithState(INITIALIZING).size(),
|
||||||
rn.shardsWithState(RELOCATING).size(),
|
rn.shardsWithState(RELOCATING).size(),
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common.inject;
|
package org.elasticsearch.common.inject;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.common.inject.spi.Element;
|
import org.elasticsearch.common.inject.spi.Element;
|
||||||
import org.elasticsearch.common.inject.spi.Elements;
|
import org.elasticsearch.common.inject.spi.Elements;
|
||||||
import org.elasticsearch.common.inject.spi.InstanceBinding;
|
import org.elasticsearch.common.inject.spi.InstanceBinding;
|
||||||
|
@ -34,6 +33,7 @@ import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base testcase for testing {@link Module} implementations.
|
* Base testcase for testing {@link Module} implementations.
|
||||||
|
@ -160,7 +160,7 @@ public abstract class ModuleTestCase extends ESTestCase {
|
||||||
InstanceBinding binding = (InstanceBinding) element;
|
InstanceBinding binding = (InstanceBinding) element;
|
||||||
if (to.equals(binding.getKey().getTypeLiteral().getType())) {
|
if (to.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||||
if (annotation == null || annotation.equals(binding.getKey().getAnnotationType())) {
|
if (annotation == null || annotation.equals(binding.getKey().getAnnotationType())) {
|
||||||
assertTrue(tester.apply(to.cast(binding.getInstance())));
|
assertTrue(tester.test(to.cast(binding.getInstance())));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.common.util;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.RandomizedContext;
|
import com.carrotsearch.randomizedtesting.RandomizedContext;
|
||||||
import com.carrotsearch.randomizedtesting.SeedUtils;
|
import com.carrotsearch.randomizedtesting.SeedUtils;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import org.apache.lucene.util.Accountable;
|
import org.apache.lucene.util.Accountable;
|
||||||
|
@ -59,12 +58,7 @@ public class MockBigArrays extends BigArrays {
|
||||||
// not empty, we might be executing on a shared cluster that keeps on obtaining
|
// not empty, we might be executing on a shared cluster that keeps on obtaining
|
||||||
// and releasing arrays, lets make sure that after a reasonable timeout, all master
|
// and releasing arrays, lets make sure that after a reasonable timeout, all master
|
||||||
// copy (snapshot) have been released
|
// copy (snapshot) have been released
|
||||||
boolean success = ESTestCase.awaitBusy(new Predicate<Object>() {
|
boolean success = ESTestCase.awaitBusy(() -> Sets.intersection(masterCopy.keySet(), ACQUIRED_ARRAYS.keySet()).isEmpty());
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
return Sets.intersection(masterCopy.keySet(), ACQUIRED_ARRAYS.keySet()).isEmpty();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (!success) {
|
if (!success) {
|
||||||
masterCopy.keySet().retainAll(ACQUIRED_ARRAYS.keySet());
|
masterCopy.keySet().retainAll(ACQUIRED_ARRAYS.keySet());
|
||||||
ACQUIRED_ARRAYS.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
|
ACQUIRED_ARRAYS.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
|
||||||
|
|
|
@ -19,9 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.discovery;
|
package org.elasticsearch.discovery;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
|
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse;
|
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse;
|
||||||
|
@ -32,7 +30,11 @@ import org.elasticsearch.action.get.GetResponse;
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
import org.elasticsearch.action.index.IndexResponse;
|
import org.elasticsearch.action.index.IndexResponse;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.cluster.*;
|
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||||
|
import org.elasticsearch.cluster.ClusterService;
|
||||||
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
|
import org.elasticsearch.cluster.ClusterStateListener;
|
||||||
|
import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
||||||
import org.elasticsearch.cluster.action.shard.ShardStateAction;
|
import org.elasticsearch.cluster.action.shard.ShardStateAction;
|
||||||
import org.elasticsearch.cluster.block.ClusterBlock;
|
import org.elasticsearch.cluster.block.ClusterBlock;
|
||||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||||
|
@ -65,7 +67,16 @@ import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.test.InternalTestCluster;
|
import org.elasticsearch.test.InternalTestCluster;
|
||||||
import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration;
|
import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration;
|
||||||
import org.elasticsearch.test.disruption.*;
|
import org.elasticsearch.test.disruption.BlockClusterStateProcessing;
|
||||||
|
import org.elasticsearch.test.disruption.IntermittentLongGCDisruption;
|
||||||
|
import org.elasticsearch.test.disruption.LongGCDisruption;
|
||||||
|
import org.elasticsearch.test.disruption.NetworkDelaysPartition;
|
||||||
|
import org.elasticsearch.test.disruption.NetworkDisconnectPartition;
|
||||||
|
import org.elasticsearch.test.disruption.NetworkPartition;
|
||||||
|
import org.elasticsearch.test.disruption.NetworkUnresponsivePartition;
|
||||||
|
import org.elasticsearch.test.disruption.ServiceDisruptionScheme;
|
||||||
|
import org.elasticsearch.test.disruption.SingleNodeDisruption;
|
||||||
|
import org.elasticsearch.test.disruption.SlowClusterStateProcessing;
|
||||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||||
import org.elasticsearch.test.transport.MockTransportService;
|
import org.elasticsearch.test.transport.MockTransportService;
|
||||||
import org.elasticsearch.transport.TransportException;
|
import org.elasticsearch.transport.TransportException;
|
||||||
|
@ -76,8 +87,22 @@ import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.*;
|
import java.util.ArrayList;
|
||||||
import java.util.concurrent.*;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
import java.util.concurrent.ExecutionException;
|
||||||
|
import java.util.concurrent.Future;
|
||||||
|
import java.util.concurrent.Semaphore;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
@ -86,7 +111,10 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
import static org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||||
import static org.elasticsearch.test.ESIntegTestCase.Scope;
|
import static org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.not;
|
||||||
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
@ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 0)
|
@ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 0)
|
||||||
@ESIntegTestCase.SuppressLocalMode
|
@ESIntegTestCase.SuppressLocalMode
|
||||||
|
@ -1354,12 +1382,14 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
||||||
|
|
||||||
private void assertDiscoveryCompleted(List<String> nodes) throws InterruptedException {
|
private void assertDiscoveryCompleted(List<String> nodes) throws InterruptedException {
|
||||||
for (final String node : nodes) {
|
for (final String node : nodes) {
|
||||||
assertTrue("node [" + node + "] is still joining master", awaitBusy(new Predicate<Object>() {
|
assertTrue(
|
||||||
@Override
|
"node [" + node + "] is still joining master",
|
||||||
public boolean apply(Object input) {
|
awaitBusy(
|
||||||
return !((ZenDiscovery) internalCluster().getInstance(Discovery.class, node)).joiningCluster();
|
() -> !((ZenDiscovery) internalCluster().getInstance(Discovery.class, node)).joiningCluster(),
|
||||||
}
|
30,
|
||||||
}, 30, TimeUnit.SECONDS));
|
TimeUnit.SECONDS
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.gateway;
|
package org.elasticsearch.gateway;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.FailedNodeException;
|
import org.elasticsearch.action.FailedNodeException;
|
||||||
|
@ -286,12 +285,7 @@ public class AsyncShardFetchTests extends ESTestCase {
|
||||||
entry = simulations.get(nodeId);
|
entry = simulations.get(nodeId);
|
||||||
if (entry == null) {
|
if (entry == null) {
|
||||||
// we are simulating a master node switch, wait for it to not be null
|
// we are simulating a master node switch, wait for it to not be null
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> simulations.containsKey(nodeId));
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
return simulations.containsKey(nodeId);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
assert entry != null;
|
assert entry != null;
|
||||||
entry.executeLatch.await();
|
entry.executeLatch.await();
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.gateway;
|
package org.elasticsearch.gateway;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
|
@ -160,9 +159,7 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase {
|
||||||
|
|
||||||
|
|
||||||
private void assertMetaState(final String nodeName, final String indexName, final boolean shouldBe) throws Exception {
|
private void assertMetaState(final String nodeName, final String indexName, final boolean shouldBe) throws Exception {
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object o) {
|
|
||||||
logger.info("checking if meta state exists...");
|
logger.info("checking if meta state exists...");
|
||||||
try {
|
try {
|
||||||
return shouldBe == metaStateExists(nodeName, indexName);
|
return shouldBe == metaStateExists(nodeName, indexName);
|
||||||
|
@ -172,7 +169,6 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase {
|
||||||
// this here is a hack, would be much better to use for example a WatchService
|
// this here is a hack, would be much better to use for example a WatchService
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
});
|
});
|
||||||
boolean inMetaSate = metaStateExists(nodeName, indexName);
|
boolean inMetaSate = metaStateExists(nodeName, indexName);
|
||||||
if (shouldBe) {
|
if (shouldBe) {
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.gateway;
|
package org.elasticsearch.gateway;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||||
|
@ -36,10 +35,12 @@ import static org.elasticsearch.client.Requests.clusterHealthRequest;
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||||
import static org.elasticsearch.test.ESIntegTestCase.*;
|
import static org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -92,12 +93,10 @@ public class QuorumGatewayIT extends ESIntegTestCase {
|
||||||
ClusterHealthResponse clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForNodes("1")).actionGet();
|
ClusterHealthResponse clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForNodes("1")).actionGet();
|
||||||
assertThat(clusterHealth.isTimedOut(), equalTo(false));
|
assertThat(clusterHealth.isTimedOut(), equalTo(false));
|
||||||
assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.RED)); // nothing allocated yet
|
assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.RED)); // nothing allocated yet
|
||||||
assertThat(awaitBusy(new Predicate<Object>() {
|
assertTrue(awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
ClusterStateResponse clusterStateResponse = internalCluster().smartClient().admin().cluster().prepareState().setMasterNodeTimeout("500ms").get();
|
ClusterStateResponse clusterStateResponse = internalCluster().smartClient().admin().cluster().prepareState().setMasterNodeTimeout("500ms").get();
|
||||||
return clusterStateResponse.getState() != null && clusterStateResponse.getState().routingTable().index("test") != null;
|
return clusterStateResponse.getState() != null && clusterStateResponse.getState().routingTable().index("test") != null;
|
||||||
}}), equalTo(true)); // wait until we get a cluster state - could be null if we quick enough.
|
})); // wait until we get a cluster state - could be null if we quick enough.
|
||||||
final ClusterStateResponse clusterStateResponse = internalCluster().smartClient().admin().cluster().prepareState().setMasterNodeTimeout("500ms").get();
|
final ClusterStateResponse clusterStateResponse = internalCluster().smartClient().admin().cluster().prepareState().setMasterNodeTimeout("500ms").get();
|
||||||
assertThat(clusterStateResponse.getState(), notNullValue());
|
assertThat(clusterStateResponse.getState(), notNullValue());
|
||||||
assertThat(clusterStateResponse.getState().routingTable().index("test"), notNullValue());
|
assertThat(clusterStateResponse.getState().routingTable().index("test"), notNullValue());
|
||||||
|
@ -149,15 +148,12 @@ public class QuorumGatewayIT extends ESIntegTestCase {
|
||||||
@Override
|
@Override
|
||||||
public void doAfterNodes(int numNodes, final Client activeClient) throws Exception {
|
public void doAfterNodes(int numNodes, final Client activeClient) throws Exception {
|
||||||
if (numNodes == 1) {
|
if (numNodes == 1) {
|
||||||
assertThat(awaitBusy(new Predicate<Object>() {
|
assertTrue(awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
logger.info("--> running cluster_health (wait for the shards to startup)");
|
logger.info("--> running cluster_health (wait for the shards to startup)");
|
||||||
ClusterHealthResponse clusterHealth = activeClient.admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForNodes("2").waitForActiveShards(test.numPrimaries * 2)).actionGet();
|
ClusterHealthResponse clusterHealth = activeClient.admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForNodes("2").waitForActiveShards(test.numPrimaries * 2)).actionGet();
|
||||||
logger.info("--> done cluster_health, status " + clusterHealth.getStatus());
|
logger.info("--> done cluster_health, status " + clusterHealth.getStatus());
|
||||||
return (!clusterHealth.isTimedOut()) && clusterHealth.getStatus() == ClusterHealthStatus.YELLOW;
|
return (!clusterHealth.isTimedOut()) && clusterHealth.getStatus() == ClusterHealthStatus.YELLOW;
|
||||||
}
|
}, 30, TimeUnit.SECONDS));
|
||||||
}, 30, TimeUnit.SECONDS), equalTo(true));
|
|
||||||
logger.info("--> one node is closed -- index 1 document into the remaining nodes");
|
logger.info("--> one node is closed -- index 1 document into the remaining nodes");
|
||||||
activeClient.prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject().field("field", "value3").endObject()).get();
|
activeClient.prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject().field("field", "value3").endObject()).get();
|
||||||
assertNoFailures(activeClient.admin().indices().prepareRefresh().get());
|
assertNoFailures(activeClient.admin().indices().prepareRefresh().get());
|
||||||
|
|
|
@ -19,14 +19,12 @@
|
||||||
|
|
||||||
package org.elasticsearch.index;
|
package org.elasticsearch.index;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
||||||
import org.elasticsearch.action.index.IndexAction;
|
import org.elasticsearch.action.index.IndexAction;
|
||||||
import org.elasticsearch.action.index.IndexResponse;
|
import org.elasticsearch.action.index.IndexResponse;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
|
||||||
import org.elasticsearch.cluster.routing.RoutingNodes;
|
import org.elasticsearch.cluster.routing.RoutingNodes;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -36,14 +34,16 @@ import org.elasticsearch.discovery.zen.fd.FaultDetection;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.test.transport.MockTransportService;
|
import org.elasticsearch.test.transport.MockTransportService;
|
||||||
import org.elasticsearch.transport.TransportModule;
|
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.*;
|
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
|
||||||
|
import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
|
||||||
|
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
|
||||||
|
import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -142,12 +142,7 @@ public class TransportIndexFailuresIT extends ESIntegTestCase {
|
||||||
state = getNodeClusterState(randomFrom(nodes.toArray(Strings.EMPTY_ARRAY)));
|
state = getNodeClusterState(randomFrom(nodes.toArray(Strings.EMPTY_ARRAY)));
|
||||||
RoutingNodes rn = state.getRoutingNodes();
|
RoutingNodes rn = state.getRoutingNodes();
|
||||||
logger.info("--> counts: total: {}, unassigned: {}, initializing: {}, relocating: {}, started: {}",
|
logger.info("--> counts: total: {}, unassigned: {}, initializing: {}, relocating: {}, started: {}",
|
||||||
rn.shards(new Predicate<ShardRouting>() {
|
rn.shards(input -> true).size(),
|
||||||
@Override
|
|
||||||
public boolean apply(ShardRouting input) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}).size(),
|
|
||||||
rn.shardsWithState(UNASSIGNED).size(),
|
rn.shardsWithState(UNASSIGNED).size(),
|
||||||
rn.shardsWithState(INITIALIZING).size(),
|
rn.shardsWithState(INITIALIZING).size(),
|
||||||
rn.shardsWithState(RELOCATING).size(),
|
rn.shardsWithState(RELOCATING).size(),
|
||||||
|
|
|
@ -19,22 +19,21 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.codec.postingformat;
|
package org.elasticsearch.index.codec.postingformat;
|
||||||
|
|
||||||
import com.google.common.base.Predicates;
|
|
||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.FieldsConsumer;
|
import org.apache.lucene.codecs.FieldsConsumer;
|
||||||
import org.apache.lucene.codecs.PostingsFormat;
|
import org.apache.lucene.codecs.PostingsFormat;
|
||||||
import org.apache.lucene.index.Fields;
|
import org.apache.lucene.index.Fields;
|
||||||
import org.apache.lucene.index.FilterLeafReader;
|
import org.apache.lucene.index.FilterLeafReader;
|
||||||
import org.apache.lucene.index.SegmentWriteState;
|
import org.apache.lucene.index.SegmentWriteState;
|
||||||
import org.elasticsearch.common.util.BloomFilter;
|
import org.elasticsearch.common.util.BloomFilter;
|
||||||
import org.elasticsearch.index.codec.postingsformat.BloomFilterPostingsFormat.BloomFilteredFieldsConsumer;
|
|
||||||
import org.elasticsearch.index.codec.postingsformat.BloomFilterPostingsFormat;
|
import org.elasticsearch.index.codec.postingsformat.BloomFilterPostingsFormat;
|
||||||
|
import org.elasticsearch.index.codec.postingsformat.BloomFilterPostingsFormat.BloomFilteredFieldsConsumer;
|
||||||
import org.elasticsearch.index.codec.postingsformat.Elasticsearch090PostingsFormat;
|
import org.elasticsearch.index.codec.postingsformat.Elasticsearch090PostingsFormat;
|
||||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
/** read-write version with blooms for testing */
|
/** read-write version with blooms for testing */
|
||||||
public class Elasticsearch090RWPostingsFormat extends Elasticsearch090PostingsFormat {
|
public class Elasticsearch090RWPostingsFormat extends Elasticsearch090PostingsFormat {
|
||||||
|
@ -55,7 +54,7 @@ public class Elasticsearch090RWPostingsFormat extends Elasticsearch090PostingsFo
|
||||||
Fields maskedFields = new FilterLeafReader.FilterFields(fields) {
|
Fields maskedFields = new FilterLeafReader.FilterFields(fields) {
|
||||||
@Override
|
@Override
|
||||||
public Iterator<String> iterator() {
|
public Iterator<String> iterator() {
|
||||||
return Iterators.filter(this.in.iterator(), Predicates.not(UID_FIELD_FILTER));
|
return StreamSupport.stream(this.in.spliterator(), false).filter(UID_FIELD_FILTER.negate()).iterator();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
fieldsConsumer.getDelegate().write(maskedFields);
|
fieldsConsumer.getDelegate().write(maskedFields);
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.engine;
|
package org.elasticsearch.index.engine;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||||
import org.elasticsearch.action.bulk.BulkResponse;
|
import org.elasticsearch.action.bulk.BulkResponse;
|
||||||
|
@ -65,15 +64,12 @@ public class InternalEngineMergeIT extends ESIntegTestCase {
|
||||||
logger.info("index round [{}] - segments {}, total merges {}, current merge {}", i, stats.getPrimaries().getSegments().getCount(), stats.getPrimaries().getMerge().getTotal(), stats.getPrimaries().getMerge().getCurrent());
|
logger.info("index round [{}] - segments {}, total merges {}, current merge {}", i, stats.getPrimaries().getSegments().getCount(), stats.getPrimaries().getMerge().getTotal(), stats.getPrimaries().getMerge().getCurrent());
|
||||||
}
|
}
|
||||||
final long upperNumberSegments = 2 * numOfShards * 10;
|
final long upperNumberSegments = 2 * numOfShards * 10;
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
IndicesStatsResponse stats = client().admin().indices().prepareStats().setSegments(true).setMerge(true).get();
|
IndicesStatsResponse stats = client().admin().indices().prepareStats().setSegments(true).setMerge(true).get();
|
||||||
logger.info("numshards {}, segments {}, total merges {}, current merge {}", numOfShards, stats.getPrimaries().getSegments().getCount(), stats.getPrimaries().getMerge().getTotal(), stats.getPrimaries().getMerge().getCurrent());
|
logger.info("numshards {}, segments {}, total merges {}, current merge {}", numOfShards, stats.getPrimaries().getSegments().getCount(), stats.getPrimaries().getMerge().getTotal(), stats.getPrimaries().getMerge().getCurrent());
|
||||||
long current = stats.getPrimaries().getMerge().getCurrent();
|
long current = stats.getPrimaries().getMerge().getCurrent();
|
||||||
long count = stats.getPrimaries().getSegments().getCount();
|
long count = stats.getPrimaries().getSegments().getCount();
|
||||||
return count < upperNumberSegments && current == 0;
|
return count < upperNumberSegments && current == 0;
|
||||||
}
|
|
||||||
});
|
});
|
||||||
IndicesStatsResponse stats = client().admin().indices().prepareStats().setSegments(true).setMerge(true).get();
|
IndicesStatsResponse stats = client().admin().indices().prepareStats().setSegments(true).setMerge(true).get();
|
||||||
logger.info("numshards {}, segments {}, total merges {}, current merge {}", numOfShards, stats.getPrimaries().getSegments().getCount(), stats.getPrimaries().getMerge().getTotal(), stats.getPrimaries().getMerge().getCurrent());
|
logger.info("numshards {}, segments {}, total merges {}, current merge {}", numOfShards, stats.getPrimaries().getSegments().getCount(), stats.getPrimaries().getMerge().getTotal(), stats.getPrimaries().getMerge().getCurrent());
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.elasticsearch.index.store;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
import org.apache.lucene.index.CheckIndex;
|
import org.apache.lucene.index.CheckIndex;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
|
@ -107,8 +106,16 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList;
|
import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||||
|
import static org.hamcrest.Matchers.anyOf;
|
||||||
|
import static org.hamcrest.Matchers.empty;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.not;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
|
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
|
||||||
public class CorruptedFileIT extends ESIntegTestCase {
|
public class CorruptedFileIT extends ESIntegTestCase {
|
||||||
|
@ -279,13 +286,10 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
||||||
client().admin().indices().prepareUpdateSettings("test").setSettings(build).get();
|
client().admin().indices().prepareUpdateSettings("test").setSettings(build).get();
|
||||||
client().admin().cluster().prepareReroute().get();
|
client().admin().cluster().prepareReroute().get();
|
||||||
|
|
||||||
boolean didClusterTurnRed = awaitBusy(new Predicate<Object>() {
|
boolean didClusterTurnRed = awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
ClusterHealthStatus test = client().admin().cluster()
|
ClusterHealthStatus test = client().admin().cluster()
|
||||||
.health(Requests.clusterHealthRequest("test")).actionGet().getStatus();
|
.health(Requests.clusterHealthRequest("test")).actionGet().getStatus();
|
||||||
return test == ClusterHealthStatus.RED;
|
return test == ClusterHealthStatus.RED;
|
||||||
}
|
|
||||||
}, 5, TimeUnit.MINUTES);// sometimes on slow nodes the replication / recovery is just dead slow
|
}, 5, TimeUnit.MINUTES);// sometimes on slow nodes the replication / recovery is just dead slow
|
||||||
final ClusterHealthResponse response = client().admin().cluster()
|
final ClusterHealthResponse response = client().admin().cluster()
|
||||||
.health(Requests.clusterHealthRequest("test")).get();
|
.health(Requests.clusterHealthRequest("test")).get();
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.indices;
|
package org.elasticsearch.indices;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||||
|
@ -45,12 +44,17 @@ import java.util.concurrent.ConcurrentMap;
|
||||||
import java.util.concurrent.CopyOnWriteArrayList;
|
import java.util.concurrent.CopyOnWriteArrayList;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.function.BooleanSupplier;
|
||||||
|
|
||||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||||
import static org.elasticsearch.cluster.routing.allocation.decider.DisableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_DISABLE_ALLOCATION;
|
import static org.elasticsearch.cluster.routing.allocation.decider.DisableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_DISABLE_ALLOCATION;
|
||||||
import static org.elasticsearch.common.settings.Settings.builder;
|
import static org.elasticsearch.common.settings.Settings.builder;
|
||||||
import static org.elasticsearch.index.shard.IndexShardState.*;
|
import static org.elasticsearch.index.shard.IndexShardState.CLOSED;
|
||||||
|
import static org.elasticsearch.index.shard.IndexShardState.CREATED;
|
||||||
|
import static org.elasticsearch.index.shard.IndexShardState.POST_RECOVERY;
|
||||||
|
import static org.elasticsearch.index.shard.IndexShardState.RECOVERING;
|
||||||
|
import static org.elasticsearch.index.shard.IndexShardState.STARTED;
|
||||||
import static org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
import static org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||||
import static org.elasticsearch.test.ESIntegTestCase.Scope;
|
import static org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
|
@ -203,9 +207,7 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase {
|
||||||
private static void assertShardStatesMatch(final IndexShardStateChangeListener stateChangeListener, final int numShards, final IndexShardState... shardStates)
|
private static void assertShardStatesMatch(final IndexShardStateChangeListener stateChangeListener, final int numShards, final IndexShardState... shardStates)
|
||||||
throws InterruptedException {
|
throws InterruptedException {
|
||||||
|
|
||||||
Predicate<Object> waitPredicate = new Predicate<Object>() {
|
BooleanSupplier waitPredicate = () -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
if (stateChangeListener.shardStates.size() != numShards) {
|
if (stateChangeListener.shardStates.size() != numShards) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -220,7 +222,6 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
|
||||||
};
|
};
|
||||||
if (!awaitBusy(waitPredicate, 1, TimeUnit.MINUTES)) {
|
if (!awaitBusy(waitPredicate, 1, TimeUnit.MINUTES)) {
|
||||||
fail("failed to observe expect shard states\n" +
|
fail("failed to observe expect shard states\n" +
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.indices.memory;
|
package org.elasticsearch.indices.memory;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.cluster.ClusterName;
|
import org.elasticsearch.cluster.ClusterName;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -54,13 +53,9 @@ public class IndexingMemoryControllerIT extends ESIntegTestCase {
|
||||||
final long expected1ShardSize = internalCluster().getInstance(IndexingMemoryController.class).indexingBufferSize().bytes();
|
final long expected1ShardSize = internalCluster().getInstance(IndexingMemoryController.class).indexingBufferSize().bytes();
|
||||||
final long expected2ShardsSize = expected1ShardSize / 2;
|
final long expected2ShardsSize = expected1ShardSize / 2;
|
||||||
|
|
||||||
boolean success = awaitBusy(new Predicate<Object>() {
|
boolean success = awaitBusy(() -> shard1.engine().config().getIndexingBufferSize().bytes() <= expected2ShardsSize &&
|
||||||
@Override
|
shard2.engine().config().getIndexingBufferSize().bytes() <= expected2ShardsSize
|
||||||
public boolean apply(Object input) {
|
);
|
||||||
return shard1.engine().config().getIndexingBufferSize().bytes() <= expected2ShardsSize &&
|
|
||||||
shard2.engine().config().getIndexingBufferSize().bytes() <= expected2ShardsSize;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!success) {
|
if (!success) {
|
||||||
fail("failed to update shard indexing buffer size. expected [" + expected2ShardsSize + "] shard1 [" +
|
fail("failed to update shard indexing buffer size. expected [" + expected2ShardsSize + "] shard1 [" +
|
||||||
|
@ -70,12 +65,7 @@ public class IndexingMemoryControllerIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
client().admin().indices().prepareDelete("test2").get();
|
client().admin().indices().prepareDelete("test2").get();
|
||||||
success = awaitBusy(new Predicate<Object>() {
|
success = awaitBusy(() -> shard1.engine().config().getIndexingBufferSize().bytes() >= expected1ShardSize);
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
return shard1.engine().config().getIndexingBufferSize().bytes() >= expected1ShardSize;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!success) {
|
if (!success) {
|
||||||
fail("failed to update shard indexing buffer size after deleting shards. expected [" + expected1ShardSize + "] got [" +
|
fail("failed to update shard indexing buffer size after deleting shards. expected [" + expected1ShardSize + "] got [" +
|
||||||
|
@ -95,12 +85,7 @@ public class IndexingMemoryControllerIT extends ESIntegTestCase {
|
||||||
ensureGreen();
|
ensureGreen();
|
||||||
|
|
||||||
final IndexShard shard1 = internalCluster().getInstance(IndicesService.class).indexService("test1").shard(0);
|
final IndexShard shard1 = internalCluster().getInstance(IndicesService.class).indexService("test1").shard(0);
|
||||||
boolean success = awaitBusy(new Predicate<Object>() {
|
boolean success = awaitBusy(() -> shard1.engine().config().getIndexingBufferSize().bytes() == EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER.bytes());
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
return shard1.engine().config().getIndexingBufferSize().bytes() == EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER.bytes();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (!success) {
|
if (!success) {
|
||||||
fail("failed to update shard indexing buffer size due to inactive state. expected [" + EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER + "] got [" +
|
fail("failed to update shard indexing buffer size due to inactive state. expected [" + EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER + "] got [" +
|
||||||
shard1.engine().config().getIndexingBufferSize().bytes() + "]"
|
shard1.engine().config().getIndexingBufferSize().bytes() + "]"
|
||||||
|
@ -109,12 +94,7 @@ public class IndexingMemoryControllerIT extends ESIntegTestCase {
|
||||||
|
|
||||||
index("test1", "type", "1", "f", 1);
|
index("test1", "type", "1", "f", 1);
|
||||||
|
|
||||||
success = awaitBusy(new Predicate<Object>() {
|
success = awaitBusy(() -> shard1.engine().config().getIndexingBufferSize().bytes() > EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER.bytes());
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
return shard1.engine().config().getIndexingBufferSize().bytes() > EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER.bytes();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (!success) {
|
if (!success) {
|
||||||
fail("failed to update shard indexing buffer size due to inactive state. expected something larger then [" + EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER + "] got [" +
|
fail("failed to update shard indexing buffer size due to inactive state. expected something larger then [" + EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER + "] got [" +
|
||||||
shard1.engine().config().getIndexingBufferSize().bytes() + "]"
|
shard1.engine().config().getIndexingBufferSize().bytes() + "]"
|
||||||
|
@ -123,12 +103,7 @@ public class IndexingMemoryControllerIT extends ESIntegTestCase {
|
||||||
|
|
||||||
flush(); // clean translogs
|
flush(); // clean translogs
|
||||||
|
|
||||||
success = awaitBusy(new Predicate<Object>() {
|
success = awaitBusy(() -> shard1.engine().config().getIndexingBufferSize().bytes() == EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER.bytes());
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
return shard1.engine().config().getIndexingBufferSize().bytes() == EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER.bytes();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (!success) {
|
if (!success) {
|
||||||
fail("failed to update shard indexing buffer size due to inactive state. expected [" + EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER + "] got [" +
|
fail("failed to update shard indexing buffer size due to inactive state. expected [" + EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER + "] got [" +
|
||||||
shard1.engine().config().getIndexingBufferSize().bytes() + "]"
|
shard1.engine().config().getIndexingBufferSize().bytes() + "]"
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.indices.store;
|
package org.elasticsearch.indices.store;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||||
|
@ -55,7 +54,6 @@ import org.elasticsearch.test.disruption.BlockClusterStateProcessing;
|
||||||
import org.elasticsearch.test.disruption.SingleNodeDisruption;
|
import org.elasticsearch.test.disruption.SingleNodeDisruption;
|
||||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||||
import org.elasticsearch.test.transport.MockTransportService;
|
import org.elasticsearch.test.transport.MockTransportService;
|
||||||
import org.elasticsearch.transport.TransportModule;
|
|
||||||
import org.elasticsearch.transport.TransportRequestOptions;
|
import org.elasticsearch.transport.TransportRequestOptions;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -400,22 +398,12 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean waitForShardDeletion(final String server, final String index, final int shard) throws InterruptedException {
|
private boolean waitForShardDeletion(final String server, final String index, final int shard) throws InterruptedException {
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> !Files.exists(shardDirectory(server, index, shard)));
|
||||||
@Override
|
|
||||||
public boolean apply(Object o) {
|
|
||||||
return !Files.exists(shardDirectory(server, index, shard));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return Files.exists(shardDirectory(server, index, shard));
|
return Files.exists(shardDirectory(server, index, shard));
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean waitForIndexDeletion(final String server, final String index) throws InterruptedException {
|
private boolean waitForIndexDeletion(final String server, final String index) throws InterruptedException {
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> !Files.exists(indexDirectory(server, index)));
|
||||||
@Override
|
|
||||||
public boolean apply(Object o) {
|
|
||||||
return !Files.exists(indexDirectory(server, index));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return Files.exists(indexDirectory(server, index));
|
return Files.exists(indexDirectory(server, index));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.percolator;
|
package org.elasticsearch.percolator;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||||
import org.elasticsearch.action.count.CountResponse;
|
import org.elasticsearch.action.count.CountResponse;
|
||||||
|
@ -38,17 +37,27 @@ import org.junit.Test;
|
||||||
|
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder;
|
import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder;
|
||||||
import static org.elasticsearch.client.Requests.clusterHealthRequest;
|
import static org.elasticsearch.client.Requests.clusterHealthRequest;
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||||
|
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
|
||||||
|
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||||
import static org.elasticsearch.percolator.PercolatorIT.convertFromTextArray;
|
import static org.elasticsearch.percolator.PercolatorIT.convertFromTextArray;
|
||||||
import static org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
import static org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||||
import static org.elasticsearch.test.ESIntegTestCase.Scope;
|
import static org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||||
|
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
|
||||||
|
import static org.hamcrest.Matchers.arrayWithSize;
|
||||||
|
import static org.hamcrest.Matchers.emptyArray;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
@ClusterScope(scope = Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0)
|
@ClusterScope(scope = Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0)
|
||||||
public class RecoveryPercolatorIT extends ESIntegTestCase {
|
public class RecoveryPercolatorIT extends ESIntegTestCase {
|
||||||
|
@ -239,12 +248,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
|
||||||
.get();
|
.get();
|
||||||
ensureGreen();
|
ensureGreen();
|
||||||
|
|
||||||
final Client client = internalCluster().client(new Predicate<Settings>() {
|
final Client client = internalCluster().client(input -> input.getAsBoolean("node.stay", true));
|
||||||
@Override
|
|
||||||
public boolean apply(Settings input) {
|
|
||||||
return input.getAsBoolean("node.stay", true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
final int numQueries = randomIntBetween(50, 100);
|
final int numQueries = randomIntBetween(50, 100);
|
||||||
logger.info("--> register a queries");
|
logger.info("--> register a queries");
|
||||||
for (int i = 0; i < numQueries; i++) {
|
for (int i = 0; i < numQueries; i++) {
|
||||||
|
@ -314,12 +318,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
|
||||||
};
|
};
|
||||||
Thread t = new Thread(r);
|
Thread t = new Thread(r);
|
||||||
t.start();
|
t.start();
|
||||||
Predicate<Settings> nodePredicate = new Predicate<Settings>() {
|
Predicate<Settings> nodePredicate = input -> !input.getAsBoolean("node.stay", false);
|
||||||
@Override
|
|
||||||
public boolean apply(Settings input) {
|
|
||||||
return !input.getAsBoolean("node.stay", false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
try {
|
try {
|
||||||
// 1 index, 2 primaries, 2 replicas per primary
|
// 1 index, 2 primaries, 2 replicas per primary
|
||||||
for (int i = 0; i < 4; i++) {
|
for (int i = 0; i < 4; i++) {
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.percolator;
|
package org.elasticsearch.percolator;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
|
|
||||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||||
import org.elasticsearch.action.get.GetResponse;
|
import org.elasticsearch.action.get.GetResponse;
|
||||||
import org.elasticsearch.action.percolate.PercolateResponse;
|
import org.elasticsearch.action.percolate.PercolateResponse;
|
||||||
|
@ -42,7 +40,9 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.elasticsearch.percolator.PercolatorIT.convertFromTextArray;
|
import static org.elasticsearch.percolator.PercolatorIT.convertFromTextArray;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.arrayContaining;
|
||||||
|
import static org.hamcrest.Matchers.emptyArray;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
|
@ -136,14 +136,11 @@ public class TTLPercolatorIT extends ESIntegTestCase {
|
||||||
|
|
||||||
// See comment in SimpleTTLTests
|
// See comment in SimpleTTLTests
|
||||||
logger.info("Checking if the ttl purger has run");
|
logger.info("Checking if the ttl purger has run");
|
||||||
assertThat(awaitBusy(new Predicate<Object>() {
|
assertTrue(awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
IndicesStatsResponse indicesStatsResponse = client.admin().indices().prepareStats("test").clear().setIndexing(true).get();
|
IndicesStatsResponse indicesStatsResponse = client.admin().indices().prepareStats("test").clear().setIndexing(true).get();
|
||||||
// TTL deletes one doc, but it is indexed in the primary shard and replica shards
|
// TTL deletes one doc, but it is indexed in the primary shard and replica shards
|
||||||
return indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount() == test.dataCopies;
|
return indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount() == test.dataCopies;
|
||||||
}
|
}, 5, TimeUnit.SECONDS));
|
||||||
}, 5, TimeUnit.SECONDS), equalTo(true));
|
|
||||||
|
|
||||||
percolateResponse = client.preparePercolate()
|
percolateResponse = client.preparePercolate()
|
||||||
.setIndices("test").setDocumentType("type1")
|
.setIndices("test").setDocumentType("type1")
|
||||||
|
@ -200,15 +197,12 @@ public class TTLPercolatorIT extends ESIntegTestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
refresh();
|
refresh();
|
||||||
assertThat(awaitBusy(new Predicate<Object>() {
|
assertTrue(awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats("test").clear().setIndexing(true).get();
|
IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats("test").clear().setIndexing(true).get();
|
||||||
logger.debug("delete count [{}]", indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount());
|
logger.debug("delete count [{}]", indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount());
|
||||||
// TTL deletes one doc, but it is indexed in the primary shard and replica shards
|
// TTL deletes one doc, but it is indexed in the primary shard and replica shards
|
||||||
return indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount() != 0;
|
return indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount() != 0;
|
||||||
}
|
}, 5, TimeUnit.SECONDS));
|
||||||
}, 5, TimeUnit.SECONDS), equalTo(true));
|
|
||||||
internalCluster().wipeIndices("test");
|
internalCluster().wipeIndices("test");
|
||||||
client().admin().indices().prepareCreate("test")
|
client().admin().indices().prepareCreate("test")
|
||||||
.addMapping("type1", typeMapping)
|
.addMapping("type1", typeMapping)
|
||||||
|
|
|
@ -18,8 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.recovery;
|
package org.elasticsearch.recovery;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.base.Predicates;
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
|
@ -29,7 +27,11 @@ import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.index.shard.IndexShard;
|
import org.elasticsearch.index.shard.IndexShard;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.indices.IndicesService;
|
import org.elasticsearch.indices.IndicesService;
|
||||||
import org.elasticsearch.indices.recovery.*;
|
import org.elasticsearch.indices.recovery.RecoveriesCollection;
|
||||||
|
import org.elasticsearch.indices.recovery.RecoveryFailedException;
|
||||||
|
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||||
|
import org.elasticsearch.indices.recovery.RecoveryStatus;
|
||||||
|
import org.elasticsearch.indices.recovery.RecoveryTarget;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -38,6 +40,7 @@ import java.util.ArrayList;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.lessThan;
|
import static org.hamcrest.Matchers.lessThan;
|
||||||
|
@ -130,13 +133,8 @@ public class RecoveriesCollectionTests extends ESSingleNodeTestCase {
|
||||||
RecoveriesCollection.StatusRef statusRef = collection.getStatus(recoveryId);
|
RecoveriesCollection.StatusRef statusRef = collection.getStatus(recoveryId);
|
||||||
toClose.add(statusRef);
|
toClose.add(statusRef);
|
||||||
ShardId shardId = statusRef.status().shardId();
|
ShardId shardId = statusRef.status().shardId();
|
||||||
assertFalse("should not have cancelled recoveries", collection.cancelRecoveriesForShard(shardId, "test", Predicates.<RecoveryStatus>alwaysFalse()));
|
assertFalse("should not have cancelled recoveries", collection.cancelRecoveriesForShard(shardId, "test", status -> false));
|
||||||
final Predicate<RecoveryStatus> shouldCancel = new Predicate<RecoveryStatus>() {
|
final Predicate<RecoveryStatus> shouldCancel = status -> status.recoveryId() == recoveryId;
|
||||||
@Override
|
|
||||||
public boolean apply(RecoveryStatus status) {
|
|
||||||
return status.recoveryId() == recoveryId;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
assertTrue("failed to cancel recoveries", collection.cancelRecoveriesForShard(shardId, "test", shouldCancel));
|
assertTrue("failed to cancel recoveries", collection.cancelRecoveriesForShard(shardId, "test", shouldCancel));
|
||||||
assertThat("we should still have on recovery", collection.size(), equalTo(1));
|
assertThat("we should still have on recovery", collection.size(), equalTo(1));
|
||||||
statusRef = collection.getStatus(recoveryId);
|
statusRef = collection.getStatus(recoveryId);
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.recovery;
|
package org.elasticsearch.recovery;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||||
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
||||||
|
@ -46,7 +45,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
|
|
||||||
public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
||||||
|
|
||||||
|
@ -289,19 +287,20 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
||||||
|
|
||||||
//if there was an error we try to wait and see if at some point it'll get fixed
|
//if there was an error we try to wait and see if at some point it'll get fixed
|
||||||
logger.info("--> trying to wait");
|
logger.info("--> trying to wait");
|
||||||
assertThat(awaitBusy(new Predicate<Object>() {
|
assertTrue(awaitBusy(() -> {
|
||||||
@Override
|
boolean errorOccurred = false;
|
||||||
public boolean apply(Object o) {
|
|
||||||
boolean error = false;
|
|
||||||
for (int i = 0; i < iterations; i++) {
|
for (int i = 0; i < iterations; i++) {
|
||||||
SearchResponse searchResponse = client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get();
|
SearchResponse searchResponse = client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get();
|
||||||
if (searchResponse.getHits().totalHits() != numberOfDocs) {
|
if (searchResponse.getHits().totalHits() != numberOfDocs) {
|
||||||
error = true;
|
errorOccurred = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return !error;
|
return !errorOccurred;
|
||||||
}
|
},
|
||||||
}, 5, TimeUnit.MINUTES), equalTo(true));
|
5,
|
||||||
|
TimeUnit.MINUTES
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
//lets now make the test fail if it was supposed to fail
|
//lets now make the test fail if it was supposed to fail
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.recovery;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.carrotsearch.hppc.procedures.IntProcedure;
|
import com.carrotsearch.hppc.procedures.IntProcedure;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
|
@ -416,14 +415,11 @@ public class RelocationIT extends ESIntegTestCase {
|
||||||
|
|
||||||
// Lets wait a bit and then move again to hopefully trigger recovery cancellations.
|
// Lets wait a bit and then move again to hopefully trigger recovery cancellations.
|
||||||
boolean applied = awaitBusy(
|
boolean applied = awaitBusy(
|
||||||
new Predicate<Object>() {
|
() -> {
|
||||||
@Override
|
RecoveryResponse recoveryResponse =
|
||||||
public boolean apply(Object input) {
|
internalCluster().client(redNodeName).admin().indices().prepareRecoveries(indexName).get();
|
||||||
RecoveryResponse recoveryResponse = internalCluster().client(redNodeName).admin().indices().prepareRecoveries(indexName)
|
|
||||||
.get();
|
|
||||||
return !recoveryResponse.shardRecoveryStates().get(indexName).isEmpty();
|
return !recoveryResponse.shardRecoveryStates().get(indexName).isEmpty();
|
||||||
}
|
}
|
||||||
}
|
|
||||||
);
|
);
|
||||||
assertTrue(applied);
|
assertTrue(applied);
|
||||||
client().admin().indices().prepareUpdateSettings(indexName).setSettings(
|
client().admin().indices().prepareUpdateSettings(indexName).setSettings(
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.search;
|
package org.elasticsearch.search;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.action.search.SearchType;
|
import org.elasticsearch.action.search.SearchType;
|
||||||
|
@ -73,14 +72,7 @@ public class SearchWithRejectionsIT extends ESIntegTestCase {
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> client().admin().indices().prepareStats().execute().actionGet().getTotal().getSearch().getOpenContexts() == 0, 1, TimeUnit.SECONDS);
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
// we must wait here because the requests to release search contexts might still be in flight
|
|
||||||
// although the search request has already returned
|
|
||||||
return client().admin().indices().prepareStats().execute().actionGet().getTotal().getSearch().getOpenContexts() == 0;
|
|
||||||
}
|
|
||||||
}, 1, TimeUnit.SECONDS);
|
|
||||||
indicesStats = client().admin().indices().prepareStats().execute().actionGet();
|
indicesStats = client().admin().indices().prepareStats().execute().actionGet();
|
||||||
assertThat(indicesStats.getTotal().getSearch().getOpenContexts(), equalTo(0l));
|
assertThat(indicesStats.getTotal().getSearch().getOpenContexts(), equalTo(0l));
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.snapshots;
|
package org.elasticsearch.snapshots;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse;
|
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse;
|
||||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||||
|
@ -49,6 +48,7 @@ import java.util.List;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
@ -92,12 +92,7 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void stopNode(final String node) throws IOException {
|
public static void stopNode(final String node) throws IOException {
|
||||||
internalCluster().stopRandomNode(new Predicate<Settings>() {
|
internalCluster().stopRandomNode(settings -> settings.get("name").equals(node));
|
||||||
@Override
|
|
||||||
public boolean apply(Settings settings) {
|
|
||||||
return settings.get("name").equals(node);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void waitForBlock(String node, String repository, TimeValue timeout) throws InterruptedException {
|
public void waitForBlock(String node, String repository, TimeValue timeout) throws InterruptedException {
|
||||||
|
@ -186,18 +181,8 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase {
|
||||||
|
|
||||||
public BlockingClusterStateListener(ClusterService clusterService, final String blockOn, final String countOn, Priority passThroughPriority, TimeValue timeout) {
|
public BlockingClusterStateListener(ClusterService clusterService, final String blockOn, final String countOn, Priority passThroughPriority, TimeValue timeout) {
|
||||||
this.clusterService = clusterService;
|
this.clusterService = clusterService;
|
||||||
this.blockOn = new Predicate<ClusterChangedEvent>() {
|
this.blockOn = clusterChangedEvent -> clusterChangedEvent.source().startsWith(blockOn);
|
||||||
@Override
|
this.countOn = clusterChangedEvent -> clusterChangedEvent.source().startsWith(countOn);
|
||||||
public boolean apply(ClusterChangedEvent clusterChangedEvent) {
|
|
||||||
return clusterChangedEvent.source().startsWith(blockOn);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
this.countOn = new Predicate<ClusterChangedEvent>() {
|
|
||||||
@Override
|
|
||||||
public boolean apply(ClusterChangedEvent clusterChangedEvent) {
|
|
||||||
return clusterChangedEvent.source().startsWith(countOn);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
this.latch = new CountDownLatch(1);
|
this.latch = new CountDownLatch(1);
|
||||||
this.passThroughPriority = passThroughPriority;
|
this.passThroughPriority = passThroughPriority;
|
||||||
this.timeout = timeout;
|
this.timeout = timeout;
|
||||||
|
@ -210,13 +195,13 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void clusterChanged(ClusterChangedEvent event) {
|
public void clusterChanged(ClusterChangedEvent event) {
|
||||||
if (blockOn.apply(event)) {
|
if (blockOn.test(event)) {
|
||||||
logger.info("blocking cluster state tasks on [{}]", event.source());
|
logger.info("blocking cluster state tasks on [{}]", event.source());
|
||||||
assert stopWaitingAt < 0; // Make sure we are the first time here
|
assert stopWaitingAt < 0; // Make sure we are the first time here
|
||||||
stopWaitingAt = System.currentTimeMillis() + timeout.getMillis();
|
stopWaitingAt = System.currentTimeMillis() + timeout.getMillis();
|
||||||
addBlock();
|
addBlock();
|
||||||
}
|
}
|
||||||
if (countOn.apply(event)) {
|
if (countOn.test(event)) {
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.snapshots;
|
package org.elasticsearch.snapshots;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
|
@ -84,8 +83,24 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||||
import static org.elasticsearch.index.shard.IndexShard.INDEX_REFRESH_INTERVAL;
|
import static org.elasticsearch.index.shard.IndexShard.INDEX_REFRESH_INTERVAL;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAliasesExist;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAliasesMissing;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertIndexTemplateExists;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertIndexTemplateMissing;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
|
||||||
|
import static org.hamcrest.Matchers.allOf;
|
||||||
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
|
import static org.hamcrest.Matchers.lessThan;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
import static org.hamcrest.Matchers.startsWith;
|
||||||
|
|
||||||
public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCase {
|
public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCase {
|
||||||
|
|
||||||
|
@ -1857,21 +1872,11 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean waitForIndex(final String index, TimeValue timeout) throws InterruptedException {
|
private boolean waitForIndex(final String index, TimeValue timeout) throws InterruptedException {
|
||||||
return awaitBusy(new Predicate<Object>() {
|
return awaitBusy(() -> client().admin().indices().prepareExists(index).execute().actionGet().isExists(), timeout.millis(), TimeUnit.MILLISECONDS);
|
||||||
@Override
|
|
||||||
public boolean apply(Object o) {
|
|
||||||
return client().admin().indices().prepareExists(index).execute().actionGet().isExists();
|
|
||||||
}
|
|
||||||
}, timeout.millis(), TimeUnit.MILLISECONDS);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean waitForRelocationsToStart(final String index, TimeValue timeout) throws InterruptedException {
|
private boolean waitForRelocationsToStart(final String index, TimeValue timeout) throws InterruptedException {
|
||||||
return awaitBusy(new Predicate<Object>() {
|
return awaitBusy(() -> client().admin().cluster().prepareHealth(index).execute().actionGet().getRelocatingShards() > 0, timeout.millis(), TimeUnit.MILLISECONDS);
|
||||||
@Override
|
|
||||||
public boolean apply(Object o) {
|
|
||||||
return client().admin().cluster().prepareHealth(index).execute().actionGet().getRelocatingShards() > 0;
|
|
||||||
}
|
|
||||||
}, timeout.millis(), TimeUnit.MILLISECONDS);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
package org.elasticsearch.test;
|
package org.elasticsearch.test;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.Collections2;
|
|
||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||||
|
@ -33,10 +31,12 @@ import org.elasticsearch.common.transport.TransportAddress;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
@ -90,12 +90,10 @@ public class CompositeTestCluster extends TestCluster {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Collection<ExternalNode> runningNodes() {
|
private Collection<ExternalNode> runningNodes() {
|
||||||
return Collections2.filter(Arrays.asList(externalNodes), new Predicate<ExternalNode>() {
|
return Arrays
|
||||||
@Override
|
.stream(externalNodes)
|
||||||
public boolean apply(ExternalNode input) {
|
.filter(input -> input.running())
|
||||||
return input.running();
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -25,15 +25,7 @@ import com.carrotsearch.randomizedtesting.annotations.TestGroup;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||||
import com.google.common.base.Joiner;
|
import com.google.common.base.Joiner;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.apache.http.impl.client.HttpClients;
|
import org.apache.http.impl.client.HttpClients;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
|
||||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|
||||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
|
|
||||||
import org.elasticsearch.common.network.NetworkAddress;
|
|
||||||
import org.elasticsearch.index.shard.MergeSchedulerConfig;
|
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.TestUtil;
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
@ -63,6 +55,7 @@ import org.elasticsearch.action.get.GetResponse;
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
import org.elasticsearch.action.index.IndexResponse;
|
import org.elasticsearch.action.index.IndexResponse;
|
||||||
import org.elasticsearch.action.search.ClearScrollResponse;
|
import org.elasticsearch.action.search.ClearScrollResponse;
|
||||||
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.action.support.IndicesOptions;
|
import org.elasticsearch.action.support.IndicesOptions;
|
||||||
import org.elasticsearch.client.AdminClient;
|
import org.elasticsearch.client.AdminClient;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
|
@ -164,6 +157,7 @@ import java.util.concurrent.ExecutionException;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
import java.util.function.BooleanSupplier;
|
||||||
|
|
||||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||||
|
@ -172,8 +166,14 @@ import static org.elasticsearch.common.util.CollectionUtils.eagerPartition;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||||
import static org.elasticsearch.test.XContentTestUtils.convertToMap;
|
import static org.elasticsearch.test.XContentTestUtils.convertToMap;
|
||||||
import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder;
|
import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
||||||
|
import static org.hamcrest.Matchers.emptyIterable;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
import static org.hamcrest.Matchers.startsWith;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* {@link ESIntegTestCase} is an abstract base class to run integration
|
* {@link ESIntegTestCase} is an abstract base class to run integration
|
||||||
|
@ -1000,9 +1000,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||||
throws InterruptedException {
|
throws InterruptedException {
|
||||||
final AtomicLong lastKnownCount = new AtomicLong(-1);
|
final AtomicLong lastKnownCount = new AtomicLong(-1);
|
||||||
long lastStartCount = -1;
|
long lastStartCount = -1;
|
||||||
Predicate<Object> testDocs = new Predicate<Object>() {
|
BooleanSupplier testDocs = () -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object o) {
|
|
||||||
if (indexer != null) {
|
if (indexer != null) {
|
||||||
lastKnownCount.set(indexer.totalIndexedDocs());
|
lastKnownCount.set(indexer.totalIndexedDocs());
|
||||||
}
|
}
|
||||||
|
@ -1023,7 +1021,6 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||||
logger.debug("[{}] docs indexed. waiting for [{}]", lastKnownCount.get(), numDocs);
|
logger.debug("[{}] docs indexed. waiting for [{}]", lastKnownCount.get(), numDocs);
|
||||||
}
|
}
|
||||||
return lastKnownCount.get() >= numDocs;
|
return lastKnownCount.get() >= numDocs;
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
while (!awaitBusy(testDocs, maxWaitTime, maxWaitTimeUnit)) {
|
while (!awaitBusy(testDocs, maxWaitTime, maxWaitTimeUnit)) {
|
||||||
|
|
|
@ -29,7 +29,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter;
|
import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.apache.lucene.uninverting.UninvertingReader;
|
import org.apache.lucene.uninverting.UninvertingReader;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||||
|
@ -79,6 +78,8 @@ import java.util.concurrent.Callable;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.function.BooleanSupplier;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList;
|
import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
@ -438,19 +439,19 @@ public abstract class ESTestCase extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean awaitBusy(Predicate<?> breakPredicate) throws InterruptedException {
|
public static boolean awaitBusy(BooleanSupplier breakSupplier) throws InterruptedException {
|
||||||
return awaitBusy(breakPredicate, 10, TimeUnit.SECONDS);
|
return awaitBusy(breakSupplier, 10, TimeUnit.SECONDS);
|
||||||
}
|
}
|
||||||
|
|
||||||
// After 1s, we stop growing the sleep interval exponentially and just sleep 1s until maxWaitTime
|
// After 1s, we stop growing the sleep interval exponentially and just sleep 1s until maxWaitTime
|
||||||
private static final long AWAIT_BUSY_THRESHOLD = 1000L;
|
private static final long AWAIT_BUSY_THRESHOLD = 1000L;
|
||||||
|
|
||||||
public static boolean awaitBusy(Predicate<?> breakPredicate, long maxWaitTime, TimeUnit unit) throws InterruptedException {
|
public static boolean awaitBusy(BooleanSupplier breakSupplier, long maxWaitTime, TimeUnit unit) throws InterruptedException {
|
||||||
long maxTimeInMillis = TimeUnit.MILLISECONDS.convert(maxWaitTime, unit);
|
long maxTimeInMillis = TimeUnit.MILLISECONDS.convert(maxWaitTime, unit);
|
||||||
long timeInMillis = 1;
|
long timeInMillis = 1;
|
||||||
long sum = 0;
|
long sum = 0;
|
||||||
while (sum + timeInMillis < maxTimeInMillis) {
|
while (sum + timeInMillis < maxTimeInMillis) {
|
||||||
if (breakPredicate.apply(null)) {
|
if (breakSupplier.getAsBoolean()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
Thread.sleep(timeInMillis);
|
Thread.sleep(timeInMillis);
|
||||||
|
@ -459,7 +460,7 @@ public abstract class ESTestCase extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
timeInMillis = maxTimeInMillis - sum;
|
timeInMillis = maxTimeInMillis - sum;
|
||||||
Thread.sleep(Math.max(timeInMillis, 0));
|
Thread.sleep(Math.max(timeInMillis, 0));
|
||||||
return breakPredicate.apply(null);
|
return breakSupplier.getAsBoolean();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean terminate(ExecutorService... services) throws InterruptedException {
|
public static boolean terminate(ExecutorService... services) throws InterruptedException {
|
||||||
|
|
|
@ -18,8 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.test;
|
package org.elasticsearch.test;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
|
|
||||||
import org.apache.lucene.util.Constants;
|
import org.apache.lucene.util.Constants;
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||||
|
@ -155,9 +153,7 @@ final class ExternalNode implements Closeable {
|
||||||
}
|
}
|
||||||
|
|
||||||
static boolean waitForNode(final Client client, final String name) throws InterruptedException {
|
static boolean waitForNode(final Client client, final String name) throws InterruptedException {
|
||||||
return ESTestCase.awaitBusy(new Predicate<Object>() {
|
return ESTestCase.awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(java.lang.Object input) {
|
|
||||||
final NodesInfoResponse nodeInfos = client.admin().cluster().prepareNodesInfo().get();
|
final NodesInfoResponse nodeInfos = client.admin().cluster().prepareNodesInfo().get();
|
||||||
final NodeInfo[] nodes = nodeInfos.getNodes();
|
final NodeInfo[] nodes = nodeInfos.getNodes();
|
||||||
for (NodeInfo info : nodes) {
|
for (NodeInfo info : nodes) {
|
||||||
|
@ -166,7 +162,6 @@ final class ExternalNode implements Closeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
|
||||||
}, 30, TimeUnit.SECONDS);
|
}, 30, TimeUnit.SECONDS);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,12 +24,7 @@ import com.carrotsearch.randomizedtesting.SysGlobals;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.base.Predicates;
|
|
||||||
import com.google.common.collect.Collections2;
|
|
||||||
import com.google.common.collect.Iterables;
|
|
||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
import com.google.common.collect.Maps;
|
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.google.common.util.concurrent.Futures;
|
import com.google.common.util.concurrent.Futures;
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
|
@ -127,13 +122,20 @@ import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import static junit.framework.Assert.fail;
|
import static junit.framework.Assert.fail;
|
||||||
import static org.apache.lucene.util.LuceneTestCase.*;
|
import static org.apache.lucene.util.LuceneTestCase.TEST_NIGHTLY;
|
||||||
|
import static org.apache.lucene.util.LuceneTestCase.rarely;
|
||||||
|
import static org.apache.lucene.util.LuceneTestCase.usually;
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.elasticsearch.test.ESTestCase.assertBusy;
|
import static org.elasticsearch.test.ESTestCase.assertBusy;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
|
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||||
import static org.junit.Assert.assertThat;
|
import static org.junit.Assert.assertThat;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -532,14 +534,13 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
}
|
}
|
||||||
|
|
||||||
private synchronized NodeAndClient getRandomNodeAndClient() {
|
private synchronized NodeAndClient getRandomNodeAndClient() {
|
||||||
Predicate<NodeAndClient> all = Predicates.alwaysTrue();
|
return getRandomNodeAndClient(nc -> true);
|
||||||
return getRandomNodeAndClient(all);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private synchronized NodeAndClient getRandomNodeAndClient(Predicate<NodeAndClient> predicate) {
|
private synchronized NodeAndClient getRandomNodeAndClient(Predicate<NodeAndClient> predicate) {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
Collection<NodeAndClient> values = Collections2.filter(nodes.values(), predicate);
|
Collection<NodeAndClient> values = nodes.values().stream().filter(predicate).collect(Collectors.toCollection(ArrayList::new));
|
||||||
if (!values.isEmpty()) {
|
if (!values.isEmpty()) {
|
||||||
int whichOne = random.nextInt(values.size());
|
int whichOne = random.nextInt(values.size());
|
||||||
for (NodeAndClient nodeAndClient : values) {
|
for (NodeAndClient nodeAndClient : values) {
|
||||||
|
@ -588,8 +589,9 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// prevent killing the master if possible and client nodes
|
// prevent killing the master if possible and client nodes
|
||||||
final Iterator<NodeAndClient> values = n == 0 ? nodes.values().iterator() : Iterators.filter(nodes.values().iterator(),
|
final Stream<NodeAndClient> collection =
|
||||||
Predicates.and(new DataNodePredicate(), Predicates.not(new MasterNodePredicate(getMasterName()))));
|
n == 0 ? nodes.values().stream() : nodes.values().stream().filter(new DataNodePredicate().and(new MasterNodePredicate(getMasterName()).negate()));
|
||||||
|
final Iterator<NodeAndClient> values = collection.iterator();
|
||||||
|
|
||||||
final Iterator<NodeAndClient> limit = Iterators.limit(values, size - n);
|
final Iterator<NodeAndClient> limit = Iterators.limit(values, size - n);
|
||||||
logger.info("changing cluster size from {} to {}, {} data nodes", size(), n + numSharedClientNodes, n);
|
logger.info("changing cluster size from {} to {}, {} data nodes", size(), n + numSharedClientNodes, n);
|
||||||
|
@ -682,7 +684,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
*/
|
*/
|
||||||
public synchronized Client nonMasterClient() {
|
public synchronized Client nonMasterClient() {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
NodeAndClient randomNodeAndClient = getRandomNodeAndClient(Predicates.not(new MasterNodePredicate(getMasterName())));
|
NodeAndClient randomNodeAndClient = getRandomNodeAndClient(new MasterNodePredicate(getMasterName()).negate());
|
||||||
if (randomNodeAndClient != null) {
|
if (randomNodeAndClient != null) {
|
||||||
return randomNodeAndClient.nodeClient(); // ensure node client non-master is requested
|
return randomNodeAndClient.nodeClient(); // ensure node client non-master is requested
|
||||||
}
|
}
|
||||||
|
@ -758,12 +760,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
*/
|
*/
|
||||||
public synchronized Client client(final Predicate<Settings> filterPredicate) {
|
public synchronized Client client(final Predicate<Settings> filterPredicate) {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
final NodeAndClient randomNodeAndClient = getRandomNodeAndClient(new Predicate<NodeAndClient>() {
|
final NodeAndClient randomNodeAndClient = getRandomNodeAndClient(nodeAndClient -> filterPredicate.test(nodeAndClient.node.settings()));
|
||||||
@Override
|
|
||||||
public boolean apply(NodeAndClient nodeAndClient) {
|
|
||||||
return filterPredicate.apply(nodeAndClient.node.settings());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (randomNodeAndClient != null) {
|
if (randomNodeAndClient != null) {
|
||||||
return randomNodeAndClient.client(random);
|
return randomNodeAndClient.client(random);
|
||||||
}
|
}
|
||||||
|
@ -1145,7 +1142,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
}
|
}
|
||||||
|
|
||||||
private synchronized <T> Iterable<T> getInstances(Class<T> clazz, Predicate<NodeAndClient> predicate) {
|
private synchronized <T> Iterable<T> getInstances(Class<T> clazz, Predicate<NodeAndClient> predicate) {
|
||||||
Iterable<NodeAndClient> filteredNodes = Iterables.filter(nodes.values(), predicate);
|
Iterable<NodeAndClient> filteredNodes = nodes.values().stream().filter(predicate)::iterator;
|
||||||
List<T> instances = new ArrayList<>();
|
List<T> instances = new ArrayList<>();
|
||||||
for (NodeAndClient nodeAndClient : filteredNodes) {
|
for (NodeAndClient nodeAndClient : filteredNodes) {
|
||||||
instances.add(getInstanceFromNode(clazz, nodeAndClient.node));
|
instances.add(getInstanceFromNode(clazz, nodeAndClient.node));
|
||||||
|
@ -1157,18 +1154,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
* Returns a reference to the given nodes instances of the given class >T<
|
* Returns a reference to the given nodes instances of the given class >T<
|
||||||
*/
|
*/
|
||||||
public synchronized <T> T getInstance(Class<T> clazz, final String node) {
|
public synchronized <T> T getInstance(Class<T> clazz, final String node) {
|
||||||
final Predicate<InternalTestCluster.NodeAndClient> predicate;
|
return getInstance(clazz, nc -> node == null || node.equals(nc.name));
|
||||||
if (node != null) {
|
|
||||||
predicate = new Predicate<InternalTestCluster.NodeAndClient>() {
|
|
||||||
@Override
|
|
||||||
public boolean apply(NodeAndClient nodeAndClient) {
|
|
||||||
return node.equals(nodeAndClient.name);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
predicate = Predicates.alwaysTrue();
|
|
||||||
}
|
|
||||||
return getInstance(clazz, predicate);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public synchronized <T> T getDataNodeInstance(Class<T> clazz) {
|
public synchronized <T> T getDataNodeInstance(Class<T> clazz) {
|
||||||
|
@ -1185,7 +1171,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
* Returns a reference to a random nodes instances of the given class >T<
|
* Returns a reference to a random nodes instances of the given class >T<
|
||||||
*/
|
*/
|
||||||
public synchronized <T> T getInstance(Class<T> clazz) {
|
public synchronized <T> T getInstance(Class<T> clazz) {
|
||||||
return getInstance(clazz, Predicates.<NodeAndClient>alwaysTrue());
|
return getInstance(clazz, nc -> true);
|
||||||
}
|
}
|
||||||
|
|
||||||
private synchronized <T> T getInstanceFromNode(Class<T> clazz, Node node) {
|
private synchronized <T> T getInstanceFromNode(Class<T> clazz, Node node) {
|
||||||
|
@ -1228,12 +1214,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
*/
|
*/
|
||||||
public synchronized void stopRandomNode(final Predicate<Settings> filter) throws IOException {
|
public synchronized void stopRandomNode(final Predicate<Settings> filter) throws IOException {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
NodeAndClient nodeAndClient = getRandomNodeAndClient(new Predicate<InternalTestCluster.NodeAndClient>() {
|
NodeAndClient nodeAndClient = getRandomNodeAndClient(nc -> filter.test(nc.node.settings()));
|
||||||
@Override
|
|
||||||
public boolean apply(NodeAndClient nodeAndClient) {
|
|
||||||
return filter.apply(nodeAndClient.node.settings());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (nodeAndClient != null) {
|
if (nodeAndClient != null) {
|
||||||
logger.info("Closing filtered random node [{}] ", nodeAndClient.name);
|
logger.info("Closing filtered random node [{}] ", nodeAndClient.name);
|
||||||
removeDisruptionSchemeFromNode(nodeAndClient);
|
removeDisruptionSchemeFromNode(nodeAndClient);
|
||||||
|
@ -1260,7 +1241,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
* Stops the any of the current nodes but not the master node.
|
* Stops the any of the current nodes but not the master node.
|
||||||
*/
|
*/
|
||||||
public void stopRandomNonMasterNode() throws IOException {
|
public void stopRandomNonMasterNode() throws IOException {
|
||||||
NodeAndClient nodeAndClient = getRandomNodeAndClient(Predicates.not(new MasterNodePredicate(getMasterName())));
|
NodeAndClient nodeAndClient = getRandomNodeAndClient(new MasterNodePredicate(getMasterName()).negate());
|
||||||
if (nodeAndClient != null) {
|
if (nodeAndClient != null) {
|
||||||
logger.info("Closing random non master node [{}] current master [{}] ", nodeAndClient.name, getMasterName());
|
logger.info("Closing random non master node [{}] current master [{}] ", nodeAndClient.name, getMasterName());
|
||||||
removeDisruptionSchemeFromNode(nodeAndClient);
|
removeDisruptionSchemeFromNode(nodeAndClient);
|
||||||
|
@ -1280,7 +1261,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
* Restarts a random node in the cluster and calls the callback during restart.
|
* Restarts a random node in the cluster and calls the callback during restart.
|
||||||
*/
|
*/
|
||||||
public void restartRandomNode(RestartCallback callback) throws Exception {
|
public void restartRandomNode(RestartCallback callback) throws Exception {
|
||||||
restartRandomNode(Predicates.<NodeAndClient>alwaysTrue(), callback);
|
restartRandomNode(nc -> true, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1442,7 +1423,12 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
|
|
||||||
private synchronized Set<String> nRandomDataNodes(int numNodes) {
|
private synchronized Set<String> nRandomDataNodes(int numNodes) {
|
||||||
assert size() >= numNodes;
|
assert size() >= numNodes;
|
||||||
NavigableMap<String, NodeAndClient> dataNodes = Maps.filterEntries(nodes, new EntryNodePredicate(new DataNodePredicate()));
|
Map<String, NodeAndClient> dataNodes =
|
||||||
|
nodes
|
||||||
|
.entrySet()
|
||||||
|
.stream()
|
||||||
|
.filter(new EntryNodePredicate(new DataNodePredicate()))
|
||||||
|
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||||
return Sets.newHashSet(Iterators.limit(dataNodes.keySet().iterator(), numNodes));
|
return Sets.newHashSet(Iterators.limit(dataNodes.keySet().iterator(), numNodes));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1676,23 +1662,31 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
}
|
}
|
||||||
|
|
||||||
private synchronized Collection<NodeAndClient> dataNodeAndClients() {
|
private synchronized Collection<NodeAndClient> dataNodeAndClients() {
|
||||||
return Collections2.filter(nodes.values(), new DataNodePredicate());
|
return filterNodes(nodes, new DataNodePredicate());
|
||||||
}
|
}
|
||||||
|
|
||||||
private synchronized Collection<NodeAndClient> dataAndMasterNodes() {
|
private synchronized Collection<NodeAndClient> dataAndMasterNodes() {
|
||||||
return Collections2.filter(nodes.values(), new DataOrMasterNodePredicate());
|
return filterNodes(nodes, new DataOrMasterNodePredicate());
|
||||||
|
}
|
||||||
|
|
||||||
|
private synchronized Collection<NodeAndClient> filterNodes(Map<String, InternalTestCluster.NodeAndClient> map, Predicate<NodeAndClient> predicate) {
|
||||||
|
return map
|
||||||
|
.values()
|
||||||
|
.stream()
|
||||||
|
.filter(predicate)
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final class DataNodePredicate implements Predicate<NodeAndClient> {
|
private static final class DataNodePredicate implements Predicate<NodeAndClient> {
|
||||||
@Override
|
@Override
|
||||||
public boolean apply(NodeAndClient nodeAndClient) {
|
public boolean test(NodeAndClient nodeAndClient) {
|
||||||
return DiscoveryNode.dataNode(nodeAndClient.node.settings());
|
return DiscoveryNode.dataNode(nodeAndClient.node.settings());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final class DataOrMasterNodePredicate implements Predicate<NodeAndClient> {
|
private static final class DataOrMasterNodePredicate implements Predicate<NodeAndClient> {
|
||||||
@Override
|
@Override
|
||||||
public boolean apply(NodeAndClient nodeAndClient) {
|
public boolean test(NodeAndClient nodeAndClient) {
|
||||||
return DiscoveryNode.dataNode(nodeAndClient.node.settings()) ||
|
return DiscoveryNode.dataNode(nodeAndClient.node.settings()) ||
|
||||||
DiscoveryNode.masterNode(nodeAndClient.node.settings());
|
DiscoveryNode.masterNode(nodeAndClient.node.settings());
|
||||||
}
|
}
|
||||||
|
@ -1706,14 +1700,14 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean apply(NodeAndClient nodeAndClient) {
|
public boolean test(NodeAndClient nodeAndClient) {
|
||||||
return masterNodeName.equals(nodeAndClient.name);
|
return masterNodeName.equals(nodeAndClient.name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final class ClientNodePredicate implements Predicate<NodeAndClient> {
|
private static final class ClientNodePredicate implements Predicate<NodeAndClient> {
|
||||||
@Override
|
@Override
|
||||||
public boolean apply(NodeAndClient nodeAndClient) {
|
public boolean test(NodeAndClient nodeAndClient) {
|
||||||
return DiscoveryNode.clientNode(nodeAndClient.node.settings());
|
return DiscoveryNode.clientNode(nodeAndClient.node.settings());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1726,8 +1720,8 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean apply(Map.Entry<String, NodeAndClient> entry) {
|
public boolean test(Map.Entry<String, NodeAndClient> entry) {
|
||||||
return delegateNodePredicate.apply(entry.getValue());
|
return delegateNodePredicate.test(entry.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1795,7 +1789,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean apply(Settings settings) {
|
public boolean test(Settings settings) {
|
||||||
return nodeNames.contains(settings.get("name"));
|
return nodeNames.contains(settings.get("name"));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,11 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.test.hamcrest;
|
package org.elasticsearch.test.hamcrest;
|
||||||
|
|
||||||
import com.google.common.base.Function;
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.base.Predicates;
|
|
||||||
import com.google.common.collect.FluentIterable;
|
|
||||||
import com.google.common.collect.Iterables;
|
|
||||||
import org.apache.lucene.search.BooleanQuery;
|
import org.apache.lucene.search.BooleanQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
|
@ -35,7 +30,6 @@ import org.elasticsearch.action.ShardOperationFailedException;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||||
import org.elasticsearch.plugins.PluginInfo;
|
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
||||||
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse;
|
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse;
|
||||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
|
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
|
||||||
|
@ -62,6 +56,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
import org.elasticsearch.common.io.stream.Streamable;
|
||||||
|
import org.elasticsearch.plugins.PluginInfo;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
import org.elasticsearch.search.SearchHit;
|
import org.elasticsearch.search.SearchHit;
|
||||||
import org.elasticsearch.search.suggest.Suggest;
|
import org.elasticsearch.search.suggest.Suggest;
|
||||||
|
@ -77,16 +72,36 @@ import java.lang.reflect.Constructor;
|
||||||
import java.lang.reflect.InvocationTargetException;
|
import java.lang.reflect.InvocationTargetException;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.*;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static com.google.common.base.Predicates.isNull;
|
import static org.elasticsearch.test.ESTestCase.assertArrayEquals;
|
||||||
import static com.google.common.base.Predicates.notNull;
|
import static org.elasticsearch.test.ESTestCase.assertEquals;
|
||||||
import static org.elasticsearch.test.ESTestCase.*;
|
import static org.elasticsearch.test.ESTestCase.assertFalse;
|
||||||
|
import static org.elasticsearch.test.ESTestCase.assertNotNull;
|
||||||
|
import static org.elasticsearch.test.ESTestCase.assertTrue;
|
||||||
|
import static org.elasticsearch.test.ESTestCase.fail;
|
||||||
|
import static org.elasticsearch.test.ESTestCase.random;
|
||||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||||
import static org.hamcrest.CoreMatchers.equalTo;
|
import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
import static org.hamcrest.CoreMatchers.is;
|
import static org.hamcrest.CoreMatchers.is;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
|
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||||
|
import static org.hamcrest.Matchers.hasItem;
|
||||||
|
import static org.hamcrest.Matchers.hasKey;
|
||||||
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
|
import static org.hamcrest.Matchers.not;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -739,91 +754,68 @@ public class ElasticsearchAssertions {
|
||||||
PluginsInfo plugins = response.getNodesMap().get(nodeId).getPlugins();
|
PluginsInfo plugins = response.getNodesMap().get(nodeId).getPlugins();
|
||||||
Assert.assertThat(plugins, notNullValue());
|
Assert.assertThat(plugins, notNullValue());
|
||||||
|
|
||||||
List<String> pluginNames = FluentIterable.from(plugins.getInfos()).filter(jvmPluginPredicate).transform(nameFunction).toList();
|
List<String> pluginNames = filterAndMap(plugins, jvmPluginPredicate, nameFunction);
|
||||||
for (String expectedJvmPluginName : expectedJvmPluginNames) {
|
for (String expectedJvmPluginName : expectedJvmPluginNames) {
|
||||||
Assert.assertThat(pluginNames, hasItem(expectedJvmPluginName));
|
Assert.assertThat(pluginNames, hasItem(expectedJvmPluginName));
|
||||||
}
|
}
|
||||||
|
|
||||||
List<String> pluginDescriptions = FluentIterable.from(plugins.getInfos()).filter(jvmPluginPredicate).transform(descriptionFunction).toList();
|
List<String> pluginDescriptions = filterAndMap(plugins, jvmPluginPredicate, descriptionFunction);
|
||||||
for (String expectedJvmPluginDescription : expectedJvmPluginDescriptions) {
|
for (String expectedJvmPluginDescription : expectedJvmPluginDescriptions) {
|
||||||
Assert.assertThat(pluginDescriptions, hasItem(expectedJvmPluginDescription));
|
Assert.assertThat(pluginDescriptions, hasItem(expectedJvmPluginDescription));
|
||||||
}
|
}
|
||||||
|
|
||||||
List<String> jvmPluginVersions = FluentIterable.from(plugins.getInfos()).filter(jvmPluginPredicate).transform(versionFunction).toList();
|
List<String> jvmPluginVersions = filterAndMap(plugins, jvmPluginPredicate, versionFunction);
|
||||||
for (String pluginVersion : expectedJvmVersions) {
|
for (String pluginVersion : expectedJvmVersions) {
|
||||||
Assert.assertThat(jvmPluginVersions, hasItem(pluginVersion));
|
Assert.assertThat(jvmPluginVersions, hasItem(pluginVersion));
|
||||||
}
|
}
|
||||||
|
|
||||||
FluentIterable<String> jvmUrls = FluentIterable.from(plugins.getInfos())
|
boolean anyHaveUrls =
|
||||||
.filter(Predicates.and(jvmPluginPredicate, Predicates.not(sitePluginPredicate)))
|
plugins
|
||||||
.transform(urlFunction)
|
.getInfos()
|
||||||
.filter(notNull());
|
.stream()
|
||||||
Assert.assertThat(Iterables.size(jvmUrls), is(0));
|
.filter(jvmPluginPredicate.and(sitePluginPredicate.negate()))
|
||||||
|
.map(urlFunction)
|
||||||
|
.anyMatch(p -> p != null);
|
||||||
|
assertFalse(anyHaveUrls);
|
||||||
|
|
||||||
|
List<String> sitePluginNames = filterAndMap(plugins, sitePluginPredicate, nameFunction);
|
||||||
|
|
||||||
List<String> sitePluginNames = FluentIterable.from(plugins.getInfos()).filter(sitePluginPredicate).transform(nameFunction).toList();
|
|
||||||
Assert.assertThat(sitePluginNames.isEmpty(), is(expectedSitePluginNames.isEmpty()));
|
Assert.assertThat(sitePluginNames.isEmpty(), is(expectedSitePluginNames.isEmpty()));
|
||||||
for (String expectedSitePluginName : expectedSitePluginNames) {
|
for (String expectedSitePluginName : expectedSitePluginNames) {
|
||||||
Assert.assertThat(sitePluginNames, hasItem(expectedSitePluginName));
|
Assert.assertThat(sitePluginNames, hasItem(expectedSitePluginName));
|
||||||
}
|
}
|
||||||
|
|
||||||
List<String> sitePluginDescriptions = FluentIterable.from(plugins.getInfos()).filter(sitePluginPredicate).transform(descriptionFunction).toList();
|
List<String> sitePluginDescriptions = filterAndMap(plugins, sitePluginPredicate, descriptionFunction);
|
||||||
Assert.assertThat(sitePluginDescriptions.isEmpty(), is(expectedSitePluginDescriptions.isEmpty()));
|
Assert.assertThat(sitePluginDescriptions.isEmpty(), is(expectedSitePluginDescriptions.isEmpty()));
|
||||||
for (String sitePluginDescription : expectedSitePluginDescriptions) {
|
for (String sitePluginDescription : expectedSitePluginDescriptions) {
|
||||||
Assert.assertThat(sitePluginDescriptions, hasItem(sitePluginDescription));
|
Assert.assertThat(sitePluginDescriptions, hasItem(sitePluginDescription));
|
||||||
}
|
}
|
||||||
|
|
||||||
List<String> sitePluginUrls = FluentIterable.from(plugins.getInfos()).filter(sitePluginPredicate).transform(urlFunction).toList();
|
List<String> sitePluginUrls = filterAndMap(plugins, sitePluginPredicate, urlFunction);
|
||||||
Assert.assertThat(sitePluginUrls, not(contains(nullValue())));
|
Assert.assertThat(sitePluginUrls, not(contains(nullValue())));
|
||||||
|
|
||||||
|
List<String> sitePluginVersions = filterAndMap(plugins, sitePluginPredicate, versionFunction);
|
||||||
List<String> sitePluginVersions = FluentIterable.from(plugins.getInfos()).filter(sitePluginPredicate).transform(versionFunction).toList();
|
|
||||||
Assert.assertThat(sitePluginVersions.isEmpty(), is(expectedSiteVersions.isEmpty()));
|
Assert.assertThat(sitePluginVersions.isEmpty(), is(expectedSiteVersions.isEmpty()));
|
||||||
for (String pluginVersion : expectedSiteVersions) {
|
for (String pluginVersion : expectedSiteVersions) {
|
||||||
Assert.assertThat(sitePluginVersions, hasItem(pluginVersion));
|
Assert.assertThat(sitePluginVersions, hasItem(pluginVersion));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Predicate<PluginInfo> jvmPluginPredicate = new Predicate<PluginInfo>() {
|
private static List<String> filterAndMap(PluginsInfo pluginsInfo, Predicate<PluginInfo> predicate, Function<PluginInfo, String> function) {
|
||||||
@Override
|
return pluginsInfo.getInfos().stream().filter(predicate).map(function).collect(Collectors.toList());
|
||||||
public boolean apply(PluginInfo pluginInfo) {
|
|
||||||
return pluginInfo.isJvm();
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
private static Predicate<PluginInfo> sitePluginPredicate = new Predicate<PluginInfo>() {
|
private static Predicate<PluginInfo> jvmPluginPredicate = p -> p.isJvm();
|
||||||
@Override
|
|
||||||
public boolean apply(PluginInfo pluginInfo) {
|
|
||||||
return pluginInfo.isSite();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private static Function<PluginInfo, String> nameFunction = new Function<PluginInfo, String>() {
|
private static Predicate<PluginInfo> sitePluginPredicate = p -> p.isSite();
|
||||||
@Override
|
|
||||||
public String apply(PluginInfo pluginInfo) {
|
|
||||||
return pluginInfo.getName();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private static Function<PluginInfo, String> descriptionFunction = new Function<PluginInfo, String>() {
|
private static Function<PluginInfo, String> nameFunction = p -> p.getName();
|
||||||
@Override
|
|
||||||
public String apply(PluginInfo pluginInfo) {
|
|
||||||
return pluginInfo.getDescription();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private static Function<PluginInfo, String> urlFunction = new Function<PluginInfo, String>() {
|
private static Function<PluginInfo, String> descriptionFunction = p -> p.getDescription();
|
||||||
@Override
|
|
||||||
public String apply(PluginInfo pluginInfo) {
|
|
||||||
return pluginInfo.getUrl();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private static Function<PluginInfo, String> versionFunction = new Function<PluginInfo, String>() {
|
private static Function<PluginInfo, String> urlFunction = p -> p.getUrl();
|
||||||
@Override
|
|
||||||
public String apply(PluginInfo pluginInfo) {
|
private static Function<PluginInfo, String> versionFunction = p -> p.getVersion();
|
||||||
return pluginInfo.getVersion();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if a file exists
|
* Check if a file exists
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.ttl;
|
package org.elasticsearch.ttl;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
|
|
||||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||||
|
@ -31,7 +29,6 @@ import org.elasticsearch.action.update.UpdateResponse;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.get.GetField;
|
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||||
|
@ -44,7 +41,6 @@ import java.util.concurrent.TimeUnit;
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
|
||||||
import static org.hamcrest.Matchers.both;
|
import static org.hamcrest.Matchers.both;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.greaterThan;
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
|
@ -170,19 +166,18 @@ public class SimpleTTLIT extends ESIntegTestCase {
|
||||||
// But we can use index statistics' delete count to be sure that deletes have been executed, that must be incremented before
|
// But we can use index statistics' delete count to be sure that deletes have been executed, that must be incremented before
|
||||||
// ttl purging has finished.
|
// ttl purging has finished.
|
||||||
logger.info("--> checking purger");
|
logger.info("--> checking purger");
|
||||||
assertThat(awaitBusy(new Predicate<Object>() {
|
assertTrue(awaitBusy(() -> {
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
if (rarely()) {
|
if (rarely()) {
|
||||||
client().admin().indices().prepareFlush("test").get();
|
client().admin().indices().prepareFlush("test").get();
|
||||||
} else if (rarely()) {
|
} else if (rarely()) {
|
||||||
client().admin().indices().prepareOptimize("test").setMaxNumSegments(1).get();
|
client().admin().indices().prepareOptimize("test").setMaxNumSegments(1).get();
|
||||||
}
|
}
|
||||||
IndicesStatsResponse response = client().admin().indices().prepareStats("test").clear().setIndexing(true).get();
|
IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats("test").clear().setIndexing(true).get();
|
||||||
// TTL deletes two docs, but it is indexed in the primary shard and replica shard.
|
// TTL deletes two docs, but it is indexed in the primary shard and replica shard.
|
||||||
return response.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount() == 2L * test.dataCopies;
|
return indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount() == 2L * test.dataCopies;
|
||||||
}
|
},
|
||||||
}, 5, TimeUnit.SECONDS), equalTo(true));
|
5, TimeUnit.SECONDS
|
||||||
|
));
|
||||||
|
|
||||||
// realtime get check
|
// realtime get check
|
||||||
getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(true).execute().actionGet();
|
getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(true).execute().actionGet();
|
||||||
|
|
|
@ -88,3 +88,5 @@ org.elasticsearch.common.io.PathUtils#get(java.net.URI)
|
||||||
com.google.common.collect.Lists
|
com.google.common.collect.Lists
|
||||||
com.google.common.collect.ImmutableList
|
com.google.common.collect.ImmutableList
|
||||||
com.google.common.base.Objects
|
com.google.common.base.Objects
|
||||||
|
com.google.common.base.Predicate
|
||||||
|
com.google.common.base.Predicates
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.deletebyquery;
|
package org.elasticsearch.action.deletebyquery;
|
||||||
|
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.ShardOperationFailedException;
|
import org.elasticsearch.action.ShardOperationFailedException;
|
||||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||||
|
@ -164,13 +163,8 @@ public class TransportDeleteByQueryActionTests extends ESSingleNodeTestCase {
|
||||||
TestActionListener listener = new TestActionListener();
|
TestActionListener listener = new TestActionListener();
|
||||||
|
|
||||||
final TransportDeleteByQueryAction.AsyncDeleteByQueryAction async = newAsyncAction(delete, listener);
|
final TransportDeleteByQueryAction.AsyncDeleteByQueryAction async = newAsyncAction(delete, listener);
|
||||||
awaitBusy(new Predicate<Object>() {
|
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
// Wait until the action timed out
|
// Wait until the action timed out
|
||||||
return async.hasTimedOut();
|
awaitBusy(() -> async.hasTimedOut());
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
async.executeScroll(searchResponse.getScrollId());
|
async.executeScroll(searchResponse.getScrollId());
|
||||||
waitForCompletion("scroll request returns zero documents on expired scroll id", listener);
|
waitForCompletion("scroll request returns zero documents on expired scroll id", listener);
|
||||||
|
@ -419,12 +413,7 @@ public class TransportDeleteByQueryActionTests extends ESSingleNodeTestCase {
|
||||||
private void waitForCompletion(String testName, final TestActionListener listener) {
|
private void waitForCompletion(String testName, final TestActionListener listener) {
|
||||||
logger.info(" --> waiting for delete-by-query [{}] to complete", testName);
|
logger.info(" --> waiting for delete-by-query [{}] to complete", testName);
|
||||||
try {
|
try {
|
||||||
awaitBusy(new Predicate<Object>() {
|
awaitBusy(() -> listener.isTerminated());
|
||||||
@Override
|
|
||||||
public boolean apply(Object input) {
|
|
||||||
return listener.isTerminated();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
fail("exception when waiting for delete-by-query [" + testName + "] to complete: " + e.getMessage());
|
fail("exception when waiting for delete-by-query [" + testName + "] to complete: " + e.getMessage());
|
||||||
logger.error("exception when waiting for delete-by-query [{}] to complete", e, testName);
|
logger.error("exception when waiting for delete-by-query [{}] to complete", e, testName);
|
||||||
|
|
Loading…
Reference in New Issue