Merge pull request #13497 from rjernst/test_jar_cleanup

Reorganize sharing of constants for mock cluster info service
This commit is contained in:
Ryan Ernst 2015-09-10 21:10:29 -07:00
commit f3142e0c93
4 changed files with 73 additions and 62 deletions

View File

@ -18,20 +18,27 @@
*/
package org.elasticsearch.cluster;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction;
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDeciderTests;
import org.elasticsearch.cluster.routing.allocation.decider.MockDiskUsagesIT;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.DummyTransportAddress;
import org.elasticsearch.monitor.fs.FsInfo;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.AbstractMap;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
/**
@ -57,6 +64,21 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService {
private final ClusterName clusterName;
private volatile NodeStats[] stats = new NodeStats[3];
/** Create a fake NodeStats for the given node and usage */
public static NodeStats makeStats(String nodeName, DiskUsage usage) {
FsInfo.Path[] paths = new FsInfo.Path[1];
FsInfo.Path path = new FsInfo.Path("/dev/null", null,
usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes());
paths[0] = path;
FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), paths);
return new NodeStats(new DiscoveryNode(nodeName, DummyTransportAddress.INSTANCE, Version.CURRENT),
System.currentTimeMillis(),
null, null, null, null, null,
fsInfo,
null, null, null,
null);
}
@Inject
public MockInternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService,
TransportNodesStatsAction transportNodesStatsAction,
@ -64,21 +86,21 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService {
ClusterService clusterService, ThreadPool threadPool) {
super(settings, nodeSettingsService, transportNodesStatsAction, transportIndicesStatsAction, clusterService, threadPool);
this.clusterName = ClusterName.clusterNameFromSettings(settings);
stats[0] = MockDiskUsagesIT.makeStats("node_t1", new DiskUsage("node_t1", "n1", "/dev/null", 100, 100));
stats[1] = MockDiskUsagesIT.makeStats("node_t2", new DiskUsage("node_t2", "n2", "/dev/null", 100, 100));
stats[2] = MockDiskUsagesIT.makeStats("node_t3", new DiskUsage("node_t3", "n3", "/dev/null", 100, 100));
stats[0] = makeStats("node_t1", new DiskUsage("node_t1", "n1", "/dev/null", 100, 100));
stats[1] = makeStats("node_t2", new DiskUsage("node_t2", "n2", "/dev/null", 100, 100));
stats[2] = makeStats("node_t3", new DiskUsage("node_t3", "n3", "/dev/null", 100, 100));
}
public void setN1Usage(String nodeName, DiskUsage newUsage) {
stats[0] = MockDiskUsagesIT.makeStats(nodeName, newUsage);
stats[0] = makeStats(nodeName, newUsage);
}
public void setN2Usage(String nodeName, DiskUsage newUsage) {
stats[1] = MockDiskUsagesIT.makeStats(nodeName, newUsage);
stats[1] = makeStats(nodeName, newUsage);
}
public void setN3Usage(String nodeName, DiskUsage newUsage) {
stats[2] = MockDiskUsagesIT.makeStats(nodeName, newUsage);
stats[2] = makeStats(nodeName, newUsage);
}
@Override
@ -96,6 +118,28 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService {
public ClusterInfo getClusterInfo() {
ClusterInfo clusterInfo = super.getClusterInfo();
return new ClusterInfo(clusterInfo.getNodeLeastAvailableDiskUsages(), clusterInfo.getNodeMostAvailableDiskUsages(), clusterInfo.shardSizes, DiskThresholdDeciderTests.DEV_NULL_MAP);
return new ClusterInfo(clusterInfo.getNodeLeastAvailableDiskUsages(), clusterInfo.getNodeMostAvailableDiskUsages(), clusterInfo.shardSizes, DEV_NULL_MAP);
}
public static final Map<ShardRouting, String> DEV_NULL_MAP = Collections.unmodifiableMap(new StaticValueMap("/dev/null"));
// a test only map that always returns the same value no matter what key is passed
private static final class StaticValueMap extends AbstractMap<ShardRouting, String> {
private final String value;
private StaticValueMap(String value) {
this.value = value;
}
@Override
public String get(Object key) {
return value;
}
@Override
public Set<Entry<ShardRouting, String>> entrySet() {
throw new UnsupportedOperationException("this is a test-only map that only supports #get(Object key)");
}
}
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.cluster.ClusterInfo;
import org.elasticsearch.cluster.ClusterInfoService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.DiskUsage;
import org.elasticsearch.cluster.MockInternalClusterInfoService;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
@ -88,7 +89,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
Map<String, Long> shardSizes = new HashMap<>();
shardSizes.put("[test][0][p]", 10L); // 10 bytes
shardSizes.put("[test][0][r]", 10L);
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP);
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP);
AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY,
new HashSet<>(Arrays.asList(
@ -282,7 +283,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
Map<String, Long> shardSizes = new HashMap<>();
shardSizes.put("[test][0][p]", 10L); // 10 bytes
shardSizes.put("[test][0][r]", 10L);
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP);
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP);
AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY,
new HashSet<>(Arrays.asList(
@ -344,7 +345,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
// Make node without the primary now habitable to replicas
usages.put(nodeWithoutPrimary, new DiskUsage(nodeWithoutPrimary, "", "/dev/null", 100, 35)); // 65% used
final ClusterInfo clusterInfo2 = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP);
final ClusterInfo clusterInfo2 = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP);
cis = new ClusterInfoService() {
@Override
public ClusterInfo getClusterInfo() {
@ -543,7 +544,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
Map<String, Long> shardSizes = new HashMap<>();
shardSizes.put("[test][0][p]", 10L); // 10 bytes
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP);
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP);
AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY,
new HashSet<>(Arrays.asList(
@ -610,7 +611,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
Map<String, Long> shardSizes = new HashMap<>();
shardSizes.put("[test][0][p]", 10L); // 10 bytes
shardSizes.put("[test][0][r]", 10L); // 10 bytes
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP);
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP);
AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY,
new HashSet<>(Arrays.asList(
@ -714,7 +715,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
shardSizes.put("[test][0][r]", 14L);
shardSizes.put("[test2][0][p]", 1L); // 1 bytes
shardSizes.put("[test2][0][r]", 1L);
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP);
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP);
AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY,
new HashSet<>(Arrays.asList(
@ -817,7 +818,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
Map<String, Long> shardSizes = new HashMap<>();
shardSizes.put("[test][0][p]", 40L);
shardSizes.put("[test][1][p]", 40L);
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), DEV_NULL_MAP);
final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP);
DiskThresholdDecider diskThresholdDecider = new DiskThresholdDecider(diskSettings);
MetaData metaData = MetaData.builder()
@ -925,26 +926,4 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
rn.shardsWithState(RELOCATING),
rn.shardsWithState(STARTED));
}
public static final Map<ShardRouting, String> DEV_NULL_MAP = Collections.unmodifiableMap(new StaticValueMap("/dev/null"));
// a test only map that always returns the same value no matter what key is passed
private static final class StaticValueMap extends AbstractMap<ShardRouting, String> {
private final String value;
private StaticValueMap(String value) {
this.value = value;
}
@Override
public String get(Object key) {
return value;
}
@Override
public Set<Entry<ShardRouting, String>> entrySet() {
throw new UnsupportedOperationException("this is a test-only map that only supports #get(Object key)");
}
}
}

View File

@ -20,12 +20,21 @@
package org.elasticsearch.cluster.routing.allocation.decider;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.ClusterInfo;
import org.elasticsearch.cluster.ClusterInfoService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.DiskUsage;
import org.elasticsearch.cluster.EmptyClusterInfoService;
import org.elasticsearch.cluster.MockInternalClusterInfoService;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.*;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingHelper;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.DummyTransportAddress;
@ -215,7 +224,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase {
shardSizes.put("[test][1][r]", 100L);
shardSizes.put("[test][2][r]", 1000L);
shardSizes.put("[other][0][p]", 10000L);
ClusterInfo info = new ClusterInfo(Collections.EMPTY_MAP, Collections.EMPTY_MAP, shardSizes, DiskThresholdDeciderTests.DEV_NULL_MAP);
ClusterInfo info = new ClusterInfo(Collections.EMPTY_MAP, Collections.EMPTY_MAP, shardSizes, MockInternalClusterInfoService.DEV_NULL_MAP);
ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(test_0, "node1");
ShardRoutingHelper.moveToStarted(test_0);

View File

@ -19,8 +19,6 @@
package org.elasticsearch.cluster.routing.allocation.decider;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.cluster.ClusterInfo;
@ -28,11 +26,8 @@ import org.elasticsearch.cluster.ClusterInfoService;
import org.elasticsearch.cluster.DiskUsage;
import org.elasticsearch.cluster.InternalClusterInfoService;
import org.elasticsearch.cluster.MockInternalClusterInfoService;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.DummyTransportAddress;
import org.elasticsearch.monitor.fs.FsInfo;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.junit.Test;
@ -170,20 +165,4 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
}
});
}
/** Create a fake NodeStats for the given node and usage */
public static NodeStats makeStats(String nodeName, DiskUsage usage) {
FsInfo.Path[] paths = new FsInfo.Path[1];
FsInfo.Path path = new FsInfo.Path("/dev/null", null,
usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes());
paths[0] = path;
FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), paths);
return new NodeStats(new DiscoveryNode(nodeName, DummyTransportAddress.INSTANCE, Version.CURRENT),
System.currentTimeMillis(),
null, null, null, null, null,
fsInfo,
null, null, null,
null);
}
}