HBASE-7513 HDFSBlocksDistribution shouldn't send NPEs when something goes wrong
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1430560 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
f77e5b5bff
commit
087f1df0e2
|
@ -30,7 +30,9 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Data structure to describe the distribution of HDFS blocks amount hosts
|
* Data structure to describe the distribution of HDFS blocks amount hosts.
|
||||||
|
*
|
||||||
|
* Adding erroneous data will be ignored silently.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class HDFSBlocksDistribution {
|
public class HDFSBlocksDistribution {
|
||||||
|
@ -122,8 +124,10 @@ public class HDFSBlocksDistribution {
|
||||||
*/
|
*/
|
||||||
public void addHostsAndBlockWeight(String[] hosts, long weight) {
|
public void addHostsAndBlockWeight(String[] hosts, long weight) {
|
||||||
if (hosts == null || hosts.length == 0) {
|
if (hosts == null || hosts.length == 0) {
|
||||||
throw new NullPointerException("empty hosts");
|
// erroneous data
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
addUniqueWeight(weight);
|
addUniqueWeight(weight);
|
||||||
for (String hostname : hosts) {
|
for (String hostname : hosts) {
|
||||||
addHostAndBlockWeight(hostname, weight);
|
addHostAndBlockWeight(hostname, weight);
|
||||||
|
@ -146,7 +150,8 @@ public class HDFSBlocksDistribution {
|
||||||
*/
|
*/
|
||||||
private void addHostAndBlockWeight(String host, long weight) {
|
private void addHostAndBlockWeight(String host, long weight) {
|
||||||
if (host == null) {
|
if (host == null) {
|
||||||
throw new NullPointerException("Passed hostname is null");
|
// erroneous data
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
|
HostAndWeight hostAndWeight = this.hostAndWeights.get(host);
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.experimental.categories.Category;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static junit.framework.Assert.assertEquals;
|
||||||
|
|
||||||
|
@Category(SmallTests.class)
|
||||||
|
public class TestHDFSBlocksDistribution {
|
||||||
|
@Test
|
||||||
|
public void testAddHostsAndBlockWeight() throws Exception {
|
||||||
|
HDFSBlocksDistribution distribution = new HDFSBlocksDistribution();
|
||||||
|
distribution.addHostsAndBlockWeight(null, 100);
|
||||||
|
assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size());
|
||||||
|
distribution.addHostsAndBlockWeight(new String[0], 100);
|
||||||
|
assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size());
|
||||||
|
distribution.addHostsAndBlockWeight(new String[] {"test"}, 101);
|
||||||
|
assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
|
||||||
|
distribution.addHostsAndBlockWeight(new String[] {"test"}, 202);
|
||||||
|
assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
|
||||||
|
assertEquals("test host should have weight 303", 303,
|
||||||
|
distribution.getHostAndWeights().get("test").getWeight());
|
||||||
|
distribution.addHostsAndBlockWeight(new String[] {"testTwo"}, 222);
|
||||||
|
assertEquals("Should be two hosts", 2, distribution.getHostAndWeights().size());
|
||||||
|
assertEquals("Total weight should be 525", 525, distribution.getUniqueBlocksTotalWeight());
|
||||||
|
}
|
||||||
|
|
||||||
|
public class MockHDFSBlocksDistribution extends HDFSBlocksDistribution {
|
||||||
|
public Map<String,HostAndWeight> getHostAndWeights() {
|
||||||
|
HashMap<String, HostAndWeight> map = new HashMap<String, HostAndWeight>();
|
||||||
|
map.put("test", new HostAndWeight(null, 100));
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testAdd() throws Exception {
|
||||||
|
HDFSBlocksDistribution distribution = new HDFSBlocksDistribution();
|
||||||
|
distribution.add(new MockHDFSBlocksDistribution());
|
||||||
|
assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size());
|
||||||
|
distribution.addHostsAndBlockWeight(new String[]{"test"}, 10);
|
||||||
|
assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
|
||||||
|
distribution.add(new MockHDFSBlocksDistribution());
|
||||||
|
assertEquals("Should be one host", 1, distribution.getHostAndWeights().size());
|
||||||
|
assertEquals("Total weight should be 10", 10, distribution.getUniqueBlocksTotalWeight());
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue