From 087f1df0e2a538f7627aaaa94e6e92215545dce4 Mon Sep 17 00:00:00 2001 From: eclark Date: Tue, 8 Jan 2013 21:50:38 +0000 Subject: [PATCH] HBASE-7513 HDFSBlocksDistribution shouldn't send NPEs when something goes wrong git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1430560 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop/hbase/HDFSBlocksDistribution.java | 11 ++- .../hbase/TestHDFSBlocksDistribution.java | 69 +++++++++++++++++++ 2 files changed, 77 insertions(+), 3 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HDFSBlocksDistribution.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HDFSBlocksDistribution.java index f9be7685e23..11619d3683e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HDFSBlocksDistribution.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HDFSBlocksDistribution.java @@ -30,7 +30,9 @@ import org.apache.hadoop.classification.InterfaceAudience; /** - * Data structure to describe the distribution of HDFS blocks amount hosts + * Data structure to describe the distribution of HDFS blocks amount hosts. + * + * Adding erroneous data will be ignored silently. */ @InterfaceAudience.Private public class HDFSBlocksDistribution { @@ -122,8 +124,10 @@ public class HDFSBlocksDistribution { */ public void addHostsAndBlockWeight(String[] hosts, long weight) { if (hosts == null || hosts.length == 0) { - throw new NullPointerException("empty hosts"); + // erroneous data + return; } + addUniqueWeight(weight); for (String hostname : hosts) { addHostAndBlockWeight(hostname, weight); @@ -146,7 +150,8 @@ public class HDFSBlocksDistribution { */ private void addHostAndBlockWeight(String host, long weight) { if (host == null) { - throw new NullPointerException("Passed hostname is null"); + // erroneous data + return; } HostAndWeight hostAndWeight = this.hostAndWeights.get(host); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java new file mode 100644 index 00000000000..ea694067c78 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java @@ -0,0 +1,69 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.util.HashMap; +import java.util.Map; + +import static junit.framework.Assert.assertEquals; + +@Category(SmallTests.class) +public class TestHDFSBlocksDistribution { + @Test + public void testAddHostsAndBlockWeight() throws Exception { + HDFSBlocksDistribution distribution = new HDFSBlocksDistribution(); + distribution.addHostsAndBlockWeight(null, 100); + assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size()); + distribution.addHostsAndBlockWeight(new String[0], 100); + assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size()); + distribution.addHostsAndBlockWeight(new String[] {"test"}, 101); + assertEquals("Should be one host", 1, distribution.getHostAndWeights().size()); + distribution.addHostsAndBlockWeight(new String[] {"test"}, 202); + assertEquals("Should be one host", 1, distribution.getHostAndWeights().size()); + assertEquals("test host should have weight 303", 303, + distribution.getHostAndWeights().get("test").getWeight()); + distribution.addHostsAndBlockWeight(new String[] {"testTwo"}, 222); + assertEquals("Should be two hosts", 2, distribution.getHostAndWeights().size()); + assertEquals("Total weight should be 525", 525, distribution.getUniqueBlocksTotalWeight()); + } + + public class MockHDFSBlocksDistribution extends HDFSBlocksDistribution { + public Map getHostAndWeights() { + HashMap map = new HashMap(); + map.put("test", new HostAndWeight(null, 100)); + return map; + } + + } + + @Test + public void testAdd() throws Exception { + HDFSBlocksDistribution distribution = new HDFSBlocksDistribution(); + distribution.add(new MockHDFSBlocksDistribution()); + assertEquals("Expecting no hosts weights", 0, distribution.getHostAndWeights().size()); + distribution.addHostsAndBlockWeight(new String[]{"test"}, 10); + assertEquals("Should be one host", 1, distribution.getHostAndWeights().size()); + distribution.add(new MockHDFSBlocksDistribution()); + assertEquals("Should be one host", 1, distribution.getHostAndWeights().size()); + assertEquals("Total weight should be 10", 10, distribution.getUniqueBlocksTotalWeight()); + } +}