diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml index d361e558562..5d21ea3204c 100644 --- a/hbase-client/pom.xml +++ b/hbase-client/pom.xml @@ -34,6 +34,25 @@ + + maven-compiler-plugin + + + default-compile + + javac-with-errorprone + true + + + + default-testCompile + + javac-with-errorprone + true + + + + org.apache.maven.plugins maven-site-plugin diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java index f365db33119..50eda7a8a7b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java @@ -379,47 +379,37 @@ public class ClusterStatus extends VersionedWritable { public static ClusterStatus convert(ClusterStatusProtos.ClusterStatus proto) { Map servers = null; - if (proto.getLiveServersList() != null) { - servers = new HashMap(proto.getLiveServersList().size()); - for (LiveServerInfo lsi : proto.getLiveServersList()) { - servers.put(ProtobufUtil.toServerName( - lsi.getServer()), new ServerLoad(lsi.getServerLoad())); - } + servers = new HashMap(proto.getLiveServersList().size()); + for (LiveServerInfo lsi : proto.getLiveServersList()) { + servers.put(ProtobufUtil.toServerName( + lsi.getServer()), new ServerLoad(lsi.getServerLoad())); } Collection deadServers = null; - if (proto.getDeadServersList() != null) { - deadServers = new ArrayList(proto.getDeadServersList().size()); - for (HBaseProtos.ServerName sn : proto.getDeadServersList()) { - deadServers.add(ProtobufUtil.toServerName(sn)); - } + deadServers = new ArrayList(proto.getDeadServersList().size()); + for (HBaseProtos.ServerName sn : proto.getDeadServersList()) { + deadServers.add(ProtobufUtil.toServerName(sn)); } Collection backupMasters = null; - if (proto.getBackupMastersList() != null) { - backupMasters = new ArrayList(proto.getBackupMastersList().size()); - for (HBaseProtos.ServerName sn : proto.getBackupMastersList()) { - backupMasters.add(ProtobufUtil.toServerName(sn)); - } + backupMasters = new ArrayList(proto.getBackupMastersList().size()); + for (HBaseProtos.ServerName sn : proto.getBackupMastersList()) { + backupMasters.add(ProtobufUtil.toServerName(sn)); } Map rit = null; - if (proto.getRegionsInTransitionList() != null) { - rit = new HashMap(proto.getRegionsInTransitionList().size()); - for (RegionInTransition region : proto.getRegionsInTransitionList()) { - String key = new String(region.getSpec().getValue().toByteArray()); - RegionState value = RegionState.convert(region.getRegionState()); - rit.put(key, value); - } + rit = new HashMap(proto.getRegionsInTransitionList().size()); + for (RegionInTransition region : proto.getRegionsInTransitionList()) { + String key = new String(region.getSpec().getValue().toByteArray()); + RegionState value = RegionState.convert(region.getRegionState()); + rit.put(key, value); } String[] masterCoprocessors = null; - if (proto.getMasterCoprocessorsList() != null) { - final int numMasterCoprocessors = proto.getMasterCoprocessorsCount(); - masterCoprocessors = new String[numMasterCoprocessors]; - for (int i = 0; i < numMasterCoprocessors; i++) { - masterCoprocessors[i] = proto.getMasterCoprocessors(i).getName(); - } + final int numMasterCoprocessors = proto.getMasterCoprocessorsCount(); + masterCoprocessors = new String[numMasterCoprocessors]; + for (int i = 0; i < numMasterCoprocessors; i++) { + masterCoprocessors[i] = proto.getMasterCoprocessors(i).getName(); } return new ClusterStatus(proto.getHbaseVersion().getVersion(), diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java index 2fd0958d6c7..31f273e59c9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java @@ -801,6 +801,7 @@ public class ZKUtil { * @throws KeeperException if unexpected zookeeper exception * @deprecated Unused */ + @Deprecated public static List getChildDataAndWatchForNewChildren( ZooKeeperWatcher zkw, String baseNode) throws KeeperException { List nodes = @@ -833,6 +834,7 @@ public class ZKUtil { * @throws KeeperException.BadVersionException if version mismatch * @deprecated Unused */ + @Deprecated public static void updateExistingNodeData(ZooKeeperWatcher zkw, String znode, byte [] data, int expectedVersion) throws KeeperException { diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml index 2c51cd4ef53..8a25cd440c2 100644 --- a/hbase-common/pom.xml +++ b/hbase-common/pom.xml @@ -41,13 +41,32 @@ - - org.apache.maven.plugins - maven-site-plugin - - true - - + + maven-compiler-plugin + + + default-compile + + javac-with-errorprone + true + + + + default-testCompile + + javac-with-errorprone + true + + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + maven-assembly-plugin diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index 52c3163f5a8..781f5befba1 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -31,13 +31,32 @@ Examples of HBase usage - - org.apache.maven.plugins - maven-site-plugin - - true - - + + maven-compiler-plugin + + + default-compile + + javac-with-errorprone + true + + + + default-testCompile + + javac-with-errorprone + true + + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + maven-assembly-plugin diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml index ce59c6845c3..a7ad0abf187 100644 --- a/hbase-hadoop-compat/pom.xml +++ b/hbase-hadoop-compat/pom.xml @@ -35,7 +35,26 @@ - + + + maven-compiler-plugin + + + default-compile + + javac-with-errorprone + true + + + + default-testCompile + + javac-with-errorprone + true + + + + org.apache.maven.plugins maven-site-plugin diff --git a/hbase-hadoop2-compat/pom.xml b/hbase-hadoop2-compat/pom.xml index ec0ab21e0cd..e845520399b 100644 --- a/hbase-hadoop2-compat/pom.xml +++ b/hbase-hadoop2-compat/pom.xml @@ -34,18 +34,37 @@ limitations under the License. - - org.apache.maven.plugins - maven-site-plugin - - true - - - - - org.apache.maven.plugins - maven-source-plugin - + + maven-compiler-plugin + + + default-compile + + javac-with-errorprone + true + + + + default-testCompile + + javac-with-errorprone + true + + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + + + + org.apache.maven.plugins + maven-source-plugin + maven-assembly-plugin diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml index f4b6b2efea0..96aedc96926 100644 --- a/hbase-it/pom.xml +++ b/hbase-it/pom.xml @@ -117,6 +117,25 @@ + + maven-compiler-plugin + + + default-compile + + javac-with-errorprone + true + + + + default-testCompile + + javac-with-errorprone + true + + + + org.apache.maven.plugins diff --git a/hbase-prefix-tree/pom.xml b/hbase-prefix-tree/pom.xml index 2c0dd365d65..665480635f3 100644 --- a/hbase-prefix-tree/pom.xml +++ b/hbase-prefix-tree/pom.xml @@ -33,18 +33,37 @@ - - org.apache.maven.plugins - maven-site-plugin - - true - - - - - org.apache.maven.plugins - maven-source-plugin - + + maven-compiler-plugin + + + default-compile + + javac-with-errorprone + true + + + + default-testCompile + + javac-with-errorprone + true + + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + + + + org.apache.maven.plugins + maven-source-plugin + maven-assembly-plugin diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java index 5bd44947e2f..9e27942cf2a 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java @@ -43,7 +43,7 @@ public class TestKeyValueTool { @Parameters public static Collection parameters() { - return new TestRowData.InMemory().getAllAsObjectArray(); + return TestRowData.InMemory.getAllAsObjectArray(); } private TestRowData rows; diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java index 20303faa6f1..55d3d221c7e 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java @@ -52,7 +52,7 @@ public class TestPrefixTreeSearcher { @Parameters public static Collection parameters() { - return new TestRowData.InMemory().getAllAsObjectArray(); + return TestRowData.InMemory.getAllAsObjectArray(); } protected TestRowData rows; diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java index 2eb897f9d4b..4bf60e03df3 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java @@ -57,7 +57,7 @@ public interface TestRowData { void individualSearcherAssertions(CellSearcher searcher); - class InMemory { + static class InMemory { /* * The following are different styles of data that the codec may encounter. Having these small diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml index 33883c25319..72aadb1715d 100644 --- a/hbase-server/pom.xml +++ b/hbase-server/pom.xml @@ -53,13 +53,27 @@ - - org.apache.maven.plugins - maven-site-plugin - - true - - + + maven-compiler-plugin + + + default-compile + + javac-with-errorprone + true + + + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java index afc8a090cfa..660733d6225 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java @@ -238,6 +238,7 @@ public class HTableWrapper implements HTableInterface { * @deprecated If any exception is thrown by one of the actions, there is no way to * retrieve the partially executed results. Use {@link #batch(List, Object[])} instead. */ + @Deprecated @Override public Object[] batch(List actions) throws IOException, InterruptedException { @@ -257,6 +258,7 @@ public class HTableWrapper implements HTableInterface { * {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)} * instead. */ + @Deprecated @Override public Object[] batchCallback(List actions, Batch.Callback callback) throws IOException, InterruptedException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java index 474e3981ef2..e526d63278c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java @@ -118,6 +118,7 @@ public interface RegionObserver extends Coprocessor { * @throws IOException if an error occurred on the coprocessor * @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead */ + @Deprecated void preFlush(final ObserverContext c) throws IOException; /** @@ -138,6 +139,7 @@ public interface RegionObserver extends Coprocessor { * @throws IOException if an error occurred on the coprocessor * @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead. */ + @Deprecated void postFlush(final ObserverContext c) throws IOException; /** @@ -339,6 +341,7 @@ public interface RegionObserver extends Coprocessor { * @deprecated Use preSplit( * final ObserverContext c, byte[] splitRow) */ + @Deprecated void preSplit(final ObserverContext c) throws IOException; /** @@ -359,6 +362,7 @@ public interface RegionObserver extends Coprocessor { * @throws IOException if an error occurred on the coprocessor * @deprecated Use postCompleteSplit() instead */ + @Deprecated void postSplit(final ObserverContext c, final HRegion l, final HRegion r) throws IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 5763cff48d3..9373130c983 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -3743,7 +3743,7 @@ public class HBaseFsck extends Configured { * Display the full report from fsck. This displays all live and dead region * servers, and all known regions. */ - public static void setDisplayFullReport() { + public void setDisplayFullReport() { details = true; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java index 18bc731fe6f..37563435923 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java @@ -52,6 +52,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster; * like an HBaseConfiguration and filesystem. * @deprecated Write junit4 unit tests using {@link HBaseTestingUtility} */ +@Deprecated public abstract class HBaseTestCase extends TestCase { private static final Log LOG = LogFactory.getLog(HBaseTestCase.class); @@ -111,12 +112,12 @@ public abstract class HBaseTestCase extends TestCase { } try { if (localfs) { - this.testDir = getUnitTestdir(getName()); + testDir = getUnitTestdir(getName()); if (fs.exists(testDir)) { fs.delete(testDir, true); } } else { - this.testDir = FSUtils.getRootDir(conf); + testDir = FSUtils.getRootDir(conf); } } catch (Exception e) { LOG.fatal("error during setup", e); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java index 81db6b40e96..061068cbaa9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java @@ -198,7 +198,7 @@ public class TestMasterCoprocessorExceptionWithAbort { // Test (part of the) output that should have be printed by master when it aborts: // (namely the part that shows the set of loaded coprocessors). // In this test, there is only a single coprocessor (BuggyMasterObserver). - assertTrue(master.getLoadedCoprocessors(). + assertTrue(HMaster.getLoadedCoprocessors(). contains(TestMasterCoprocessorExceptionWithAbort.BuggyMasterObserver.class.getName())); CreateTableThread createTableThread = new CreateTableThread(UTIL); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java index 9672764b5e0..de842926670 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java @@ -39,6 +39,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.LruCachedBlock; @@ -372,7 +373,7 @@ public class TestHeapSize { byte[] row = new byte[] { 0 }; cl = Put.class; - actual = new Put(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); + actual = Mutation.MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); expected = ClassSize.estimateBase(cl, false); //The actual TreeMap is not included in the above calculation expected += ClassSize.align(ClassSize.TREEMAP); @@ -382,7 +383,7 @@ public class TestHeapSize { } cl = Delete.class; - actual = new Delete(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); + actual = Mutation.MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); expected = ClassSize.estimateBase(cl, false); //The actual TreeMap is not included in the above calculation expected += ClassSize.align(ClassSize.TREEMAP); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java index e31ebb90daa..c1a506199e9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java @@ -60,8 +60,7 @@ public class TestScannerSelectionUsingTTL { private static final Log LOG = LogFactory.getLog(TestScannerSelectionUsingTTL.class); - private static final HBaseTestingUtility TEST_UTIL = - new HBaseTestingUtility().createLocalHTU(); + private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); private static TableName TABLE = TableName.valueOf("myTable"); private static String FAMILY = "myCF"; private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java index b7be1bb1878..63055aa22e1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java @@ -71,7 +71,7 @@ public class TestSeekTo extends HBaseTestCase { } Path makeNewFile(TagUsage tagUsage) throws IOException { - Path ncTFile = new Path(this.testDir, "basic.hfile"); + Path ncTFile = new Path(testDir, "basic.hfile"); if (tagUsage != TagUsage.NO_TAG) { conf.setInt("hfile.format.version", 3); } else { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java index 9bdebe6fa90..bf4eb0997b1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java @@ -795,7 +795,7 @@ public class TestHFileOutputFormat { HTable table = Mockito.mock(HTable.class); HTableDescriptor htd = new HTableDescriptor(TABLE_NAME); Mockito.doReturn(htd).when(table).getTableDescriptor(); - for (HColumnDescriptor hcd: this.util.generateColumnDescriptors()) { + for (HColumnDescriptor hcd: HBaseTestingUtility.generateColumnDescriptors()) { htd.addFamily(hcd); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java index 903e0c0667d..8d7e2d38e10 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java @@ -77,37 +77,42 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa Configuration conf = UTIL.getConfiguration(); HDFSBlocksDistribution blockDistribution = new HDFSBlocksDistribution(); - Assert.assertEquals(Lists.newArrayList(), tsif.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList(), + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution = new HDFSBlocksDistribution(); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 10); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 7); blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 5); blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 2); Assert.assertEquals(Lists.newArrayList("h1", "h2"), - tsif.getBestLocations(conf, blockDistribution)); + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 3); Assert.assertEquals(Lists.newArrayList("h2", "h1"), - tsif.getBestLocations(conf, blockDistribution)); + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 6); blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 9); Assert.assertEquals(Lists.newArrayList("h2", "h3", "h4", "h1"), - tsif.getBestLocations(conf, blockDistribution)); + TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); } public static enum TestTableSnapshotCounters { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java index 04e35165c43..b81c4f9f38b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java @@ -97,7 +97,7 @@ public class TestStoreFile extends HBaseTestCase { final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb")); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( - conf, fs, new Path(this.testDir, hri.getTable().getNameAsString()), hri); + conf, fs, new Path(testDir, hri.getTable().getNameAsString()), hri); HFileContext meta = new HFileContextBuilder().withBlockSize(2*1024).build(); StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs) @@ -148,7 +148,7 @@ public class TestStoreFile extends HBaseTestCase { public void testReference() throws IOException { final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testReferenceTb")); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( - conf, fs, new Path(this.testDir, hri.getTable().getNameAsString()), hri); + conf, fs, new Path(testDir, hri.getTable().getNameAsString()), hri); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. @@ -192,9 +192,9 @@ public class TestStoreFile extends HBaseTestCase { final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testHFileLinkTb")); // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ Configuration testConf = new Configuration(this.conf); - FSUtils.setRootDir(testConf, this.testDir); + FSUtils.setRootDir(testConf, testDir); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( - testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), hri); + testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), hri); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. @@ -233,12 +233,12 @@ public class TestStoreFile extends HBaseTestCase { public void testReferenceToHFileLink() throws IOException { // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ Configuration testConf = new Configuration(this.conf); - FSUtils.setRootDir(testConf, this.testDir); + FSUtils.setRootDir(testConf, testDir); // adding legal table name chars to verify regex handles it. HRegionInfo hri = new HRegionInfo(TableName.valueOf("_original-evil-name")); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( - testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), hri); + testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), hri); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. //// @@ -252,7 +252,7 @@ public class TestStoreFile extends HBaseTestCase { // create link to store file. /clone/region//-- HRegionInfo hriClone = new HRegionInfo(TableName.valueOf("clone")); HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem( - testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), + testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), hriClone); Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY); HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName()); @@ -269,7 +269,7 @@ public class TestStoreFile extends HBaseTestCase { Path pathB = splitStoreFile(cloneRegionFs, splitHriB, TEST_FAMILY, f, SPLITKEY, false);// bottom // OK test the thing - FSUtils.logFileSystemState(fs, this.testDir, LOG); + FSUtils.logFileSystemState(fs, testDir, LOG); // There is a case where a file with the hfilelink pattern is actually a daughter // reference to a hfile link. This code in StoreFile that handles this case. @@ -774,7 +774,7 @@ public class TestStoreFile extends HBaseTestCase { Scan scan = new Scan(); // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname. - Path storedir = new Path(new Path(this.testDir, "7e0102"), "familyname"); + Path storedir = new Path(new Path(testDir, "7e0102"), "familyname"); Path dir = new Path(storedir, "1234567890"); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. @@ -820,7 +820,7 @@ public class TestStoreFile extends HBaseTestCase { Configuration conf = this.conf; // Find a home for our files (regiondir ("7e0102") and familyname). - Path baseDir = new Path(new Path(this.testDir, "7e0102"),"twoCOWEOC"); + Path baseDir = new Path(new Path(testDir, "7e0102"),"twoCOWEOC"); // Grab the block cache and get the initial hit/miss counts BlockCache bc = new CacheConfig(conf).getBlockCache(); @@ -990,7 +990,7 @@ public class TestStoreFile extends HBaseTestCase { */ public void testDataBlockEncodingMetaData() throws IOException { // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname. - Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname"); + Path dir = new Path(new Path(testDir, "7e0102"), "familyname"); Path path = new Path(dir, "1234567890"); DataBlockEncoding dataBlockEncoderAlgo = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java index 10d7f0c6352..8657c06d0a8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java @@ -147,15 +147,14 @@ public class TestMergeTool extends HBaseTestCase { try { // Create meta region createMetaRegion(); - new FSTableDescriptors(this.conf, this.fs, this.testDir).createTableDescriptor( + new FSTableDescriptors(this.conf, this.fs, testDir).createTableDescriptor( new TableDescriptor(this.desc)); /* * Create the regions we will merge */ for (int i = 0; i < sourceRegions.length; i++) { regions[i] = - HRegion.createHRegion(this.sourceRegions[i], this.testDir, this.conf, - this.desc); + HRegion.createHRegion(this.sourceRegions[i], testDir, this.conf, this.desc); /* * Insert data */ diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml index 79ebdca6cd6..75722137e7e 100644 --- a/hbase-shell/pom.xml +++ b/hbase-shell/pom.xml @@ -50,13 +50,32 @@ - - org.apache.maven.plugins - maven-site-plugin - - true - - + + maven-compiler-plugin + + + default-compile + + javac-with-errorprone + true + + + + default-testCompile + + javac-with-errorprone + true + + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + diff --git a/hbase-thrift/pom.xml b/hbase-thrift/pom.xml index 0815f3ac913..0f64d002b48 100644 --- a/hbase-thrift/pom.xml +++ b/hbase-thrift/pom.xml @@ -46,6 +46,25 @@ + + maven-compiler-plugin + + + default-compile + + javac-with-errorprone + true + + + + default-testCompile + + javac-with-errorprone + true + + + + org.apache.maven.plugins maven-site-plugin diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/HTablePool.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/HTablePool.java index 45578c83f8d..400f10f0013 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/HTablePool.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/HTablePool.java @@ -235,6 +235,7 @@ public class HTablePool implements Closeable { * the proxy table user got from pool * @deprecated */ + @Deprecated public void putTable(HTableInterface table) throws IOException { // we need to be sure nobody puts a proxy implementation in the pool // but if the client code is not updated @@ -395,6 +396,7 @@ public class HTablePool implements Closeable { * @deprecated If any exception is thrown by one of the actions, there is no way to * retrieve the partially executed results. Use {@link #batch(List, Object[])} instead. */ + @Deprecated @Override public Object[] batch(List actions) throws IOException, InterruptedException { @@ -588,6 +590,7 @@ public class HTablePool implements Closeable { * {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)} * instead. */ + @Deprecated @Override public Object[] batchCallback(List actions, Callback callback) throws IOException, InterruptedException { diff --git a/pom.xml b/pom.xml index 1b97e380a0c..c4e5b663b61 100644 --- a/pom.xml +++ b/pom.xml @@ -441,7 +441,7 @@ maven-compiler-plugin - 2.5.1 + 3.2 ${compileSource} ${compileSource} @@ -449,6 +449,23 @@ false -Xlint:-options + + + com.google.errorprone + error_prone_core + 1.1.1 + + + org.codehaus.plexus + plexus-compiler-javac + 2.3 + + + org.codehaus.plexus + plexus-compiler-javac-errorprone + 2.3 + +