diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml index 5d21ea3204c..d361e558562 100644 --- a/hbase-client/pom.xml +++ b/hbase-client/pom.xml @@ -34,25 +34,6 @@ - - maven-compiler-plugin - - - default-compile - - javac-with-errorprone - true - - - - default-testCompile - - javac-with-errorprone - true - - - - org.apache.maven.plugins maven-site-plugin diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java index ca4bf60fcf2..f365db33119 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java @@ -379,7 +379,7 @@ public class ClusterStatus extends VersionedWritable { public static ClusterStatus convert(ClusterStatusProtos.ClusterStatus proto) { Map servers = null; - if (!proto.getLiveServersList().isEmpty()) { + if (proto.getLiveServersList() != null) { servers = new HashMap(proto.getLiveServersList().size()); for (LiveServerInfo lsi : proto.getLiveServersList()) { servers.put(ProtobufUtil.toServerName( @@ -388,7 +388,7 @@ public class ClusterStatus extends VersionedWritable { } Collection deadServers = null; - if (!proto.getDeadServersList().isEmpty()) { + if (proto.getDeadServersList() != null) { deadServers = new ArrayList(proto.getDeadServersList().size()); for (HBaseProtos.ServerName sn : proto.getDeadServersList()) { deadServers.add(ProtobufUtil.toServerName(sn)); @@ -396,7 +396,7 @@ public class ClusterStatus extends VersionedWritable { } Collection backupMasters = null; - if (!proto.getBackupMastersList().isEmpty()) { + if (proto.getBackupMastersList() != null) { backupMasters = new ArrayList(proto.getBackupMastersList().size()); for (HBaseProtos.ServerName sn : proto.getBackupMastersList()) { backupMasters.add(ProtobufUtil.toServerName(sn)); @@ -404,7 +404,7 @@ public class ClusterStatus extends VersionedWritable { } Map rit = null; - if (!proto.getRegionsInTransitionList().isEmpty()) { + if (proto.getRegionsInTransitionList() != null) { rit = new HashMap(proto.getRegionsInTransitionList().size()); for (RegionInTransition region : proto.getRegionsInTransitionList()) { String key = new String(region.getSpec().getValue().toByteArray()); @@ -414,7 +414,7 @@ public class ClusterStatus extends VersionedWritable { } String[] masterCoprocessors = null; - if (!proto.getMasterCoprocessorsList().isEmpty()) { + if (proto.getMasterCoprocessorsList() != null) { final int numMasterCoprocessors = proto.getMasterCoprocessorsCount(); masterCoprocessors = new String[numMasterCoprocessors]; for (int i = 0; i < numMasterCoprocessors; i++) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java index 31f273e59c9..2fd0958d6c7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java @@ -801,7 +801,6 @@ public class ZKUtil { * @throws KeeperException if unexpected zookeeper exception * @deprecated Unused */ - @Deprecated public static List getChildDataAndWatchForNewChildren( ZooKeeperWatcher zkw, String baseNode) throws KeeperException { List nodes = @@ -834,7 +833,6 @@ public class ZKUtil { * @throws KeeperException.BadVersionException if version mismatch * @deprecated Unused */ - @Deprecated public static void updateExistingNodeData(ZooKeeperWatcher zkw, String znode, byte [] data, int expectedVersion) throws KeeperException { diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml index 8a25cd440c2..2c51cd4ef53 100644 --- a/hbase-common/pom.xml +++ b/hbase-common/pom.xml @@ -41,32 +41,13 @@ - - maven-compiler-plugin - - - default-compile - - javac-with-errorprone - true - - - - default-testCompile - - javac-with-errorprone - true - - - - - - org.apache.maven.plugins - maven-site-plugin - - true - - + + org.apache.maven.plugins + maven-site-plugin + + true + + maven-assembly-plugin diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index 781f5befba1..52c3163f5a8 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -31,32 +31,13 @@ Examples of HBase usage - - maven-compiler-plugin - - - default-compile - - javac-with-errorprone - true - - - - default-testCompile - - javac-with-errorprone - true - - - - - - org.apache.maven.plugins - maven-site-plugin - - true - - + + org.apache.maven.plugins + maven-site-plugin + + true + + maven-assembly-plugin diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml index a7ad0abf187..ce59c6845c3 100644 --- a/hbase-hadoop-compat/pom.xml +++ b/hbase-hadoop-compat/pom.xml @@ -35,26 +35,7 @@ - - - maven-compiler-plugin - - - default-compile - - javac-with-errorprone - true - - - - default-testCompile - - javac-with-errorprone - true - - - - + org.apache.maven.plugins maven-site-plugin diff --git a/hbase-hadoop2-compat/pom.xml b/hbase-hadoop2-compat/pom.xml index e845520399b..ec0ab21e0cd 100644 --- a/hbase-hadoop2-compat/pom.xml +++ b/hbase-hadoop2-compat/pom.xml @@ -34,37 +34,18 @@ limitations under the License. - - maven-compiler-plugin - - - default-compile - - javac-with-errorprone - true - - - - default-testCompile - - javac-with-errorprone - true - - - - - - org.apache.maven.plugins - maven-site-plugin - - true - - - - - org.apache.maven.plugins - maven-source-plugin - + + org.apache.maven.plugins + maven-site-plugin + + true + + + + + org.apache.maven.plugins + maven-source-plugin + maven-assembly-plugin diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml index 96aedc96926..f4b6b2efea0 100644 --- a/hbase-it/pom.xml +++ b/hbase-it/pom.xml @@ -117,25 +117,6 @@ - - maven-compiler-plugin - - - default-compile - - javac-with-errorprone - true - - - - default-testCompile - - javac-with-errorprone - true - - - - org.apache.maven.plugins diff --git a/hbase-prefix-tree/pom.xml b/hbase-prefix-tree/pom.xml index 665480635f3..2c0dd365d65 100644 --- a/hbase-prefix-tree/pom.xml +++ b/hbase-prefix-tree/pom.xml @@ -33,37 +33,18 @@ - - maven-compiler-plugin - - - default-compile - - javac-with-errorprone - true - - - - default-testCompile - - javac-with-errorprone - true - - - - - - org.apache.maven.plugins - maven-site-plugin - - true - - - - - org.apache.maven.plugins - maven-source-plugin - + + org.apache.maven.plugins + maven-site-plugin + + true + + + + + org.apache.maven.plugins + maven-source-plugin + maven-assembly-plugin diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java index 9e27942cf2a..5bd44947e2f 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java @@ -43,7 +43,7 @@ public class TestKeyValueTool { @Parameters public static Collection parameters() { - return TestRowData.InMemory.getAllAsObjectArray(); + return new TestRowData.InMemory().getAllAsObjectArray(); } private TestRowData rows; diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java index 55d3d221c7e..20303faa6f1 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java @@ -52,7 +52,7 @@ public class TestPrefixTreeSearcher { @Parameters public static Collection parameters() { - return TestRowData.InMemory.getAllAsObjectArray(); + return new TestRowData.InMemory().getAllAsObjectArray(); } protected TestRowData rows; diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java index 4bf60e03df3..2eb897f9d4b 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java @@ -57,7 +57,7 @@ public interface TestRowData { void individualSearcherAssertions(CellSearcher searcher); - static class InMemory { + class InMemory { /* * The following are different styles of data that the codec may encounter. Having these small diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml index 72aadb1715d..33883c25319 100644 --- a/hbase-server/pom.xml +++ b/hbase-server/pom.xml @@ -53,27 +53,13 @@ - - maven-compiler-plugin - - - default-compile - - javac-with-errorprone - true - - - - - - - org.apache.maven.plugins - maven-site-plugin - - true - - + + org.apache.maven.plugins + maven-site-plugin + + true + + diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java index 660733d6225..afc8a090cfa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java @@ -238,7 +238,6 @@ public class HTableWrapper implements HTableInterface { * @deprecated If any exception is thrown by one of the actions, there is no way to * retrieve the partially executed results. Use {@link #batch(List, Object[])} instead. */ - @Deprecated @Override public Object[] batch(List actions) throws IOException, InterruptedException { @@ -258,7 +257,6 @@ public class HTableWrapper implements HTableInterface { * {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)} * instead. */ - @Deprecated @Override public Object[] batchCallback(List actions, Batch.Callback callback) throws IOException, InterruptedException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java index e526d63278c..474e3981ef2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java @@ -118,7 +118,6 @@ public interface RegionObserver extends Coprocessor { * @throws IOException if an error occurred on the coprocessor * @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead */ - @Deprecated void preFlush(final ObserverContext c) throws IOException; /** @@ -139,7 +138,6 @@ public interface RegionObserver extends Coprocessor { * @throws IOException if an error occurred on the coprocessor * @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead. */ - @Deprecated void postFlush(final ObserverContext c) throws IOException; /** @@ -341,7 +339,6 @@ public interface RegionObserver extends Coprocessor { * @deprecated Use preSplit( * final ObserverContext c, byte[] splitRow) */ - @Deprecated void preSplit(final ObserverContext c) throws IOException; /** @@ -362,7 +359,6 @@ public interface RegionObserver extends Coprocessor { * @throws IOException if an error occurred on the coprocessor * @deprecated Use postCompleteSplit() instead */ - @Deprecated void postSplit(final ObserverContext c, final HRegion l, final HRegion r) throws IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java index aa54a56744d..1eee2c4000e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java @@ -80,7 +80,7 @@ public class OfflineMetaRepair { for (int i = 0; i < args.length; i++) { String cmd = args[i]; if (cmd.equals("-details")) { - HBaseFsck.setDisplayFullReport(); + fsck.setDisplayFullReport(); } else if (cmd.equals("-base")) { if (i == args.length - 1) { System.err.println("OfflineMetaRepair: -base needs an HDFS path."); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java index 37563435923..18bc731fe6f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java @@ -52,7 +52,6 @@ import org.apache.hadoop.hdfs.MiniDFSCluster; * like an HBaseConfiguration and filesystem. * @deprecated Write junit4 unit tests using {@link HBaseTestingUtility} */ -@Deprecated public abstract class HBaseTestCase extends TestCase { private static final Log LOG = LogFactory.getLog(HBaseTestCase.class); @@ -112,12 +111,12 @@ public abstract class HBaseTestCase extends TestCase { } try { if (localfs) { - testDir = getUnitTestdir(getName()); + this.testDir = getUnitTestdir(getName()); if (fs.exists(testDir)) { fs.delete(testDir, true); } } else { - testDir = FSUtils.getRootDir(conf); + this.testDir = FSUtils.getRootDir(conf); } } catch (Exception e) { LOG.fatal("error during setup", e); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java index 061068cbaa9..81db6b40e96 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java @@ -198,7 +198,7 @@ public class TestMasterCoprocessorExceptionWithAbort { // Test (part of the) output that should have be printed by master when it aborts: // (namely the part that shows the set of loaded coprocessors). // In this test, there is only a single coprocessor (BuggyMasterObserver). - assertTrue(HMaster.getLoadedCoprocessors(). + assertTrue(master.getLoadedCoprocessors(). contains(TestMasterCoprocessorExceptionWithAbort.BuggyMasterObserver.class.getName())); CreateTableThread createTableThread = new CreateTableThread(UTIL); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java index de842926670..9672764b5e0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java @@ -39,7 +39,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Delete; -import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.LruCachedBlock; @@ -373,7 +372,7 @@ public class TestHeapSize { byte[] row = new byte[] { 0 }; cl = Put.class; - actual = Mutation.MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); + actual = new Put(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); expected = ClassSize.estimateBase(cl, false); //The actual TreeMap is not included in the above calculation expected += ClassSize.align(ClassSize.TREEMAP); @@ -383,7 +382,7 @@ public class TestHeapSize { } cl = Delete.class; - actual = Mutation.MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); + actual = new Delete(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); expected = ClassSize.estimateBase(cl, false); //The actual TreeMap is not included in the above calculation expected += ClassSize.align(ClassSize.TREEMAP); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java index c1a506199e9..e31ebb90daa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java @@ -60,7 +60,8 @@ public class TestScannerSelectionUsingTTL { private static final Log LOG = LogFactory.getLog(TestScannerSelectionUsingTTL.class); - private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); + private static final HBaseTestingUtility TEST_UTIL = + new HBaseTestingUtility().createLocalHTU(); private static TableName TABLE = TableName.valueOf("myTable"); private static String FAMILY = "myCF"; private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java index 63055aa22e1..b7be1bb1878 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java @@ -71,7 +71,7 @@ public class TestSeekTo extends HBaseTestCase { } Path makeNewFile(TagUsage tagUsage) throws IOException { - Path ncTFile = new Path(testDir, "basic.hfile"); + Path ncTFile = new Path(this.testDir, "basic.hfile"); if (tagUsage != TagUsage.NO_TAG) { conf.setInt("hfile.format.version", 3); } else { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java index bf4eb0997b1..9bdebe6fa90 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java @@ -795,7 +795,7 @@ public class TestHFileOutputFormat { HTable table = Mockito.mock(HTable.class); HTableDescriptor htd = new HTableDescriptor(TABLE_NAME); Mockito.doReturn(htd).when(table).getTableDescriptor(); - for (HColumnDescriptor hcd: HBaseTestingUtility.generateColumnDescriptors()) { + for (HColumnDescriptor hcd: this.util.generateColumnDescriptors()) { htd.addFamily(hcd); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java index 8d7e2d38e10..903e0c0667d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java @@ -77,42 +77,37 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa Configuration conf = UTIL.getConfiguration(); HDFSBlocksDistribution blockDistribution = new HDFSBlocksDistribution(); - Assert.assertEquals(Lists.newArrayList(), - TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList(), tsif.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), - TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), - TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), - TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); blockDistribution = new HDFSBlocksDistribution(); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 10); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 7); blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 5); blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 1); - Assert.assertEquals(Lists.newArrayList("h1"), - TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); + Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 2); Assert.assertEquals(Lists.newArrayList("h1", "h2"), - TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); + tsif.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 3); Assert.assertEquals(Lists.newArrayList("h2", "h1"), - TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); + tsif.getBestLocations(conf, blockDistribution)); blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 6); blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 9); Assert.assertEquals(Lists.newArrayList("h2", "h3", "h4", "h1"), - TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); + tsif.getBestLocations(conf, blockDistribution)); } public static enum TestTableSnapshotCounters { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java index b81c4f9f38b..04e35165c43 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java @@ -97,7 +97,7 @@ public class TestStoreFile extends HBaseTestCase { final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb")); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( - conf, fs, new Path(testDir, hri.getTable().getNameAsString()), hri); + conf, fs, new Path(this.testDir, hri.getTable().getNameAsString()), hri); HFileContext meta = new HFileContextBuilder().withBlockSize(2*1024).build(); StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs) @@ -148,7 +148,7 @@ public class TestStoreFile extends HBaseTestCase { public void testReference() throws IOException { final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testReferenceTb")); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( - conf, fs, new Path(testDir, hri.getTable().getNameAsString()), hri); + conf, fs, new Path(this.testDir, hri.getTable().getNameAsString()), hri); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. @@ -192,9 +192,9 @@ public class TestStoreFile extends HBaseTestCase { final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testHFileLinkTb")); // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ Configuration testConf = new Configuration(this.conf); - FSUtils.setRootDir(testConf, testDir); + FSUtils.setRootDir(testConf, this.testDir); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( - testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), hri); + testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), hri); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. @@ -233,12 +233,12 @@ public class TestStoreFile extends HBaseTestCase { public void testReferenceToHFileLink() throws IOException { // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ Configuration testConf = new Configuration(this.conf); - FSUtils.setRootDir(testConf, testDir); + FSUtils.setRootDir(testConf, this.testDir); // adding legal table name chars to verify regex handles it. HRegionInfo hri = new HRegionInfo(TableName.valueOf("_original-evil-name")); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( - testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), hri); + testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), hri); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. //// @@ -252,7 +252,7 @@ public class TestStoreFile extends HBaseTestCase { // create link to store file. /clone/region//-- HRegionInfo hriClone = new HRegionInfo(TableName.valueOf("clone")); HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem( - testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), + testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), hriClone); Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY); HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName()); @@ -269,7 +269,7 @@ public class TestStoreFile extends HBaseTestCase { Path pathB = splitStoreFile(cloneRegionFs, splitHriB, TEST_FAMILY, f, SPLITKEY, false);// bottom // OK test the thing - FSUtils.logFileSystemState(fs, testDir, LOG); + FSUtils.logFileSystemState(fs, this.testDir, LOG); // There is a case where a file with the hfilelink pattern is actually a daughter // reference to a hfile link. This code in StoreFile that handles this case. @@ -774,7 +774,7 @@ public class TestStoreFile extends HBaseTestCase { Scan scan = new Scan(); // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname. - Path storedir = new Path(new Path(testDir, "7e0102"), "familyname"); + Path storedir = new Path(new Path(this.testDir, "7e0102"), "familyname"); Path dir = new Path(storedir, "1234567890"); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. @@ -820,7 +820,7 @@ public class TestStoreFile extends HBaseTestCase { Configuration conf = this.conf; // Find a home for our files (regiondir ("7e0102") and familyname). - Path baseDir = new Path(new Path(testDir, "7e0102"),"twoCOWEOC"); + Path baseDir = new Path(new Path(this.testDir, "7e0102"),"twoCOWEOC"); // Grab the block cache and get the initial hit/miss counts BlockCache bc = new CacheConfig(conf).getBlockCache(); @@ -990,7 +990,7 @@ public class TestStoreFile extends HBaseTestCase { */ public void testDataBlockEncodingMetaData() throws IOException { // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname. - Path dir = new Path(new Path(testDir, "7e0102"), "familyname"); + Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname"); Path path = new Path(dir, "1234567890"); DataBlockEncoding dataBlockEncoderAlgo = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java index 21c8a75c27b..2f7051ebe1b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java @@ -155,8 +155,6 @@ public class TestHBaseFsck { TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager(); regionStates = assignmentManager.getRegionStates(); TEST_UTIL.getHBaseAdmin().setBalancerRunning(false, true); - - HBaseFsck.setDisplayFullReport(); } @AfterClass @@ -968,6 +966,7 @@ public class TestHBaseFsck { // fix the problem. HBaseFsck fsck = new HBaseFsck(conf); fsck.connect(); + fsck.setDisplayFullReport(); // i.e. -details fsck.setTimeLag(0); fsck.setFixAssignments(true); fsck.setFixMeta(true); @@ -1559,6 +1558,7 @@ public class TestHBaseFsck { // fix lingering split parent hbck = new HBaseFsck(conf); hbck.connect(); + hbck.setDisplayFullReport(); // i.e. -details hbck.setTimeLag(0); hbck.setFixSplitParents(true); hbck.onlineHbck(); @@ -1813,6 +1813,7 @@ public class TestHBaseFsck { // verify that noHdfsChecking report the same errors HBaseFsck fsck = new HBaseFsck(conf); fsck.connect(); + fsck.setDisplayFullReport(); // i.e. -details fsck.setTimeLag(0); fsck.setCheckHdfs(false); fsck.onlineHbck(); @@ -1822,6 +1823,7 @@ public class TestHBaseFsck { // verify that fixAssignments works fine with noHdfsChecking fsck = new HBaseFsck(conf); fsck.connect(); + fsck.setDisplayFullReport(); // i.e. -details fsck.setTimeLag(0); fsck.setCheckHdfs(false); fsck.setFixAssignments(true); @@ -1861,6 +1863,7 @@ public class TestHBaseFsck { // verify that noHdfsChecking report the same errors HBaseFsck fsck = new HBaseFsck(conf); fsck.connect(); + fsck.setDisplayFullReport(); // i.e. -details fsck.setTimeLag(0); fsck.setCheckHdfs(false); fsck.onlineHbck(); @@ -1870,6 +1873,7 @@ public class TestHBaseFsck { // verify that fixMeta doesn't work with noHdfsChecking fsck = new HBaseFsck(conf); fsck.connect(); + fsck.setDisplayFullReport(); // i.e. -details fsck.setTimeLag(0); fsck.setCheckHdfs(false); fsck.setFixAssignments(true); @@ -1923,6 +1927,7 @@ public class TestHBaseFsck { // verify that noHdfsChecking can't detect ORPHAN_HDFS_REGION HBaseFsck fsck = new HBaseFsck(conf); fsck.connect(); + fsck.setDisplayFullReport(); // i.e. -details fsck.setTimeLag(0); fsck.setCheckHdfs(false); fsck.onlineHbck(); @@ -1932,6 +1937,7 @@ public class TestHBaseFsck { // verify that fixHdfsHoles doesn't work with noHdfsChecking fsck = new HBaseFsck(conf); fsck.connect(); + fsck.setDisplayFullReport(); // i.e. -details fsck.setTimeLag(0); fsck.setCheckHdfs(false); fsck.setFixHdfsHoles(true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java index 8657c06d0a8..10d7f0c6352 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java @@ -147,14 +147,15 @@ public class TestMergeTool extends HBaseTestCase { try { // Create meta region createMetaRegion(); - new FSTableDescriptors(this.conf, this.fs, testDir).createTableDescriptor( + new FSTableDescriptors(this.conf, this.fs, this.testDir).createTableDescriptor( new TableDescriptor(this.desc)); /* * Create the regions we will merge */ for (int i = 0; i < sourceRegions.length; i++) { regions[i] = - HRegion.createHRegion(this.sourceRegions[i], testDir, this.conf, this.desc); + HRegion.createHRegion(this.sourceRegions[i], this.testDir, this.conf, + this.desc); /* * Insert data */ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java index 3b0f459d8ca..1f6ec70d585 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java @@ -50,7 +50,7 @@ public class HbckTestingUtil { TableName table) throws Exception { HBaseFsck fsck = new HBaseFsck(conf, exec); fsck.connect(); - HBaseFsck.setDisplayFullReport(); // i.e. -details + fsck.setDisplayFullReport(); // i.e. -details fsck.setTimeLag(0); fsck.setFixAssignments(fixAssignments); fsck.setFixMeta(fixMeta); diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml index 75722137e7e..79ebdca6cd6 100644 --- a/hbase-shell/pom.xml +++ b/hbase-shell/pom.xml @@ -50,32 +50,13 @@ - - maven-compiler-plugin - - - default-compile - - javac-with-errorprone - true - - - - default-testCompile - - javac-with-errorprone - true - - - - - - org.apache.maven.plugins - maven-site-plugin - - true - - + + org.apache.maven.plugins + maven-site-plugin + + true + + diff --git a/hbase-thrift/pom.xml b/hbase-thrift/pom.xml index 0f64d002b48..0815f3ac913 100644 --- a/hbase-thrift/pom.xml +++ b/hbase-thrift/pom.xml @@ -46,25 +46,6 @@ - - maven-compiler-plugin - - - default-compile - - javac-with-errorprone - true - - - - default-testCompile - - javac-with-errorprone - true - - - - org.apache.maven.plugins maven-site-plugin diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/HTablePool.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/HTablePool.java index 400f10f0013..45578c83f8d 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/HTablePool.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/HTablePool.java @@ -235,7 +235,6 @@ public class HTablePool implements Closeable { * the proxy table user got from pool * @deprecated */ - @Deprecated public void putTable(HTableInterface table) throws IOException { // we need to be sure nobody puts a proxy implementation in the pool // but if the client code is not updated @@ -396,7 +395,6 @@ public class HTablePool implements Closeable { * @deprecated If any exception is thrown by one of the actions, there is no way to * retrieve the partially executed results. Use {@link #batch(List, Object[])} instead. */ - @Deprecated @Override public Object[] batch(List actions) throws IOException, InterruptedException { @@ -590,7 +588,6 @@ public class HTablePool implements Closeable { * {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)} * instead. */ - @Deprecated @Override public Object[] batchCallback(List actions, Callback callback) throws IOException, InterruptedException { diff --git a/pom.xml b/pom.xml index c4e5b663b61..1b97e380a0c 100644 --- a/pom.xml +++ b/pom.xml @@ -441,7 +441,7 @@ maven-compiler-plugin - 3.2 + 2.5.1 ${compileSource} ${compileSource} @@ -449,23 +449,6 @@ false -Xlint:-options - - - com.google.errorprone - error_prone_core - 1.1.1 - - - org.codehaus.plexus - plexus-compiler-javac - 2.3 - - - org.codehaus.plexus - plexus-compiler-javac-errorprone - 2.3 - -