Revert "HBASE-11912 Catch some bad practices at compile time with error-prone"

This reverts commit 7ed0260eff.
This commit is contained in:
Andrew Purtell 2014-10-25 08:33:43 -07:00
parent 7ed0260eff
commit ff5bc351b2
31 changed files with 103 additions and 315 deletions

View File

@ -34,25 +34,6 @@
<build> <build>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -379,7 +379,7 @@ public class ClusterStatus extends VersionedWritable {
public static ClusterStatus convert(ClusterStatusProtos.ClusterStatus proto) { public static ClusterStatus convert(ClusterStatusProtos.ClusterStatus proto) {
Map<ServerName, ServerLoad> servers = null; Map<ServerName, ServerLoad> servers = null;
if (!proto.getLiveServersList().isEmpty()) { if (proto.getLiveServersList() != null) {
servers = new HashMap<ServerName, ServerLoad>(proto.getLiveServersList().size()); servers = new HashMap<ServerName, ServerLoad>(proto.getLiveServersList().size());
for (LiveServerInfo lsi : proto.getLiveServersList()) { for (LiveServerInfo lsi : proto.getLiveServersList()) {
servers.put(ProtobufUtil.toServerName( servers.put(ProtobufUtil.toServerName(
@ -388,7 +388,7 @@ public class ClusterStatus extends VersionedWritable {
} }
Collection<ServerName> deadServers = null; Collection<ServerName> deadServers = null;
if (!proto.getDeadServersList().isEmpty()) { if (proto.getDeadServersList() != null) {
deadServers = new ArrayList<ServerName>(proto.getDeadServersList().size()); deadServers = new ArrayList<ServerName>(proto.getDeadServersList().size());
for (HBaseProtos.ServerName sn : proto.getDeadServersList()) { for (HBaseProtos.ServerName sn : proto.getDeadServersList()) {
deadServers.add(ProtobufUtil.toServerName(sn)); deadServers.add(ProtobufUtil.toServerName(sn));
@ -396,7 +396,7 @@ public class ClusterStatus extends VersionedWritable {
} }
Collection<ServerName> backupMasters = null; Collection<ServerName> backupMasters = null;
if (!proto.getBackupMastersList().isEmpty()) { if (proto.getBackupMastersList() != null) {
backupMasters = new ArrayList<ServerName>(proto.getBackupMastersList().size()); backupMasters = new ArrayList<ServerName>(proto.getBackupMastersList().size());
for (HBaseProtos.ServerName sn : proto.getBackupMastersList()) { for (HBaseProtos.ServerName sn : proto.getBackupMastersList()) {
backupMasters.add(ProtobufUtil.toServerName(sn)); backupMasters.add(ProtobufUtil.toServerName(sn));
@ -404,7 +404,7 @@ public class ClusterStatus extends VersionedWritable {
} }
Map<String, RegionState> rit = null; Map<String, RegionState> rit = null;
if (!proto.getRegionsInTransitionList().isEmpty()) { if (proto.getRegionsInTransitionList() != null) {
rit = new HashMap<String, RegionState>(proto.getRegionsInTransitionList().size()); rit = new HashMap<String, RegionState>(proto.getRegionsInTransitionList().size());
for (RegionInTransition region : proto.getRegionsInTransitionList()) { for (RegionInTransition region : proto.getRegionsInTransitionList()) {
String key = new String(region.getSpec().getValue().toByteArray()); String key = new String(region.getSpec().getValue().toByteArray());
@ -414,7 +414,7 @@ public class ClusterStatus extends VersionedWritable {
} }
String[] masterCoprocessors = null; String[] masterCoprocessors = null;
if (!proto.getMasterCoprocessorsList().isEmpty()) { if (proto.getMasterCoprocessorsList() != null) {
final int numMasterCoprocessors = proto.getMasterCoprocessorsCount(); final int numMasterCoprocessors = proto.getMasterCoprocessorsCount();
masterCoprocessors = new String[numMasterCoprocessors]; masterCoprocessors = new String[numMasterCoprocessors];
for (int i = 0; i < numMasterCoprocessors; i++) { for (int i = 0; i < numMasterCoprocessors; i++) {

View File

@ -801,7 +801,6 @@ public class ZKUtil {
* @throws KeeperException if unexpected zookeeper exception * @throws KeeperException if unexpected zookeeper exception
* @deprecated Unused * @deprecated Unused
*/ */
@Deprecated
public static List<NodeAndData> getChildDataAndWatchForNewChildren( public static List<NodeAndData> getChildDataAndWatchForNewChildren(
ZooKeeperWatcher zkw, String baseNode) throws KeeperException { ZooKeeperWatcher zkw, String baseNode) throws KeeperException {
List<String> nodes = List<String> nodes =
@ -834,7 +833,6 @@ public class ZKUtil {
* @throws KeeperException.BadVersionException if version mismatch * @throws KeeperException.BadVersionException if version mismatch
* @deprecated Unused * @deprecated Unused
*/ */
@Deprecated
public static void updateExistingNodeData(ZooKeeperWatcher zkw, String znode, public static void updateExistingNodeData(ZooKeeperWatcher zkw, String znode,
byte [] data, int expectedVersion) byte [] data, int expectedVersion)
throws KeeperException { throws KeeperException {

View File

@ -41,25 +41,6 @@
</resource> </resource>
</resources> </resources>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -31,25 +31,6 @@
<description>Examples of HBase usage</description> <description>Examples of HBase usage</description>
<build> <build>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -36,25 +36,6 @@
<build> <build>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -34,25 +34,6 @@ limitations under the License.
<build> <build>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -117,25 +117,6 @@
</pluginManagement> </pluginManagement>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<!-- Run integration tests with mvn verify --> <!-- Run integration tests with mvn verify -->
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>

View File

@ -33,25 +33,6 @@
<build> <build>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -43,7 +43,7 @@ public class TestKeyValueTool {
@Parameters @Parameters
public static Collection<Object[]> parameters() { public static Collection<Object[]> parameters() {
return TestRowData.InMemory.getAllAsObjectArray(); return new TestRowData.InMemory().getAllAsObjectArray();
} }
private TestRowData rows; private TestRowData rows;

View File

@ -52,7 +52,7 @@ public class TestPrefixTreeSearcher {
@Parameters @Parameters
public static Collection<Object[]> parameters() { public static Collection<Object[]> parameters() {
return TestRowData.InMemory.getAllAsObjectArray(); return new TestRowData.InMemory().getAllAsObjectArray();
} }
protected TestRowData rows; protected TestRowData rows;

View File

@ -57,7 +57,7 @@ public interface TestRowData {
void individualSearcherAssertions(CellSearcher searcher); void individualSearcherAssertions(CellSearcher searcher);
static class InMemory { class InMemory {
/* /*
* The following are different styles of data that the codec may encounter. Having these small * The following are different styles of data that the codec may encounter. Having these small

View File

@ -53,20 +53,6 @@
</testResource> </testResource>
</testResources> </testResources>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<!-- We can't check tests with default-testCompile because of generated
protobuf code (IncrementCounterProcessorTestProtos) -->
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -238,7 +238,6 @@ public class HTableWrapper implements HTableInterface {
* @deprecated If any exception is thrown by one of the actions, there is no way to * @deprecated If any exception is thrown by one of the actions, there is no way to
* retrieve the partially executed results. Use {@link #batch(List, Object[])} instead. * retrieve the partially executed results. Use {@link #batch(List, Object[])} instead.
*/ */
@Deprecated
@Override @Override
public Object[] batch(List<? extends Row> actions) public Object[] batch(List<? extends Row> actions)
throws IOException, InterruptedException { throws IOException, InterruptedException {
@ -258,7 +257,6 @@ public class HTableWrapper implements HTableInterface {
* {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)} * {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)}
* instead. * instead.
*/ */
@Deprecated
@Override @Override
public <R> Object[] batchCallback(List<? extends Row> actions, public <R> Object[] batchCallback(List<? extends Row> actions,
Batch.Callback<R> callback) throws IOException, InterruptedException { Batch.Callback<R> callback) throws IOException, InterruptedException {

View File

@ -118,7 +118,6 @@ public interface RegionObserver extends Coprocessor {
* @throws IOException if an error occurred on the coprocessor * @throws IOException if an error occurred on the coprocessor
* @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead * @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead
*/ */
@Deprecated
void preFlush(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException; void preFlush(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException;
/** /**
@ -139,7 +138,6 @@ public interface RegionObserver extends Coprocessor {
* @throws IOException if an error occurred on the coprocessor * @throws IOException if an error occurred on the coprocessor
* @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead. * @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead.
*/ */
@Deprecated
void postFlush(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException; void postFlush(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException;
/** /**
@ -341,7 +339,6 @@ public interface RegionObserver extends Coprocessor {
* @deprecated Use preSplit( * @deprecated Use preSplit(
* final ObserverContext<RegionCoprocessorEnvironment> c, byte[] splitRow) * final ObserverContext<RegionCoprocessorEnvironment> c, byte[] splitRow)
*/ */
@Deprecated
void preSplit(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException; void preSplit(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException;
/** /**
@ -362,7 +359,6 @@ public interface RegionObserver extends Coprocessor {
* @throws IOException if an error occurred on the coprocessor * @throws IOException if an error occurred on the coprocessor
* @deprecated Use postCompleteSplit() instead * @deprecated Use postCompleteSplit() instead
*/ */
@Deprecated
void postSplit(final ObserverContext<RegionCoprocessorEnvironment> c, final HRegion l, void postSplit(final ObserverContext<RegionCoprocessorEnvironment> c, final HRegion l,
final HRegion r) throws IOException; final HRegion r) throws IOException;

View File

@ -80,7 +80,7 @@ public class OfflineMetaRepair {
for (int i = 0; i < args.length; i++) { for (int i = 0; i < args.length; i++) {
String cmd = args[i]; String cmd = args[i];
if (cmd.equals("-details")) { if (cmd.equals("-details")) {
HBaseFsck.setDisplayFullReport(); fsck.setDisplayFullReport();
} else if (cmd.equals("-base")) { } else if (cmd.equals("-base")) {
if (i == args.length - 1) { if (i == args.length - 1) {
System.err.println("OfflineMetaRepair: -base needs an HDFS path."); System.err.println("OfflineMetaRepair: -base needs an HDFS path.");

View File

@ -52,7 +52,6 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
* like an HBaseConfiguration and filesystem. * like an HBaseConfiguration and filesystem.
* @deprecated Write junit4 unit tests using {@link HBaseTestingUtility} * @deprecated Write junit4 unit tests using {@link HBaseTestingUtility}
*/ */
@Deprecated
public abstract class HBaseTestCase extends TestCase { public abstract class HBaseTestCase extends TestCase {
private static final Log LOG = LogFactory.getLog(HBaseTestCase.class); private static final Log LOG = LogFactory.getLog(HBaseTestCase.class);
@ -112,12 +111,12 @@ public abstract class HBaseTestCase extends TestCase {
} }
try { try {
if (localfs) { if (localfs) {
testDir = getUnitTestdir(getName()); this.testDir = getUnitTestdir(getName());
if (fs.exists(testDir)) { if (fs.exists(testDir)) {
fs.delete(testDir, true); fs.delete(testDir, true);
} }
} else { } else {
testDir = FSUtils.getRootDir(conf); this.testDir = FSUtils.getRootDir(conf);
} }
} catch (Exception e) { } catch (Exception e) {
LOG.fatal("error during setup", e); LOG.fatal("error during setup", e);

View File

@ -198,7 +198,7 @@ public class TestMasterCoprocessorExceptionWithAbort {
// Test (part of the) output that should have be printed by master when it aborts: // Test (part of the) output that should have be printed by master when it aborts:
// (namely the part that shows the set of loaded coprocessors). // (namely the part that shows the set of loaded coprocessors).
// In this test, there is only a single coprocessor (BuggyMasterObserver). // In this test, there is only a single coprocessor (BuggyMasterObserver).
assertTrue(HMaster.getLoadedCoprocessors(). assertTrue(master.getLoadedCoprocessors().
contains(TestMasterCoprocessorExceptionWithAbort.BuggyMasterObserver.class.getName())); contains(TestMasterCoprocessorExceptionWithAbort.BuggyMasterObserver.class.getName()));
CreateTableThread createTableThread = new CreateTableThread(UTIL); CreateTableThread createTableThread = new CreateTableThread(UTIL);

View File

@ -39,7 +39,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.LruCachedBlock; import org.apache.hadoop.hbase.io.hfile.LruCachedBlock;
@ -373,7 +372,7 @@ public class TestHeapSize {
byte[] row = new byte[] { 0 }; byte[] row = new byte[] { 0 };
cl = Put.class; cl = Put.class;
actual = Mutation.MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); actual = new Put(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY);
expected = ClassSize.estimateBase(cl, false); expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation //The actual TreeMap is not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP); expected += ClassSize.align(ClassSize.TREEMAP);
@ -383,7 +382,7 @@ public class TestHeapSize {
} }
cl = Delete.class; cl = Delete.class;
actual = Mutation.MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); actual = new Delete(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY);
expected = ClassSize.estimateBase(cl, false); expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation //The actual TreeMap is not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP); expected += ClassSize.align(ClassSize.TREEMAP);

View File

@ -60,7 +60,8 @@ public class TestScannerSelectionUsingTTL {
private static final Log LOG = private static final Log LOG =
LogFactory.getLog(TestScannerSelectionUsingTTL.class); LogFactory.getLog(TestScannerSelectionUsingTTL.class);
private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); private static final HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility().createLocalHTU();
private static TableName TABLE = TableName.valueOf("myTable"); private static TableName TABLE = TableName.valueOf("myTable");
private static String FAMILY = "myCF"; private static String FAMILY = "myCF";
private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY); private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY);

View File

@ -71,7 +71,7 @@ public class TestSeekTo extends HBaseTestCase {
} }
Path makeNewFile(TagUsage tagUsage) throws IOException { Path makeNewFile(TagUsage tagUsage) throws IOException {
Path ncTFile = new Path(testDir, "basic.hfile"); Path ncTFile = new Path(this.testDir, "basic.hfile");
if (tagUsage != TagUsage.NO_TAG) { if (tagUsage != TagUsage.NO_TAG) {
conf.setInt("hfile.format.version", 3); conf.setInt("hfile.format.version", 3);
} else { } else {

View File

@ -795,7 +795,7 @@ public class TestHFileOutputFormat {
HTable table = Mockito.mock(HTable.class); HTable table = Mockito.mock(HTable.class);
HTableDescriptor htd = new HTableDescriptor(TABLE_NAME); HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
Mockito.doReturn(htd).when(table).getTableDescriptor(); Mockito.doReturn(htd).when(table).getTableDescriptor();
for (HColumnDescriptor hcd: HBaseTestingUtility.generateColumnDescriptors()) { for (HColumnDescriptor hcd: this.util.generateColumnDescriptors()) {
htd.addFamily(hcd); htd.addFamily(hcd);
} }

View File

@ -77,42 +77,37 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
Configuration conf = UTIL.getConfiguration(); Configuration conf = UTIL.getConfiguration();
HDFSBlocksDistribution blockDistribution = new HDFSBlocksDistribution(); HDFSBlocksDistribution blockDistribution = new HDFSBlocksDistribution();
Assert.assertEquals(Lists.newArrayList(), Assert.assertEquals(Lists.newArrayList(), tsif.getBestLocations(conf, blockDistribution));
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"), Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution));
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"), Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution));
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 1); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"), Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution));
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution = new HDFSBlocksDistribution(); blockDistribution = new HDFSBlocksDistribution();
blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 10); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 10);
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 7); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 7);
blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 5); blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 5);
blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 1); blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"), Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution));
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 2); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 2);
Assert.assertEquals(Lists.newArrayList("h1", "h2"), Assert.assertEquals(Lists.newArrayList("h1", "h2"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); tsif.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 3); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 3);
Assert.assertEquals(Lists.newArrayList("h2", "h1"), Assert.assertEquals(Lists.newArrayList("h2", "h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); tsif.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 6); blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 6);
blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 9); blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 9);
Assert.assertEquals(Lists.newArrayList("h2", "h3", "h4", "h1"), Assert.assertEquals(Lists.newArrayList("h2", "h3", "h4", "h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution)); tsif.getBestLocations(conf, blockDistribution));
} }
public static enum TestTableSnapshotCounters { public static enum TestTableSnapshotCounters {

View File

@ -97,7 +97,7 @@ public class TestStoreFile extends HBaseTestCase {
final HRegionInfo hri = final HRegionInfo hri =
new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb")); new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb"));
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
conf, fs, new Path(testDir, hri.getTable().getNameAsString()), hri); conf, fs, new Path(this.testDir, hri.getTable().getNameAsString()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(2*1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(2*1024).build();
StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs) StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
@ -148,7 +148,7 @@ public class TestStoreFile extends HBaseTestCase {
public void testReference() throws IOException { public void testReference() throws IOException {
final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testReferenceTb")); final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testReferenceTb"));
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
conf, fs, new Path(testDir, hri.getTable().getNameAsString()), hri); conf, fs, new Path(this.testDir, hri.getTable().getNameAsString()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. // Make a store file and write data to it.
@ -192,9 +192,9 @@ public class TestStoreFile extends HBaseTestCase {
final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testHFileLinkTb")); final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testHFileLinkTb"));
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
Configuration testConf = new Configuration(this.conf); Configuration testConf = new Configuration(this.conf);
FSUtils.setRootDir(testConf, testDir); FSUtils.setRootDir(testConf, this.testDir);
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), hri); testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. // Make a store file and write data to it.
@ -233,12 +233,12 @@ public class TestStoreFile extends HBaseTestCase {
public void testReferenceToHFileLink() throws IOException { public void testReferenceToHFileLink() throws IOException {
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
Configuration testConf = new Configuration(this.conf); Configuration testConf = new Configuration(this.conf);
FSUtils.setRootDir(testConf, testDir); FSUtils.setRootDir(testConf, this.testDir);
// adding legal table name chars to verify regex handles it. // adding legal table name chars to verify regex handles it.
HRegionInfo hri = new HRegionInfo(TableName.valueOf("_original-evil-name")); HRegionInfo hri = new HRegionInfo(TableName.valueOf("_original-evil-name"));
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), hri); testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file> // Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file>
@ -252,7 +252,7 @@ public class TestStoreFile extends HBaseTestCase {
// create link to store file. <root>/clone/region/<cf>/<hfile>-<region>-<table> // create link to store file. <root>/clone/region/<cf>/<hfile>-<region>-<table>
HRegionInfo hriClone = new HRegionInfo(TableName.valueOf("clone")); HRegionInfo hriClone = new HRegionInfo(TableName.valueOf("clone"));
HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem(
testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()),
hriClone); hriClone);
Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY); Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY);
HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName()); HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
@ -269,7 +269,7 @@ public class TestStoreFile extends HBaseTestCase {
Path pathB = splitStoreFile(cloneRegionFs, splitHriB, TEST_FAMILY, f, SPLITKEY, false);// bottom Path pathB = splitStoreFile(cloneRegionFs, splitHriB, TEST_FAMILY, f, SPLITKEY, false);// bottom
// OK test the thing // OK test the thing
FSUtils.logFileSystemState(fs, testDir, LOG); FSUtils.logFileSystemState(fs, this.testDir, LOG);
// There is a case where a file with the hfilelink pattern is actually a daughter // There is a case where a file with the hfilelink pattern is actually a daughter
// reference to a hfile link. This code in StoreFile that handles this case. // reference to a hfile link. This code in StoreFile that handles this case.
@ -774,7 +774,7 @@ public class TestStoreFile extends HBaseTestCase {
Scan scan = new Scan(); Scan scan = new Scan();
// Make up a directory hierarchy that has a regiondir ("7e0102") and familyname. // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
Path storedir = new Path(new Path(testDir, "7e0102"), "familyname"); Path storedir = new Path(new Path(this.testDir, "7e0102"), "familyname");
Path dir = new Path(storedir, "1234567890"); Path dir = new Path(storedir, "1234567890");
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. // Make a store file and write data to it.
@ -820,7 +820,7 @@ public class TestStoreFile extends HBaseTestCase {
Configuration conf = this.conf; Configuration conf = this.conf;
// Find a home for our files (regiondir ("7e0102") and familyname). // Find a home for our files (regiondir ("7e0102") and familyname).
Path baseDir = new Path(new Path(testDir, "7e0102"),"twoCOWEOC"); Path baseDir = new Path(new Path(this.testDir, "7e0102"),"twoCOWEOC");
// Grab the block cache and get the initial hit/miss counts // Grab the block cache and get the initial hit/miss counts
BlockCache bc = new CacheConfig(conf).getBlockCache(); BlockCache bc = new CacheConfig(conf).getBlockCache();
@ -990,7 +990,7 @@ public class TestStoreFile extends HBaseTestCase {
*/ */
public void testDataBlockEncodingMetaData() throws IOException { public void testDataBlockEncodingMetaData() throws IOException {
// Make up a directory hierarchy that has a regiondir ("7e0102") and familyname. // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
Path dir = new Path(new Path(testDir, "7e0102"), "familyname"); Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
Path path = new Path(dir, "1234567890"); Path path = new Path(dir, "1234567890");
DataBlockEncoding dataBlockEncoderAlgo = DataBlockEncoding dataBlockEncoderAlgo =

View File

@ -155,8 +155,6 @@ public class TestHBaseFsck {
TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager(); TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager();
regionStates = assignmentManager.getRegionStates(); regionStates = assignmentManager.getRegionStates();
TEST_UTIL.getHBaseAdmin().setBalancerRunning(false, true); TEST_UTIL.getHBaseAdmin().setBalancerRunning(false, true);
HBaseFsck.setDisplayFullReport();
} }
@AfterClass @AfterClass
@ -968,6 +966,7 @@ public class TestHBaseFsck {
// fix the problem. // fix the problem.
HBaseFsck fsck = new HBaseFsck(conf); HBaseFsck fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setFixAssignments(true); fsck.setFixAssignments(true);
fsck.setFixMeta(true); fsck.setFixMeta(true);
@ -1559,6 +1558,7 @@ public class TestHBaseFsck {
// fix lingering split parent // fix lingering split parent
hbck = new HBaseFsck(conf); hbck = new HBaseFsck(conf);
hbck.connect(); hbck.connect();
hbck.setDisplayFullReport(); // i.e. -details
hbck.setTimeLag(0); hbck.setTimeLag(0);
hbck.setFixSplitParents(true); hbck.setFixSplitParents(true);
hbck.onlineHbck(); hbck.onlineHbck();
@ -1813,6 +1813,7 @@ public class TestHBaseFsck {
// verify that noHdfsChecking report the same errors // verify that noHdfsChecking report the same errors
HBaseFsck fsck = new HBaseFsck(conf); HBaseFsck fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.onlineHbck(); fsck.onlineHbck();
@ -1822,6 +1823,7 @@ public class TestHBaseFsck {
// verify that fixAssignments works fine with noHdfsChecking // verify that fixAssignments works fine with noHdfsChecking
fsck = new HBaseFsck(conf); fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.setFixAssignments(true); fsck.setFixAssignments(true);
@ -1861,6 +1863,7 @@ public class TestHBaseFsck {
// verify that noHdfsChecking report the same errors // verify that noHdfsChecking report the same errors
HBaseFsck fsck = new HBaseFsck(conf); HBaseFsck fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.onlineHbck(); fsck.onlineHbck();
@ -1870,6 +1873,7 @@ public class TestHBaseFsck {
// verify that fixMeta doesn't work with noHdfsChecking // verify that fixMeta doesn't work with noHdfsChecking
fsck = new HBaseFsck(conf); fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.setFixAssignments(true); fsck.setFixAssignments(true);
@ -1923,6 +1927,7 @@ public class TestHBaseFsck {
// verify that noHdfsChecking can't detect ORPHAN_HDFS_REGION // verify that noHdfsChecking can't detect ORPHAN_HDFS_REGION
HBaseFsck fsck = new HBaseFsck(conf); HBaseFsck fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.onlineHbck(); fsck.onlineHbck();
@ -1932,6 +1937,7 @@ public class TestHBaseFsck {
// verify that fixHdfsHoles doesn't work with noHdfsChecking // verify that fixHdfsHoles doesn't work with noHdfsChecking
fsck = new HBaseFsck(conf); fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.setFixHdfsHoles(true); fsck.setFixHdfsHoles(true);

View File

@ -147,14 +147,15 @@ public class TestMergeTool extends HBaseTestCase {
try { try {
// Create meta region // Create meta region
createMetaRegion(); createMetaRegion();
new FSTableDescriptors(this.conf, this.fs, testDir).createTableDescriptor( new FSTableDescriptors(this.conf, this.fs, this.testDir).createTableDescriptor(
new TableDescriptor(this.desc)); new TableDescriptor(this.desc));
/* /*
* Create the regions we will merge * Create the regions we will merge
*/ */
for (int i = 0; i < sourceRegions.length; i++) { for (int i = 0; i < sourceRegions.length; i++) {
regions[i] = regions[i] =
HRegion.createHRegion(this.sourceRegions[i], testDir, this.conf, this.desc); HRegion.createHRegion(this.sourceRegions[i], this.testDir, this.conf,
this.desc);
/* /*
* Insert data * Insert data
*/ */

View File

@ -50,7 +50,7 @@ public class HbckTestingUtil {
TableName table) throws Exception { TableName table) throws Exception {
HBaseFsck fsck = new HBaseFsck(conf, exec); HBaseFsck fsck = new HBaseFsck(conf, exec);
fsck.connect(); fsck.connect();
HBaseFsck.setDisplayFullReport(); // i.e. -details fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setFixAssignments(fixAssignments); fsck.setFixAssignments(fixAssignments);
fsck.setFixMeta(fixMeta); fsck.setFixMeta(fixMeta);

View File

@ -50,25 +50,6 @@
</testResource> </testResource>
</testResources> </testResources>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -46,25 +46,6 @@
</resources> </resources>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -235,7 +235,6 @@ public class HTablePool implements Closeable {
* the proxy table user got from pool * the proxy table user got from pool
* @deprecated * @deprecated
*/ */
@Deprecated
public void putTable(HTableInterface table) throws IOException { public void putTable(HTableInterface table) throws IOException {
// we need to be sure nobody puts a proxy implementation in the pool // we need to be sure nobody puts a proxy implementation in the pool
// but if the client code is not updated // but if the client code is not updated
@ -396,7 +395,6 @@ public class HTablePool implements Closeable {
* @deprecated If any exception is thrown by one of the actions, there is no way to * @deprecated If any exception is thrown by one of the actions, there is no way to
* retrieve the partially executed results. Use {@link #batch(List, Object[])} instead. * retrieve the partially executed results. Use {@link #batch(List, Object[])} instead.
*/ */
@Deprecated
@Override @Override
public Object[] batch(List<? extends Row> actions) throws IOException, public Object[] batch(List<? extends Row> actions) throws IOException,
InterruptedException { InterruptedException {
@ -590,7 +588,6 @@ public class HTablePool implements Closeable {
* {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)} * {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)}
* instead. * instead.
*/ */
@Deprecated
@Override @Override
public <R> Object[] batchCallback(List<? extends Row> actions, public <R> Object[] batchCallback(List<? extends Row> actions,
Callback<R> callback) throws IOException, InterruptedException { Callback<R> callback) throws IOException, InterruptedException {

19
pom.xml
View File

@ -441,7 +441,7 @@
</plugin> </plugin>
<plugin> <plugin>
<artifactId>maven-compiler-plugin</artifactId> <artifactId>maven-compiler-plugin</artifactId>
<version>3.2</version> <version>2.5.1</version>
<configuration> <configuration>
<source>${compileSource}</source> <source>${compileSource}</source>
<target>${compileSource}</target> <target>${compileSource}</target>
@ -449,23 +449,6 @@
<showDeprecation>false</showDeprecation> <showDeprecation>false</showDeprecation>
<compilerArgument>-Xlint:-options</compilerArgument> <compilerArgument>-Xlint:-options</compilerArgument>
</configuration> </configuration>
<dependencies>
<dependency>
<groupId>com.google.errorprone</groupId>
<artifactId>error_prone_core</artifactId>
<version>1.1.1</version>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-compiler-javac</artifactId>
<version>2.3</version>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-compiler-javac-errorprone</artifactId>
<version>2.3</version>
</dependency>
</dependencies>
</plugin> </plugin>
<!-- Test oriented plugins --> <!-- Test oriented plugins -->
<plugin> <plugin>