HBASE-11912 Catch some bad practices at compile time with error-prone

This commit is contained in:
Andrew Purtell 2014-10-19 16:53:18 -04:00
parent 6c7543c9c7
commit 7ed0260eff
31 changed files with 315 additions and 103 deletions

View File

@ -34,6 +34,25 @@
<build> <build>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -379,7 +379,7 @@ public class ClusterStatus extends VersionedWritable {
public static ClusterStatus convert(ClusterStatusProtos.ClusterStatus proto) { public static ClusterStatus convert(ClusterStatusProtos.ClusterStatus proto) {
Map<ServerName, ServerLoad> servers = null; Map<ServerName, ServerLoad> servers = null;
if (proto.getLiveServersList() != null) { if (!proto.getLiveServersList().isEmpty()) {
servers = new HashMap<ServerName, ServerLoad>(proto.getLiveServersList().size()); servers = new HashMap<ServerName, ServerLoad>(proto.getLiveServersList().size());
for (LiveServerInfo lsi : proto.getLiveServersList()) { for (LiveServerInfo lsi : proto.getLiveServersList()) {
servers.put(ProtobufUtil.toServerName( servers.put(ProtobufUtil.toServerName(
@ -388,7 +388,7 @@ public class ClusterStatus extends VersionedWritable {
} }
Collection<ServerName> deadServers = null; Collection<ServerName> deadServers = null;
if (proto.getDeadServersList() != null) { if (!proto.getDeadServersList().isEmpty()) {
deadServers = new ArrayList<ServerName>(proto.getDeadServersList().size()); deadServers = new ArrayList<ServerName>(proto.getDeadServersList().size());
for (HBaseProtos.ServerName sn : proto.getDeadServersList()) { for (HBaseProtos.ServerName sn : proto.getDeadServersList()) {
deadServers.add(ProtobufUtil.toServerName(sn)); deadServers.add(ProtobufUtil.toServerName(sn));
@ -396,7 +396,7 @@ public class ClusterStatus extends VersionedWritable {
} }
Collection<ServerName> backupMasters = null; Collection<ServerName> backupMasters = null;
if (proto.getBackupMastersList() != null) { if (!proto.getBackupMastersList().isEmpty()) {
backupMasters = new ArrayList<ServerName>(proto.getBackupMastersList().size()); backupMasters = new ArrayList<ServerName>(proto.getBackupMastersList().size());
for (HBaseProtos.ServerName sn : proto.getBackupMastersList()) { for (HBaseProtos.ServerName sn : proto.getBackupMastersList()) {
backupMasters.add(ProtobufUtil.toServerName(sn)); backupMasters.add(ProtobufUtil.toServerName(sn));
@ -404,7 +404,7 @@ public class ClusterStatus extends VersionedWritable {
} }
Map<String, RegionState> rit = null; Map<String, RegionState> rit = null;
if (proto.getRegionsInTransitionList() != null) { if (!proto.getRegionsInTransitionList().isEmpty()) {
rit = new HashMap<String, RegionState>(proto.getRegionsInTransitionList().size()); rit = new HashMap<String, RegionState>(proto.getRegionsInTransitionList().size());
for (RegionInTransition region : proto.getRegionsInTransitionList()) { for (RegionInTransition region : proto.getRegionsInTransitionList()) {
String key = new String(region.getSpec().getValue().toByteArray()); String key = new String(region.getSpec().getValue().toByteArray());
@ -414,7 +414,7 @@ public class ClusterStatus extends VersionedWritable {
} }
String[] masterCoprocessors = null; String[] masterCoprocessors = null;
if (proto.getMasterCoprocessorsList() != null) { if (!proto.getMasterCoprocessorsList().isEmpty()) {
final int numMasterCoprocessors = proto.getMasterCoprocessorsCount(); final int numMasterCoprocessors = proto.getMasterCoprocessorsCount();
masterCoprocessors = new String[numMasterCoprocessors]; masterCoprocessors = new String[numMasterCoprocessors];
for (int i = 0; i < numMasterCoprocessors; i++) { for (int i = 0; i < numMasterCoprocessors; i++) {

View File

@ -801,6 +801,7 @@ public class ZKUtil {
* @throws KeeperException if unexpected zookeeper exception * @throws KeeperException if unexpected zookeeper exception
* @deprecated Unused * @deprecated Unused
*/ */
@Deprecated
public static List<NodeAndData> getChildDataAndWatchForNewChildren( public static List<NodeAndData> getChildDataAndWatchForNewChildren(
ZooKeeperWatcher zkw, String baseNode) throws KeeperException { ZooKeeperWatcher zkw, String baseNode) throws KeeperException {
List<String> nodes = List<String> nodes =
@ -833,6 +834,7 @@ public class ZKUtil {
* @throws KeeperException.BadVersionException if version mismatch * @throws KeeperException.BadVersionException if version mismatch
* @deprecated Unused * @deprecated Unused
*/ */
@Deprecated
public static void updateExistingNodeData(ZooKeeperWatcher zkw, String znode, public static void updateExistingNodeData(ZooKeeperWatcher zkw, String znode,
byte [] data, int expectedVersion) byte [] data, int expectedVersion)
throws KeeperException { throws KeeperException {

View File

@ -41,13 +41,32 @@
</resource> </resource>
</resources> </resources>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId>
<artifactId>maven-site-plugin</artifactId> <executions>
<configuration> <execution>
<skip>true</skip> <id>default-compile</id>
</configuration> <configuration>
</plugin> <compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin> <plugin>
<!--Make it so assembly:single does nothing in here--> <!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>

View File

@ -31,13 +31,32 @@
<description>Examples of HBase usage</description> <description>Examples of HBase usage</description>
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId>
<artifactId>maven-site-plugin</artifactId> <executions>
<configuration> <execution>
<skip>true</skip> <id>default-compile</id>
</configuration> <configuration>
</plugin> <compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin> <plugin>
<!--Make it so assembly:single does nothing in here--> <!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>

View File

@ -35,7 +35,26 @@
</description> </description>
<build> <build>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -34,18 +34,37 @@ limitations under the License.
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId>
<artifactId>maven-site-plugin</artifactId> <executions>
<configuration> <execution>
<skip>true</skip> <id>default-compile</id>
</configuration> <configuration>
</plugin> <compilerId>javac-with-errorprone</compilerId>
<!-- Make a jar and put the sources in the jar --> <forceJavacCompilerUse>true</forceJavacCompilerUse>
<plugin> </configuration>
<groupId>org.apache.maven.plugins</groupId> </execution>
<artifactId>maven-source-plugin</artifactId> <execution>
</plugin> <id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<!-- Make a jar and put the sources in the jar -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
</plugin>
<plugin> <plugin>
<!--Make it so assembly:single does nothing in here--> <!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>

View File

@ -117,6 +117,25 @@
</pluginManagement> </pluginManagement>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<!-- Run integration tests with mvn verify --> <!-- Run integration tests with mvn verify -->
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>

View File

@ -33,18 +33,37 @@
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId>
<artifactId>maven-site-plugin</artifactId> <executions>
<configuration> <execution>
<skip>true</skip> <id>default-compile</id>
</configuration> <configuration>
</plugin> <compilerId>javac-with-errorprone</compilerId>
<!-- Make a jar and put the sources in the jar --> <forceJavacCompilerUse>true</forceJavacCompilerUse>
<plugin> </configuration>
<groupId>org.apache.maven.plugins</groupId> </execution>
<artifactId>maven-source-plugin</artifactId> <execution>
</plugin> <id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<!-- Make a jar and put the sources in the jar -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
</plugin>
<plugin> <plugin>
<!--Make it so assembly:single does nothing in here--> <!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>

View File

@ -43,7 +43,7 @@ public class TestKeyValueTool {
@Parameters @Parameters
public static Collection<Object[]> parameters() { public static Collection<Object[]> parameters() {
return new TestRowData.InMemory().getAllAsObjectArray(); return TestRowData.InMemory.getAllAsObjectArray();
} }
private TestRowData rows; private TestRowData rows;

View File

@ -52,7 +52,7 @@ public class TestPrefixTreeSearcher {
@Parameters @Parameters
public static Collection<Object[]> parameters() { public static Collection<Object[]> parameters() {
return new TestRowData.InMemory().getAllAsObjectArray(); return TestRowData.InMemory.getAllAsObjectArray();
} }
protected TestRowData rows; protected TestRowData rows;

View File

@ -57,7 +57,7 @@ public interface TestRowData {
void individualSearcherAssertions(CellSearcher searcher); void individualSearcherAssertions(CellSearcher searcher);
class InMemory { static class InMemory {
/* /*
* The following are different styles of data that the codec may encounter. Having these small * The following are different styles of data that the codec may encounter. Having these small

View File

@ -53,13 +53,27 @@
</testResource> </testResource>
</testResources> </testResources>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId>
<artifactId>maven-site-plugin</artifactId> <executions>
<configuration> <execution>
<skip>true</skip> <id>default-compile</id>
</configuration> <configuration>
</plugin> <compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<!-- We can't check tests with default-testCompile because of generated
protobuf code (IncrementCounterProcessorTestProtos) -->
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<!-- Run with -Dmaven.test.skip.exec=true to build -tests.jar without running <!-- Run with -Dmaven.test.skip.exec=true to build -tests.jar without running
tests (this is needed for upstream projects whose tests need this jar simply for tests (this is needed for upstream projects whose tests need this jar simply for
compilation) --> compilation) -->

View File

@ -238,6 +238,7 @@ public class HTableWrapper implements HTableInterface {
* @deprecated If any exception is thrown by one of the actions, there is no way to * @deprecated If any exception is thrown by one of the actions, there is no way to
* retrieve the partially executed results. Use {@link #batch(List, Object[])} instead. * retrieve the partially executed results. Use {@link #batch(List, Object[])} instead.
*/ */
@Deprecated
@Override @Override
public Object[] batch(List<? extends Row> actions) public Object[] batch(List<? extends Row> actions)
throws IOException, InterruptedException { throws IOException, InterruptedException {
@ -257,6 +258,7 @@ public class HTableWrapper implements HTableInterface {
* {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)} * {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)}
* instead. * instead.
*/ */
@Deprecated
@Override @Override
public <R> Object[] batchCallback(List<? extends Row> actions, public <R> Object[] batchCallback(List<? extends Row> actions,
Batch.Callback<R> callback) throws IOException, InterruptedException { Batch.Callback<R> callback) throws IOException, InterruptedException {

View File

@ -118,6 +118,7 @@ public interface RegionObserver extends Coprocessor {
* @throws IOException if an error occurred on the coprocessor * @throws IOException if an error occurred on the coprocessor
* @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead * @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead
*/ */
@Deprecated
void preFlush(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException; void preFlush(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException;
/** /**
@ -138,6 +139,7 @@ public interface RegionObserver extends Coprocessor {
* @throws IOException if an error occurred on the coprocessor * @throws IOException if an error occurred on the coprocessor
* @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead. * @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead.
*/ */
@Deprecated
void postFlush(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException; void postFlush(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException;
/** /**
@ -339,6 +341,7 @@ public interface RegionObserver extends Coprocessor {
* @deprecated Use preSplit( * @deprecated Use preSplit(
* final ObserverContext<RegionCoprocessorEnvironment> c, byte[] splitRow) * final ObserverContext<RegionCoprocessorEnvironment> c, byte[] splitRow)
*/ */
@Deprecated
void preSplit(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException; void preSplit(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException;
/** /**
@ -359,6 +362,7 @@ public interface RegionObserver extends Coprocessor {
* @throws IOException if an error occurred on the coprocessor * @throws IOException if an error occurred on the coprocessor
* @deprecated Use postCompleteSplit() instead * @deprecated Use postCompleteSplit() instead
*/ */
@Deprecated
void postSplit(final ObserverContext<RegionCoprocessorEnvironment> c, final HRegion l, void postSplit(final ObserverContext<RegionCoprocessorEnvironment> c, final HRegion l,
final HRegion r) throws IOException; final HRegion r) throws IOException;

View File

@ -80,7 +80,7 @@ public class OfflineMetaRepair {
for (int i = 0; i < args.length; i++) { for (int i = 0; i < args.length; i++) {
String cmd = args[i]; String cmd = args[i];
if (cmd.equals("-details")) { if (cmd.equals("-details")) {
fsck.setDisplayFullReport(); HBaseFsck.setDisplayFullReport();
} else if (cmd.equals("-base")) { } else if (cmd.equals("-base")) {
if (i == args.length - 1) { if (i == args.length - 1) {
System.err.println("OfflineMetaRepair: -base needs an HDFS path."); System.err.println("OfflineMetaRepair: -base needs an HDFS path.");

View File

@ -52,6 +52,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
* like an HBaseConfiguration and filesystem. * like an HBaseConfiguration and filesystem.
* @deprecated Write junit4 unit tests using {@link HBaseTestingUtility} * @deprecated Write junit4 unit tests using {@link HBaseTestingUtility}
*/ */
@Deprecated
public abstract class HBaseTestCase extends TestCase { public abstract class HBaseTestCase extends TestCase {
private static final Log LOG = LogFactory.getLog(HBaseTestCase.class); private static final Log LOG = LogFactory.getLog(HBaseTestCase.class);
@ -111,12 +112,12 @@ public abstract class HBaseTestCase extends TestCase {
} }
try { try {
if (localfs) { if (localfs) {
this.testDir = getUnitTestdir(getName()); testDir = getUnitTestdir(getName());
if (fs.exists(testDir)) { if (fs.exists(testDir)) {
fs.delete(testDir, true); fs.delete(testDir, true);
} }
} else { } else {
this.testDir = FSUtils.getRootDir(conf); testDir = FSUtils.getRootDir(conf);
} }
} catch (Exception e) { } catch (Exception e) {
LOG.fatal("error during setup", e); LOG.fatal("error during setup", e);

View File

@ -198,7 +198,7 @@ public class TestMasterCoprocessorExceptionWithAbort {
// Test (part of the) output that should have be printed by master when it aborts: // Test (part of the) output that should have be printed by master when it aborts:
// (namely the part that shows the set of loaded coprocessors). // (namely the part that shows the set of loaded coprocessors).
// In this test, there is only a single coprocessor (BuggyMasterObserver). // In this test, there is only a single coprocessor (BuggyMasterObserver).
assertTrue(master.getLoadedCoprocessors(). assertTrue(HMaster.getLoadedCoprocessors().
contains(TestMasterCoprocessorExceptionWithAbort.BuggyMasterObserver.class.getName())); contains(TestMasterCoprocessorExceptionWithAbort.BuggyMasterObserver.class.getName()));
CreateTableThread createTableThread = new CreateTableThread(UTIL); CreateTableThread createTableThread = new CreateTableThread(UTIL);

View File

@ -39,6 +39,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.LruCachedBlock; import org.apache.hadoop.hbase.io.hfile.LruCachedBlock;
@ -372,7 +373,7 @@ public class TestHeapSize {
byte[] row = new byte[] { 0 }; byte[] row = new byte[] { 0 };
cl = Put.class; cl = Put.class;
actual = new Put(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); actual = Mutation.MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY);
expected = ClassSize.estimateBase(cl, false); expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation //The actual TreeMap is not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP); expected += ClassSize.align(ClassSize.TREEMAP);
@ -382,7 +383,7 @@ public class TestHeapSize {
} }
cl = Delete.class; cl = Delete.class;
actual = new Delete(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY); actual = Mutation.MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY);
expected = ClassSize.estimateBase(cl, false); expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation //The actual TreeMap is not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP); expected += ClassSize.align(ClassSize.TREEMAP);

View File

@ -60,8 +60,7 @@ public class TestScannerSelectionUsingTTL {
private static final Log LOG = private static final Log LOG =
LogFactory.getLog(TestScannerSelectionUsingTTL.class); LogFactory.getLog(TestScannerSelectionUsingTTL.class);
private static final HBaseTestingUtility TEST_UTIL = private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
new HBaseTestingUtility().createLocalHTU();
private static TableName TABLE = TableName.valueOf("myTable"); private static TableName TABLE = TableName.valueOf("myTable");
private static String FAMILY = "myCF"; private static String FAMILY = "myCF";
private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY); private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY);

View File

@ -71,7 +71,7 @@ public class TestSeekTo extends HBaseTestCase {
} }
Path makeNewFile(TagUsage tagUsage) throws IOException { Path makeNewFile(TagUsage tagUsage) throws IOException {
Path ncTFile = new Path(this.testDir, "basic.hfile"); Path ncTFile = new Path(testDir, "basic.hfile");
if (tagUsage != TagUsage.NO_TAG) { if (tagUsage != TagUsage.NO_TAG) {
conf.setInt("hfile.format.version", 3); conf.setInt("hfile.format.version", 3);
} else { } else {

View File

@ -795,7 +795,7 @@ public class TestHFileOutputFormat {
HTable table = Mockito.mock(HTable.class); HTable table = Mockito.mock(HTable.class);
HTableDescriptor htd = new HTableDescriptor(TABLE_NAME); HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
Mockito.doReturn(htd).when(table).getTableDescriptor(); Mockito.doReturn(htd).when(table).getTableDescriptor();
for (HColumnDescriptor hcd: this.util.generateColumnDescriptors()) { for (HColumnDescriptor hcd: HBaseTestingUtility.generateColumnDescriptors()) {
htd.addFamily(hcd); htd.addFamily(hcd);
} }

View File

@ -77,37 +77,42 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
Configuration conf = UTIL.getConfiguration(); Configuration conf = UTIL.getConfiguration();
HDFSBlocksDistribution blockDistribution = new HDFSBlocksDistribution(); HDFSBlocksDistribution blockDistribution = new HDFSBlocksDistribution();
Assert.assertEquals(Lists.newArrayList(), tsif.getBestLocations(conf, blockDistribution)); Assert.assertEquals(Lists.newArrayList(),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); Assert.assertEquals(Lists.newArrayList("h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); Assert.assertEquals(Lists.newArrayList("h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 1); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); Assert.assertEquals(Lists.newArrayList("h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution = new HDFSBlocksDistribution(); blockDistribution = new HDFSBlocksDistribution();
blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 10); blockDistribution.addHostsAndBlockWeight(new String[] {"h1"}, 10);
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 7); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 7);
blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 5); blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 5);
blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 1); blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 1);
Assert.assertEquals(Lists.newArrayList("h1"), tsif.getBestLocations(conf, blockDistribution)); Assert.assertEquals(Lists.newArrayList("h1"),
TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 2); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 2);
Assert.assertEquals(Lists.newArrayList("h1", "h2"), Assert.assertEquals(Lists.newArrayList("h1", "h2"),
tsif.getBestLocations(conf, blockDistribution)); TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 3); blockDistribution.addHostsAndBlockWeight(new String[] {"h2"}, 3);
Assert.assertEquals(Lists.newArrayList("h2", "h1"), Assert.assertEquals(Lists.newArrayList("h2", "h1"),
tsif.getBestLocations(conf, blockDistribution)); TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 6); blockDistribution.addHostsAndBlockWeight(new String[] {"h3"}, 6);
blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 9); blockDistribution.addHostsAndBlockWeight(new String[] {"h4"}, 9);
Assert.assertEquals(Lists.newArrayList("h2", "h3", "h4", "h1"), Assert.assertEquals(Lists.newArrayList("h2", "h3", "h4", "h1"),
tsif.getBestLocations(conf, blockDistribution)); TableSnapshotInputFormatImpl.getBestLocations(conf, blockDistribution));
} }
public static enum TestTableSnapshotCounters { public static enum TestTableSnapshotCounters {

View File

@ -97,7 +97,7 @@ public class TestStoreFile extends HBaseTestCase {
final HRegionInfo hri = final HRegionInfo hri =
new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb")); new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb"));
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
conf, fs, new Path(this.testDir, hri.getTable().getNameAsString()), hri); conf, fs, new Path(testDir, hri.getTable().getNameAsString()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(2*1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(2*1024).build();
StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs) StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
@ -148,7 +148,7 @@ public class TestStoreFile extends HBaseTestCase {
public void testReference() throws IOException { public void testReference() throws IOException {
final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testReferenceTb")); final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testReferenceTb"));
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
conf, fs, new Path(this.testDir, hri.getTable().getNameAsString()), hri); conf, fs, new Path(testDir, hri.getTable().getNameAsString()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. // Make a store file and write data to it.
@ -192,9 +192,9 @@ public class TestStoreFile extends HBaseTestCase {
final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testHFileLinkTb")); final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testHFileLinkTb"));
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
Configuration testConf = new Configuration(this.conf); Configuration testConf = new Configuration(this.conf);
FSUtils.setRootDir(testConf, this.testDir); FSUtils.setRootDir(testConf, testDir);
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), hri); testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. // Make a store file and write data to it.
@ -233,12 +233,12 @@ public class TestStoreFile extends HBaseTestCase {
public void testReferenceToHFileLink() throws IOException { public void testReferenceToHFileLink() throws IOException {
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
Configuration testConf = new Configuration(this.conf); Configuration testConf = new Configuration(this.conf);
FSUtils.setRootDir(testConf, this.testDir); FSUtils.setRootDir(testConf, testDir);
// adding legal table name chars to verify regex handles it. // adding legal table name chars to verify regex handles it.
HRegionInfo hri = new HRegionInfo(TableName.valueOf("_original-evil-name")); HRegionInfo hri = new HRegionInfo(TableName.valueOf("_original-evil-name"));
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), hri); testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file> // Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file>
@ -252,7 +252,7 @@ public class TestStoreFile extends HBaseTestCase {
// create link to store file. <root>/clone/region/<cf>/<hfile>-<region>-<table> // create link to store file. <root>/clone/region/<cf>/<hfile>-<region>-<table>
HRegionInfo hriClone = new HRegionInfo(TableName.valueOf("clone")); HRegionInfo hriClone = new HRegionInfo(TableName.valueOf("clone"));
HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem(
testConf, fs, FSUtils.getTableDir(this.testDir, hri.getTable()), testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()),
hriClone); hriClone);
Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY); Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY);
HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName()); HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
@ -269,7 +269,7 @@ public class TestStoreFile extends HBaseTestCase {
Path pathB = splitStoreFile(cloneRegionFs, splitHriB, TEST_FAMILY, f, SPLITKEY, false);// bottom Path pathB = splitStoreFile(cloneRegionFs, splitHriB, TEST_FAMILY, f, SPLITKEY, false);// bottom
// OK test the thing // OK test the thing
FSUtils.logFileSystemState(fs, this.testDir, LOG); FSUtils.logFileSystemState(fs, testDir, LOG);
// There is a case where a file with the hfilelink pattern is actually a daughter // There is a case where a file with the hfilelink pattern is actually a daughter
// reference to a hfile link. This code in StoreFile that handles this case. // reference to a hfile link. This code in StoreFile that handles this case.
@ -774,7 +774,7 @@ public class TestStoreFile extends HBaseTestCase {
Scan scan = new Scan(); Scan scan = new Scan();
// Make up a directory hierarchy that has a regiondir ("7e0102") and familyname. // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
Path storedir = new Path(new Path(this.testDir, "7e0102"), "familyname"); Path storedir = new Path(new Path(testDir, "7e0102"), "familyname");
Path dir = new Path(storedir, "1234567890"); Path dir = new Path(storedir, "1234567890");
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. // Make a store file and write data to it.
@ -820,7 +820,7 @@ public class TestStoreFile extends HBaseTestCase {
Configuration conf = this.conf; Configuration conf = this.conf;
// Find a home for our files (regiondir ("7e0102") and familyname). // Find a home for our files (regiondir ("7e0102") and familyname).
Path baseDir = new Path(new Path(this.testDir, "7e0102"),"twoCOWEOC"); Path baseDir = new Path(new Path(testDir, "7e0102"),"twoCOWEOC");
// Grab the block cache and get the initial hit/miss counts // Grab the block cache and get the initial hit/miss counts
BlockCache bc = new CacheConfig(conf).getBlockCache(); BlockCache bc = new CacheConfig(conf).getBlockCache();
@ -990,7 +990,7 @@ public class TestStoreFile extends HBaseTestCase {
*/ */
public void testDataBlockEncodingMetaData() throws IOException { public void testDataBlockEncodingMetaData() throws IOException {
// Make up a directory hierarchy that has a regiondir ("7e0102") and familyname. // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname"); Path dir = new Path(new Path(testDir, "7e0102"), "familyname");
Path path = new Path(dir, "1234567890"); Path path = new Path(dir, "1234567890");
DataBlockEncoding dataBlockEncoderAlgo = DataBlockEncoding dataBlockEncoderAlgo =

View File

@ -155,6 +155,8 @@ public class TestHBaseFsck {
TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager(); TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager();
regionStates = assignmentManager.getRegionStates(); regionStates = assignmentManager.getRegionStates();
TEST_UTIL.getHBaseAdmin().setBalancerRunning(false, true); TEST_UTIL.getHBaseAdmin().setBalancerRunning(false, true);
HBaseFsck.setDisplayFullReport();
} }
@AfterClass @AfterClass
@ -966,7 +968,6 @@ public class TestHBaseFsck {
// fix the problem. // fix the problem.
HBaseFsck fsck = new HBaseFsck(conf); HBaseFsck fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setFixAssignments(true); fsck.setFixAssignments(true);
fsck.setFixMeta(true); fsck.setFixMeta(true);
@ -1558,7 +1559,6 @@ public class TestHBaseFsck {
// fix lingering split parent // fix lingering split parent
hbck = new HBaseFsck(conf); hbck = new HBaseFsck(conf);
hbck.connect(); hbck.connect();
hbck.setDisplayFullReport(); // i.e. -details
hbck.setTimeLag(0); hbck.setTimeLag(0);
hbck.setFixSplitParents(true); hbck.setFixSplitParents(true);
hbck.onlineHbck(); hbck.onlineHbck();
@ -1813,7 +1813,6 @@ public class TestHBaseFsck {
// verify that noHdfsChecking report the same errors // verify that noHdfsChecking report the same errors
HBaseFsck fsck = new HBaseFsck(conf); HBaseFsck fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.onlineHbck(); fsck.onlineHbck();
@ -1823,7 +1822,6 @@ public class TestHBaseFsck {
// verify that fixAssignments works fine with noHdfsChecking // verify that fixAssignments works fine with noHdfsChecking
fsck = new HBaseFsck(conf); fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.setFixAssignments(true); fsck.setFixAssignments(true);
@ -1863,7 +1861,6 @@ public class TestHBaseFsck {
// verify that noHdfsChecking report the same errors // verify that noHdfsChecking report the same errors
HBaseFsck fsck = new HBaseFsck(conf); HBaseFsck fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.onlineHbck(); fsck.onlineHbck();
@ -1873,7 +1870,6 @@ public class TestHBaseFsck {
// verify that fixMeta doesn't work with noHdfsChecking // verify that fixMeta doesn't work with noHdfsChecking
fsck = new HBaseFsck(conf); fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.setFixAssignments(true); fsck.setFixAssignments(true);
@ -1927,7 +1923,6 @@ public class TestHBaseFsck {
// verify that noHdfsChecking can't detect ORPHAN_HDFS_REGION // verify that noHdfsChecking can't detect ORPHAN_HDFS_REGION
HBaseFsck fsck = new HBaseFsck(conf); HBaseFsck fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.onlineHbck(); fsck.onlineHbck();
@ -1937,7 +1932,6 @@ public class TestHBaseFsck {
// verify that fixHdfsHoles doesn't work with noHdfsChecking // verify that fixHdfsHoles doesn't work with noHdfsChecking
fsck = new HBaseFsck(conf); fsck = new HBaseFsck(conf);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setCheckHdfs(false); fsck.setCheckHdfs(false);
fsck.setFixHdfsHoles(true); fsck.setFixHdfsHoles(true);

View File

@ -147,15 +147,14 @@ public class TestMergeTool extends HBaseTestCase {
try { try {
// Create meta region // Create meta region
createMetaRegion(); createMetaRegion();
new FSTableDescriptors(this.conf, this.fs, this.testDir).createTableDescriptor( new FSTableDescriptors(this.conf, this.fs, testDir).createTableDescriptor(
new TableDescriptor(this.desc)); new TableDescriptor(this.desc));
/* /*
* Create the regions we will merge * Create the regions we will merge
*/ */
for (int i = 0; i < sourceRegions.length; i++) { for (int i = 0; i < sourceRegions.length; i++) {
regions[i] = regions[i] =
HRegion.createHRegion(this.sourceRegions[i], this.testDir, this.conf, HRegion.createHRegion(this.sourceRegions[i], testDir, this.conf, this.desc);
this.desc);
/* /*
* Insert data * Insert data
*/ */

View File

@ -50,7 +50,7 @@ public class HbckTestingUtil {
TableName table) throws Exception { TableName table) throws Exception {
HBaseFsck fsck = new HBaseFsck(conf, exec); HBaseFsck fsck = new HBaseFsck(conf, exec);
fsck.connect(); fsck.connect();
fsck.setDisplayFullReport(); // i.e. -details HBaseFsck.setDisplayFullReport(); // i.e. -details
fsck.setTimeLag(0); fsck.setTimeLag(0);
fsck.setFixAssignments(fixAssignments); fsck.setFixAssignments(fixAssignments);
fsck.setFixMeta(fixMeta); fsck.setFixMeta(fixMeta);

View File

@ -50,13 +50,32 @@
</testResource> </testResource>
</testResources> </testResources>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId>
<artifactId>maven-site-plugin</artifactId> <executions>
<configuration> <execution>
<skip>true</skip> <id>default-compile</id>
</configuration> <configuration>
</plugin> <compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<!-- Run with -Dmaven.test.skip.exec=true to build -tests.jar without running <!-- Run with -Dmaven.test.skip.exec=true to build -tests.jar without running
tests (this is needed for upstream projects whose tests need this jar simply for tests (this is needed for upstream projects whose tests need this jar simply for
compilation) --> compilation) -->

View File

@ -46,6 +46,25 @@
</resources> </resources>
<plugins> <plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>default-compile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>

View File

@ -235,6 +235,7 @@ public class HTablePool implements Closeable {
* the proxy table user got from pool * the proxy table user got from pool
* @deprecated * @deprecated
*/ */
@Deprecated
public void putTable(HTableInterface table) throws IOException { public void putTable(HTableInterface table) throws IOException {
// we need to be sure nobody puts a proxy implementation in the pool // we need to be sure nobody puts a proxy implementation in the pool
// but if the client code is not updated // but if the client code is not updated
@ -395,6 +396,7 @@ public class HTablePool implements Closeable {
* @deprecated If any exception is thrown by one of the actions, there is no way to * @deprecated If any exception is thrown by one of the actions, there is no way to
* retrieve the partially executed results. Use {@link #batch(List, Object[])} instead. * retrieve the partially executed results. Use {@link #batch(List, Object[])} instead.
*/ */
@Deprecated
@Override @Override
public Object[] batch(List<? extends Row> actions) throws IOException, public Object[] batch(List<? extends Row> actions) throws IOException,
InterruptedException { InterruptedException {
@ -588,6 +590,7 @@ public class HTablePool implements Closeable {
* {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)} * {@link #batchCallback(List, Object[], org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)}
* instead. * instead.
*/ */
@Deprecated
@Override @Override
public <R> Object[] batchCallback(List<? extends Row> actions, public <R> Object[] batchCallback(List<? extends Row> actions,
Callback<R> callback) throws IOException, InterruptedException { Callback<R> callback) throws IOException, InterruptedException {

19
pom.xml
View File

@ -441,7 +441,7 @@
</plugin> </plugin>
<plugin> <plugin>
<artifactId>maven-compiler-plugin</artifactId> <artifactId>maven-compiler-plugin</artifactId>
<version>2.5.1</version> <version>3.2</version>
<configuration> <configuration>
<source>${compileSource}</source> <source>${compileSource}</source>
<target>${compileSource}</target> <target>${compileSource}</target>
@ -449,6 +449,23 @@
<showDeprecation>false</showDeprecation> <showDeprecation>false</showDeprecation>
<compilerArgument>-Xlint:-options</compilerArgument> <compilerArgument>-Xlint:-options</compilerArgument>
</configuration> </configuration>
<dependencies>
<dependency>
<groupId>com.google.errorprone</groupId>
<artifactId>error_prone_core</artifactId>
<version>1.1.1</version>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-compiler-javac</artifactId>
<version>2.3</version>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-compiler-javac-errorprone</artifactId>
<version>2.3</version>
</dependency>
</dependencies>
</plugin> </plugin>
<!-- Test oriented plugins --> <!-- Test oriented plugins -->
<plugin> <plugin>