HBASE-19597 Fixed Checkstyle errors in hbase-spark and enabled Checkstyle to fail on violations

This commit is contained in:
Jan Hentschel 2017-12-22 13:03:48 +01:00
parent c0c802f62b
commit 6007725521
6 changed files with 96 additions and 63 deletions

View File

@ -713,6 +713,22 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<executions>
<execution>
<id>checkstyle</id>
<phase>validate</phase>
<goals>
<goal>check</goal>
</goals>
<configuration>
<failOnViolation>true</failOnViolation>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>
<profiles> <profiles>

View File

@ -17,10 +17,15 @@
package org.apache.hadoop.hbase.spark; package org.apache.hadoop.hbase.spark;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.filter.FilterBase;
import org.apache.hadoop.hbase.spark.datasources.BytesEncoder; import org.apache.hadoop.hbase.spark.datasources.BytesEncoder;
@ -29,16 +34,12 @@ import org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.sql.datasources.hbase.Field; import org.apache.spark.sql.datasources.hbase.Field;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.collection.mutable.MutableList; import scala.collection.mutable.MutableList;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.ByteString;
/** /**
* This filter will push down all qualifier logic given to us * This filter will push down all qualifier logic given to us
* by SparkSQL so that we have make the filters at the region server level * by SparkSQL so that we have make the filters at the region server level
@ -179,7 +180,7 @@ public class SparkSQLPushDownFilter extends FilterBase{
/** /**
* @param pbBytes A pb serialized instance * @param pbBytes A pb serialized instance
* @return An instance of SparkSQLPushDownFilter * @return An instance of SparkSQLPushDownFilter
* @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @throws DeserializationException if the filter cannot be parsed from the given bytes
*/ */
@SuppressWarnings("unused") @SuppressWarnings("unused")
public static SparkSQLPushDownFilter parseFrom(final byte[] pbBytes) public static SparkSQLPushDownFilter parseFrom(final byte[] pbBytes)

View File

@ -16,6 +16,9 @@
*/ */
package org.apache.hadoop.hbase.spark.example.hbasecontext; package org.apache.hadoop.hbase.spark.example.hbasecontext;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
@ -27,9 +30,6 @@ import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function; import org.apache.spark.api.java.function.Function;
import java.util.ArrayList;
import java.util.List;
/** /**
* This is a simple example of deleting records in HBase * This is a simple example of deleting records in HBase
* with the bulkDelete function. * with the bulkDelete function.

View File

@ -16,6 +16,10 @@
*/ */
package org.apache.hadoop.hbase.spark.example.hbasecontext; package org.apache.hadoop.hbase.spark.example.hbasecontext;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
@ -25,23 +29,21 @@ import org.apache.hadoop.hbase.spark.JavaHBaseContext;
import org.apache.hadoop.hbase.spark.KeyFamilyQualifier; import org.apache.hadoop.hbase.spark.KeyFamilyQualifier;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.Function; import org.apache.spark.api.java.function.Function;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
/** /**
* Run this example using command below: * Run this example using command below:
* *
* SPARK_HOME/bin/spark-submit --master local[2] --class org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseBulkLoadExample * SPARK_HOME/bin/spark-submit --master local[2]
* --class org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseBulkLoadExample
* path/to/hbase-spark.jar {path/to/output/HFiles} * path/to/hbase-spark.jar {path/to/output/HFiles}
* *
* This example will output put hfiles in {path/to/output/HFiles}, and user can run * This example will output put hfiles in {path/to/output/HFiles}, and user can run
* 'hbase org.apache.hadoop.hbase.tool.LoadIncrementalHFiles' to load the HFiles into table to verify this example. * 'hbase org.apache.hadoop.hbase.tool.LoadIncrementalHFiles' to load the HFiles into table to
* verify this example.
*/ */
final public class JavaHBaseBulkLoadExample { final public class JavaHBaseBulkLoadExample {
private JavaHBaseBulkLoadExample() {} private JavaHBaseBulkLoadExample() {}
@ -85,17 +87,21 @@ final public class JavaHBaseBulkLoadExample {
} }
} }
public static class BulkLoadFunction implements Function<String, Pair<KeyFamilyQualifier, byte[]>> { public static class BulkLoadFunction
implements Function<String, Pair<KeyFamilyQualifier, byte[]>> {
@Override @Override
public Pair<KeyFamilyQualifier, byte[]> call(String v1) throws Exception { public Pair<KeyFamilyQualifier, byte[]> call(String v1) throws Exception {
if (v1 == null) if (v1 == null) {
return null; return null;
}
String[] strs = v1.split(","); String[] strs = v1.split(",");
if(strs.length != 4) if(strs.length != 4) {
return null; return null;
KeyFamilyQualifier kfq = new KeyFamilyQualifier(Bytes.toBytes(strs[0]), Bytes.toBytes(strs[1]), }
Bytes.toBytes(strs[2]));
KeyFamilyQualifier kfq = new KeyFamilyQualifier(Bytes.toBytes(strs[0]),
Bytes.toBytes(strs[1]), Bytes.toBytes(strs[2]));
return new Pair(kfq, Bytes.toBytes(strs[3])); return new Pair(kfq, Bytes.toBytes(strs[3]));
} }
} }

View File

@ -73,24 +73,24 @@ final public class JavaHBaseMapGetPutExample {
hbaseContext.foreachPartition(rdd, hbaseContext.foreachPartition(rdd,
new VoidFunction<Tuple2<Iterator<byte[]>, Connection>>() { new VoidFunction<Tuple2<Iterator<byte[]>, Connection>>() {
public void call(Tuple2<Iterator<byte[]>, Connection> t) public void call(Tuple2<Iterator<byte[]>, Connection> t)
throws Exception { throws Exception {
Table table = t._2().getTable(TableName.valueOf(tableName)); Table table = t._2().getTable(TableName.valueOf(tableName));
BufferedMutator mutator = t._2().getBufferedMutator(TableName.valueOf(tableName)); BufferedMutator mutator = t._2().getBufferedMutator(TableName.valueOf(tableName));
while (t._1().hasNext()) { while (t._1().hasNext()) {
byte[] b = t._1().next(); byte[] b = t._1().next();
Result r = table.get(new Get(b)); Result r = table.get(new Get(b));
if (r.getExists()) { if (r.getExists()) {
mutator.mutate(new Put(b)); mutator.mutate(new Put(b));
}
} }
}
mutator.flush(); mutator.flush();
mutator.close(); mutator.close();
table.close(); table.close();
} }
}); });
} finally { } finally {
jsc.stop(); jsc.stop();
} }

View File

@ -20,16 +20,16 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Iterator;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator;
import java.util.List; import java.util.List;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
@ -41,12 +41,11 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseBulkDeleteExample; import org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseBulkDeleteExample;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
@ -61,6 +60,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import scala.Tuple2; import scala.Tuple2;
import org.apache.hadoop.hbase.shaded.com.google.common.io.Files; import org.apache.hadoop.hbase.shaded.com.google.common.io.Files;
@Category({MiscTests.class, MediumTests.class}) @Category({MiscTests.class, MediumTests.class})
@ -317,10 +317,12 @@ public class TestJavaHBaseContext implements Serializable {
hbaseContext.bulkLoad(rdd, TableName.valueOf(tableName), new BulkLoadFunction(), output.toUri().getPath(), hbaseContext.bulkLoad(rdd, TableName.valueOf(tableName), new BulkLoadFunction(),
new HashMap<byte[], FamilyHFileWriteOptions>(), false, HConstants.DEFAULT_MAX_FILE_SIZE); output.toUri().getPath(), new HashMap<byte[], FamilyHFileWriteOptions>(), false,
HConstants.DEFAULT_MAX_FILE_SIZE);
try (Connection conn = ConnectionFactory.createConnection(conf); Admin admin = conn.getAdmin()) { try (Connection conn = ConnectionFactory.createConnection(conf);
Admin admin = conn.getAdmin()) {
Table table = conn.getTable(TableName.valueOf(tableName)); Table table = conn.getTable(TableName.valueOf(tableName));
// Do bulk load // Do bulk load
LoadIncrementalHFiles load = new LoadIncrementalHFiles(conf); LoadIncrementalHFiles load = new LoadIncrementalHFiles(conf);
@ -387,11 +389,13 @@ public class TestJavaHBaseContext implements Serializable {
Configuration conf = htu.getConfiguration(); Configuration conf = htu.getConfiguration();
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf); JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
hbaseContext.bulkLoadThinRows(rdd, TableName.valueOf(tableName), new BulkLoadThinRowsFunction(), output.toString(), hbaseContext.bulkLoadThinRows(rdd, TableName.valueOf(tableName), new BulkLoadThinRowsFunction(),
new HashMap<byte[], FamilyHFileWriteOptions>(), false, HConstants.DEFAULT_MAX_FILE_SIZE); output.toString(), new HashMap<byte[], FamilyHFileWriteOptions>(), false,
HConstants.DEFAULT_MAX_FILE_SIZE);
try (Connection conn = ConnectionFactory.createConnection(conf); Admin admin = conn.getAdmin()) { try (Connection conn = ConnectionFactory.createConnection(conf);
Admin admin = conn.getAdmin()) {
Table table = conn.getTable(TableName.valueOf(tableName)); Table table = conn.getTable(TableName.valueOf(tableName));
// Do bulk load // Do bulk load
LoadIncrementalHFiles load = new LoadIncrementalHFiles(conf); LoadIncrementalHFiles load = new LoadIncrementalHFiles(conf);
@ -429,25 +433,31 @@ public class TestJavaHBaseContext implements Serializable {
} }
} }
public static class BulkLoadFunction implements Function<String, Pair<KeyFamilyQualifier, byte[]>> { public static class BulkLoadFunction
implements Function<String, Pair<KeyFamilyQualifier, byte[]>> {
@Override public Pair<KeyFamilyQualifier, byte[]> call(String v1) throws Exception { @Override public Pair<KeyFamilyQualifier, byte[]> call(String v1) throws Exception {
if (v1 == null) if (v1 == null) {
return null; return null;
}
String[] strs = v1.split(","); String[] strs = v1.split(",");
if(strs.length != 4) if(strs.length != 4) {
return null; return null;
KeyFamilyQualifier kfq = new KeyFamilyQualifier(Bytes.toBytes(strs[0]), Bytes.toBytes(strs[1]), }
Bytes.toBytes(strs[2]));
KeyFamilyQualifier kfq = new KeyFamilyQualifier(Bytes.toBytes(strs[0]),
Bytes.toBytes(strs[1]), Bytes.toBytes(strs[2]));
return new Pair(kfq, Bytes.toBytes(strs[3])); return new Pair(kfq, Bytes.toBytes(strs[3]));
} }
} }
public static class BulkLoadThinRowsFunction implements Function<List<String>, Pair<ByteArrayWrapper, FamiliesQualifiersValues>> { public static class BulkLoadThinRowsFunction
implements Function<List<String>, Pair<ByteArrayWrapper, FamiliesQualifiersValues>> {
@Override public Pair<ByteArrayWrapper, FamiliesQualifiersValues> call(List<String> list) throws Exception { @Override public Pair<ByteArrayWrapper, FamiliesQualifiersValues> call(List<String> list) {
if (list == null) if (list == null) {
return null; return null;
}
ByteArrayWrapper rowKey = null; ByteArrayWrapper rowKey = null;
FamiliesQualifiersValues fqv = new FamiliesQualifiersValues(); FamiliesQualifiersValues fqv = new FamiliesQualifiersValues();
for (String cell : list) { for (String cell : list) {