HBase-17549 HBase-Spark Module: Corrected - Incorrect log at println and unwanted comment code

Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
chetkhatri 2017-01-27 11:13:15 +05:30 committed by Michael Stack
parent fb2c89b1b3
commit 92fc4c0cc8
12 changed files with 24 additions and 26 deletions

View File

@ -31,7 +31,7 @@ import org.apache.spark.SparkConf
object HBaseBulkDeleteExample {
def main(args: Array[String]) {
if (args.length < 1) {
println("HBaseBulkDeletesExample {tableName} ")
println("HBaseBulkDeleteExample {tableName} missing an argument")
return
}
@ -60,4 +60,4 @@ object HBaseBulkDeleteExample {
sc.stop()
}
}
}
}

View File

@ -26,13 +26,13 @@ import org.apache.hadoop.hbase.client.Result
import org.apache.spark.SparkConf
/**
* This is a simple example of getting records in HBase
* This is a simple example of getting records from HBase
* with the bulkGet function.
*/
object HBaseBulkGetExample {
def main(args: Array[String]) {
if (args.length < 1) {
println("HBaseBulkGetExample {tableName}")
println("HBaseBulkGetExample {tableName} missing an argument")
return
}

View File

@ -31,7 +31,7 @@ import org.apache.spark.SparkConf
object HBaseBulkPutExample {
def main(args: Array[String]) {
if (args.length < 2) {
println("HBaseBulkPutExample {tableName} {columnFamily}")
println("HBaseBulkPutExample {tableName} {columnFamily} are missing an arguments")
return
}
@ -72,4 +72,4 @@ object HBaseBulkPutExample {
sc.stop()
}
}
}
}

View File

@ -35,7 +35,7 @@ import org.apache.spark.SparkConf
object HBaseBulkPutExampleFromFile {
def main(args: Array[String]) {
if (args.length < 3) {
println("HBaseBulkPutExampleFromFile {tableName} {columnFamily} {inputFile}")
println("HBaseBulkPutExampleFromFile {tableName} {columnFamily} {inputFile} are missing an argument")
return
}

View File

@ -32,7 +32,7 @@ import org.apache.spark.SparkConf
object HBaseBulkPutTimestampExample {
def main(args: Array[String]) {
if (args.length < 2) {
System.out.println("HBaseBulkPutTimestampExample {tableName} {columnFamily}")
System.out.println("HBaseBulkPutTimestampExample {tableName} {columnFamily} are missing an argument")
return
}

View File

@ -24,12 +24,12 @@ import org.apache.hadoop.hbase.client.Scan
import org.apache.spark.SparkConf
/**
* This is a simple example of scanning records from HBase
* with the hbaseRDD function.
* with the hbaseRDD function in Distributed fashion.
*/
object HBaseDistributedScanExample {
def main(args: Array[String]) {
if (args.length < 1) {
println("GenerateGraphs {tableName}")
println("HBaseDistributedScanExample {tableName} missing an argument")
return
}
@ -51,11 +51,9 @@ object HBaseDistributedScanExample {
getRdd.foreach(v => println(Bytes.toString(v._1.get())))
println("Length: " + getRdd.map(r => r._1.copyBytes()).collect().length);
//.collect().foreach(v => println(Bytes.toString(v._1.get())))
} finally {
sc.stop()
}
}
}
}

View File

@ -33,7 +33,7 @@ object HBaseStreamingBulkPutExample {
def main(args: Array[String]) {
if (args.length < 4) {
println("HBaseStreamingBulkPutExample " +
"{host} {port} {tableName} {columnFamily}")
"{host} {port} {tableName} {columnFamily} are missing an argument")
return
}
@ -42,7 +42,7 @@ object HBaseStreamingBulkPutExample {
val tableName = args(2)
val columnFamily = args(3)
val sparkConf = new SparkConf().setAppName("HBaseBulkPutTimestampExample " +
val sparkConf = new SparkConf().setAppName("HBaseStreamingBulkPutExample " +
tableName + " " + columnFamily)
val sc = new SparkContext(sparkConf)
try {
@ -71,4 +71,4 @@ object HBaseStreamingBulkPutExample {
sc.stop()
}
}
}
}

View File

@ -31,7 +31,7 @@ import org.apache.spark.{SparkContext, SparkConf}
object HBaseBulkDeleteExample {
def main(args: Array[String]) {
if (args.length < 1) {
println("HBaseBulkDeletesExample {tableName} ")
println("HBaseBulkDeleteExample {tableName} are missing an argument")
return
}
@ -61,4 +61,4 @@ object HBaseBulkDeleteExample {
sc.stop()
}
}
}
}

View File

@ -24,13 +24,13 @@ import org.apache.hadoop.hbase.spark.HBaseRDDFunctions._
import org.apache.spark.{SparkContext, SparkConf}
/**
* This is a simple example of getting records in HBase
* This is a simple example of getting records from HBase
* with the bulkGet function.
*/
object HBaseBulkGetExample {
def main(args: Array[String]) {
if (args.length < 1) {
println("HBaseBulkGetExample {tableName}")
println("HBaseBulkGetExample {tableName} is missing an argument")
return
}

View File

@ -31,7 +31,7 @@ import org.apache.spark.{SparkConf, SparkContext}
object HBaseBulkPutExample {
def main(args: Array[String]) {
if (args.length < 2) {
println("HBaseBulkPutExample {tableName} {columnFamily}")
println("HBaseBulkPutExample {tableName} {columnFamily} are missing an arguments")
return
}
@ -73,4 +73,4 @@ object HBaseBulkPutExample {
sc.stop()
}
}
}
}

View File

@ -31,14 +31,14 @@ import org.apache.spark.{SparkContext, SparkConf}
object HBaseForeachPartitionExample {
def main(args: Array[String]) {
if (args.length < 2) {
println("HBaseBulkPutExample {tableName} {columnFamily}")
println("HBaseForeachPartitionExample {tableName} {columnFamily} are missing an arguments")
return
}
val tableName = args(0)
val columnFamily = args(1)
val sparkConf = new SparkConf().setAppName("HBaseBulkPutExample " +
val sparkConf = new SparkConf().setAppName("HBaseForeachPartitionExample " +
tableName + " " + columnFamily)
val sc = new SparkContext(sparkConf)

View File

@ -31,13 +31,13 @@ import org.apache.spark.{SparkContext, SparkConf}
object HBaseMapPartitionExample {
def main(args: Array[String]) {
if (args.length < 1) {
println("HBaseBulkGetExample {tableName}")
println("HBaseMapPartitionExample {tableName} is missing an argument")
return
}
val tableName = args(0)
val sparkConf = new SparkConf().setAppName("HBaseBulkGetExample " + tableName)
val sparkConf = new SparkConf().setAppName("HBaseMapPartitionExample " + tableName)
val sc = new SparkContext(sparkConf)
try {