diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/AvroSource.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/AvroSource.scala index 2880c5d06c8..c09e99d906d 100644 --- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/AvroSource.scala +++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/AvroSource.scala @@ -119,14 +119,14 @@ object AvroSource { .save() val df = withCatalog(catalog) - df.show + df.show() df.printSchema() df.registerTempTable("ExampleAvrotable") val c = sqlContext.sql("select count(1) from ExampleAvrotable") - c.show + c.show() val filtered = df.select($"col0", $"col1.favorite_array").where($"col0" === "name001") - filtered.show + filtered.show() val collected = filtered.collect() if (collected(0).getSeq[String](1)(0) != "number1") { throw new UserCustomizedSampleException("value invalid") @@ -141,7 +141,7 @@ object AvroSource { .format("org.apache.hadoop.hbase.spark") .save() val newDF = withCatalog(avroCatalogInsert) - newDF.show + newDF.show() newDF.printSchema() if(newDF.count() != 256) { throw new UserCustomizedSampleException("value invalid") @@ -149,10 +149,10 @@ object AvroSource { df.filter($"col1.name" === "name005" || $"col1.name" <= "name005") .select("col0", "col1.favorite_color", "col1.favorite_number") - .show + .show() df.filter($"col1.name" <= "name005" || $"col1.name".contains("name007")) .select("col0", "col1.favorite_color", "col1.favorite_number") - .show + .show() } } diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/DataType.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/DataType.scala index 5839bf75823..96c6d6e4f92 100644 --- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/DataType.scala +++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/DataType.scala @@ -100,56 +100,56 @@ object DataType { // test less than 0 val df = withCatalog(cat) val s = df.filter($"col0" < 0) - s.show + s.show() if(s.count() != 16){ throw new UserCustomizedSampleException("value invalid") } //test less or equal than -10. The number of results is 11 val num1 = df.filter($"col0" <= -10) - num1.show + num1.show() val c1 = num1.count() println(s"test result count should be 11: $c1") //test less or equal than -9. The number of results is 12 val num2 = df.filter($"col0" <= -9) - num2.show + num2.show() val c2 = num2.count() println(s"test result count should be 12: $c2") //test greater or equal than -9". The number of results is 21 val num3 = df.filter($"col0" >= -9) - num3.show + num3.show() val c3 = num3.count() println(s"test result count should be 21: $c3") //test greater or equal than 0. The number of results is 16 val num4 = df.filter($"col0" >= 0) - num4.show + num4.show() val c4 = num4.count() println(s"test result count should be 16: $c4") //test greater than 10. The number of results is 10 val num5 = df.filter($"col0" > 10) - num5.show + num5.show() val c5 = num5.count() println(s"test result count should be 10: $c5") // test "and". The number of results is 11 val num6 = df.filter($"col0" > -10 && $"col0" <= 10) - num6.show + num6.show() val c6 = num6.count() println(s"test result count should be 11: $c6") //test "or". The number of results is 21 val num7 = df.filter($"col0" <= -10 || $"col0" > 10) - num7.show + num7.show() val c7 = num7.count() println(s"test result count should be 21: $c7") //test "all". The number of results is 32 val num8 = df.filter($"col0" >= -100) - num8.show + num8.show() val c8 = num8.count() println(s"test result count should be 32: $c8") diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/HBaseSource.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/HBaseSource.scala index ed239900fa2..056c071d5dd 100644 --- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/HBaseSource.scala +++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/example/datasources/HBaseSource.scala @@ -89,7 +89,7 @@ object HBaseSource { .save() val df = withCatalog(cat) - df.show + df.show() df.filter($"col0" <= "row005") .select($"col0", $"col1").show df.filter($"col0" === "row005" || $"col0" <= "row005")