HBASE-17547 HBase-Spark Module : TableCatelog doesn't support multiple columns from Single Column family - revert due to not using git am

This commit is contained in:
tedyu 2017-01-26 11:57:04 -08:00
parent 07c175b10a
commit 81d3e25a75
2 changed files with 9 additions and 9 deletions

View File

@ -150,7 +150,7 @@ case class HBaseTableCatalog(
def getRowKey: Seq[Field] = row.fields def getRowKey: Seq[Field] = row.fields
def getPrimaryKey= row.keys(0) def getPrimaryKey= row.keys(0)
def getColumnFamilies = { def getColumnFamilies = {
sMap.fields.map(_.cf).filter(_ != HBaseTableCatalog.rowKey).toSeq.distinct sMap.fields.map(_.cf).filter(_ != HBaseTableCatalog.rowKey)
} }
def get(key: String) = params.get(key) def get(key: String) = params.get(key)

View File

@ -812,9 +812,9 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
|"columns":{ |"columns":{
|"col0":{"cf":"rowkey", "col":"key", "type":"string"}, |"col0":{"cf":"rowkey", "col":"key", "type":"string"},
|"col1":{"cf":"cf1", "col":"col1", "type":"boolean"}, |"col1":{"cf":"cf1", "col":"col1", "type":"boolean"},
|"col2":{"cf":"cf1", "col":"col2", "type":"double"}, |"col2":{"cf":"cf2", "col":"col2", "type":"double"},
|"col3":{"cf":"cf3", "col":"col3", "type":"float"}, |"col3":{"cf":"cf3", "col":"col3", "type":"float"},
|"col4":{"cf":"cf3", "col":"col4", "type":"int"}, |"col4":{"cf":"cf4", "col":"col4", "type":"int"},
|"col5":{"cf":"cf5", "col":"col5", "type":"bigint"}, |"col5":{"cf":"cf5", "col":"col5", "type":"bigint"},
|"col6":{"cf":"cf6", "col":"col6", "type":"smallint"}, |"col6":{"cf":"cf6", "col":"col6", "type":"smallint"},
|"col7":{"cf":"cf7", "col":"col7", "type":"string"}, |"col7":{"cf":"cf7", "col":"col7", "type":"string"},
@ -851,7 +851,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
test("full query") { test("full query") {
val df = withCatalog(writeCatalog) val df = withCatalog(writeCatalog)
df.show() df.show
assert(df.count() == 256) assert(df.count() == 256)
} }
@ -861,7 +861,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
val df = withCatalog(writeCatalog) val df = withCatalog(writeCatalog)
val s = df.filter($"col0" <= "row005") val s = df.filter($"col0" <= "row005")
.select("col0", "col1") .select("col0", "col1")
s.show() s.show
assert(s.count() == 6) assert(s.count() == 6)
} }
@ -999,7 +999,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
test("avro full query") { test("avro full query") {
val df = withAvroCatalog(avroCatalog) val df = withAvroCatalog(avroCatalog)
df.show() df.show
df.printSchema() df.printSchema()
assert(df.count() == 256) assert(df.count() == 256)
} }
@ -1013,7 +1013,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
.format("org.apache.hadoop.hbase.spark") .format("org.apache.hadoop.hbase.spark")
.save() .save()
val newDF = withAvroCatalog(avroCatalogInsert) val newDF = withAvroCatalog(avroCatalogInsert)
newDF.show() newDF.show
newDF.printSchema() newDF.printSchema()
assert(newDF.count() == 256) assert(newDF.count() == 256)
} }
@ -1024,7 +1024,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
val df = withAvroCatalog(avroCatalog) val df = withAvroCatalog(avroCatalog)
val r = df.filter($"col1.name" === "name005" || $"col1.name" <= "name005") val r = df.filter($"col1.name" === "name005" || $"col1.name" <= "name005")
.select("col0", "col1.favorite_color", "col1.favorite_number") .select("col0", "col1.favorite_color", "col1.favorite_number")
r.show() r.show
assert(r.count() == 6) assert(r.count() == 6)
} }
@ -1034,7 +1034,7 @@ BeforeAndAfterEach with BeforeAndAfterAll with Logging {
val df = withAvroCatalog(avroCatalog) val df = withAvroCatalog(avroCatalog)
val s = df.filter($"col1.name" <= "name005" || $"col1.name".contains("name007")) val s = df.filter($"col1.name" <= "name005" || $"col1.name".contains("name007"))
.select("col0", "col1.favorite_color", "col1.favorite_number") .select("col0", "col1.favorite_color", "col1.favorite_number")
s.show() s.show
assert(s.count() == 7) assert(s.count() == 7)
} }
} }