HBASE-20176 Fix warnings about Logging import in hbase-spark test code

Signed-off-by: tedyu <yuzhihong@gmail.com>
This commit is contained in:
Artem Ervits 2018-03-12 12:28:13 -04:00 committed by tedyu
parent f30dfc69bb
commit c8fba7071e
9 changed files with 0 additions and 9 deletions

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.spark
import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.hbase.client.{Get, ConnectionFactory} import org.apache.hadoop.hbase.client.{Get, ConnectionFactory}
import org.apache.hadoop.hbase.io.hfile.{CacheConfig, HFile} import org.apache.hadoop.hbase.io.hfile.{CacheConfig, HFile}
import org.apache.hadoop.hbase.spark.Logging
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles
import org.apache.hadoop.hbase.{HConstants, CellUtil, HBaseTestingUtility, TableName} import org.apache.hadoop.hbase.{HConstants, CellUtil, HBaseTestingUtility, TableName}
import org.apache.hadoop.hbase.util.Bytes import org.apache.hadoop.hbase.util.Bytes

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.spark
import org.apache.avro.Schema import org.apache.avro.Schema
import org.apache.avro.generic.GenericData import org.apache.avro.generic.GenericData
import org.apache.hadoop.hbase.client.{ConnectionFactory, Put} import org.apache.hadoop.hbase.client.{ConnectionFactory, Put}
import org.apache.hadoop.hbase.spark.Logging
import org.apache.hadoop.hbase.spark.datasources.HBaseSparkConf import org.apache.hadoop.hbase.spark.datasources.HBaseSparkConf
import org.apache.hadoop.hbase.util.Bytes import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{HBaseTestingUtility, TableName} import org.apache.hadoop.hbase.{HBaseTestingUtility, TableName}

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.spark
import java.util import java.util
import org.apache.hadoop.hbase.spark.Logging
import org.apache.hadoop.hbase.spark.datasources.{HBaseSparkConf, JavaBytesEncoder} import org.apache.hadoop.hbase.spark.datasources.{HBaseSparkConf, JavaBytesEncoder}
import org.apache.hadoop.hbase.util.Bytes import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.sql.types._ import org.apache.spark.sql.types._

View File

@ -17,7 +17,6 @@
package org.apache.hadoop.hbase.spark package org.apache.hadoop.hbase.spark
import org.apache.hadoop.hbase.spark.Logging
import org.apache.hadoop.hbase.spark.datasources.{DoubleSerDes, SerDes} import org.apache.hadoop.hbase.spark.datasources.{DoubleSerDes, SerDes}
import org.apache.hadoop.hbase.util.Bytes import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.sql.datasources.hbase.{DataTypeParserWrapper, HBaseTableCatalog} import org.apache.spark.sql.datasources.hbase.{DataTypeParserWrapper, HBaseTableCatalog}

View File

@ -22,7 +22,6 @@ import scala.util.Random
import org.apache.hadoop.hbase.client.{BufferedMutator, Table, RegionLocator, import org.apache.hadoop.hbase.client.{BufferedMutator, Table, RegionLocator,
Connection, BufferedMutatorParams, Admin, TableBuilder} Connection, BufferedMutatorParams, Admin, TableBuilder}
import org.apache.hadoop.hbase.spark.Logging
import org.apache.hadoop.conf.Configuration import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.TableName import org.apache.hadoop.hbase.TableName
import org.scalatest.FunSuite import org.scalatest.FunSuite

View File

@ -20,7 +20,6 @@ import org.apache.hadoop.hbase.client._
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter
import org.apache.hadoop.hbase.util.Bytes import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{ CellUtil, TableName, HBaseTestingUtility} import org.apache.hadoop.hbase.{ CellUtil, TableName, HBaseTestingUtility}
import org.apache.hadoop.hbase.spark.Logging
import org.apache.spark.{SparkException, SparkContext} import org.apache.spark.{SparkException, SparkContext}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite} import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite}

View File

@ -17,7 +17,6 @@
package org.apache.hadoop.hbase.spark package org.apache.hadoop.hbase.spark
import org.apache.hadoop.hbase.client._ import org.apache.hadoop.hbase.client._
import org.apache.hadoop.hbase.spark.Logging
import org.apache.hadoop.hbase.util.Bytes import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{CellUtil, TableName, HBaseTestingUtility} import org.apache.hadoop.hbase.{CellUtil, TableName, HBaseTestingUtility}
import org.apache.spark.rdd.RDD import org.apache.spark.rdd.RDD

View File

@ -17,7 +17,6 @@
package org.apache.hadoop.hbase.spark package org.apache.hadoop.hbase.spark
import org.apache.hadoop.hbase.client._ import org.apache.hadoop.hbase.client._
import org.apache.hadoop.hbase.spark.Logging
import org.apache.hadoop.hbase.util.Bytes import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{CellUtil, TableName, HBaseTestingUtility} import org.apache.hadoop.hbase.{CellUtil, TableName, HBaseTestingUtility}
import org.apache.hadoop.hbase.spark.HBaseRDDFunctions._ import org.apache.hadoop.hbase.spark.HBaseRDDFunctions._

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.spark package org.apache.hadoop.hbase.spark
import org.apache.hadoop.hbase.spark.datasources.HBaseSparkConf import org.apache.hadoop.hbase.spark.datasources.HBaseSparkConf
import org.apache.hadoop.hbase.spark.Logging
import org.apache.hadoop.hbase.{TableName, HBaseTestingUtility} import org.apache.hadoop.hbase.{TableName, HBaseTestingUtility}
import org.apache.spark.sql.datasources.hbase.HBaseTableCatalog import org.apache.spark.sql.datasources.hbase.HBaseTableCatalog
import org.apache.spark.sql.{DataFrame, SQLContext} import org.apache.spark.sql.{DataFrame, SQLContext}