val spark=SparkSession.builder().appName("").master("").getOrCreate()
val sc=spark.sparkContext
val conf= HBaseConfiguration.create()
val habsecontext=new HBaseContext(sc,conf)
def scanHbaseTB(tableName:String)(implicit startKey:Option[String],endKey:Option[String]):RDD[(ImmutableBytesWritable,Result)]={
//如果有StartRowKey根据提供查询
startKey match {
case Some(x)=>{
val scan=new Scan()
scan.setStartRow(Bytes.toBytes(x))
scan.setStopRow(Bytes.toBytes(endKey.getOrElse(x)))
val hbaeRDD=habsecontext.hbaseRDD(TableName.valueOf(tableName),scan)
hbaeRDD
}
case None=>{
val scan=new Scan()
val hbaeRDD=habsecontext.hbaseRDD(TableName.valueOf(tableName),scan)
hbaeRDD
}
}
def main(args: Array[String]): Unit = {
//传统方式
conf.set(TableInputFormat.SCAN_ROW_START, "startrowkey")
conf.set(TableInputFormat.SCAN_ROW_STOP, "stoprowkey")
conf.set(TableInputFormat.INPUT_TABLE, "SparkHbase")
val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat], classOf[ImmutableBytesWritable], classOf[Result])
//利用HbaseContext进行操作
val SparkHbaseRDD=scanHbaseTB("SparkHbase")
SparkHbaseRDD.foreach(x=>{
val rowKey=x._1.toString
val rs=x._2
val cell=rs.getColumnLatestCell(Bytes.toBytes(""),Bytes.toBytes(""))
println(s"the rowKey is $rowKey the values is $cell")
})
}
}