scala访问HBASE通常2种方式,一种是使用SPARK方式读取HBASE数据直接转换成RDD, 一种采用和JAVA类似的方式,通过HTable操作HBASE,数据获取之后再自己进行处理。 这2种方式区别应该是RDD是跑在多节点通过从HBASE获取数据,而采用HTable的方式,应该是串行了,仅仅是HBASE层面是分布式而已。
1. 转换为RDD
package com.isesol.spark
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.hbase.mapred.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.rdd.RDD.rddToPairRDDFunctions
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.spark._
import org.apache.hadoop.hbase.client.Scan
import org.apache.hadoop.hbase.TableName
import org.apache.hadoop.hbase.filter._
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
object hbasescan {
def main(args: Array[String]) {
val conf = new SparkConf()
conf.setMaster("local").setAppName("this is for spark SQL")
//conf.setSparkHome("d:\\spark_home")
val hbaseconf = HBaseConfiguration.create()
hbaseconf.set("hbase.zookeeper.quorum", "datanode01.isesol.com,datanode02.isesol.com,datanode03.isesol.com,datanode04.isesol.com,cmserver.isesol.com")
hbaseconf.set("hbase.zookeeper.property.clientPort", "2181")
hbaseconf.set("maxSessionTimeout", "6")
val sc = new SparkContext(conf)
try {
println("start to read from hbase")
val hbaseContext = new HBaseContext(sc, hbaseconf)
val scan = new Scan()
scan.setMaxVersions()
//scan.setRowPrefixFilter(Bytes.toBytes("i51530048-1007-9223370552914159518"))
scan.setCaching(100)
val filter = new SingleColumnValueFilter(Bytes.toBytes("cf"), Bytes.toBytes("age"), CompareOp.LESS, Bytes.toBytes("1"));
scan.setFilter(filter)
val hbaserdd = hbaseContext.hbaseRDD(TableName.valueOf("bank"), scan)
hbaserdd.cache()
println(hbaserdd.count())
} catch {
case ex: Exception => println("can not connect hbase")
}
}
}
2. 采用 HTable方式处理
val htable = new HTable(hbaseconf, "t_device_fault_statistics")
val scan1 = new Scan()
scan1.setCaching(3*1024*1024)
val scaner = htable.getScanner(scan1)
while(scaner.iterator().hasNext()){
val result = scaner.next()
if(result.eq(null)){
} else {
println(Bytes.toString(result.getRow) + "\t" + Bytes.toString(result.getValue("cf".getBytes, "fault_level2_name".getBytes)))
}
}
scaner.close()
htable.close()
来自 “ ITPUB博客 ” ,链接:http://blog.itpub.net/17036462/viewspace-2140437/,如需转载,请注明出处,否则将追究法律责任。