import org.apache.spark._
import org.apache.spark.rdd.RDD import org.apache.hadoop.hbase.client.HBaseAdmin import org.apache.hadoop.hbase.{HBaseConfiguration, HTableDescriptor, TableName} import org.apache.hadoop.hbase.mapreduce.TableInputFormat import org.apache.hadoop.hbase.client._ import org.apache.hadoop.hbase.util.Bytes object Hbase_obj { def main(args: Array[String]): Unit = { val sparkConf = new SparkConf().setMaster("local").setAppName("HBaseTest") val sc = new SparkContext(sparkConf) var table_name = "news" val conf = HBaseConfiguration.create() conf.set("hbase.zookeeper.quorum","192.168.110.233,192.168.110.234,192.168.110.235"); conf.set(TableInputFormat.INPUT_TABLE, table_name) conf.set("hbase.zookeeper.property.clientPort", "2181"); val hadmin = new HBaseAdmin(conf) val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat], classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable], classOf[org.apache.hadoop.hbase.client.Result]) println(hBaseRDD.count()) insert_hbase(100002,3) } //写入hbase def insert_hbase(news_id:Int,type_id:Int): Unit ={ var table_name = "news" val conf = HBaseConfiguration.create() conf.set("hbase.zookeeper.quorum","192.168.110.233, 192.168.110.234, 192.168.110.235"); conf.set("hbase.zookeeper.property.clientPort", "2181"); val table = new HTable(conf, table_name) val hadmin = new HBaseAdmin(conf) val row = Bytes.toBytes(news_id.toString()) val p = new Put(row) p.add(Bytes.toBytes("content"),Bytes.toBytes("typeid"),Bytes.toBytes(type_id.toString())) table.put(p) table.close() } }