Spark读写MySQL与HBase

 1. MySQL读取

val sparkConf = new SparkConf().setMaster("local[2]").setAppName("JdbcApp")
val sc = new SparkContext(sparkConf)
val rdd = new org.apache.spark.rdd.JdbcRDD(sc, () => {
  Class.forName("com.mysql.jdbc.Driver").newInstance()
  java.sql.DriverManager.getConnection("jdbc:mysql://master01:3306/hive", "root", "123456")
},
  "select * from tableName where id >= ? and id <= ?;",
  1, 10, 1, r => (r.getInt(1), r.getString(2)))
println(rdd.count())
rdd.foreach(println(_))
sc.stop()

2.Mysql写入

 def main(args: Array[String]): Unit = {
  val sparkConf = new SparkConf().setMaster("local[2]").setAppName("HAppName")
  val sc = new SparkContext(sparkConf)
  val data = sc.parallelize(List("Female", "Male", "Female"))
  data.foreachPartition(insertData)
}

 def insertData(iterator: Iterator[String]): Unit = {
  Class.forName ("com.mysql.jdbc.Driver").newInstance()
  val conn = java.sql.DriverManager.getConnection("jdbc:mysql://master01:3306/hive", "root", "123456")
  iterator.foreach(data => { val ps = conn.prepareStatement("insert into tableName(name) values (?)")
    ps.setString(1, data)
    ps.executeUpdate()
  }) 
}

3.HBase 读取

val sparkConf = new SparkConf().setMaster("local[2]").setAppName("HBaseApp")
val sc = new SparkContext(sparkConf)
val conf = HBaseConfiguration.create()
//HBase中的表名
conf.set(TableInputFormat.INPUT_TABLE, "fruit")
val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat],
  classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
  classOf[org.apache.hadoop.hbase.client.Result])
val count = hBaseRDD.count()
println("hBaseRDD RDD Count:" + count)
hBaseRDD.cache()
hBaseRDD.foreach {
  case (_, result) =>
    val key = Bytes.toString(result.getRow)
    val name = Bytes.toString(result.getValue("info".getBytes, "name".getBytes))
    val color = Bytes.toString(result.getValue("info".getBytes, "color".getBytes))
    println("Row key:" + key + " Name:" + name + " Color:" + color)
}
sc.stop()

4.HBase 写入

val sparkConf = new SparkConf().setMaster("local[2]").setAppName("HBaseApp")
val sc = new SparkContext(sparkConf)
val conf = HBaseConfiguration.create()
val jobConf = new JobConf(conf)
jobConf.setOutputFormat(classOf[TableOutputFormat])
jobConf.set(TableOutputFormat.OUTPUT_TABLE, "fruit_spark")
val fruitTable = TableName.valueOf("fruit_spark")
val tableDescr = new HTableDescriptor(fruitTable)
tableDescr.addFamily(new HColumnDescriptor("info".getBytes))
val admin = new HBaseAdmin(conf)
if (admin.tableExists(fruitTable)) {
  admin.disableTable(fruitTable)
  admin.deleteTable(fruitTable)
}
admin.createTable(tableDescr) 
def convert(triple: (Int, String, Int)) = { 
  val put = new Put(Bytes.toBytes(triple._1)) 
  put.addImmutable(Bytes.toBytes("info"), Bytes.toBytes("name"),Bytes.toBytes(triple._2)) 
  put.addImmutable(Bytes.toBytes("info"),Bytes.toBytes("price"),Bytes.toBytes(triple._3)) 
  (new ImmutableBytesWritable, put) 
} 
val initialRDD = sc.parallelize(List((1,"apple",11), (2,"banana",12), (3,"pear",13))) 
val localData = initialRDD.map(convert) 
localData.saveAsHadoopDataset(jobConf)

 

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值