1. MySQL读取
val sparkConf = new SparkConf().setMaster("local[2]").setAppName("JdbcApp")
val sc = new SparkContext(sparkConf)
val rdd = new org.apache.spark.rdd.JdbcRDD(sc, () => {
Class.forName("com.mysql.jdbc.Driver").newInstance()
java.sql.DriverManager.getConnection("jdbc:mysql://master01:3306/hive", "root", "123456")
},
"select * from tableName where id >= ? and id <= ?;",
1, 10, 1, r => (r.getInt(1), r.getString(2)))
println(rdd.count())
rdd.foreach(println(_))
sc.stop()
2.Mysql写入
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setMaster("local[2]").setAppName("HAppName")
val sc = new SparkContext(sparkConf)
val data = sc.parallelize(List("Female", "Male", "Female"))
data.foreachPartition(insertData)
}
def insertData(iterator: Iterator[String]): Unit = {
Class.forName ("com.mysql.jdbc.Driver").newInstance()
val conn = java.sql.DriverManager.getConnection("jdbc:mysql://master01:3306/hive", "root", "123456")
iterator.foreach(data => { val ps = conn.prepareStatement("insert into tableName(name) values (?)")
ps.setString(1, data)
ps.executeUpdate()
})
}
3.HBase 读取
val sparkConf = new SparkConf().setMaster("local[2]").setAppName("HBaseApp")
val sc = new SparkContext(sparkConf)
val conf = HBaseConfiguration.create()
//HBase中的表名
conf.set(TableInputFormat.INPUT_TABLE, "fruit")
val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat],
classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
classOf[org.apache.hadoop.hbase.client.Result])
val count = hBaseRDD.count()
println("hBaseRDD RDD Count:" + count)
hBaseRDD.cache()
hBaseRDD.foreach {
case (_, result) =>
val key = Bytes.toString(result.getRow)
val name = Bytes.toString(result.getValue("info".getBytes, "name".getBytes))
val color = Bytes.toString(result.getValue("info".getBytes, "color".getBytes))
println("Row key:" + key + " Name:" + name + " Color:" + color)
}
sc.stop()
4.HBase 写入
val sparkConf = new SparkConf().setMaster("local[2]").setAppName("HBaseApp")
val sc = new SparkContext(sparkConf)
val conf = HBaseConfiguration.create()
val jobConf = new JobConf(conf)
jobConf.setOutputFormat(classOf[TableOutputFormat])
jobConf.set(TableOutputFormat.OUTPUT_TABLE, "fruit_spark")
val fruitTable = TableName.valueOf("fruit_spark")
val tableDescr = new HTableDescriptor(fruitTable)
tableDescr.addFamily(new HColumnDescriptor("info".getBytes))
val admin = new HBaseAdmin(conf)
if (admin.tableExists(fruitTable)) {
admin.disableTable(fruitTable)
admin.deleteTable(fruitTable)
}
admin.createTable(tableDescr)
def convert(triple: (Int, String, Int)) = {
val put = new Put(Bytes.toBytes(triple._1))
put.addImmutable(Bytes.toBytes("info"), Bytes.toBytes("name"),Bytes.toBytes(triple._2))
put.addImmutable(Bytes.toBytes("info"),Bytes.toBytes("price"),Bytes.toBytes(triple._3))
(new ImmutableBytesWritable, put)
}
val initialRDD = sc.parallelize(List((1,"apple",11), (2,"banana",12), (3,"pear",13)))
val localData = initialRDD.map(convert)
localData.saveAsHadoopDataset(jobConf)