1.通过Flink将数据sink到HBase
import org.apache.commons.lang.StringUtils
import org.apache.flink.api.common.functions.RichMapFunction
import org.apache.flink.api.scala.ExecutionEnvironment
import scala.collection.mutable.ListBuffer
import org.apache.flink.api.scala._
import org.apache.flink.api.scala.hadoop.mapreduce.HadoopOutputFormat
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.{Mutation, Put}
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.Job
object HBaseSinkApp {
/**
* 把DataSet转成Hbase能支持的数据类型
* @param input
*/
def convertToHBase(input: DataSet[(String, String, Int, String)]) = {
input.map{
new RichMapFunction[(String, String, Int, String),(Text, Mutation)] {
override def map(value: (String, String, Int, String)): (Text, Mutation) = {
//可以使用Put,但面向