1、初始化kafka相关参数,在APP类初始化的时候即获取kafka对应的topic参数
public App(){
try{
kafkaParams.put("metadata.broker.list", ConfigUtil.getInstance().getKafkaConf().get("brokerlist"));
kafkaParams.put("group.id", Constant.groupId);
scala.collection.mutable.Map mutableKafkaParam = JavaConversions
.mapAsScalaMap(kafkaParams);
scala.collection.immutable.Map immutableKafkaParam = mutableKafkaParam
.toMap(new Predef.$less$colon$less, Tuple2>() {
public Tuple2 apply(
Tuple2 v1) {
return v1;
}
});
this.kafkaCluster = new KafkaCluster(immutableKafkaParam);
this.topics = new HashSet();
List topicList = DatasourceUtil.getInstance().getAnalyzerTopicNamesByDb(Constant.DBNAME);
for(int i=0;i
this.topics.add(topicList.get(i));
}
}catch(Exception e){
e.printStackTrace();
}
}
2、设置spark conf 参数并定义sqlcontext
final SparkConf sparkConf = new SparkConf();
//使用Kryo序列化
sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");