十二 Spark+Kafka+Mysql 整合

如果程序缺少包,需要导入到系统中去,采用如下方法
for i in `ls /data/spark-workspace/lib/*.jar`
do
    LIBJAR=$i,$LIBJAR
done
export LIBJARS=${LIBJAR%?}


/*  mvn dependency:copy-dependencies https://blog.csdn.net/u013514928/article/details/77930183
    文件位置
    .//spark/examples/src/main/java/org/apache/spark/examples/streaming/JavaKafkaWordCount.java
    运行  其中org.apache.spark.examples.streaming是包的名字,JavaKafkaWordCount是程序的名字, /data/example/pro-spark-example-1.6.2.jar是编译出target中的jar包位置
    ./spark-submit --master spark://kolla:7077 --name Spark-kafka-wordcount --class org.apache.spark.examples.streaming.JavaKafkaWordCount /data/example/pro-spark-example-1.6.2.jar
*/
/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.spark.examples.streaming;

import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.regex.Pattern;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.MysqlUtil;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;

import scala.Tuple2;

import com.google.common.collect.Lists;

/**
 * Consumes messages from one or more topics in Kafka and does wordcount.
 *
 * Usage: JavaKafkaWordCount <zkQuorum> <group> <topics> <numThreads>
 *   <zkQuorum> is a list of one or more zookeeper servers that make quorum
 *   <group> is the name of kafka consumer group
 *   <topics> is a list of one or more kafka topics to consume from
 *   <numThreads> is the number of threads the kafka consumer should use
 *
 * To run this example:
 *   `$ bin/run-example org.apache.spark.examples.streaming.JavaKafkaWordCount zoo01,zoo02, \
 *    zoo03 my-consumer-group topic1,topic2 1`
 */

public final class JavaKafkaWordCount {
  private static final Pattern SPACE = Pattern.compile(" ");

  private JavaKafkaWordCount() {
  }

  public static void main(String[] args) {
//    if (args.length < 4) {
//      System.err.println("Usage: JavaKafkaWordCount <zkQuorum> <group> <topics> <numThreads>");
//      System.exit(1);
//    }

//    StreamingExamples.setStreamingLogLevels();
    SparkConf sparkConf = new SparkConf().setAppName("JavaKafkaWordCount");
    // Create the context with 2 seconds batch size
    //每个两秒钟获取一次数据,获取数据后再进行处理
    JavaStreamingContext jssc = new JavaStreamingContext(sparkConf, new Duration(2000));

    //topic只有一个test
    int numThreads = Integer.parseInt("1");
    Map<String, Integer> topicMap = new HashMap<String, Integer>();
    String[] topics = {"test"};
    for (String topic: topics) {
      topicMap.put(topic, numThreads);
    }
    /*
      static JavaPairReceiverInputDStream<java.lang.String,java.lang.String>
      createStream(
                  JavaStreamingContext jssc, 
                  java.lang.String zkQuorum, 
                  java.lang.String groupId, 
                  java.util.Map<java.lang.String,java.lang.Integer> topics
                  )
      Create an input stream that pulls messages from Kafka Brokers.
    */
    String zk="192.168.10.141:2181/kafka";
    String groupId="spark";

    
    JavaPairReceiverInputDStream<String, String> messages =
            KafkaUtils.createStream(jssc, zk, groupId, topicMap);

    JavaDStream<String> lines = messages.map(new Function<Tuple2<String, String>, String>() {
      @Override
      public String call(Tuple2<String, String> tuple2) {
        return tuple2._2();
      }
    });

    JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
      @Override
      public Iterable<String> call(String x) {
        return Lists.newArrayList(SPACE.split(x));
      }
    });
    

    JavaPairDStream<String, Integer> wordCounts = words.mapToPair(
      new PairFunction<String, String, Integer>() {
        @Override
        public Tuple2<String, Integer> call(String s) {
          return new Tuple2<String, Integer>(s, 1);
        }
      }).reduceByKey(new Function2<Integer, Integer, Integer>() {
        @Override
        public Integer call(Integer i1, Integer i2) {
          return i1 + i2;
        }
      });
    
    
    VoidFunction<JavaPairRDD<String, Integer>> foreachFunc = new VoidFunction<JavaPairRDD<String,Integer>>() {
        private static final long serialVersionUID = 1L;

        @Override
        public void call(JavaPairRDD<String, Integer> results) throws Exception {
            
            VoidFunction<Iterator<Tuple2<String, Integer>>> result = new VoidFunction<Iterator<Tuple2<String,Integer>>>() {
                @Override
                public void call(Iterator<Tuple2<String, Integer>> r) throws Exception {
                    System.out.println("寮�濮嬭繘鍏ysql鎿嶄綔");
                    MysqlUtil.saveWC(r);
                    
                    
                }
            };
            
            results.foreachPartition(result );
            
        }
    };
    wordCounts.foreachRDD(foreachFunc );
    
    
    
    wordCounts.print();
    jssc.start();
    jssc.awaitTermination();
  }
}
 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值