kafkaStream

本文档展示了如何使用Apache Kafka Streams处理UserFriendsStream和EventAttendStream,通过Kafka消费和转换数据,实现实时用户好友关系和活动参与状态的收集与分析。涉及到数据解析、流处理和Kafka主题操作。
摘要由CSDN通过智能技术生成

UserFriendsStream

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.kstream.KStream;

import java.util.ArrayList;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;

/**
 * 2021.12.28
 * user , friends           topic:user_friends_row
 * 3197468391,1346449342 387324416 4226080662
 *
 * user , friends           topic:user_friends
 * 3197468391 1346449342
 * 3197468391 387324416
 * 3197468391 4226080662
 *
 */
public class UserFriendsStream {
    public static void main(String[] args) {
        Properties prop = new Properties();
        prop.put(StreamsConfig.APPLICATION_ID_CONFIG,"userFriend");
        prop.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.111.131:9092");
        prop.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG,3000);
        prop.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"false");
        //earliest latest none
        prop.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");
        prop.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        prop.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG,Serdes.String().getClass());


        StreamsBuilder builder = new StreamsBuilder();
        KStream<String, String> user_friends_rows = builder.stream("user_friends_rows")
                .flatMap((key, value) -> { //3197468391,1346449342 387324416 4226080662
                    ArrayList<KeyValue<String, String>> list = new ArrayList<>();
                    String[] fields = value.toString().split(","); //[3197468391,1346449342 387324416 4226080662]
                    if (fields.length == 2) {
                        String[] friends = fields[1].split("\\s+");  //[1346449342 387324416 4226080662]
                        String user = fields[0];                           // 3197468391
                        if (user.trim().length() > 0) {  //  \\s匹配任意空白字符
                            for (String friend :
                                    friends) {
                                System.out.println(user + "," + friend);
                                KeyValue<String, String> keyValue = new KeyValue<>(null, user + "," + friend);
                                list.add(keyValue);

                            }
                        }
                    }
                    return list;


                });

        user_friends_rows.to("user_friends");

        //构建 Topology
        Topology topo = builder.build();
        final KafkaStreams streams = new KafkaStreams(topo, prop);

        final CountDownLatch latch = new CountDownLatch(1);

        Runtime.getRuntime().addShutdownHook(new Thread("stream"){
            @Override
            public void run() {
                streams.close();
                latch.countDown();
            }
        });



        try {
            streams.start();
            latch.countDown();
        }catch (IllegalStateException e){
            e.printStackTrace();
        }catch (StreamsException e){
            e.printStackTrace();
    }

//        System.exit(0);


    }
}

 EventAttendStream

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.kstream.KStream;

import java.util.ArrayList;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
//2021.12.28
public class EventAttendStream {
    public static void main(String[] args) {
        Properties prop = new Properties();
        prop.put(StreamsConfig.APPLICATION_ID_CONFIG,"eventattend");
        prop.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.111.131:9092");
        prop.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        prop.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG,Serdes.String().getClass());
        prop.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG,3000);
        prop.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"false"); //自动提交
        //earliest latest none
        prop.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");


        StreamsBuilder builder = new StreamsBuilder();
        KStream<Object, Object> ear = builder.stream("event_attendess_row"); //流
        ear.flatMap((key,value)->{
            ArrayList<KeyValue<String,String>>list=new ArrayList<>();
            String[] fields = value.toString().split(",");
            String eventid = fields[0];
            //yes
            if (fields.length>=2 && fields[1].trim().length()>0){
                String[]yes=fields[1].trim().split("\\s+");
                for (String y:
                yes){
                    System.out.println(eventid+","+y+",yes");
                    KeyValue<String, String> yesKeyValue = new KeyValue<>(null, eventid + "," + y + ",yes");
                    list.add(yesKeyValue);
                }

            }

            //maybe
            if (fields.length>=3 && fields[2].trim().length()>0){
                String[]maybe=fields[2].trim().split("\\s+");
                for (String mb:
                        maybe){
                    System.out.println(eventid+","+mb+",maybe");
                    KeyValue<String, String> mbKeyValue = new KeyValue<>(null, eventid + "," + mb + ",maybe");
                    list.add(mbKeyValue);
                }

            }

            //invited
            if (fields.length>=4 && fields[3].trim().length()>0){
                String[]invited=fields[3].trim().split("\\s+");
                for (String i:
                        invited){
                    System.out.println(eventid+","+i+",invited");
                    KeyValue<String, String> invitedKeyValue = new KeyValue<>(null, eventid + "," + i + ",invited");
                    list.add(invitedKeyValue);
                }

            }
            //no
            if (fields.length>=5 && fields[4].trim().length()>0){
                String[]nos=fields[4].trim().split("\\s+");
                for (String no:
                        nos){
                    System.out.println(eventid+","+no+",no");
                    KeyValue<String, String> noKeyValue = new KeyValue<>(null, eventid + "," + no + ",no");
                    list.add(noKeyValue);
                }

            }



            return list;


        }).to("event_attendess");

        Topology topo = builder.build();
        final KafkaStreams streams = new KafkaStreams(topo, prop);
        final CountDownLatch latch = new CountDownLatch(1);

        Runtime.getRuntime().addShutdownHook(new Thread("stream"){
            @Override
            public void run() {
                streams.close();
                latch.countDown();
            }
        });

        try {
            streams.start();
            latch.await();
        } catch (IllegalStateException e) {
                e.printStackTrace();
        } catch (StreamsException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }

        System.exit(0);


    }

}

 MyStreamTest

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.kstream.KStream;

import java.util.Properties;
import java.util.concurrent.CountDownLatch;

public class MyStreamTest {
    public static void main(String[] args) {
        Properties prop = new Properties();
        prop.put(StreamsConfig.APPLICATION_ID_CONFIG, "myStream");
        prop.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.111.131:9092");
        prop.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 3000);
        prop.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
        //earliest latest none
        prop.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        prop.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        prop.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

        StreamsBuilder builder = new StreamsBuilder();
        KStream<Object, Object> in = builder.stream("mystreamin");
        in.to("mystreamout");


        Topology topo = builder.build();
        final KafkaStreams streams = new KafkaStreams(topo, prop);
        final CountDownLatch latch = new CountDownLatch(1);

        Runtime.getRuntime().addShutdownHook(new Thread("stream"){
            @Override
            public void run() {
                streams.close();
                latch.countDown();
            }
        });

        try {
            streams.start();
            latch.await();
        } catch (IllegalStateException e) {
            e.printStackTrace();
        } catch (StreamsException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }

        System.exit(0);


    }

}



评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值