Flink项目之电商实时数据分析(二)

Flink项目之电商实时数据分析(二)

本文承接上一篇电商实时数据分析(一)https://blog.csdn.net/weixin_38255444/article/details/104818776

四:编写用户浏览记录上报服务

  1. 编写实体类-⽤用户浏览记录

    package com.ityouxin.report.bean.user;
    /**
    * Created by ityouxin
    */
    public class UserBrowse {
    private Long channelId;//频道id;
    private Long categoryId;//产品类别id
    private Long productId;//产品id
    private String country;//国家
    private String province;//省份
    private String city;//城市
    private String network;//⽹网络⽅方式
    private String sources;//来源⽅方式
    private String browserType;//浏览器器类型
    private Long entryTime;//打开时间
    private Long leaveTime;//离开时间
    private Long userId;//⽤用户id
    }
    //后面生成pojo  满参,无参构造,set,get方法,tostring方法等
    
    1. 编写APP启动类 使用注解方式类调用run方法
    package com.ityouxin.report.Controller;
    import org.springframework.boot.SpringApplication;
    import org.springframework.boot.autoconfigure.SpringBootApplication;
    /**
    * Created by ityouxin
    */
    @SpringBootApplication
    public class App {
    public static void main(String[] args) {
    SpringApplication.run(App.class , args);
    }
    }
    
    1. 编写上报服务的业务类
    package com.ityouxin.report.Controller;
    import com.alibaba.fastjson.JSON;
    import com.ityouxin.report.msg.Message;
    import org.springframework.beans.factory.annotation.Autowired;
    import org.springframework.boot.autoconfigure.SpringBootApplication;
    import org.springframework.http.HttpStatus;
    import org.springframework.kafka.core.KafkaTemplate;
    import org.springframework.stereotype.Controller;
    import org.springframework.web.bind.annotation.RequestBody;
    import org.springframework.web.bind.annotation.RequestMapping;
    import org.springframework.web.bind.annotation.RequestMethod;
    import javax.servlet.http.HttpServletRequest;
    import javax.servlet.http.HttpServletResponse;
    import java.io.IOException;
    import java.io.OutputStream;
    import java.io.PrintWriter;4.4:编写kafka配置⽂文件类
    import java.util.Date;
    @SpringBootApplication
    @Controller
    @RequestMapping("ReportApplication")
    public class ReportApplication {
    @Autowired
    private KafkaTemplate kafkaTemplate;
    /**
    * @param json 接收的数据
    * @param request
    * @param response
    * */
    @RequestMapping(value = "retrieveData" , method = RequestMethod.POST)
    public void retrieveData(@RequestBody String json , HttpServletRequest
    request , HttpServletResponse response){
    Message msg = new Message();
    msg.setMessage(json);
    msg.setCount(1);
    msg.setTimestamp(new Date().getTime());
    json = JSON.toJSONString(msg);
    System.out.println(json);
    //业务开始
    kafkaTemplate.send("test","key",json);
    //业务结束
    PrintWriter printWriter = getWriter(response);
    response.setStatus(HttpStatus.OK.value());
    printWriter.write("success");
    close(printWriter);
    }
    private PrintWriter getWriter(HttpServletResponse response){
    response.setCharacterEncoding("utf-8");
    response.setContentType("application/json");
    OutputStream out = null;
    PrintWriter printWriter = null;
    try {
    out = response.getOutputStream();
    printWriter = new PrintWriter(out);
    } catch (IOException e) {
    e.printStackTrace();
    }
    return printWriter;
    }
    private void close(PrintWriter printWriter){
    printWriter.flush();
    printWriter.close();
    }
    }
    
    1. 编写kafka配置文件类
    package com.ityouxin.report.Controller;
    import org.apache.kafka.clients.producer.ProducerConfig;
    import org.apache.kafka.common.serialization.StringSerializer;
    import org.springframework.beans.factory.annotation.Value;
    import org.springframework.context.annotation.Bean;
    import org.springframework.context.annotation.Configuration;
    import org.springframework.kafka.annotation.EnableKafka;
    import org.springframework.kafka.core.DefaultKafkaProducerFactory;
    import org.springframework.kafka.core.KafkaTemplate;
    import org.springframework.kafka.core.ProducerFactory;
    import java.util.HashMap;
    import java.util.Map;
    @Configuration
    @EnableKafka
    public class KafkaProducerConfig {
    @Value("${kafka.producer.servers}")
    private String servers;
    @Value("${kafka.producer.retries}")
    private int retries;
    @Value("${kafka.producer.batch.size}")
    private int batchSize;
    @Value("${kafka.producer.linger}")
    private int linger;
    @Value("${kafka.producer.buffer.memory}")
    private int bufferMemory;
    public Map<String, Object> producerConfigs() {
    Map<String, Object> props = new HashMap<>();
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
    props.put(ProducerConfig.RETRIES_CONFIG, retries);
    props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize);
    props.put(ProducerConfig.LINGER_MS_CONFIG, linger);
    props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory);
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
    StringSerializer.class);
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
    StringSerializer.class);
    return props;
    }
    public ProducerFactory<String, String> producerFactory() {4.5:编写发送kafka单的消息体
    return new DefaultKafkaProducerFactory<String, String>
    (producerConfigs());
    }
    @Bean
    public KafkaTemplate<String, String> kafkaTemplate() {
    return new KafkaTemplate<String, String>(producerFactory());
    }
    }
    
  2. 编写发送kafka的消息体

    package com.ityouxin.report.msg;
    /**
    * Created by ityouxin
    */
    public class Message {
    private String message;//json格式的消息内容
    private int count;//消息的次数
    private Long timestamp;//消息的时间
    public String getMessage() {
    return message;
    }
    public void setMessage(String message) {
    this.message = message;
    }
    public int getCount() {
    return count;
    }
    public void setCount(int count) {
    this.count = count;
    }
    public Long getTimestamp() {
    return timestamp;
    }
    public void setTimestamp(Long timestamp) {
    this.timestamp = timestamp;
    }
    @Override
    public String toString() {4.6:编写发送消息
    return "Message{" +
    "message='" + message + '\'' +
    ", count=" + count +
    ", timestamp=" + timestamp +
    '}';
    }
    }
    
  3. v编写发送消息

    package com.ityouxin.report.msg;
    import java.io.BufferedOutputStream;
    import java.io.InputStream;
    import java.io.OutputStream;
    import java.net.HttpURLConnection;
    import java.net.URL;
    /**
    * Created by ityouxin
    */
    public class SendMsg {
    public static void send(String address,String message){
    try {
    URL url = new URL(address);
    HttpURLConnection conn =
    (HttpURLConnection)url.openConnection();
    conn.setRequestMethod("POST");
    conn.setDoInput(true);
    conn.setDoOutput(true);
    conn.setAllowUserInteraction(true);
    conn.setUseCaches(false);
    conn.setReadTimeout(6*1000);
    conn.setRequestProperty("User-Agent","Mozilla/5.0 (Windows NT
    6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106
    Safari/537.36");
    conn.setRequestProperty("Content-Type","application/json");
    conn.connect();
    OutputStream outputStream = conn.getOutputStream();
    BufferedOutputStream out = new
    BufferedOutputStream(outputStream);
    out.write(message.getBytes());
    out.flush();
    String temp = "";
    InputStream in = conn.getInputStream();
    byte[] tempbytes = new byte[1024];
    while(in.read(tempbytes,0,1024) != -1){
    temp+=new String(tempbytes);4.7:验证代码
    4.7.1:启动kafka
    bin/kafka-server-start.sh /export/servers/kafka_2.10-0.9.0.1/config/server.properties
    4.7.2:启动kafkaoffsetMonitor监控kafka
    启动命令: sh start.sh
    4.7.3:启动上报服务
    4.7.4:编写模拟数据代码:
    }
    System.out.println(conn.getResponseCode());
    System.out.println(temp);
    } catch (Exception e) {
    e.printStackTrace();
    }
    }
    }
    
  4. 验证代码

    启动kafka

    bin/kafka-server-start.sh /export/servers/kafka_2.10-0.9.0.1/config/server.properties

  5. 编写模拟数据代码

    package com.ityouxin.report.GeneratorData;
    import com.alibaba.fastjson.JSONObject;
    import com.ityouxin.report.bean.user.UserBrowse;
    import com.ityouxin.report.msg.SendMsg;import java.text.DateFormat;
    import java.text.ParseException;
    import java.text.SimpleDateFormat;
    import java.util.ArrayList;
    import java.util.Date;
    import java.util.List;
    import java.util.Random;
    /**
    * Created by ityouxin
    */
    public class UserBrowseRecord {
    private static Long[] pindaoids = new Long[]
    {1l,2l,3l,4l,5l,6l,7l,8l};//频道id集合
    private static Long[] leibieids = new Long[]
    {1l,2l,3l,4l,5l,6l,7l,8l};//产品类别id集合
    private static Long[] chanpinids = new Long[]
    {1l,2l,3l,4l,5l,6l,7l,8l};//产品id集合
    private static Long[] yonghuids = new Long[]
    {1l,2l,3l,4l,5l,6l,7l,8l};//⽤用户id集合
    /**
    * 地区
    */
    private static String[] contrys = new String[]{"America","china"};//地-国家集合
    private static String[] provinces = new String[]{"America","china"};//
    地区-省集合
    private static String[] citys = new String[]{"America","china"};//地区-
    市集合
    /**
    *⽹网络⽅方式
    */
    private static String[] networks = new String[]{"电信","移动","联通"};
    /**
    * 来源⽅方式
    */
    private static String[] sources = new String[]{"直接输⼊入","百度跳转","360
    搜索跳转","必应跳转"};
    /**
    * 浏览器器
    */
    private static String[] liulanqis = new String[]{"⽕火狐","qq浏览器器","360浏
    览器器","⾕谷歌浏览器器"};
    /*** 打开时间 离开时间
    */
    private static List<Long[]> usetimelog = producetimes();
    public static List<Long[]> producetimes(){
    List<Long[]> usetimelog = new ArrayList<Long[]>();
    for(int i=0;i<100;i++){
    Long [] timesarray = gettimes("2018-12-12 24:60:60:000");
    usetimelog.add(timesarray);
    }
    return usetimelog;
    }
    private static Long [] gettimes(String time){
    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd
    hh:mm:ss:SSS");
    try {
    Date date = dateFormat.parse(time);
    long timetemp = date.getTime();
    Random random = new Random();
    int randomint = random.nextInt(10);
    long starttime = timetemp - randomint*3600*1000;
    long endtime = starttime + randomint*3600*1000;
    return new Long[]{starttime,endtime};
    } catch (ParseException e) {
    e.printStackTrace();
    }
    return new Long[]{0l,0l};
    }
    public static void main(String[] args) {
    Random random = new Random();
    for (int i = 0; i < 2; i++) {
    //频道id 类别id 产品id ⽤用户id 打开时间 离开时间 地区 ⽹网络⽅方式 来源⽅方式
    浏览器器
    UserBrowse userBrowse = new UserBrowse();
    userBrowse.setChannelId(chanpinids[random.nextInt(chanpinids.length)]);
    userBrowse.setCategoryId(leibieids[random.nextInt(leibieids.length)]);
    userBrowse.setProductId(chanpinids[random.nextInt(chanpinids.length)]);
    userBrowse.setUserId(yonghuids[random.nextInt(yonghuids.length)]);
    userBrowse.setCountry(contrys[random.nextInt(contrys.length)]);
    userBrowse.setProvince(provinces[random.nextInt(provinces.length)]);
    userBrowse.setCity(citys[random.nextInt(citys.length)]);4.7.5:观察kafkaoffsetmonitor
    可以发现数据已经成功发送到了了kafka中
    userBrowse.setNetwork(networks[random.nextInt(networks.length)]);
    userBrowse.setSources(sources[random.nextInt(sources.length)]);
    userBrowse.setBrowserType(liulanqis[random.nextInt(liulanqis.length)]);
    Long[] times =
    usetimelog.get(random.nextInt(usetimelog.size()));
    userBrowse.setEntryTime(times[0]);
    userBrowse.setLeaveTime(times[1]);
    String jonstr = JSONObject.toJSONString(userBrowse);
    System.out.println(jonstr);
    try {
    Thread.sleep(100);
    } catch (InterruptedException e) {
    e.printStackTrace();
    }
    SendMsg.send("http://localhost:8000/ReportApplication/retrieveData",
    jonstr);
    }
    }
    }
    
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值