使用kafka定时获取接口的数据并存到mysql数据库

9 篇文章 0 订阅
3 篇文章 0 订阅

一、搭建kafka集群

二、导入pom包

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
        </dependency>
        <dependency>
            <groupId>com.google.code.gson</groupId>
            <artifactId>gson</artifactId>
            <version>2.8.2</version>
        </dependency>
        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <scope>runtime</scope>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-jdbc</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.9</version>
        </dependency>
         <dependency>
            <groupId>org.mybatis.spring.boot</groupId>
            <artifactId>mybatis-spring-boot-starter</artifactId>
            <version>2.0.0</version>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <optional>true</optional>
        </dependency>

三、启动类

package com.example.demo;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableScheduling;


@SpringBootApplication
@EnableScheduling
public class DemoApplication {

    public static void main(String[] args) {
        SpringApplication.run(DemoApplication.class, args);
    }

}

四、yml配置文件

server:
  port: 10005

spring:
  datasource:
    url: jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
    username: root
    password: root
    driver-class-name: com.mysql.jdbc.Driver
  #kafka相关配置
  kafka:
    bootstrap-servers: 192.168.65.130:9092,192.168.65.131:9092,192.168.65.132:9092
    producer:
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
      acks: all
      retries: 1

    consumer:
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      group-id: consumer
      auto-offset-reset: earliest


五、实体类

package com.example.demo.kafka;

import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;

import java.util.Date;


@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class KafkaMessage {

    private Long id;
    private String wdNight;
    private String date;
    private String high;
    private String week;
    private String textNight;
    private String wdDay;
    private String low;
    private String wcNight;
    private String textDay;
    private String wcDay;
    private Date sendTime;
}

六、mapper类

package com.example.demo.mapper;

import com.example.demo.kafka.KafkaMessage;
import org.apache.ibatis.annotations.Insert;
import org.apache.ibatis.annotations.Mapper;

@Mapper
public interface WeatherMapper {

    @Insert("INSERT into weather(id,text_day,text_night,high,low,wc_day,wd_day,wc_night,wd_night,date,week,send_time)" +
            " VALUES(#{id},#{textDay},#{textNight},#{high},#{low},#{wcDay},#{wdDay},#{wcNight},#{wdNight},#{date},#{week},#{sendTime});")
    public int insert(KafkaMessage kafkaMessage);
}

七、解析接口工具类

package com.example.demo.kafka;

import com.google.gson.Gson;
import com.google.gson.GsonBuilder;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;

public class RestTest {
    /**
     * 获取接口数据,返回json格式字符串,方法2;
     * 1. @param url 接口路径
     * 2. @param params 传递参数,自定义
     * 3. @param key 传递参数标识,自定义
     * 4.@return
     */

    public static String getData2(String url) throws IOException {
        HttpURLConnection conn = null;
        BufferedReader reader = null;
        String rs = null;
        try {
            URL url1 = new URL(url);
            conn = (HttpURLConnection) url1.openConnection();
            conn.setUseCaches(false);
            conn.setConnectTimeout(30000);
            conn.setReadTimeout(30000);
            conn.setInstanceFollowRedirects(false);
            conn.connect();
            //获取并解析数据
            InputStream is = conn.getInputStream();
            reader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
            StringBuffer sb = new StringBuffer();
            String strRead = null;
            while ((strRead = reader.readLine()) != null) {
                sb.append(strRead);
            }
            rs = sb.toString();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (reader != null) {
                reader.close();
            }
            if (conn != null) {
                conn.disconnect();
            }
        }
        return rs;
    }

}

八、发送消息组件

package com.example.demo.kafka;

import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;


@Component
@Slf4j
public class KafkaSender {
    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    private Gson gson = new GsonBuilder().create();

    /**
     * 发送消息
     *
     * @param topic        topic
     * @param kafkaMessage 消息体
     */
    public void send(String topic, KafkaMessage kafkaMessage) {
        if (kafkaMessage == null) {
            return;
        }
        log.info("Producer->send kafka topic {}, message = {}", topic, gson.toJson(kafkaMessage));
        kafkaTemplate.send(topic, gson.toJson(kafkaMessage));
    }

}

九、消费消息组件类

package com.example.demo.kafka;

import com.example.demo.mapper.WeatherMapper;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

import java.io.IOException;

@Component
@Slf4j
public class KafkaConsumer {
    @Autowired
    private WeatherMapper weatherMapper;

    private Gson gson = new GsonBuilder().create();

    @KafkaListener(topics = "test")
    public void receive2(ConsumerRecord<String, KafkaMessage> consumerRecord) throws IOException {
        KafkaMessage kafkaMessage = gson.fromJson(String.valueOf(consumerRecord.value()), KafkaMessage.class);
        System.out.println(kafkaMessage.getId());
        int res = weatherMapper.insert(kafkaMessage);
        if(res>0){
            System.out.println("添加成功");
        }else{
            System.out.println("添加失败");
        }
    }
}

十、定时发布消息类

package com.example.demo.kafka;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.util.Date;

import static com.example.demo.kafka.RestTest.getData2;

@Component
public class KafkaController {
    @Autowired
    private KafkaSender kafkaSender;

    @Scheduled(cron="0/30 * * * * ? ")
    public void sendMessage() throws IOException {
        System.out.println("测试kafka");
        String data = getData2("http://api.map.baidu.com/weather/v1/?district_id=222405&data_type=all&ak=自己的ak");
        JSONObject jsonObject = JSONObject.parseObject(data);
        JSONObject result = jsonObject.getJSONObject("result");
        JSONArray forecasts = result.getJSONArray("forecasts");
        //遍历JSONArray数组
        for (int i = 0; i < forecasts.size(); i++) {
            JSONObject forecastsResult= forecasts.getJSONObject(i);
            kafkaSender.send("test", KafkaMessage.builder().id(System.nanoTime())
                    .date(forecastsResult.getString("date"))
                    .high(forecastsResult.getString("high"))
                    .low(forecastsResult.getString("low"))
                    .textDay(forecastsResult.getString("text_day"))
                    .textNight(forecastsResult.getString("text_night"))
                    .wcDay(forecastsResult.getString("wc_day"))
                    .wcNight(forecastsResult.getString("wc_night"))
                    .wdDay(forecastsResult.getString("wd_day"))
                    .wdNight(forecastsResult.getString("wd_night"))
                    .week(forecastsResult.getString("week"))
                    .sendTime(new Date()).build());
            System.out.println(forecasts.get(i));
        }
    }

}

十一、查看数据库

在这里插入图片描述

  • 9
    点赞
  • 32
    收藏
    觉得还不错? 一键收藏
  • 5
    评论
Flink是一个分布式流处理框架,能够处理和分析实时数据流。Kafka是一个分布式流式数据处理平台,能够实时地收集、存储和处理大规模数据流。 在Flink读取Kafka数据并将其写入MySQL数据库需要以下步骤: 1. 配置Kafka Consumer:通过配置Kafka Consumer相关的属性,如bootstrap.servers(Kafka的地址)、group.id(消费者组标识)、topic(要读取的主题名称)等。 2. 创建Flink Execution Environment:通过创建Flink执行环境,可以定义Flink作业的运行模式和相关配置。 3. 创建Kafka Data Source:使用Flink的Kafka Consumer API创建一个Kafka数据源,通过指定Kafka Consumer的配置和要读取的主题,可以从Kafka获取数据。 4. 定义数据转换逻辑:根据需要,可以使用Flink提供的转换算子对Kafka数据进行处理,如map、filter、reduce等。 5. 创建MySQL Sink:通过配置MySQL数据库的连接信息,如URL、用户名、密码等,创建一个MySQL数据池。 6. 将数据写入MySQL:通过使用Flink的MySQL Sink API,将经过转换后的数据写入MySQL数据库。可以指定要写入的表名、字段映射关系等。 7. 设置并执行作业:将Kafka数据源和MySQL Sink绑定在一起,并设置作业的并行度,然后执行Flink作业。 通过以上步骤,我们可以将Kafka数据读取出来,并经过转换后写入MySQL数据库,实现了从KafkaMySQL数据传输。 需要注意的是,在配置Kafka Consumer和MySQL数据库时,要确保其正确性和可用性,以确保数据的正确读取和写入。同时,在处理大规模数据流时,还需要考虑分布式部署、容错性和高可用性等方面的问题,以保证系统的稳定性和性能。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值