二、导入pom包
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.2</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.9</version>
</dependency>
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
三、启动类
package com.example.demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableScheduling;
@SpringBootApplication
@EnableScheduling
public class DemoApplication {
public static void main(String[] args) {
SpringApplication.run(DemoApplication.class, args);
}
}
四、yml配置文件
server:
port: 10005
spring:
datasource:
url: jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
username: root
password: root
driver-class-name: com.mysql.jdbc.Driver
#kafka相关配置
kafka:
bootstrap-servers: 192.168.65.130:9092,192.168.65.131:9092,192.168.65.132:9092
producer:
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
acks: all
retries: 1
consumer:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
group-id: consumer
auto-offset-reset: earliest
五、实体类
package com.example.demo.kafka;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Date;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class KafkaMessage {
private Long id;
private String wdNight;
private String date;
private String high;
private String week;
private String textNight;
private String wdDay;
private String low;
private String wcNight;
private String textDay;
private String wcDay;
private Date sendTime;
}
六、mapper类
package com.example.demo.mapper;
import com.example.demo.kafka.KafkaMessage;
import org.apache.ibatis.annotations.Insert;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface WeatherMapper {
@Insert("INSERT into weather(id,text_day,text_night,high,low,wc_day,wd_day,wc_night,wd_night,date,week,send_time)" +
" VALUES(#{id},#{textDay},#{textNight},#{high},#{low},#{wcDay},#{wdDay},#{wcNight},#{wdNight},#{date},#{week},#{sendTime});")
public int insert(KafkaMessage kafkaMessage);
}
七、解析接口工具类
package com.example.demo.kafka;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
public class RestTest {
/**
* 获取接口数据,返回json格式字符串,方法2;
* 1. @param url 接口路径
* 2. @param params 传递参数,自定义
* 3. @param key 传递参数标识,自定义
* 4.@return
*/
public static String getData2(String url) throws IOException {
HttpURLConnection conn = null;
BufferedReader reader = null;
String rs = null;
try {
URL url1 = new URL(url);
conn = (HttpURLConnection) url1.openConnection();
conn.setUseCaches(false);
conn.setConnectTimeout(30000);
conn.setReadTimeout(30000);
conn.setInstanceFollowRedirects(false);
conn.connect();
//获取并解析数据
InputStream is = conn.getInputStream();
reader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
StringBuffer sb = new StringBuffer();
String strRead = null;
while ((strRead = reader.readLine()) != null) {
sb.append(strRead);
}
rs = sb.toString();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (reader != null) {
reader.close();
}
if (conn != null) {
conn.disconnect();
}
}
return rs;
}
}
八、发送消息组件
package com.example.demo.kafka;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;
@Component
@Slf4j
public class KafkaSender {
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
private Gson gson = new GsonBuilder().create();
/**
* 发送消息
*
* @param topic topic
* @param kafkaMessage 消息体
*/
public void send(String topic, KafkaMessage kafkaMessage) {
if (kafkaMessage == null) {
return;
}
log.info("Producer->send kafka topic {}, message = {}", topic, gson.toJson(kafkaMessage));
kafkaTemplate.send(topic, gson.toJson(kafkaMessage));
}
}
九、消费消息组件类
package com.example.demo.kafka;
import com.example.demo.mapper.WeatherMapper;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import java.io.IOException;
@Component
@Slf4j
public class KafkaConsumer {
@Autowired
private WeatherMapper weatherMapper;
private Gson gson = new GsonBuilder().create();
@KafkaListener(topics = "test")
public void receive2(ConsumerRecord<String, KafkaMessage> consumerRecord) throws IOException {
KafkaMessage kafkaMessage = gson.fromJson(String.valueOf(consumerRecord.value()), KafkaMessage.class);
System.out.println(kafkaMessage.getId());
int res = weatherMapper.insert(kafkaMessage);
if(res>0){
System.out.println("添加成功");
}else{
System.out.println("添加失败");
}
}
}
十、定时发布消息类
package com.example.demo.kafka;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.util.Date;
import static com.example.demo.kafka.RestTest.getData2;
@Component
public class KafkaController {
@Autowired
private KafkaSender kafkaSender;
@Scheduled(cron="0/30 * * * * ? ")
public void sendMessage() throws IOException {
System.out.println("测试kafka");
String data = getData2("http://api.map.baidu.com/weather/v1/?district_id=222405&data_type=all&ak=自己的ak");
JSONObject jsonObject = JSONObject.parseObject(data);
JSONObject result = jsonObject.getJSONObject("result");
JSONArray forecasts = result.getJSONArray("forecasts");
//遍历JSONArray数组
for (int i = 0; i < forecasts.size(); i++) {
JSONObject forecastsResult= forecasts.getJSONObject(i);
kafkaSender.send("test", KafkaMessage.builder().id(System.nanoTime())
.date(forecastsResult.getString("date"))
.high(forecastsResult.getString("high"))
.low(forecastsResult.getString("low"))
.textDay(forecastsResult.getString("text_day"))
.textNight(forecastsResult.getString("text_night"))
.wcDay(forecastsResult.getString("wc_day"))
.wcNight(forecastsResult.getString("wc_night"))
.wdDay(forecastsResult.getString("wd_day"))
.wdNight(forecastsResult.getString("wd_night"))
.week(forecastsResult.getString("week"))
.sendTime(new Date()).build());
System.out.println(forecasts.get(i));
}
}
}
十一、查看数据库
![在这里插入图片描述](https://img-blog.csdnimg.cn/20200821161809135.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L3FxXzM4NjUwODA4,size_16,color_FFFFFF,t_70#pic_center)