import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.fs.SequenceFileWriter;
import org.apache.flink.streaming.connectors.fs.bucketing.BucketingSink;
import org.apache.flink.streaming.connectors.fs.bucketing.DateTimeBucketer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.util.Collector;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import java.util.Properties;
public class ReadingToKafka {
public static void main(String[] args) throws Ex
flink消费kafka数据直接到hdfs
最新推荐文章于 2023-05-22 14:44:49 发布
本文详细介绍了如何使用Apache Flink从Kafka topic实时消费数据,并将这些数据无缝写入HDFS文件系统的过程。通过实例演示配置Flink连接器,实现数据流处理与存储的高效集成。
摘要由CSDN通过智能技术生成