Java~No data sinks have been created yet. A program needs at least one sink that consumes data

一、报错截图

在这里插入图片描述

二、错误代码

package myflinkDemo;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

public class BatchWordCount{
    public static void main(String[] args) throws Exception {
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        DataSet<String> text = env.fromElements(
                "Flink batch demo",
                "batch demo",
                "demo"
        );
        DataSet<Tuple2<String, Integer>> ds = text.flatMap(new LineSplitter())
                .groupBy(0)
                .sum(1);
        // 错误代码
        env.execute();
        ds.print();
    }
    static class LineSplitter implements FlatMapFunction<String, Tuple2<String, Integer>>{
        @Override
        public void flatMap(String line, Collector<Tuple2<String, Integer>> collector){
            for (String word : line.split(" ")){
                collector.collect(new Tuple2<>(word, 1));
            }
        }
    }

}

三、错误原因

当DataSet调用print()方法时,源码内部已经调用了Execute方法,因此此处不能调用env.execute()方法,如果调用会出现错误

四、正确代码

package myflinkDemo;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

public class BatchWordCount{
    public static void main(String[] args) throws Exception {
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        DataSet<String> text = env.fromElements(
                "Flink batch demo",
                "batch demo",
                "demo"
        );
        DataSet<Tuple2<String, Integer>> ds = text.flatMap(new LineSplitter())
                .groupBy(0)
                .sum(1);
        // 错误代码
        // env.execute();
        ds.print();
    }
    static class LineSplitter implements FlatMapFunction<String, Tuple2<String, Integer>>{
        @Override
        public void flatMap(String line, Collector<Tuple2<String, Integer>> collector){
            for (String word : line.split(" ")){
                collector.collect(new Tuple2<>(word, 1));
            }
        }
    }

}
以下是一个flume的conf文件,请帮我逐行解释一下代码:“#定义三大组件的名称 a.sources = r a.sinks = k1 k2 k3 a.channels = c1 c2 c3 #将数据流复制给所有channel a.sources.r.selector.type = replicating  # 配置Source组件 a.sources.r.type = exec a.sources.r.command = cat /home/bit/novel/novel.csv # kafka a.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink a.sinks.k1.kafka.topic = data a.sinks.k1.kafka.bootstrap.servers = localhost:9092 a.sinks.k1.kafka.flumeBatchSize = 20 a.sinks.k1.kafka.producer.acks = 1 a.sinks.k1.kafka.producer.linger.ms = 1 a.sinks.k1.kafka.producer.compression.type = snappy a.channels.c1.type = memory a.channels.c1.capacity = 100000 a.channels.c1.transactionCapacity = 100 # mysql a.sinks.k2.type =com.us.flume.MysqlSink a.sinks.k2.hostname=localhost a.sinks.k2.port=3306 a.sinks.k2.databaseName=novel a.sinks.k2.tableName=table1 a.sinks.k2.user=bit a.sinks.k2.password=123456 a.channels.c2.type = memory a.channels.c2.capacity = 100000 a.channels.c2.transactionCapactiy = 2000 # hdfs a.sinks.k3.type = hdfs a.sinks.k3.hdfs.path = hdfs://localhost:9000/user/bit/novel #积攒多少个Event才flush到HDFS一次 a.sinks.k3.hdfs.batchSize = 100 #设置文件类型,可支持压缩 a.sinks.k3.hdfs.fileType = DataStream #多久生成一个新的文件 a.sinks.k3.hdfs.rollInterval = 5 a.channels.c3.type = memory a.channels.c3.capacity =100000 a.channels.c3.transactionCapacity = 100 # Bind the source and sink to the channel a.sources.r.channels = c1 c2 c3 a.sinks.k1.channel = c1 a.sinks.k2.channel = c2 a.sinks.k3.channel = c3”
05-24
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值