Flink学习之路(三)—— Transform

Transfrom是Flink的转换算子,可以将数据转换成不同的模样,是Flink最底层的API,以下为常用算子,其他算子可以参照官网去测试

官网https://flink.apache.org/

Transform

Map

package com.dahuan.transform;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Transform_Map {
    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism( 1 );

        //从文件读取数据
        String path = "E:\\Project\\FlinkTutorials\\Flink-Scala\\src\\main\\resources\\sensor.txt";
        DataStreamSource<String> stringDataStreamSource = env.readTextFile( path );

        //map 返回数据的长度
        stringDataStreamSource.map( new MapFunction<String, Integer>() {
            @Override
            public Integer map(String value) throws Exception {
                return value.length();

            }
        } ).print();


        env.execute("Transform_Map");

    }
}

FlatMap

package com.dahuan.transform;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class Transform_FlatMap {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism( 1 );

        //从文件读取数据
        String path = "E:\\Project\\FlinkTutorials\\Flink-Scala\\src\\main\\resources\\sensor.txt";
        DataStreamSource<String> stringDataStreamSource = env.readTextFile( path );

        //按逗号分隔字段
        stringDataStreamSource.flatMap( new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String value, Collector<String> out) throws Exception {
                //每个字段的值按逗号分隔
                String[] splits = value.split( "," );
                //使用for循环将splits遍历出来变成split
                for (String split : splits) {
                    //输出
                    out.collect( split );
                }
            }
        } ).print();


        env.execute( "Transform_FlapMap" );
    }

}

KeyBy

package com.dahuan.transform;

import com.dahuan.bean.SensorReading;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Transform_KeyBy {
    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism( 1 );

        //从文件读取数据
        String path = "E:\\Project\\FlinkTutorials\\Flink-Scala\\src\\main\\resources\\sensor.txt";
        DataStreamSource<String> stringDataStreamSource = env.readTextFile( path );

        /**
         * DataStream → KeyedStream:逻辑地将一个流拆分成不相交的分区,每个分
         * 区包含具有相同 key 的元素,在内部以 hash 的形式实现的。
         */

        //将String类型转换成SensorReading类型
        DataStream<SensorReading> dataStream = stringDataStreamSource.map( new MapFunction<String, SensorReading>() {
            @Override
            public SensorReading map(String value) throws Exception {
                //文件中的每一组数据用逗号分割
                String[] split = value.split( "," );
                return new SensorReading(split[0],new Long(split[1]),new Double( split[2] ));
            }
        } );
        //按照id进行分组
        KeyedStream<SensorReading, Tuple> keyedStream = dataStream.keyBy( "id" );
        //取出当前最大的温度值
        SingleOutputStreamOperator<SensorReading> temperature = keyedStream.max( "temperature" );//TODO maxBy直接取出当前最大值的时间戳


        temperature.print();
        env.execute("Transform_KeyBy");
    }
}

Filter

package com.dahuan.transform;

import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Transform_Filter {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism( 1 );

        //从文件读取数据
        String path = "E:\\Project\\FlinkTutorials\\Flink-Scala\\src\\main\\resources\\sensor.txt";
        DataStreamSource<String> stringDataStreamSource = env.readTextFile( path );

        //筛选sensor_1开头的id对应的数据
        stringDataStreamSource.filter( new FilterFunction<String>() {
            @Override
            public boolean filter(String value) throws Exception {
                //包含sensor_10
                return value.startsWith( "sensor_1" );
            }
        } ).print();

        env.execute( "Transform_Filter" );
    }
}

Reduce

package com.dahuan.transform;

import com.dahuan.bean.SensorReading;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Transform_Reduce {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism( 1 );

        //从文件读取数据
        String path = "E:\\Project\\FlinkTutorials\\Flink-Scala\\src\\main\\resources\\sensor.txt";
        DataStream<String> inputStream = env.readTextFile( path );

        //使用Java1.8的Lambda表达式将String类型转换为SensorReading类型
        DataStream<SensorReading> mapStream = inputStream.map( data -> {
            String[] splits = data.split( "," );
            return new SensorReading( splits[0], new Long( splits[1] ), new Double( splits[2] ) );
        } );

        //按照id分组
        KeyedStream<SensorReading, Tuple> keyedStream = mapStream.keyBy( "id" );

        //reduce聚合,取最大的温度值,以及当前的时间戳
/*        SingleOutputStreamOperator<SensorReading> reduce = keyedStream.reduce( new ReduceFunction<SensorReading>() {
            @Override
            public SensorReading reduce(SensorReading value1, SensorReading value2) throws Exception {

                return new SensorReading(value1.getId(),value2.getTimestamp(),Math.max( value1.getTemperature(),value2.getTemperature() ));
            }
        } );*/


        //TODO  Lambda表达式
        SingleOutputStreamOperator<SensorReading> reduce = keyedStream.reduce( (curData, newData) -> {

            //curData对应id,newData对应时间戳,保持一个参数对应一个值,使得此值为最新值
             return new SensorReading(curData.getId(),newData.getTimestamp(),
                     //对比两个值的温度值,比较哪两个温度值最高
                     Math.max( curData.getTemperature(),newData.getTemperature() ));



        } );


        reduce.print();
        env.execute( "Transform_Reduce" );
    }
}

MultipleStreams(Split,Select)

package com.dahuan.transform;

import com.dahuan.bean.SensorReading;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.collector.selector.OutputSelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SplitStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Collection;
import java.util.Collections;

public class Transform_MultipleStreams {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism( 1 );

        //从文件读取数据
        String path = "E:\\Project\\FlinkTutorials\\Flink-Scala\\src\\main\\resources\\sensor.txt";
        DataStreamSource<String> stringDataStreamSource = env.readTextFile( path );

        //输入类型是String 类型,返回什么类型就输出什么类型
        DataStream<SensorReading> dataStream = stringDataStreamSource.map( new MapFunction<String, SensorReading>() {
            @Override
            public SensorReading map(String value) throws Exception {
                String[] split = value.split( "," );
                return new SensorReading( split[0], new Long( split[1] ), new Double( split[2] ) );
            }
        } );

        //TODO  分流 , 按照温度值30为界分为两条流
        SplitStream<SensorReading> splitStream = dataStream.split( new OutputSelector<SensorReading>() {
            @Override
            public Iterable<String> select(SensorReading sensorReading) {
                /**
                 * 看这个值的温度值是否大于30度,如果大于30 输出 high , 如果小于 30 输出low
                 *                                             返回一个只包含指定对象的不可变列表。
                 */
                return (sensorReading.getTemperature() > 30) ? Collections.singletonList( "high" ) : Collections.singletonList( "low" );
            }
        } );

        DataStream<SensorReading> highTempStream = splitStream.select( "high" );
        DataStream<SensorReading> lowTempStream = splitStream.select( "low" );
        DataStream<SensorReading> allTempStream = splitStream.select( "high", "low" );


        highTempStream.print("high");
        lowTempStream.print("low");
        allTempStream.print("all");

        env.execute("Transform_MultipleStreams");


    }
}

Connect

package com.dahuan.transform;

import com.dahuan.bean.SensorReading;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.collector.selector.OutputSelector;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;

import java.util.Collections;

public class Transform_Connect {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism( 1 );

        //从文件读取数据
        String path = "E:\\Project\\FlinkTutorials\\Flink-Scala\\src\\main\\resources\\sensor.txt";
        DataStreamSource<String> stringDataStreamSource = env.readTextFile( path );

        //输入类型是String 类型,返回什么类型就输出什么类型
        DataStream<SensorReading> dataStream = stringDataStreamSource.map( new MapFunction<String, SensorReading>() {
            @Override
            public SensorReading map(String value) throws Exception {
                String[] split = value.split( "," );
                return new SensorReading( split[0], new Long( split[1] ), new Double( split[2] ) );
            }
        } );

        //TODO  分流 , 按照温度值30为界分为两条流
        SplitStream<SensorReading> splitStream = dataStream.split( new OutputSelector<SensorReading>() {
            @Override
            public Iterable<String> select(SensorReading sensorReading) {
                /**
                 * 看这个值的温度值是否大于30度,如果大于30 输出 high , 如果小于 30 输出low
                 *                                             返回一个只包含指定对象的不可变列表。
                 */
                return (sensorReading.getTemperature() > 30) ? Collections.singletonList( "high" ) : Collections.singletonList( "low" );
            }
        } );

        DataStream<SensorReading> highTempStream = splitStream.select( "high" );
        DataStream<SensorReading> lowTempStream = splitStream.select( "low" );
        DataStream<SensorReading> allTempStream = splitStream.select( "high", "low" );

        /*
        highTempStream.print("high");
        lowTempStream.print("low");
        allTempStream.print("all");*/


        //TODO 合流,connect , 将高温流转换成二元组类型 , 与低温流连接合并之后,输出状态信息
        SingleOutputStreamOperator<Tuple2<String, Double>> waringStream = highTempStream.map( new MapFunction<SensorReading, Tuple2<String, Double>>() {
            @Override
            public Tuple2<String, Double> map(SensorReading value) throws Exception {
                //获取id 和 温度值
                return new Tuple2<>(value.getId(),value.getTemperature());
            }
        } );

        //获取出来之后的高温流 用connect 方法联合低温流 使得低温流的变量与高温流相同
        ConnectedStreams<Tuple2<String, Double>, SensorReading> connectedStreams = waringStream.connect( lowTempStream );

        //CoMapFunction接口是将两个流在map上实现
        DataStream<Object> resultStream  = connectedStreams.map( new CoMapFunction<Tuple2<String, Double>, SensorReading, Object>() {


            @Override
            public Object map1(Tuple2<String, Double> value) throws Exception {
               return new Tuple3<>(value.f0,value.f1,"high temp warning");
            }

            @Override
            public Object map2(SensorReading value) throws Exception {
                return new Tuple2<>(value.getId(),"normal");
            }
        } );

        resultStream.print();



        //union 联合多条流,可以重复
        highTempStream.union( lowTempStream,allTempStream ).print("union");



        env.execute( "Transform_Connect" );


    }
}

END

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值