大数据之flink中join用法

本文详细介绍了Apache Flink中流处理的四种Join操作:内连接、左外连接、右外连接以及Interval Join,通过实例展示了如何设置窗口和时间区间来实现不同类型的连接。此外,还展示了如何使用Flink从MySQL数据库中查询维度数据,以增强事件处理的上下文信息。

一、join用法

1、将两个流中的数据进行join处理

package cn._51doit.flink.day05;


import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple6;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

public class TumblingWindowJoinDemo {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        //1000,A,1
        DataStreamSource<String> leftLines = env.socketTextStream("localhost", 8888);
        //2000,A,2
        DataStreamSource<String> rightLines = env.socketTextStream("localhost", 9999);

        //提取第一个流中数据的EventTime
        DataStream<String> leftWaterMarkStream = leftLines
                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.seconds(0)) {
                    @Override
                    public long extractTimestamp(String line) {
                        return Long.parseLong(line.split(",")[0]);
                    }
                });
        //提取第二个流中数据的EventTime
        DataStream<String> rightWaterMarkStream = rightLines
                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.seconds(0)) {
                    @Override
                    public long extractTimestamp(String line) {
                        return Long.parseLong(line.split(",")[0]);
                    }
                });
        //对第一个流整理成tuple3
        DataStream<Tuple3<Long, String, String>> leftStream = leftWaterMarkStream.map(
                new MapFunction<String, Tuple3<Long, String, String>>() {
                    @Override
                    public Tuple3<Long, String, String> map(String value) throws Exception {
                        String[] fields = value.split(",");
                        return Tuple3.of(Long.parseLong(fields[0]), fields[1], fields[2]);
                    }
                }
        );
        //对第二个流整理成tuple3
        DataStream<Tuple3<Long, String, String>> rightStream = rightWaterMarkStream.map(
                new MapFunction<String, Tuple3<Long, String, String>>() {
                    @Override
                    public Tuple3<Long, String, String> map(String value) throws Exception {
                        String[] fields = value.split(",");
                        return Tuple3.of(Long.parseLong(fields[0]), fields[1], fields[2]);
                    }
                }
        );
        //第一个流(左流)调用join方法关联第二个流(右流),并且在where方法和equalTo方法中分别指定两个流join的条件
        DataStream<Tuple6<Long, String, String, Long, String, String>> joinedStream = leftStream.join(rightStream)
                .where(new KeySelector<Tuple3<Long, String, String>, String>() {
                    @Override
                    public String getKey(Tuple3<Long, String, String> value) throws Exception {
                        return value.f1; //将左流tuple3中的f1作为join的key
                    }
                })
                .equalTo(new KeySelector<Tuple3<Long, String, String>, String>() {
                    @Override
                    public String getKey(Tuple3<Long, String, String> value) throws Exception {
                        return value.f1; //将右流tuple3中的f1作为join的key
                    }
                })
                .window(TumblingEventTimeWindows.of(Time.seconds(5))) //划分EventTime滚动窗口,窗口长度为5秒
                .apply(new MyInnerJoinFunction()); //在apply方法中传入自定义的MyInnerJoinFunction
        joinedStream.print(); //调用print sink 输出结果
        env.execute("TumblingWindowJoinDemo");
    }

}
package cn._51doit.flink.day05;

import org.apache.flink.api.common.functions.JoinFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple6;

public class MyInnerJoinFunction implements JoinFunction<
        Tuple3<Long, String, String>, //第一个数据流(左流)输入的数据类型
        Tuple3<Long, String, String>, //第二个数据流(右流)输入的数据类型
        Tuple6<Long, String, String, Long, String, String>> { //join后输出的数据类型
    //第一个流和第二个流输入的数据在同一个时间窗口内并且join的key相同才会调用join方法
    @Override
    public Tuple6<Long, String, String, Long, String, String> join(
            Tuple3<Long, String, String> left, //第一个数据流(左流)输入的一条数据
            Tuple3<Long, String, String> right) //第二个数据流(右流)输入的一条数据
            throws Exception {
        //能join将两个流的数据放入tuple6中,并返回输出
        return Tuple6.of(left.f0, left.f1, left.f2, right.f0, right.f1, right.f2);
    }
}

2、左外连接

package cn._51doit.flink.day05;


import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple6;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

public class TumblingWindowLeftOuterJoinDemo {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        //1000,A,1
        DataStreamSource<String> leftSteam = env.socketTextStream("localhost", 8888);
        //2000,A,2
        DataStreamSource<String> rightStream = env.socketTextStream("localhost", 9999);

        //提取第一个流中数据的EventTime
        DataStream<String> leftWaterMarkStream = leftSteam
                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.seconds(0)) {
                    @Override
                    public long extractTimestamp(String line) {
                        return Long.parseLong(line.split(",")[0]);
                    }
                });
        //提取第二个流中数据的EventTime
        DataStream<String> rightWaterMarkStream = rightStream
                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.seconds(0)) {
                    @Override
                    public long extractTimestamp(String line) {
                        return Long.parseLong(line.split(",")[0]);
                    }
                });
        //对第一个流整理成tuple3
        DataStream<Tuple3<Long, String, String>> leftTuple = leftWaterMarkStream.map(
                new MapFunction<String, Tuple3<Long, String, String>>() {
                    @Override
                    public Tuple3<Long, String, String> map(String value) throws Exception {
                        String[] fields = value.split(",");
                        return Tuple3.of(Long.parseLong(fields[0]), fields[1], fields[2]);
                    }
                }
        );
        //对第二个流整理成tuple3
        DataStream<Tuple3<Long, String, String>> rightTuple = rightWaterMarkStream.map(
                new MapFunction<String, Tuple3<Long, String, String>>() {
                    @Override
                    public Tuple3<Long, String, String> map(String value) throws Exception {
                        String[] fields = value.split(",");
                        return Tuple3.of(Long.parseLong(fields[0]), fields[1], fields[2]);
                    }
                }
        );
        //第一个流(左流)和第二个流(右流)进行LeftOuterJoin
		//在同一个窗口,并且join的条件相等,第一个流中的数据没join上也输出
        DataStream<Tuple6<Long, String, String, Long, String, String>> joinedStream = leftTuple.coGroup(rightTuple)
                .where(new KeySelector<Tuple3<Long, String, String>, String>() {
                    @Override
                    public String getKey(Tuple3<Long, String, String> value) throws Exception {
                        return value.f1;
                    }
                })
                .equalTo(new KeySelector<Tuple3<Long, String, String>, String>() {
                    @Override
                    public String getKey(Tuple3<Long, String, String> value) throws Exception {
                        return value.f1;
                    }
                })
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))
                .apply(new MyLeftOuterJoinFunction());

        joinedStream.print();

        env.execute();
    }
}
package cn._51doit.flink.day05;

import org.apache.flink.api.common.functions.CoGroupFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple6;
import org.apache.flink.util.Collector;

public class MyLeftOuterJoinFunction implements CoGroupFunction<
        Tuple3<Long, String, String>, //左流输入的数据类型
        Tuple3<Long, String, String>, //右流输入的数据类型
        Tuple6<Long, String, String, Long, String, String>> { //输出的数据类型
		
    @Override
    public void coGroup(Iterable<Tuple3<Long, String, String>> first,
                        Iterable<Tuple3<Long, String, String>> second,
                        Collector<Tuple6<Long, String, String, Long, String, String>> out) throws Exception {
        //循环左流的数据,如果有数据说明触发窗口时左流中有数据
        for (Tuple3<Long, String, String> left : first) {
            boolean hasJoined = false;
            //循环右流的数据,如果有数据说明触发窗口时右流中有数据,即join上流
            for (Tuple3<Long, String, String> right : second) {
                //返回两个流join上的数据
                out.collect(Tuple6.of(left.f0, left.f1, left.f2, right.f0, right.f1, right.f2));
                hasJoined = true;
            }
            //如果没有join上,只返回左流的数据
            if (!hasJoined) {
                out.collect(Tuple6.of(left.f0, left.f1, left.f2, null, null, null));
            }
        }
    }
}

3、右外连接

package cn._51doit.flink.day05;


import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple6;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

public class TumblingWindowRightOuterJoinDemo {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        //1000,A,1
        DataStreamSource<String> leftSteam = env.socketTextStream("localhost", 8888);
        //2000,A,2
        DataStreamSource<String> rightStream = env.socketTextStream("localhost", 9999);

        //提取第一个流中数据的EventTime
        DataStream<String> leftWaterMarkStream = leftSteam
                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.seconds(0)) {
                    @Override
                    public long extractTimestamp(String line) {
                        return Long.parseLong(line.split(",")[0]);
                    }
                });
        //提取第二个流中数据的EventTime
        DataStream<String> rightWaterMarkStream = rightStream
                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.seconds(0)) {
                    @Override
                    public long extractTimestamp(String line) {
                        return Long.parseLong(line.split(",")[0]);
                    }
                });
        //对第一个流整理成tuple3
        DataStream<Tuple3<Long, String, String>> leftTuple = leftWaterMarkStream.map(
                new MapFunction<String, Tuple3<Long, String, String>>() {
                    @Override
                    public Tuple3<Long, String, String> map(String value) throws Exception {
                        String[] fields = value.split(",");
                        return Tuple3.of(Long.parseLong(fields[0]), fields[1], fields[2]);
                    }
                }
        );
        //对第二个流整理成tuple3
        DataStream<Tuple3<Long, String, String>> rightTuple = rightWaterMarkStream.map(
                new MapFunction<String, Tuple3<Long, String, String>>() {
                    @Override
                    public Tuple3<Long, String, String> map(String value) throws Exception {
                        String[] fields = value.split(",");
                        return Tuple3.of(Long.parseLong(fields[0]), fields[1], fields[2]);
                    }
                }
        );
        //调用coGroup实现left join
        DataStream<Tuple6<Long, String, String, Long, String, String>> joinedStream = leftTuple.coGroup(rightTuple)
                .where(new KeySelector<Tuple3<Long, String, String>, String>() {
                    @Override
                    public String getKey(Tuple3<Long, String, String> value) throws Exception {
                        return value.f1;
                    }
                })
                .equalTo(new KeySelector<Tuple3<Long, String, String>, String>() {
                    @Override
                    public String getKey(Tuple3<Long, String, String> value) throws Exception {
                        return value.f1;
                    }
                })
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))
                .apply(new MyRightOuterJoinFunction());

        joinedStream.print();

        env.execute();
    }
}
package cn._51doit.flink.day05;

import org.apache.flink.api.common.functions.CoGroupFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple6;
import org.apache.flink.util.Collector;

public class MyRightOuterJoinFunction implements CoGroupFunction<
        Tuple3<Long, String, String>, //左流输入的数据类型
        Tuple3<Long, String, String>, //右流输入的数据类型
        Tuple6<Long, String, String, Long, String, String>> { //输出的数据类型
    @Override
    public void coGroup(Iterable<Tuple3<Long, String, String>> first,
                        Iterable<Tuple3<Long, String, String>> second,
                        Collector<Tuple6<Long, String, String, Long, String, String>> out) throws Exception {
        //循环右流的数据,如果有数据说明触发窗口时右流中有数据
        for (Tuple3<Long, String, String> right : second) {
            boolean hasJoined = false;
            //循环左流的数据,如果有数据说明触发窗口时左流中有数据,即join上流
            for (Tuple3<Long, String, String> left : first) {
                //返回两个流join上的数据
                out.collect(Tuple6.of(left.f0, left.f1, left.f2, right.f0, right.f1, right.f2));
                hasJoined = true;
            }
            //如果没有join上,只返回右流的数据
            if (!hasJoined) {
                out.collect(Tuple6.of(null, null, null, right.f0, right.f1, right.f2));
            }
        }
    }
}

4、interval Join

key相等,设置数据存活的范围

package cn._51doit.flink.day05;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple6;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;

public class IntervalJoinDemo {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        //1000,A,1
        DataStreamSource<String> leftLines = env.socketTextStream("localhost", 8888);
        //2000,A,2
        DataStreamSource<String> rightLines = env.socketTextStream("localhost", 9999);

        //提取第一个流中数据的EventTime
        DataStream<String> leftWaterMarkStream = leftLines
                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.seconds(0)) {
                    @Override
                    public long extractTimestamp(String line) {
                        return Long.parseLong(line.split(",")[0]);
                    }
                });
        //提取第二个流中数据的EventTime
        DataStream<String> rightWaterMarkStream = rightLines
                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.seconds(0)) {
                    @Override
                    public long extractTimestamp(String line) {
                        return Long.parseLong(line.split(",")[0]);
                    }
                });
        //对第一个流整理成tuple3
        DataStream<Tuple3<Long, String, String>> leftStream = leftWaterMarkStream.map(
                new MapFunction<String, Tuple3<Long, String, String>>() {
                    @Override
                    public Tuple3<Long, String, String> map(String value) throws Exception {
                        String[] fields = value.split(",");
                        return Tuple3.of(Long.parseLong(fields[0]), fields[1], fields[2]);
                    }
                }
        );
        //对第二个流整理成tuple3
        DataStream<Tuple3<Long, String, String>> rightStream = rightWaterMarkStream.map(
                new MapFunction<String, Tuple3<Long, String, String>>() {
                    @Override
                    public Tuple3<Long, String, String> map(String value) throws Exception {
                        String[] fields = value.split(",");
                        return Tuple3.of(Long.parseLong(fields[0]), fields[1], fields[2]);
                    }
                }
        );
        DataStream<Tuple6<Long, String, String, Long, String, String>> joinedStream = leftStream
                .keyBy(t -> t.f1) //指定第一个流分组KeySelector
                .intervalJoin(rightStream.keyBy(t -> t.f1)) //调用intervalJoin方法并指定第二个流的分组KeySelector
                .between(Time.seconds(-1), Time.seconds(1)) //设置join的时间区间范围为当前数据时间±1秒
                .upperBoundExclusive() //默认join时间范围为前后都包括的闭区间,现在设置为前闭后开区间
                .process(new MyProcessJoinFunction()); //调用process方法中传入自定义的MyProcessJoinFunction
        joinedStream.print(); //调用print sink 输出结果
        env.execute("IntervalJoinDemo");
    }
}

二、关联mysql查询维度数据

地理位置信息:使用httpClient查询高德地图

DimDemo

package cn._51doit.flink.day05;

import cn._51doit.flink.day05.func.GeoRichMapFunction;
import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;


public class DimDemo {

    public static void main(String[] args) throws Exception{

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> lines = env.socketTextStream("localhost", 8888);

        SingleOutputStreamOperator<LogBean> logBeanDataStream = lines.map(new MapFunction<String, LogBean>() {
            @Override
            public LogBean map(String value) throws Exception {
                LogBean bean = null;
                try {
                    bean = JSON.parseObject(value, LogBean.class);
                } catch (Exception e) {
                    e.printStackTrace();
                }
                return bean;
            }
        });

        SingleOutputStreamOperator<LogBean> filtered = logBeanDataStream.filter(e -> e != null);

        //关联维度信息
        SingleOutputStreamOperator<LogBean> logBeanWithNameDataStream = filtered.map(new RichMapFunction<LogBean, LogBean>() {

            private transient Connection connection;
            private transient PreparedStatement prepareStatement;

            @Override
            public void open(Configuration parameters) throws Exception {
                connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/bigdata?characterEncoding=utf-8", "root", "123456");
                prepareStatement = connection.prepareStatement("select id, name from tb_category where id = ?");
            }

            @Override
            public LogBean map(LogBean value) throws Exception {
                prepareStatement.setInt(1, value.cid);
                ResultSet resultSet = prepareStatement.executeQuery();
                String name = null;
                if (resultSet.next()) {
                    name = resultSet.getString(2);
                }
                resultSet.close();
                value.name = name;
                return value;
            }

            @Override
            public void close() throws Exception {
                if (prepareStatement != null) {
                    prepareStatement.close();
                }
                if (connection != null) {
                    connection.close();
                }
            }
        });

        //查询经纬度,关联位置信息
        SingleOutputStreamOperator<LogBean> result = logBeanWithNameDataStream.map(new GeoRichMapFunction("4924f7ef5c86a278f5500851541cdcff"));

        result.print();

        env.execute();
    }
}

LogBean

package cn._51doit.flink.day05;

public class LogBean {

    public String oid;

    public Integer cid;

    public Double money;

    public Double longitude;

    public Double latitude;

    public String name;

    public String province;

    public String city;

    @Override
    public String toString() {
        return "LogBean{" +
                "oid='" + oid + '\'' +
                ", cid=" + cid +
                ", money=" + money +
                ", longitude=" + longitude +
                ", latitude=" + latitude +
                ", name='" + name + '\'' +
                ", province='" + province + '\'' +
                ", city='" + city + '\'' +
                '}';
    }
}

GeoRichMapFunction

package cn._51doit.flink.day05.func;

import cn._51doit.flink.day05.LogBean;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;

public class GeoRichMapFunction extends RichMapFunction<LogBean, LogBean> {

    private String key;

    public GeoRichMapFunction(String key) {
        this.key = key;
    }

    private transient CloseableHttpClient httpclient;

    @Override
    public void open(Configuration parameters) throws Exception {
        httpclient = HttpClients.createDefault();
    }

    @Override
    public LogBean map(LogBean bean) throws Exception {
        double longitude = bean.longitude;
        double latitude = bean.latitude;
        HttpGet httpGet = new HttpGet("https://restapi.amap.com/v3/geocode/regeo?&location="+ longitude+"," +latitude+ "&key=" + key);
        CloseableHttpResponse response = httpclient.execute(httpGet);
        try {
            //System.out.println(response.getStatusLine)
            HttpEntity entity = response.getEntity();
            // do something useful with the response body
            // and ensure it is fully consumed
            String province = null;
            String city = null;
            if (response.getStatusLine().getStatusCode() == 200) {
                //获取请求的json字符串
                String result = EntityUtils.toString(entity);
                //转成json对象
                JSONObject jsonObj = JSON.parseObject(result);
                //获取位置信息
                JSONObject regeocode = jsonObj.getJSONObject("regeocode");
                if (regeocode != null && !regeocode.isEmpty()) {
                    JSONObject address = regeocode.getJSONObject("addressComponent");
                    //获取省市区
                    bean.province = address.getString("province");
                    bean.city = address.getString("city");
                }
            }
        } finally {
            response.close();
        }
        return bean;
    }

    @Override
    public void close() throws Exception {
        if (httpclient != null) {
            httpclient.close();
        }
    }
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

大数据同盟会

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值