一个简单的Flink程序开发代码

该博客展示了如何使用Flink开发一个简单的程序,处理JSON格式的日志数据。通过模拟JSON日志,进行数据过滤、Hbase查询、状态管理,并使用表达式匹配功能。最后,将处理后的数据发送到Kafka。
摘要由CSDN通过智能技术生成
重点主要是使用State,模拟实现匹配场景功能 ,想了解更多完整代码,可以留言

package com.coder.flink.core.stormToFlink;


import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.coder.flink.core.base.Constaints;
import com.coder.flink.core.base.Resources;
import com.coder.flink.core.pojo.TopicInfo;
import com.coder.flink.core.quartz.QuartzRedis;
import com.coder.flink.core.redis.RedisTableData;
import com.coder.flink.core.utils.DateUtil;
import com.coder.flink.core.utils.GsonUtil;
import com.coder.flink.core.utils.HbaseRowKey;
import com.coder.flink.core.utils.HbaseUtil;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.googlecode.aviator.Expression;
import org.apache.commons.lang.StringUtils;
import org.apache.flink.api.common.functions.*;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;


/**
 * 最后需要把方法封装
 */
public class StormToFlink_new {

    private static String columnFamily = Resources.hbasePro.getProperty("column_family");

    public static void main(String[] args) throws Exception {

        //todo 1, 启动定时器
        new QuartzRedis();

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // todo 2,代码模拟JSON 日志

        String log1 = "{'rowKey':'002|AA|4BCED55E25123F5A','phoneType':'1','city':'深圳市','urlTime':'[21/Jun/2019:21:23:42 +0800]','latitude':'22.495816','battery':'34','deviceId':'E4A787938C2EBFBD0','memoryUseRatio':'41','localTime':'1558337193164','areaCode':'42287','baseStationCode':'23082003','province':'广东省','interfaceKey':'Tub','currentIPAddress':'172.10.13.55','fieldUrl':'/publicLog/domainName','appPackageName':'com.liumeng.demo','cpuUseRatio':'69','appKey':'432b6d2931efd34037fe282d'}";
        String log2 = "{'rowKey':'002|AA|4BCED55E25123F5A','phoneType':'1','city':'深圳市','urlTime':'[21/Jun/2019:21:23:48 +0800]','latitude':'22.495816','battery':'34',&#
  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值