代码
import com.pojo.TestParameters;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.TupleTypeInfo;
import org.apache.flink.api.scala.typeutils.Types;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.functions.ScalarFunction;
import org.apache.flink.util.Collector;
import java.util.Calendar;
import java.util.Date;
public class ResisSink {
static String topic = "test_csv_source";
static String groupId = "topic";
public static void main(String[] args) {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
env.setParallelism(1);
env.enableCheckpointing(60000);
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);
StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, settings);
String create = "CREATE TABLE table_source (\n" +
" serial_no STRING,\n" +
" category_name STRING,\n" +
" asset_id STRING,\n" +
" times INT,\n" +
" susp_updt_phone STRING,\n" +
" susp_phone STRING,\n" +
" susp_idcard STRING,\n" +
" susp_updt_idcard STRING,\n" +
" proctime as PROCTIME()" +
") WITH (\n" +
" 'connector.type' = 'kafka',\n" +
" 'connector.version' = '0.10',\n" +
" 'update-mode' = 'append',\n" +
" 'connector.topic' = '" + topic + "',\n" +
" 'connector.properties.group.id' = '" + groupId + "',\n" +
" 'connector.startup-mode' = 'earliest-offset',\n" +
" 'connector.properties.bootstrap.servers' = '" + TestParameters.bsv() + "',\n" +
" 'connector.properties.zookeeper.connect' = '127.0.0.1:2181',\n" +
" 'format.type' = 'csv'" +
")";
tEnv.registerFunction("time_interval", new TimeInterval());
tEnv.sqlUpdate(create);
String sql = "select asset_id,cast(count(*) as string)" +
"from table_source " +
"where time_interval(susp_updt_phone) <= 365 and times > 6" +
"group by TUMBLE(proctime,INTERVAL '5' SECOND),asset_id ";
Table table = tEnv.sqlQuery(sql);
TupleTypeInfo<Tuple2<String, String>> tupleType = new TupleTypeInfo<>(
Types.STRING(),
Types.STRING());
DataStream<Tuple2<String, String>> tmp = tEnv.toAppendStream(table, tupleType);
FlinkJedisPoolConfig fjpc = new FlinkJedisPoolConfig.Builder()
.setHost("127.0.0.1")
.setPort(6379)
.build();
tmp.flatMap(
new FlatMapFunction<Tuple2<String, String>, Tuple2<String, String>>() {
@Override
public void flatMap(Tuple2<String, String> value, Collector<Tuple2<String, String>> out) throws Exception {
out.collect(
new Tuple2<String, String>((String) value.f0, (String) value.f1)
);
}
}
)
.addSink(new RedisSink<Tuple2<String, String>>(fjpc, new RedisExampleMapper()));
try {
env.execute();
} catch (Exception e) {
e.printStackTrace();
}
}
public static class TimeInterval extends ScalarFunction {
public int eval(String str) {
Calendar cal1 = Calendar.getInstance();
try {
cal1.setTime(new Date(str));
} catch (Exception e) {
return 0;
}
Calendar cal2 = Calendar.getInstance();
cal2.setTime(new Date());
int day1 = cal1.get(Calendar.DAY_OF_YEAR);
int day2 = cal2.get(Calendar.DAY_OF_YEAR);
int year1 = cal1.get(Calendar.YEAR);
int year2 = cal2.get(Calendar.YEAR);
if (year1 != year2)
{
int timeDistance = 0;
for (int i = year1; i < year2; i++) {
if (i % 4 == 0 && i % 100 != 0 || i % 400 == 0)
{
timeDistance += 366;
} else
{
timeDistance += 365;
}
}
return timeDistance + (day2 - day1);
} else
{
return day2 - day1;
}
}
}
public static class RedisExampleMapper implements RedisMapper<Tuple2<String, String>> {
@Override
public RedisCommandDescription getCommandDescription() {
return new RedisCommandDescription(RedisCommand.HSET, "HASH_NAME");
}
@Override
public String getKeyFromData(Tuple2<String, String> data) {
return data.f0;
}
@Override
public String getValueFromData(Tuple2<String, String> data) {
return data.f1;
}
}
}
依赖
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<flink.version>1.10.0</flink.version>
<kafka.version>0.10.1.0</kafka.version>
<scala.binary.version>2.11</scala.binary.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-shaded-jackson</artifactId>
<version>2.9.8-7.0</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.21</version>
</dependency>
<dependency>
<groupId>org.apache.bahir</groupId>
<artifactId>flink-connector-redis_2.11</artifactId>
<version>1.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.flink</groupId>
<artifactId>flink-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.11</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.10_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-csv</artifactId>
<version>${flink.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
</dependencies>