在redis批量造数辅助测试impala中UDF的性能
pom.xml
<!-- redis -->
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>2.9.0</version>
</dependency>
BeidouUdfToRedis.java
import org.apache.commons.io.FileUtils;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.JedisCluster;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
public class BeidouUdfToRedis {
public void beidouUDF(int count,String key) {
LinkedHashSet<HostAndPort> redisClusterNodes = RedisUtil.getRedisClusterNodes();
JedisCluster jedisCluster = new JedisCluster(redisClusterNodes, 30 * 100, 10);
JedisClusterPipeline pipelined = JedisClusterPipeline.pipelined(jedisCluster);
String hashKey = key;
//将数据从txt中取出保存在hashMap中
List list = null;
try {
list = FileUtils.readLines(new File("/Users/yiqin/Desktop/crm.txt"));
} catch (IOException e) {
e.printStackTrace();
}
HashMap<String,String> hashMap = new HashMap<String,String>();
for (Object str : list) {
String[] city = str.toString().split(",");
hashMap.put(city[0],city[1]);
}
System.out.println(hashMap);
int num = 0;
for(int i =0;i < count;i++){ //写入多少条
pipelined.hmset(hashKey, hashMap);
pipelined.expire(hashKey,60*60*24*2);
num++;
if(num > 500) {
pipelined.sync();
}
}
try {
jedisCluster.close();
pipelined.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
BeidouUdfToRedis beidouUdfToRedis = new BeidouUdfToRedis();
beidouUdfToRedis.beidouUDF(1000,"P227:dict_property_$crm_id_mapping_value");
}
}