场景:数据量太大,上传到redis和下载耗时过长,可以将数据压缩后上传优化耗时问题;
/** 使用GZIP压缩算法对Redis上传JSON进行压缩上传和解压下载 */
@Component
@Slf4j
public class RedisZipJsonUtil {
/**
* 使用GZIP压缩算法对字节数组进行压缩
*
* @param jsonString
* @return
* @throws IOException
*/
public byte[] compressWithGzip(String jsonString) throws IOException {
GZIPOutputStream gzipOutputStream = null;
ByteArrayOutputStream outputStream = null;
try {
// 使用GZIP压缩算法对JSON字符串进行压缩
outputStream = new ByteArrayOutputStream();
gzipOutputStream = new GZIPOutputStream(outputStream);
gzipOutputStream.write(jsonString.getBytes());
// gzipOutputStream.close();里面有finish操作,因此要在toByteArray前面,也可以单独执行
gzipOutputStream.finish(); // 不进行finish操作数据会丢失或为空,因此要放在toByteArray前面
return outputStream.toByteArray();
} catch (IOException e) {
log.error("redis数据压缩异常:{}", e.getMessage());
e.printStackTrace();
} finally {
if (outputStream != null) {
outputStream.close();
}
if (gzipOutputStream != null) {
gzipOutputStream.close();
}
}
return null;
}
/**
* 使用GZIP解压缩算法对字节数组进行解压缩
*
* @param input
* @return
* @throws IOException
*/
public String decompressWithGzip(byte[] input) throws IOException {
GZIPInputStream gzipInputStream = null;
ByteArrayInputStream inputStream = null;
ByteArrayOutputStream outputStream = null;
try {
if (input != null) {
int bufferSize = 1024;
inputStream = new ByteArrayInputStream(input);
gzipInputStream = new GZIPInputStream(inputStream, bufferSize);
byte[] buffer = new byte[bufferSize];
outputStream = new ByteArrayOutputStream();
int len;
while ((len = gzipInputStream.read(buffer)) != -1) {
outputStream.write(buffer, 0, len);
}
outputStream.flush(); // 刷新清空
// 将解压缩后的JSON字节数组转换为字符串
return new String(outputStream.toByteArray());
}
} catch (IOException e) {
log.error("redis数据解压缩异常:{}", e.getMessage());
e.printStackTrace();
} finally {
if (outputStream != null) {
outputStream.close();
}
if (inputStream != null) {
inputStream.close();
}
if (gzipInputStream != null) {
gzipInputStream.close();
}
}
return null;
}
}