记录一次使用Flink1.17使用java代码来生成保存点并关闭任务的操作
不废话直接上代码
package com.bringspring.database;
import cn.hutool.http.HttpUtil;
import org.apache.flink.api.common.JobID;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.client.program.rest.RestClusterClient;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.core.execution.SavepointFormatType;
import org.apache.flink.util.ExceptionUtils;
import java.util.concurrent.CompletableFuture;
public class FlinksavePoint {
public static void main(String[] args) throws Exception {
//已知的flink任务id
String jobId1 = "b741bb109c7826b14ddb768ffe877548";
JobID jobId = JobID.fromHexString(jobId1);
String flinkRestUrl = "10.70.10.83:8082"; // Flink REST API地址
//设置flink集群环境信息
Configuration config = new Configuration();
config.setString(RestOptions.ADDRESS, "10.70.10.83");
config.setInteger(RestOptions.PORT, 8082);
//创建集群链接
ClusterClient<String> clusterClient = new RestClusterClient<>(config, "TestClusterClient");
try {
//生成保存点信息
CompletableFuture<String> savepointFuture = clusterClient.triggerSavepoint(jobId, "file:///opt/flink/savepoint",SavepointFormatType.CANONICAL);
System.out.println("Savepoint path: " + savepointPath);
//这个写法放在生成保存点后面关不掉任务,放在生成保存点之前的话,执行生成保存点代码会报错任务不存在,有兴趣的可以研究下
// clusterClient.cancel(jobId);
// System.out.println("Job " + jobId + " has been cancelled.");
//使用了http请求来关闭保存点
StringBuffer stringBuffer = new StringBuffer();
stringBuffer.append("http://").append("10.70.10.83").append(":")
.append("8082")
.append("/jobs/")
.append(jobId)
.append("/yarn-cancel");
//就是普通的http工具类这里用的是hutool包里面的 maven依赖如下
// <!-- 第三方工具类-->
//<dependency>
//<groupId>cn.hutool</groupId>
//<artifactId>hutool-all</artifactId>
//<version>5.7.18</version>
//</dependency>
HttpUtil.get(stringBuffer.toString());
} catch (Exception e) {
ExceptionUtils.rethrowException(e);
} finally {
clusterClient.close();
}
}
}