flink 本地从savepoint/checkpoint 启动 调度
org.apache.flink.client.deployment.executors.LocalExecutor
@Override
public CompletableFuture<JobClient> execute(
Pipeline pipeline, Configuration configuration, ClassLoader userCodeClassloader)
throws Exception {
//增加该行,可以本地从ck启动
jobGraph.setSavepointRestoreSettings(SavepointRestoreSettings.forPath("D:\\temp\\xxx\\xxx\\chk-6"));
}
用Python 本地打savepoint
import json
import requests
url = "http://localhost:63684" + "/jobs/" + "7140b62d19174a6bc1a57f51db9330d9" + "/savepoints"
myheader = {'content-type': 'application/json'}
mydata = {
"cancel-job": False,
"target-directory":"file:///E:\\flinkstatecheckpoint"
}
x = requests.post(url, data=json.dumps(mydata), headers=myheader)
print(x)
import org.apache.flink.configuration.{Configuration, RestOptions}
import org.apache.flink.runtime.minicluster.{MiniCluster, MiniClusterConfiguration}
flink 启动本地ui
object TT {
def main(args: Array[String]): Unit = {
//设置必要的属性,包括taskManager数量,slot之类
val flinkConfig = new Configuration()
//如果绑定端口0,其实就是随便绑定一个端口
flinkConfig.setInteger(RestOptions.PORT, 9999);
val miniClusterConfig: MiniClusterConfiguration =
new MiniClusterConfiguration.Builder()
.setConfiguration(flinkConfig)
.setNumTaskManagers(3)
.setNumSlotsPerTaskManager(1)
.build()
val cluster = new MiniCluster(miniClusterConfig)
cluster.start()
}
}