本文实现功能点:
1、连接yarn集群
2、获取yarn资源池各节点信息
3、在root节点下面新增一个二级节点
4、根据applicationId获取任务的运行时间、资源池等信息
5、kill掉yarn任务进程
先把集群的core-site.xml、yarn-site.xml、hdfs-site.xml、mapred-site.xml这些文件放到本地目录。
如果集成了kerberos还需要把krb5.conf和需要登录的xxx.keytab文件放到本地目录!
代码实现如下:
package com.example.demo.yarn;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.*;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.Records;
import org.apache.http.HttpEntity;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.FileEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.text.SimpleDateFormat;
import java.util.Base64;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import static javax.management.timer.Timer.ONE_MINUTE;
public class YarnUtil {
private final static Logger log = LogManager.getLogger(YarnUtil.class);
public static Configuration conf = new Configuration();
private final static String CORE_SITE = "src/main/resources/core-site.xml";
private final static String YARN_SITE = "src/main/resources/yarn-site.xml";
private final static String HDFS_SITE = "src/main/resources/hdfs-site.xml";
private final static String MAPRED_SITE = "src/main/resources/mapred-site.xml";
public static void main(String[] args) {
//getYarnNodeReports();//获取yarn资源池各节点信息
//addYarnResources();//在root节点下面新增一个二级节点
ApplicationId appId = ApplicationId.newInstance(1684390205540L,4858);//"application_" + 1684390206640L + "_" + 4815;
//getYarnData(appId);//根据applicationId获取任务的运行时间、资源池等信息
killYarn(appId);//kill掉yarn任务进程
}
public static Configuration load() {
Configuration conf = new Configuration();
conf.addResource(CORE_SITE);
conf.addResource(YARN_SITE);
conf.addResource(HDFS_SITE);
conf.addResource(MAPRED_SITE);
return conf;
}
public static YarnClient getYarnClient() {
//需要设置krb5.conf文件
System.setProperty("java.security.krb5.conf", "src/main/resources/krb5.conf");
System.setProperty("sun.security.krb5.debug", "false");
conf.set("hadoop.security.authentication", "kerberos");
String principal = "hive"; //Kerberos Principal,如果不然REALM,则使用krb5.conf默认的
String keytabFile = "src/main/resources/hive.keytab"; //KDC生成的keytab文件
try {
UserGroupInformation.loginUserFromKeytab(principal, keytabFile);
UserGroupInformation.getLoginUser();
YarnClient yarnClient = YarnClient.createYarnClient();
yarnClient.init(YarnUtil.load());
yarnClient.start();
return yarnClient;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/***
* 获取资源池下的所有节点信息
*/
public static void getYarnNodeReports(){
YarnClient yarnClient = getYarnClient();
// 获取节点报告
List<NodeReport> nodeReports = null;
try {
nodeReports = yarnClient.getNodeReports();
// 输出各节点信息
for (NodeReport nodeReport : nodeReports) {
System.out.println("节点 ID: " + nodeReport.getNodeId().toString());
System.out.println("节点 HTTP Address: " + nodeReport.getHttpAddress());
System.out.println("节点 Health Report: " + nodeReport.getHealthReport());
System.out.println("节点 Labels: " + nodeReport.getNodeLabels());
System.out.println("节点 State: " + nodeReport.getNodeState().toString());
System.out.println("节点 Total Resource: " + nodeReport.getCapability().toString());
}
// 关闭Yarn客户端
yarnClient.stop();
} catch (YarnException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static String jsonData(){
//定义资源池配置的JSON配置
JSONObject yarnjson = new JSONObject();
yarnjson.put("queuePath", "root.poolname_test");//队列路径
yarnjson.put("queueName", "poolname_test");//队列名称
yarnjson.put("capacity", YarnHttpUtils.getCapacity(1024,2));//最小内存(GB) 最小cpu
yarnjson.put("maximum-capacity", YarnHttpUtils.getCapacity(10240,2));//最大内存(GB) 最大cpu
yarnjson.put("maximum-applications", "10");//正在运行的应用程序最大数量
yarnjson.put("maximum-am-resource-percent", "2");//最大份额
/**
* 计划策略
* DRF: Dominant Resource Fairness。根据 CPU 和内存公平调度资源。 (建议)
* FAIR: 仅根据内存公平调度资源。
* FIFO: 拥有子池的池不能是 FIFO。
*/
yarnjson.put("ordering-policy", "fifo");
yarnjson.put("siblingCapacities", new JSONArray());
yarnjson.put("message", "Added root.poolname_test");
System.out.println("=================================================="+yarnjson.toString());
return yarnjson.toString();
}
/***
* 在root节点下面新增一个二级节点 poolname_test ,设置最大、最小cpu核心数和内存数、队列权重、应用程序数量限制、Application Master最大份额以及计划策略
*/
public static void addYarnResources(){
//提交yarn节点的请求地址(看文章上图)
String postUrl = "http://ip:7180/cmf/clusters/Cluster%201/queue-manager-api/api/v1/environments/support/clusters/Cluster%201/resources/scheduler/partitions/default/queues/root";
String username = "admin";
String password = "admin";
HashMap<String, String> headers = new HashMap<>();
headers.put("Content-Type", "application/json");
headers.put("Accept-Encoding", "gzip, deflate");
headers.put("Referer", "http://ip:7180/cmf/clusters/Cluster%201/queue-manager/");
headers.put("Cookie", "xxxx");
//创建队列提交到资源池
String result = YarnHttpUtils.postAccessByAuth(postUrl, headers, jsonData(),username, password);
System.out.println("资源池配置:" + result);
}
/***
* 根据applicationId 获取 任务的运行时间、资源池等信息
* @param appId
*/
public static void getYarnData(ApplicationId appId){
YarnClient yarnClient = getYarnClient();
try {
ApplicationReport report = yarnClient.getApplicationReport(appId);
System.out.println("根据applicationId获取任务信息:=============================");//JSON.toJSONString(report)
System.out.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date(report.getStartTime())));
System.out.println(report.getApplicationResourceUsageReport().getUsedResources().getVirtualCores());
System.out.println(report.getApplicationResourceUsageReport().getUsedResources().getMemory());
System.out.println(report.getFinalApplicationStatus());
System.out.println(report.getHost());
System.out.println(report.getYarnApplicationState());
} catch (YarnException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void killYarn(ApplicationId appId){
YarnClient yarnClient = getYarnClient();
try {
yarnClient.killApplication(appId);
} catch (YarnException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
yarnClient.stop();
System.out.println("kill成功===============================");
}
}
package com.example.demo.yarn;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
import java.net.URI;
import java.util.Map;
/**
* describe: Http请求工具类
*/
public class YarnHttpUtils {
/**
* Get方式用户名和密码认证
* @param url
* @param headers
* @param username
* @param password
* @return
*/
public static String getAccessByAuth(String url, Map<String, String> headers, String username, String password) {
String result = null;
URI uri = URI.create(url);
CredentialsProvider credsProvider = new BasicCredentialsProvider();
credsProvider.setCredentials(new AuthScope(uri.getHost(), uri.getPort()),
new UsernamePasswordCredentials(username, password));
CloseableHttpClient httpClient = HttpClients.custom().setDefaultCredentialsProvider(credsProvider)
.build();
HttpGet httpGet = new HttpGet(uri);
if(headers != null && headers.size() > 0){
headers.forEach((K,V)->httpGet.addHeader(K,V));
}
HttpResponse response = null;
try {
response = httpClient.execute(httpGet);
HttpEntity resultEntity = response.getEntity();
result = EntityUtils.toString(resultEntity);
return result;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
* Post方式用户名和密码认证
* @param url
* @param headers
* @param data
* @param username
* @param password
* @return
*/
public static String postAccessByAuth(String url, Map<String, String> headers, String data, String username, String password) {
String result = null;
URI uri = URI.create(url);
CredentialsProvider credsProvider = new BasicCredentialsProvider();
credsProvider.setCredentials(new AuthScope(uri.getHost(), uri.getPort()),
new UsernamePasswordCredentials(username, password));
CloseableHttpClient httpClient = HttpClients.custom()
.setDefaultCredentialsProvider(credsProvider)
.build();
HttpPost post = new HttpPost(uri);
if(headers != null && headers.size() > 0){
headers.forEach((K,V)->post.addHeader(K,V));
}
try {
if(data != null) {
StringEntity entity = new StringEntity(data);
entity.setContentEncoding("UTF-8");
entity.setContentType("application/json");
post.setEntity(entity);
}
HttpResponse response = httpClient.execute(post);
HttpEntity resultEntity = response.getEntity();
result = EntityUtils.toString(resultEntity);
return result;
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
/**
* Put方式用户名和密码认证方式
* @param url
* @param headers
* @param data
* @param username
* @param password
* @return
*/
public static String putAccessByAuth(String url, Map<String, String> headers, String data, String username, String password) {
String result = null;
URI uri = URI.create(url);
CredentialsProvider credsProvider = new BasicCredentialsProvider();
credsProvider.setCredentials(new AuthScope(uri.getHost(), uri.getPort()),
new UsernamePasswordCredentials(username, password));
CloseableHttpClient httpClient = HttpClients.custom()
.setDefaultCredentialsProvider(credsProvider)
.build();
HttpPut put = new HttpPut(uri);
if(headers != null && headers.size() > 0){
headers.forEach((K,V)->put.addHeader(K,V));
}
try {
if(data != null) {
StringEntity entity = new StringEntity(data);
entity.setContentEncoding("UTF-8");
entity.setContentType("application/json");
put.setEntity(entity);
}
HttpResponse response = httpClient.execute(put);
HttpEntity resultEntity = response.getEntity();
result = EntityUtils.toString(resultEntity);
return result;
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
/**
* 封装Capacity
* @return
*/
public static String getCapacity(Integer memory,Integer vcores){
if (null == memory){
memory = 0;
}
if (null == vcores){
vcores = 0;
}
//[memory=1024,vcores=0]
StringBuffer buf = new StringBuffer();
buf.append("[").append("memory=").append(memory)
.append(",").append("vcores=").append(vcores).append("]");
return buf.toString();
}
}