package com.cloudera.utils;
import net.sourceforge.spnego.SpnegoHttpURLConnection;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Map;
/**
* package: com.cloudera.utils
* describe: 访问Kerberos环境的Http工具类
* creat_user: Fayson
* email: htechinfo@163.com
* creat_date: 2018/2/12
* creat_time: 下午4:57
* 公众号:Hadoop实操
*/
public class KBHttpUtils{
/**
* HttpGET请求
* @param url
* @param headers
* @return
*/
public static String getAccess(String url, Map headers) {
StringBuilder sb = new StringBuilder();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
InputStream in = null;
try {
final SpnegoHttpURLConnection spnego = new SpnegoHttpURLConnection("Client");
spnego.setRequestMethod("GET");
if(headers != null && headers.size() > 0){
headers.forEach((K,V)->spnego.setRequestProperty(K,V));
}
spnego.connect(new URL(url),bos);
in = spnego.getInputStream();
byte[] b = new byte[1024];
int len ;
while ((len = in.read(b)) > 0) {
sb.append(new String(b, 0, len));
}
}catch (Exception e){
e.printStackTrace();
}finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (bos != null) {
try {
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
System.out.println("Result:" + sb.toString());
return sb.toString();
}
/**
* HttpDelete请求
* @param url
* @param headers
* @return
*/
public static String deleteAccess(String url, Map headers) {
StringBuilder sb = new StringBuilder();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
InputStream in = null;
try {
final SpnegoHttpURLConnection spnego = new SpnegoHttpURLConnection("Client");
spnego.setRequestMethod("DELETE");
if(headers != null && headers.size() > 0){
headers.forEach((K,V)->spnego.setRequestProperty(K,V));
}
spnego.connect(new URL(url),bos);
in = spnego.getInputStream();
byte[] b = new byte[1024];
int len ;
while ((len = in.read(b)) > 0) {
sb.append(new String(b, 0, len));
}
}catch (Exception e){
e.printStackTrace();
}finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (bos != null) {
try {
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
System.out.println("Result:" + sb.toString());
return sb.toString();
}
/**
* HttpPost请求
* @param url
* @param headers
* @param data
* @return
*/
public static String postAccess(String url, Map headers, String data) {
StringBuilder sb = new StringBuilder();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
InputStream in = null;
try {
final SpnegoHttpURLConnection spnego = new SpnegoHttpURLConnection("Client");
spnego.setRequestMethod("POST");
if(headers != null && headers.size() > 0){
headers.forEach((K,V)->spnego.setRequestProperty(K,V));
}
if(data != null){
bos.write(data.getBytes());
}
spnego.connect(new URL(url),bos);
System.out.println("Kerberos data:"+data);
System.out.println("HTTP Status Code: " + spnego.getResponseCode());
System.out.println("HTTP Status Message: "+ spnego.getResponseMessage());
in = spnego.getInputStream();
byte[] b = new byte[1024];
int len ;
while ((len = in.read(b)) > 0) {
sb.append(new String(b, 0, len));
}
}catch (Exception e){
e.printStackTrace();
}finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (bos != null) {
try {
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
System.out.println("Result:" + sb.toString());
return sb.toString();
}
}
(可左右滑动)
2.Livy RESTful API调用示例代码
package com.cloudera.kerberos;
import com.cloudera.utils.KBHttpUtils;
import java.util.HashMap;
/**
* package: com.cloudera
* describe: Kerberos环境下Livy RESTful API接口调用
* creat_user: Fayson
* email: htechinfo@163.com
* creat_date: 2018/2/11
* creat_time: 上午10:50
* 公众号:Hadoop实操
*/
public class AppLivy{
private static String LIVY_HOST = "http://ip-172-31-21-83.ap-southeast-1.compute.internal:8998";
public static void main(String[] args) {
System.setProperty("java.security.krb5.conf", "/Volumes/Transcend/keytab/krb5.conf");
System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
// System.setProperty("sun.security.krb5.debug", "true"); //Kerberos Debug模式
System.setProperty("java.security.auth.login.config", "/Volumes/Transcend/keytab/login-yarn.conf");
HashMap headers = new HashMap<>();
headers.put("Content-Type", "application/json");
headers.put("Accept", "application/json");
headers.put("X-Requested-By", "fayson");
//创建一个交互式会话
String kindJson = "{\"kind\": \"spark\", \"proxyUser\":\"fayson\"}";
// KBHttpUtils.postAccess(LIVY_HOST + "/sessions", headers, kindJson);
//执行code
String code = "{\"code\":\"sc.parallelize(1 to 2).count()\"}";
// KBHttpUtils.postAccess(LIVY_HOST + "/sessions/2/statements", headers, code);
//删除会话
// KBHttpUtils.deleteAccess(LIVY_HOST + "/sessions/3", headers);
//封装提交Spark作业的JSON数据
String submitJob = "{\"className\": \"org.apache.spark.examples.SparkPi\",\"executorMemory\": \"1g\",\"args\": [200],\"file\": \"/fayson-yarn/jars/spark-examples-1.6.0-cdh5.14.0-hadoop2.6.0-cdh5.14.0.jar\"}";
//向集群提交Spark作业
KBHttpUtils.postAccess(LIVY_HOST + "/batches", headers, submitJob);
//通过提交作业返回的SessionID获取具体作业的执行状态及APPID
// KBHttpUtils.getAccess(LIVY_HOST + "/batches/4", headers);
}
}
(可左右滑动)
4.示例代码运行
1.运行AppLivy代码,向集群提交Spark作业
响应结果:
{
"id": 14,
"state": "starting",
"appId": null,
"appInfo": {
"driverLogUrl": null,
"sparkUiUrl": null
},
"log": ["stdout: ", "\nstderr: ", "\nYARN Diagnostics: "]
}
(可左右滑动)
2.获取作业运行状态,将上一步获取到的id传入到如下请求
响应结果:
{
"id": 14,
"state": "success",
"appId": "application_1518423607906_0006",
"appInfo": {
"driverLogUrl": null,
"sparkUiUrl": "http://ip-172-31-16-68.ap-southeast-1.compute.internal:8088/proxy/application_1518423607906_0006/"
},
"log": ["\t ApplicationMaster RPC port: -1", "\t queue: root.users.fayson", "\t start time: 1518428233280", "\t final status: UNDEFINED", "\t tracking URL: http://ip-172-31-16-68.ap-southeast-1.compute.internal:8088/proxy/application_1518423607906_0006/", "\t user: fayson", "18/02/12 04:37:13 INFO util.ShutdownHookManager: Shutdown hook called", "18/02/12 04:37:13 INFO util.ShutdownHookManager: Deleting directory /tmp/spark-6a934286-5db7-4c6a-aa72-eefc294086a4", "\nstderr: ", "\nYARN Diagnostics: "]
}
(可左右滑动)
通过如上返回的结果,我们可以看到作业的APPID。
3.通过CM和Yarn的8088界面查看作业执行结果
CM上Yarn的应用程序界面显示
Yarn的8088界面显示
Spark的History界面显示
显示作业运行成功
5.总结
在Java 访问Kerberos环境的Livy API接口时需要在代码中加载krb5.conf、login-yran.conf配置文件到环境变量中,实现fayosn@FAYSON.COM用户登录
访问Kerberos环境下的RESTfulAPI接口需要使用HttpClient提供的spnego方式访问,这里Fayson使用第三方封装好的spnego包
GitHub地址:
https://github.com/fayson/cdhproject/blob/master/livydemo/src/main/java/com/cloudera/utils/KBHttpUtils.java
https://github.com/fayson/cdhproject/blob/master/livydemo/src/main/java/com/cloudera/kerberos/AppLivy.java
提示:代码块部分可以左右滑动查看噢
为天地立心,为生民立命,为往圣继绝学,为万世开太平。
温馨提示:要看高清无码套图,请使用手机打开并单击图片放大查看。
推荐关注Hadoop实操,第一时间,分享更多Hadoop干货,欢迎转发和分享。
原创文章,欢迎转载,转载请注明:转载自微信公众号Hadoop实操