项目功能就是封装SparkRestApi,用到了httpclient以及yarnclient,pom如下
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.2.2.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.wisetv</groupId>
<artifactId>sparkprojectmanager</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>sparkprojectmanager</name>
<description>Demo project for Spring Boot</description>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
<hadoop.version>2.9.0</hadoop.version>
<org.apache.hive.version>1.2.1</org.apache.hive.version>
<swagger.version>2.9.2</swagger.version>
<ojdbc6.version>11.2.0.1.0</ojdbc6.version>
<scala.version>2.11.12</scala.version>
<spark.version>2.3.0</spark.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.2</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.47</version>
</dependency>
<!-- hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-client</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- spark -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${swagger.version}</version>
<exclusions>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-aop</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-oxm</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>${swagger.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-yarn_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.11</artifactId>
<version>${spark.version}</version>
<exclusions>
<exclusion>
<groupId>com.twitter</groupId>
<artifactId>parquet-hadoop-bundle</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
<version>2.22.2</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
<version>2.22.2</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-common</artifactId>
<version>2.22.2</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet</artifactId>
<version>2.22.2</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
<version>2.22.2</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.bundles.repackaged</groupId>
<artifactId>jersey-guava</artifactId>
<version>2.22.2</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-jaxb</artifactId>
<version>2.22.2</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
<version>2.22.2</version>
</dependency>
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
<version>2.0.1</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
<version>1.19.4</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
<version>1.19.4</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
<version>1.19.4</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
<version>1.19.4</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${org.apache.hive.version}</version>
<exclusions>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>jetty-all</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hive</groupId>
<artifactId>hive-shims</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop-bundle</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
YarnClientTools
package com.wisetv.sparkprojectmanager.util;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.spark.sql.SparkSession;
import java.io.IOException;
/**
* @ClassName YarnClientTools
* @Description:
* Yarn相关处理工具类
* @Author
* @Date 20200213
* @Version V1.0
**/
public class YarnClientTools {
/**
* 通过SparkSession获取TrackingUrl,由于调用Spark REST Api需要这个URL作为地址,所以需要获取
* @param sparkSession SparkSession
* @param conf hadoop的配置参数,用于连接Yarn
* @return TrackingUrl
*/
public String getTrackingUrl(SparkSession sparkSession, Configuration conf){
String s = sparkSession.sparkContext().applicationId();
YarnClient yarnClient = YarnClient.createYarnClient();
yarnClient.init(conf);
yarnClient.start();
ApplicationReport report = null;
try {
report = yarnClient.getApplicationReport(ApplicationId.fromString(s));
} catch (YarnException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
yarnClient.stop();
return report.getTrackingUrl();
}
}
SparkRestApiTools.java
package com.wisetv.sparkprojectmanager.util;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @ClassName SparkRestApiTools
* @Description:
* java调用Spark REST Api工具类
* @Author 张年
* @Date 2020/2/13
* @Version V1.0
**/
public class SparkRestApiTools {
/**
* A list of all applications.
* ?status=[completed|running] list only applications in the chosen state.
* ?minDate=[date] earliest start date/time to list.
* ?maxDate=[date] latest start date/time to list.
* ?minEndDate=[date] earliest end date/time to list.
* ?maxEndDate=[date] latest end date/time to list.
* ?limit=[limit] limits the number of applications listed.
* Examples:
* ?minDate=2015-02-10
* ?minDate=2015-02-03T16:42:40.000GMT
* ?maxDate=2015-02-11T20:41:30.000GMT
* ?minEndDate=2015-02-12
* ?minEndDate=2015-02-12T09:15:10.000GMT
* ?maxEndDate=2015-02-14T16:30:45.000GMT
* ?limit=10
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param paramMap 所支持的参数根据官方api列举如上
* @param time 超时时间,由于访问需要一定时间所以超时时间不宜设置过短
* @return api返回的json信息
*/
public String getApplications(String trackingURL, Map<String,String> paramMap,int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications";
if(paramMap.size()>0){
List<String> paramList = new ArrayList<>();
for (Map.Entry entry:paramMap.entrySet()) {
paramList.add(entry.getKey()+"="+entry.getValue());
}
url = url + "?"+String.join("&",paramList);
}
String json = httpUtils.doGet(url,time);
return json;
}
/**
* A list of all jobs for a given application.
* ?status=[running|succeeded|failed|unknown] list only jobs in the specific state.
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param appId
* @param paramMap 所支持的参数根据官方api列举如上
* @param time 超时时间
* @return api返回的json信息
*/
public String getApplicationJobs(String trackingURL,String appId, Map<String,String> paramMap,int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications/"+appId+"/jobs";
if(paramMap.size()>0){
List<String> paramList = new ArrayList<>();
for (Map.Entry entry:paramMap.entrySet()) {
paramList.add(entry.getKey()+"="+entry.getValue());
}
url = url + "?"+String.join("&",paramList);
}
String json = httpUtils.doGet(url,time);
return json;
}
/**
* A list of all stages for a given application.
* ?status=[active|complete|pending|failed] list only stages in the state.
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param appId
* @param paramMap 所支持的参数根据官方api列举如上
* @param time 超时时间
* @return api返回的json信息
*/
public String getApplicationStages(String trackingURL,String appId, Map<String,String> paramMap,int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications/"+appId+"/stages";
if(paramMap.size()>0){
List<String> paramList = new ArrayList<>();
for (Map.Entry entry:paramMap.entrySet()) {
paramList.add(entry.getKey()+"="+entry.getValue());
}
url = url + "?"+String.join("&",paramList);
}
String json = httpUtils.doGet(url,time);
return json;
}
/**
* A list of all attempts for the given stage.
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param appId sparksession中的applicationId
* @param time 超时时间
* @return api返回的json信息
*/
public String getApplicationStage(String trackingURL,String appId,String stageId,int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications/"+appId+"/stages/"+stageId;
String json = httpUtils.doGet(url,time);
return json;
}
/**
* Details for the given stage attempt.
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param appId sparksession中的applicationId
* @param stageId appId中的stageId
* @param stageAttemptId stageId中的stageAttemptId,是界面stage中Attempt列
* @param time 超时时间
* @return api返回的json信息
*/
public String getApplicationStageAttempt(String trackingURL,String appId,String stageId,String stageAttemptId,int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications/"+appId+"/stages/"+stageId+"/"+stageAttemptId;
String json = httpUtils.doGet(url,time);
return json;
}
/**
* Summary metrics of all tasks in the given stage attempt.
* ?quantiles summarize the metrics with the given quantiles.
* Example: ?quantiles=0.01,0.5,0.99
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param appId sparksession中的applicationId
* @param stageId appId中的stageId
* @param stageAttemptId stageId中的stageAttemptId
* @param paramMap 所支持的参数根据官方api列举如上
* @param time 超时时间
* @return api返回的json信息
*/
public String getApplicationTaskSummary(String trackingURL,String appId,String stageId,String stageAttemptId, Map<String,String> paramMap,int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications/"+appId+"/stages/"+stageId+"/"+stageAttemptId+"/taskSummary";
if(paramMap.size()>0){
List<String> paramList = new ArrayList<>();
for (Map.Entry entry:paramMap.entrySet()) {
paramList.add(entry.getKey()+"="+entry.getValue());
}
url = url + "?"+String.join("&",paramList);
}
String json = httpUtils.doGet(url,time);
return json;
}
/**
* A list of all tasks for the given stage attempt.
* ?offset=[offset]&length=[len] list tasks in the given range.
* ?sortBy=[runtime|-runtime] sort the tasks.
* Example: ?offset=10&length=50&sortBy=runtime
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param appId sparksession中的applicationId
* @param stageId appId中的stageId
* @param stageAttemptId stageId中的stageAttemptId
* @param paramMap 所支持的参数根据官方api列举如上
* @param time 超时时间
* @return api返回的json信息
*/
public String getApplicationTaskList(String trackingURL,String appId,String stageId,String stageAttemptId, Map<String,String> paramMap,int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications/"+appId+"/stages/"+stageId+"/"+stageAttemptId+"/taskList";
if(paramMap.size()>0){
List<String> paramList = new ArrayList<>();
for (Map.Entry entry:paramMap.entrySet()) {
paramList.add(entry.getKey()+"="+entry.getValue());
}
url = url + "?"+String.join("&",paramList);
}
String json = httpUtils.doGet(url,time);
return json;
}
/**
* A list of all active executors for the given application.
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param appId sparksession中的applicationId
* @param time 超时时间
* @return api返回的json信息
*/
public String getApplicationExecutors(String trackingURL,String appId, int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications/"+appId+"/executors";
System.out.println(url);
String json = httpUtils.doGet(url,time);
return json;
}
/**
* Stack traces of all the threads running within the given active executor. Not available via the history server.
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param appId sparksession中的applicationId
* @param executorId 线程ID
* @param time 超时时间
* @return api返回的json信息
*/
public String getApplicationExecutorThreads(String trackingURL,String appId,String executorId, int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications/"+appId+"/executors/"+executorId+"/threads";
String json = httpUtils.doGet(url,time);
return json;
}
/**
* A list of all(active and dead) executors for the given application.
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param appId sparksession中的applicationId
* @param time 超时时间
* @return api返回的json信息
*/
public String getApplicationAllexecutors(String trackingURL,String appId, int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications/"+appId+"/allexecutors";
String json = httpUtils.doGet(url,time);
return json;
}
/**
* A list of stored RDDs for the given application.
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param appId sparksession中的applicationId
* @param time 超时时间
* @return api返回的json信息
*/
public String getApplicationStorageRdds(String trackingURL,String appId, int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications/"+appId+"/storage/rdd";
String json = httpUtils.doGet(url,time);
return json;
}
/**
* Details for the storage status of a given RDD.
* @param trackingURL
* 由于我是通过Yarn发布,所以需要此路径,假如是spark直接发布用 http://ip:4040/api/v1/applications
* @param appId sparksession中的applicationId
* @param rddId
* @param time 超时时间
* @return api返回的json信息
*/
public String getApplicationStorageRdd(String trackingURL,String appId,String rddId, int time){
HttpUtils httpUtils = new HttpUtils();
String url = trackingURL+ "api/v1/applications/"+appId+"/storage/rdd/"+rddId;
String json = httpUtils.doGet(url,time);
return json;
}
public static void main(String[] args) {
SparkRestApiTools sparkRestApiTools = new SparkRestApiTools();
String trackingURL = "http://sparkdis1:8088/proxy/application_1579400438868_14544/";
Map<String,String> map = new HashMap<>();
String s = "";
// s = sparkRestApiTools.getApplications(trackingURL, map, 5000);
// System.out.println(s);
// map.clear();
// map.put("status","running");
// s = sparkRestApiTools.getApplicationJobs(trackingURL, "application_1579400438868_14544", map, 5000);
// System.out.println(s);
// map.clear();
// map.put("status","pending");
// s = sparkRestApiTools.getApplicationStages(trackingURL, "application_1579400438868_14544", map, 5000);
// System.out.println(s);
// map.clear();
// map.put("status","pending");
// s = sparkRestApiTools.getApplicationStage(trackingURL, "application_1579400438868_14544", "3216", 5000);
// System.out.println(s);
// map.clear();
// map.put("status","pending");
// s = sparkRestApiTools.getApplicationStageAttempt(trackingURL, "application_1579400438868_14544", "3216","0", 5000);
// System.out.println(s);
// map.clear();
// map.put("quantiles","0.5");
// s = sparkRestApiTools.getApplicationTaskSummary(trackingURL, "application_1579400438868_14544", "3216","0",map, 5000);
// System.out.println(s);
// map.clear();
// s = sparkRestApiTools.getApplicationTaskList(trackingURL, "application_1579400438868_14544", "3216","0",map, 5000);
// System.out.println(s);
// map.clear();
// s = sparkRestApiTools.getApplicationExecutors(trackingURL, "application_1579400438868_14544", 5000);
// System.out.println(s);
/**
* 不生效
*/
// map.clear();
// s = sparkRestApiTools.getApplicationExecutorThreads(trackingURL, "application_1579400438868_14544", "13",5000);
// System.out.println(s);
// map.clear();
// s = sparkRestApiTools.getApplicationAllexecutors(trackingURL, "application_1579400438868_14544",5000);
// System.out.println(s);
map.clear();
s = sparkRestApiTools.getApplicationStorageRdds(trackingURL, "application_1579400438868_14544",5000);
System.out.println(s);
}
}
HttpUtils.java
package com.wisetv.sparkprojectmanager.util;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @ClassName HttpUtils
* @Description: http工具类
* @Author 张年
* @Date 2020/2/13
* @Version V1.0
**/
public class HttpUtils {
/**
* doget方法
* @param url url
* @param timeout 超时时长
* @return 请求正常返回结果,异常返回空
*/
public String doGet(String url, int timeout) {
Map<String, Object> map = new HashMap<>();
CloseableHttpClient httpClient = HttpClients.createDefault();
//配置可有可无,根据个人情景
RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(timeout) //设置连接超时时间
.setConnectionRequestTimeout(timeout) //设置请求超时时间
.setSocketTimeout(timeout)
.setRedirectsEnabled(true) //设置允许自动重定向
.build();
HttpGet httpGet = new HttpGet(url);
httpGet.setConfig(requestConfig);
String result = "";
try {
HttpResponse httpResponse = httpClient.execute(httpGet);
if (httpResponse.getStatusLine().getStatusCode() == 200) {
result = EntityUtils.toString(httpResponse.getEntity());
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
httpClient.close();
} catch (Exception e) {
e.printStackTrace();
}
}
return result;
}
/**
* doPost方法
* @param url url
* @param timeout 超时时长
* @return 请求正常返回结果,异常返回空
*/
public String doPost(String url, int timeout,Map<String,String>paramMap){
CloseableHttpClient httpClient = HttpClients.custom()
.build();
//配置可有可无,根据个人情景
RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(timeout) //设置连接超时时间
.setConnectionRequestTimeout(timeout) //设置请求超时时间
.setSocketTimeout(timeout)
.setRedirectsEnabled(true) //设置允许自动重定向
.build();
HttpPost httpPost = new HttpPost(url);
httpPost.setConfig(requestConfig);
List<NameValuePair> formParams = new ArrayList<>();
//表单参数
for (Map.Entry<String, String> entry:paramMap.entrySet()) {
formParams.add(new BasicNameValuePair(entry.getKey(), entry.getValue()));
}
UrlEncodedFormEntity entity = null;
String result = "";
try {
entity = new UrlEncodedFormEntity(formParams, "utf-8");
CloseableHttpResponse response = httpClient.execute(httpPost);
result = EntityUtils.toString(response.getEntity());
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (ClientProtocolException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
httpPost.setEntity(entity);
return result;
}
}