在互联网应用的开发中,为了提高应用的响应速度,可以尝试将一个任务拆分为多个子任务并发执行,然后将子任务的处理结果进行合并(有点类似MapReduce的意思)。
Java1.5之后的 concurrent 包中提供了Executor和Future,可以实现这样的功能。
比如把一个大文件拆分成10个小文件(拆分过程省略),然后使用10个线程把文件写入HDFS。
package example.flume.client;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.utils.HttpClientUtils;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.params.CoreConnectionPNames;
import org.apache.http.params.HttpParams;
import org.apache.http.params.SyncBasicHttpParams;
import com.common.StopWatch;
public class MultiBlobTask implements Callable<String> {
int idname = -1;
public MultiBlobTask(int id) {
this.idname = id;
}
public String testBlob() throws UnsupportedEncodingException, ClientProtocolException, IOException {
System.out.println("BLOB TEST:" + this.idname + " start...");
StopWatch stopWatch = new StopWatch();
stopWatch.reset();
File logFile = new File(getLogFileName());
System.out.println(this.idname + " read " + logFile.getName());
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(logFile)));
String log = null;
DefaultHttpClient httpClient = createHttpClient();
try {
while ((log = br.readLine()) != null) {
HttpResponse response = null;
try {
HttpPost httpPost = new HttpPost("http://xmdevhdp01:5240/");
StringEntity strEntity = new StringEntity(log);
strEntity.setContentType("application/json; charset=UTF-8");
httpPost.setEntity(strEntity);
response = httpClient.execute(httpPost);
if (response.getStatusLine().getStatusCode() != 200) {
break;
}
} finally {
HttpClientUtils.closeQuietly(response);
response = null;
}
}
} finally {
br.close();
HttpClientUtils.closeQuietly(httpClient);
httpClient = null;
}
return "BLOB TEST:" + this.idname + " END. time=" + stopWatch.elapsedTime();
}
private String getLogFileName() {
return "D:\\log\\thread-log-" + getNoCode(this.idname) + ".txt";
}
private static String getNoCode(int no) {
if (no < 10) {
return "0" + String.valueOf(no);
} else {
return String.valueOf(no);
}
}
private DefaultHttpClient createHttpClient() {
HttpParams params = new SyncBasicHttpParams();
// set default time out
params.setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, 2000);
params.setIntParameter(CoreConnectionPNames.SO_TIMEOUT, 2000);
params.setBooleanParameter(CoreConnectionPNames.SO_KEEPALIVE, false);
DefaultHttpClient httpClient = new DefaultHttpClient(params);
return httpClient;
}
public String call() throws Exception {
return testBlob();
}
// /
public static void main(String[] args) {
StopWatch stopWatch = new StopWatch();
stopWatch.reset();
// create service
ExecutorService service = Executors.newFixedThreadPool(10);
// create Tasks
List<MultiBlobTask> tasks = new ArrayList<MultiBlobTask>();
for (int x = 0; x < 10; x++) {
MultiBlobTask task = new MultiBlobTask(x);
tasks.add(task);
}
// submit Task and get Future
List<Future<String>> futures = new ArrayList<Future<String>>();
for (MultiBlobTask task : tasks) {
Future<String> future = service.submit(task);
futures.add(future);
}
// get Task result
for (Future<String> future : futures) {
try {
System.out.println("time=" + future.get());
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
// close service
service.shutdownNow();
System.out.println("total time=" + stopWatch.elapsedTime());
}
}