package org.example.util;
import java.io.*;
import java.util.*;
import com.sforce.async.*;
import com.sforce.ws.ConnectionException;
import com.sforce.ws.ConnectorConfig;
/**
* Bulk API 2.0版本
*/
public class BulkApiC {
public static final String AUTH_ENDPOINT="https://login.salesforce.com/services/oauth2/token";
public static void main(String[] args)
throws AsyncApiException, ConnectionException, IOException {
BulkApiC example = new BulkApiC();
// 替换以下参数为您的凭证和测试文件名
// 第一个参数表示我们正在加载的对象类型
example.runSample("Account", "myUser@myOrg.com", "myPassword", "myToken", "mySampleData.csv");
}
/**
* 创建Bulk API作业并上传CSV文件
*/
public void runSample(String sobjectType, String userName, String password, String token, String sampleFileName) throws AsyncApiException, ConnectionException, IOException {
//连接salesforce
BulkConnection connection = getBulkConnection(userName, password, token);
//创建job
JobInfo job = createJob(sobjectType, connection);
//上传CSV数据
List<BatchInfo> batchInfoList = createBatchesFromCSVFile(connection, job, sampleFileName);
//关闭job
closeJob(connection, job.getId());
//等待完成
awaitCompletion(connection, job, batchInfoList);
//获取结果
checkResults(connection, job, batchInfoList);
}
/**
* 获取操作结果并检查错误
*/
private void checkResults(BulkConnection connection, JobInfo job, List<BatchInfo> batchInfoList) throws AsyncApiException, IOException {
// batchInfoList在创建和提交批次时被填充
for (BatchInfo b : batchInfoList) {
CSVReader rdr = new CSVReader(connection.getBatchResultStream(job.getId(), b.getId()));
List<String> resultHeader = rdr.nextRecord();
int resultCols = resultHeader.size();
List<String> row;
while ((row = rdr.nextRecord()) != null) {
Map<String, String> resultInfo = new HashMap<String, String>();
for (int i = 0; i < resultCols; i++) {
resultInfo.put(resultHeader.get(i), row.get(i));
}
boolean success = Boolean.valueOf(resultInfo.get("Success"));
boolean created = Boolean.valueOf(resultInfo.get("Created"));
String id = resultInfo.get("Id");
String error = resultInfo.get("Error");
if (success && created) {
System.out.println("创建成功,ID:" + id);
} else if (!success) {
System.out.println("失败,错误信息:" + error);
}
}
}
}
/**
* 关闭作业
*/
private void closeJob(BulkConnection connection, String jobId) throws AsyncApiException {
JobInfo job = new JobInfo();
job.setId(jobId);
job.setState(JobStateEnum.Closed);
connection.updateJob(job);
}
/**
* 等待作业完成,通过轮询Bulk API来检查结果
*/
private void awaitCompletion(BulkConnection connection, JobInfo job, List<BatchInfo> batchInfoList) throws AsyncApiException {
long sleepTime = 0L;
Set<String> incomplete = new HashSet<String>();
for (BatchInfo bi : batchInfoList) {
incomplete.add(bi.getId());
}
while (!incomplete.isEmpty()) {
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
}
System.out.println("等待结果中..." + incomplete.size());
sleepTime = 10000L;
BatchInfo[] statusList = connection.getBatchInfoList(job.getId()).getBatchInfo();
for (BatchInfo b : statusList) {
if (b.getState() == BatchStateEnum.Completed || b.getState() == BatchStateEnum.Failed) {
if (incomplete.remove(b.getId())) {
System.out.println("批次状态:\n" + b);
}
}
}
}
}
/**
* 使用Bulk API创建新作业
*/
private JobInfo createJob(String sobjectType, BulkConnection connection) throws AsyncApiException {
JobInfo job = new JobInfo();
job.setObject(sobjectType);
job.setOperation(OperationEnum.insert);
job.setContentType(ContentType.CSV);
job = connection.createJob(job);
System.out.println(job);
return job;
}
/**
* 创建用于调用Bulk API操作的BulkConnection
*/
private BulkConnection getBulkConnection(String userName, String password, String token) throws ConnectionException, AsyncApiException {
ConnectorConfig config = new ConnectorConfig();
config.setUsername(userName);
//令牌的方式验证
config.setPassword(password + token);
//身份验证和授权的终端点地址
config.setAuthEndpoint(AUTH_ENDPOINT);
// 调试时应将此设置为false--todo
config.setCompression(true);//设置ConnectorConfig对象的压缩属性为true,以启用数据压缩。
// 将此设置为true以在stdout上查看HTTP请求和响应
config.setTraceMessage(false);//设置ConnectorConfig对象的跟踪消息属性为false,以禁用在stdout上查看HTTP请求和响应的功能。
BulkConnection connection = new BulkConnection(config);
return connection;
}
/**
* 从CSV文件创建并上传批次
*/
private List<BatchInfo> createBatchesFromCSVFile(BulkConnection connection, JobInfo jobInfo, String csvFileName) throws IOException, AsyncApiException {
List<BatchInfo> batchInfos = new ArrayList<BatchInfo>();
//用字节流读取CSV文件的内容,将字节流转成字符流读取文本内容,最后套的是缓冲流以便高效读取
BufferedReader rdr = new BufferedReader(
new InputStreamReader(new FileInputStream(csvFileName))
);
// 读取CSV文件的头部行
byte[] headerBytes = (rdr.readLine() + "\n").getBytes("UTF-8");
int headerBytesLength = headerBytes.length;
File tmpFile = File.createTempFile("bulkAPIInsert", ".csv");
// 将CSV文件拆分为多个批次
try {
FileOutputStream tmpOut = new FileOutputStream(tmpFile);
int maxBytesPerBatch = 10000000; // 每个批次最多1千万字节
int maxRowsPerBatch = 10000; // 每个批次最多1万行
int currentBytes = 0;
int currentLines = 0;
String nextLine;
while ((nextLine = rdr.readLine()) != null) {
byte[] bytes = (nextLine + "\n").getBytes("UTF-8");
// 当达到批次大小限制时,创建一个新的批次
if (currentBytes + bytes.length > maxBytesPerBatch || currentLines > maxRowsPerBatch) {
createBatch(tmpOut, tmpFile, batchInfos, connection, jobInfo);
currentBytes = 0;
currentLines = 0;
}
if (currentBytes == 0) {
tmpOut = new FileOutputStream(tmpFile);
tmpOut.write(headerBytes);
currentBytes = headerBytesLength;
currentLines = 1;
}
tmpOut.write(bytes);
currentBytes += bytes.length;
currentLines++;
}
// 完成处理所有行
// 为任何剩余的数据创建最后一个批次
if (currentLines > 1) {
createBatch(tmpOut, tmpFile, batchInfos, connection, jobInfo);
}
} finally {
tmpFile.delete();
}
return batchInfos;
}
/**
* 通过上传文件内容创建批次
* 这将关闭输出流
*/
private void createBatch(FileOutputStream tmpOut, File tmpFile, List<BatchInfo> batchInfos, BulkConnection connection, JobInfo jobInfo) throws IOException, AsyncApiException {
tmpOut.flush();
tmpOut.close();
FileInputStream tmpInputStream = new FileInputStream(tmpFile);
try {
BatchInfo batchInfo = connection.createBatchFromStream(jobInfo, tmpInputStream);
System.out.println(batchInfo);
batchInfos.add(batchInfo);
} finally {
tmpInputStream.close();
}
}
}
bulk api通用
最新推荐文章于 2024-10-30 18:07:32 发布
该代码示例展示了如何使用Java和BulkAPI2.0版本将CSV数据批量导入到Salesforce。首先,通过OAuth2获取认证,然后创建BulkAPI作业,上传CSV文件,监控作业状态,直到完成并检查导入结果。
摘要由CSDN通过智能技术生成