import com.alibaba.fastjson.JSON;
import com.**.**.commonutil.util.StrUtil;
import com.**.**.**.dao.doris.dto.DorisReqDTO;
import org.apache.commons.codec.binary.Base64;
import org.apache.http.HttpHeaders;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.FileEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.DefaultRedirectStrategy;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import static java.util.jar.Pack200.Unpacker.TRUE;
/*
这是一个 Doris Stream Load 示例,需要依赖
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</dependency>
*/
public class DorisStreamLoader {
// 1. 对于公有云公户,这里填写 Compute Node 地址以及 HTTP 协议访问端口(8040)。
// 2. 对于开源用户,可以选择填写 FE 地址以及 FE 的 http_port,但须保证客户端和 BE 节点的连通性。
private final static String HOST = "127.1.1.77";
private final static int PORT = 8030;
private final static String DATABASE = "t_"+公司; // 要导入的数据库
private final static String TABLE = "user"; // 要导入的表
private final static String USER = "root"; // Doris 用户名
private final static String PASSWD = ""; // Doris 密码
private final static String LOAD_FILE_NAME = "/data/user_4.json"; // 要导入的本地文件路径
private final static String loadUrl = String.format("http://%s:%s/api/%s/%s/_stream_load",
HOST, PORT, DATABASE, TABLE);
private final static HttpClientBuilder httpClientBuilder = HttpClients
.custom()
.setRedirectStrategy(new DefaultRedirectStrategy() {
@Override
protected boolean isRedirectable(String method) {
System.out.print("我来");
// 如果连接目标是 FE,则需要处理 307 redirect。
return true;
}
});
public DorisReqDTO load(File file) throws Exception {
try (CloseableHttpClient client = httpClientBuilder.build()) {
HttpPut put = new HttpPut(loadUrl);
put.setHeader(HttpHeaders.EXPECT, "100-continue");
put.setHeader(HttpHeaders.AUTHORIZATION, basicAuthHeader(USER, PASSWD));
// 可以在 Header 中设置 stream load 相关属性,这里我们设置 label 和 column_separator。
put.setHeader("label","label_"+ StrUtil.uuid());
put.setHeader("format","json");
put.setHeader("Content-Type",ContentType.APPLICATION_JSON.toString());
put.setHeader("strip_outer_array",TRUE);
// Array 中的每行数据的字段顺序完全一致。Doris 仅会根据第一行的字段顺序做解析,然后以下标的形式访问之后的数据。该方式可以提升 3-5X 的导入效率。
put.setHeader("fuzzy_parse",TRUE);
put.setHeader("jsonpaths","[\"$.siteid\",\"$.username\"]");
put.setHeader("columns","siteid,username,doris_update_time=current_timestamp()");
FileEntity entity = new FileEntity(file);
put.setEntity(entity);
System.out.print(entity);
try (CloseableHttpResponse response = client.execute(put)) {
response.setHeader(HttpHeaders.AUTHORIZATION, basicAuthHeader(USER, PASSWD));
String loadResult = "";
if (response.getEntity() != null) {
loadResult = EntityUtils.toString(response.getEntity());
}
final int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != 200) {
throw new IOException(
String.format("Stream load failed. status: %s load result: %s", statusCode, loadResult));
}
System.out.println("Get load result: " + loadResult);
return JSON.toJavaObject(JSON.parseObject(loadResult),DorisReqDTO.class);
}
}
}
private String basicAuthHeader(String username, String password) {
final String tobeEncode = username + ":" + password;
byte[] encoded = Base64.encodeBase64(tobeEncode.getBytes(StandardCharsets.UTF_8));
return "Basic " + new String(encoded);
}
public static void main(String[] args) throws Exception{
DorisStreamLoader loader = new DorisStreamLoader();
File file = new File(LOAD_FILE_NAME);
loader.load(file);
// loader.loadDemo(file);
}
public void loadDemo(File file) throws Exception {
try (CloseableHttpClient client = httpClientBuilder.build()) {
HttpPut put = new HttpPut(loadUrl);
put.setHeader(HttpHeaders.EXPECT, "100-continue");
put.setHeader(HttpHeaders.AUTHORIZATION, basicAuthHeader(USER, PASSWD));
// 可以在 Header 中设置 stream load 相关属性,这里我们设置 label 和 column_separator。
put.setHeader("label","label"+ StrUtil.uuid());
put.setHeader("format","json");
// put.setHeader("columns",",");
// put.setHeader("exec_mem_limit",",");// 导入内存限制。默认为 2GB,单位为字节。
// put.setHeader("merge_type",",");// merge_type 数据的合并类型,一共支持三种类型APPEND、DELETE、MERGE 其中,APPEND是默认值,
// // 表示这批数据全部需要追加到现有数据中,DELETE 表示删除与这批数据key相同的所有行,MERGE 语义 需要与delete 条件联合使用,表示满足delete 条件的数据按照DELETE 语义处理其余的按照APPEND 语义处理
//
// 设置导入文件。
// 这里也可以使用 StringEntity 来传输任意数据。
FileEntity entity = new FileEntity(file);
put.setEntity(entity);
System.out.print(entity);
try (CloseableHttpResponse response = client.execute(put)) {
response.setHeader(HttpHeaders.AUTHORIZATION, basicAuthHeader(USER, PASSWD));
String loadResult = "";
if (response.getEntity() != null) {
loadResult = EntityUtils.toString(response.getEntity());
}
final int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != 200) {
throw new IOException(
String.format("Stream load failed. status: %s load result: %s", statusCode, loadResult));
}
System.out.println("Get load result: " + loadResult);
}
}
}
}
Doris java Stream Load 导入示例
最新推荐文章于 2024-05-15 09:08:01 发布