maven依赖
<dependency>
<groupId>com.aliyun.openservices</groupId>
<artifactId>aliyun-log</artifactId>
<version>0.6.0</version>
</dependency>
实现代码
package sdksample;
import java.util.ArrayList;
import java.util.List;
import java.util.Vector;
import java.util.Date;
import com.aliyun.openservices.log.Client;
import com.aliyun.openservices.log.common.*;
import com.aliyun.openservices.log.exception.*;
import com.aliyun.openservices.log.request.*;
import com.aliyun.openservices.log.response.*;
import com.aliyun.openservices.log.common.LogContent;
import com.aliyun.openservices.log.common.LogGroupData;
import com.aliyun.openservices.log.common.LogItem;
import com.aliyun.openservices.log.common.Consts.CursorMode;
public class sdksample {
public static void main(String args[]) throws LogException,
InterruptedException {
String endpoint = "<log_service_endpoint>"; // 选择与上面步骤创建Project所属区域匹配的
// Endpoint
String accessKeyId = "<your_access_key_id>"; // 使用你的阿里云访问秘钥AccessKeyId
String accessKeySecret = "<your_access_key_secret>"; // 使用你的阿里云访问秘钥AccessKeySecret
String project = "<project_name>"; // 上面步骤创建的项目名称
String logstore = "<logstore_name>"; // 上面步骤创建的日志库名称
// 构建一个客户端实例
Client client = new Client(endpoint, accessKeyId, accessKeySecret);
// 列出当前Project下的所有日志库名称
int offset = 0;
int size = 100;
String logStoreSubName = "";
ListLogStoresRequest req1 = new ListLogStoresRequest(project, offset,
size, logStoreSubName);
ArrayList<String> logStores = client.ListLogStores(req1).GetLogStores();
System.out.println("ListLogs:" + logStores.toString() + "\n");
// 写入日志
String topic = "";
String source = "";
// 连续发送10个数据包,每个数据包有10条日志
for (int i = 0; i < 10; i++) {
Vector<LogItem> logGroup = new Vector<LogItem>();
for (int j = 0; j < 10; j++) {
LogItem logItem = new LogItem(
(int) (new Date().getTime() / 1000));
logItem.PushBack("index", String.valueOf(i * 10 + j));
logGroup.add(logItem);
}
PutLogsRequest req2 = new PutLogsRequest(project, logstore, topic,
source, logGroup);
client.PutLogs(req2);
}
// 把0号shard中,最近1分钟写入的数据都读取出来。
int shard_id = 0;
long curTimeInSec = System.currentTimeMillis() / 1000;
GetCursorResponse cursorRes = client.GetCursor(project, logstore,
shard_id, curTimeInSec - 60);
String beginCursor = cursorRes.GetCursor();
cursorRes = client.GetCursor(project, logstore, shard_id,
CursorMode.END);
String endCursor = cursorRes.GetCursor();
String curCursor = beginCursor;
while (curCursor.equals(endCursor) == false) {
int loggroup_count = 2; // 每次读取两个loggroup
BatchGetLogResponse logDataRes = client.BatchGetLog(project,
logstore, shard_id, loggroup_count, curCursor);
List<LogGroupData> logGroups = logDataRes.GetLogGroups();
for (LogGroupData logGroup : logGroups) {
System.out.println("Source:" + logGroup.GetSource());
System.out.println("Topic:" + logGroup.GetTopic());
for (LogItem log : logGroup.GetAllLogs()) {
System.out.println("LogTime:" + log.GetTime());
List<LogContent> contents = log.GetLogContents();
for (LogContent content : contents) {
System.out.println(content.GetKey() + ":"
+ content.GetValue());
}
}
}
String next_cursor = logDataRes.GetNextCursor();
System.out.println("The Next cursor:" + next_cursor);
curCursor = next_cursor;
}
// !!!重要提示 : 只有打开索引功能,才能调用一下接口 !!!
// 等待1分钟让日志可查询
try {
Thread.sleep(60 * 1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
// 查询日志分布情况
String query = "index";
int from = (int) (new Date().getTime() / 1000 - 300);
int to = (int) (new Date().getTime() / 1000);
GetHistogramsResponse res3 = null;
while (true) {
GetHistogramsRequest req3 = new GetHistogramsRequest(project,
logstore, topic, query, from, to);
res3 = client.GetHistograms(req3);
if (res3 != null && res3.IsCompleted()) // IsCompleted()返回true,表示查询结果是准确的,如果返回false,则重复查询
{
break;
}
Thread.sleep(200);
}
System.out.println("Total count of logs is " + res3.GetTotalCount());
for (Histogram ht : res3.GetHistograms()) {
System.out.printf("from %d, to %d, count %d.\n", ht.GetFrom(),
ht.GetTo(), ht.GetCount());
}
// 查询日志数据
long total_log_lines = res3.GetTotalCount();
int log_offset = 0;
int log_line = 10;
while (log_offset <= total_log_lines) {
GetLogsResponse res4 = null;
// 对于每个log offset,一次读取10行log,如果读取失败,最多重复读取3次。
for (int retry_time = 0; retry_time < 3; retry_time++) {
GetLogsRequest req4 = new GetLogsRequest(project, logstore,
from, to, topic, query, log_offset, log_line, false);
res4 = client.GetLogs(req4);
if (res4 != null && res4.IsCompleted()) {
break;
}
Thread.sleep(200);
}
System.out.println("Read log count:"
+ String.valueOf(res4.GetCount()));
log_offset += log_line;
}
}
}
Util工具类
package cn.dhc.ezs.util;
import com.aliyun.openservices.log.Client;
import com.aliyun.openservices.log.common.*;
import com.aliyun.openservices.log.exception.LogException;
import com.aliyun.openservices.log.request.GetHistogramsRequest;
import com.aliyun.openservices.log.request.GetLogsRequest;
import com.aliyun.openservices.log.request.ListLogStoresRequest;
import com.aliyun.openservices.log.request.PutLogsRequest;
import com.aliyun.openservices.log.response.BatchGetLogResponse;
import com.aliyun.openservices.log.response.GetCursorResponse;
import com.aliyun.openservices.log.response.GetHistogramsResponse;
import com.aliyun.openservices.log.response.GetLogsResponse;
import java.util.ArrayList;
import java.util.Date;
import java.util.Vector;
public class SendLogUtil {
private static Client client;
private static String endpoint = "cn-beijing.log.aliyuncs.com"; // 选择与上面步骤创建Project所属区域匹配的
// Endpoint
private static String accessKeyId = ""; // 使用你的阿里云访问秘钥AccessKeyId
private static String accessKeySecret = ""; // 使用你的阿里云访问秘钥AccessKeySecret
private static String project = "nhbroject"; // 上面步骤创建的项目名称
private static String logstore = "nhbstore"; // 上面步骤创建的日志库名称
private static Vector<LogItem> logGroup;
private static String topic = "";
private static String source = "";
public static void main(String args[]) throws LogException,
InterruptedException {
// 构建一个客户端实例
client = new Client(endpoint, accessKeyId, accessKeySecret);
// 列出当前Project下的所有日志库名称
int offset = 0;
int size = 100;
String logStoreSubName = "";
ListLogStoresRequest req1 = new ListLogStoresRequest(project, offset,
size, logStoreSubName);
ArrayList<String> logStores = client.ListLogStores(req1).GetLogStores();
System.out.println("ListLogs:" + logStores.toString() + "\n");
sendLog();
// // 写入日志
// String topic = "";
// String source = "";
// // 连续发送10个数据包,每个数据包有10条日志
// for (int i = 0; i < 10; i++) {
// Vector<LogItem> logGroup = new Vector<LogItem>();
// for (int j = 0; j < 10; j++) {
// LogItem logItem = new LogItem(
// (int) (new Date().getTime() / 1000));
// logItem.PushBack("index", String.valueOf(i * 10 + j));
// logGroup.add(logItem);
// }
// PutLogsRequest req2 = new PutLogsRequest(project, logstore, topic,
// source, logGroup);
// client.PutLogs(req2);
// }
//
// // 把0号shard中,最近1分钟写入的数据都读取出来。
// int shard_id = 0;
// long curTimeInSec = System.currentTimeMillis() / 1000;
// GetCursorResponse cursorRes = client.GetCursor(project, logstore,
// shard_id, curTimeInSec - 60);
// String beginCursor = cursorRes.GetCursor();
//
// cursorRes = client.GetCursor(project, logstore, shard_id,
// Consts.CursorMode.END);
// String endCursor = cursorRes.GetCursor();
//
// String curCursor = beginCursor;
// while (curCursor.equals(endCursor) == false) {
// int loggroup_count = 2; // 每次读取两个loggroup
// BatchGetLogResponse logDataRes = client.BatchGetLog(project,
// logstore, shard_id, loggroup_count, curCursor);
//
// List<LogGroupData> logGroups = logDataRes.GetLogGroups();
// for (LogGroupData logGroup : logGroups) {
// System.out.println("Source:" + logGroup.GetSource());
// System.out.println("Topic:" + logGroup.GetTopic());
// for (LogItem log : logGroup.GetAllLogs()) {
// System.out.println("LogTime:" + log.GetTime());
// List<LogContent> contents = log.GetLogContents();
// for (LogContent content : contents) {
// System.out.println(content.GetKey() + ":"
// + content.GetValue());
// }
// }
// }
// String next_cursor = logDataRes.GetNextCursor();
// System.out.println("The Next cursor:" + next_cursor);
// curCursor = next_cursor;
// }
//
// // !!!重要提示 : 只有打开索引功能,才能调用一下接口 !!!
//
// // 等待1分钟让日志可查询
// try {
// Thread.sleep(60 * 1000);
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
//
// // 查询日志分布情况
// String query = "index";
// int from = (int) (new Date().getTime() / 1000 - 300);
// int to = (int) (new Date().getTime() / 1000);
// GetHistogramsResponse res3 = null;
// while (true) {
// GetHistogramsRequest req3 = new GetHistogramsRequest(project,
// logstore, topic, query, from, to);
// res3 = client.GetHistograms(req3);
// if (res3 != null && res3.IsCompleted()) // IsCompleted()返回true,表示查询结果是准确的,如果返回false,则重复查询
// {
// break;
// }
// Thread.sleep(200);
// }
//
// System.out.println("Total count of logs is " + res3.GetTotalCount());
// for (Histogram ht : res3.GetHistograms()) {
// System.out.printf("from %d, to %d, count %d.\n", ht.GetFrom(),
// ht.GetTo(), ht.GetCount());
// }
//
// // 查询日志数据
// long total_log_lines = res3.GetTotalCount();
// int log_offset = 0;
// int log_line = 10;
// while (log_offset <= total_log_lines) {
// GetLogsResponse res4 = null;
// // 对于每个log offset,一次读取10行log,如果读取失败,最多重复读取3次。
// for (int retry_time = 0; retry_time < 3; retry_time++) {
// GetLogsRequest req4 = new GetLogsRequest(project, logstore,
// from, to, topic, query, log_offset, log_line, false);
// res4 = client.GetLogs(req4);
// if (res4 != null && res4.IsCompleted()) {
// break;
// }
// Thread.sleep(200);
// }
// System.out.println("Read log count:"
// + String.valueOf(res4.GetCount()));
// log_offset += log_line;
// }
//
}
public static void sendLog() throws LogException {
logGroup = new Vector<>();
LogItem logItem = new LogItem((int) (new Date().getTime() / 1000));
logItem.PushBack("action", "我发了一个日志");
logGroup.add(logItem);
PutLogsRequest req2 = new PutLogsRequest(project, logstore,topic,source,logGroup);
client.PutLogs(req2);
}
}