@Override
public Event intercept(Event event) {
logger.info("----------intercept(Event event)方法执行,处理单个event");
logger.info("----------接收到的自定义拦截器参数值param值为:" + param);
/*
这里编写event的处理代码
*/
String body = new String(event.getBody(), Charsets.UTF_8);
logger.info("----------行记录"+body);
String begainString=body;
// log.parseLog("begain");
AbstractOptionLog log=new WMPPOptionLog();
String sqlStatement= log.parseLog(begainString);
if (sqlStatement!=null){
event.setBody(sqlStatement.getBytes());
}else{
return null;
}
return event;
}
package cn.com.lgh.mppdb;
import cn.com.lgh.operation.AbstractOptionLog;
import com.alibaba.fastjson.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class WMPPOptionLog extends AbstractOptionLog {
//打印日志,便于测试方法的执行顺序
private static final Logger logger = LoggerFactory.getLogger(WMPPOptionLog.class);
private String sqlStatement=null ;
private boolean multipleRows =false;//默认不跨行收集日志
private boolean isEnd=false;//是否结束日志收集
public String getSqlStatement() {
return sqlStatement;
}
public void setSqlStatement(String sqlStatement) {
this.sqlStatement = sqlStatement;
}
public boolean isMultipleRows() {
return multipleRows;
}
public void setMultipleRows(boolean multipleRows) {
this.multipleRows = multipleRows;
}
public boolean isEnd() {
return isEnd;
}
public void setEnd(boolean end) {
isEnd = end;
}
@Override
public String parseLog(String begainString) {
//根据日志格式拆分日志,有的需要跨行收集,有的需要单行拆分,有的不需要拆分,需要拼接的
if (begainString.indexOf("STATEMENT:")>0){
Record record= pu(begainString);
record.setSql(sql1(begainString));
sqlStatement= JSONObject.toJSONString(record);
isEnd=true;//收集单行结束标识符
logger.info("----------单行记录"+sqlStatement);
}else if (begainString.indexOf("CONTEXT:")>0){
//跨行日志收集前开始日志标识CONTEXT:
multipleRows=true;//需要跨行收集表示符号开始
Record record= pu(begainString);
record.setSql(sql4(begainString));
sqlStatement=JSONObject.toJSONString(record);
logger.info("----------跨行开始拼装行记录"+sqlStatement);
}else if (begainString.indexOf("RETURN QUERY")>0){
//跨行日志收集前结束日志标识 RETURN QUERY
logger.info("----------跨行结束拼装行记录"+sqlStatement);
multipleRows=false;//
isEnd=true;//收集单行结束标识符
sqlStatement=null;//清空跨行
}else{
//是否跨行追加文本
sqlStatement=sql5(sqlStatement,begainString,multipleRows);
}
if (isEnd){
isEnd=false;//收集单行完毕,标识收集下一行日志
return sqlStatement;
}else{
return null;
}
}
static class Record{
private String data;
private String session;
private String db;
private String pid;
private String a_name;
private String transactionId;
private String n_name;
private String e_name;
private String sql;
public String getSql() {
return sql;
}
public void setSql(String sql) {
this.sql = sql;
}
public Record() {
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public String getSession() {
return session;
}
public void setSession(String session) {
this.session = session;
}
public String getDb() {
return db;
}
public void setDb(String db) {
this.db = db;
}
public String getPid() {
return pid;
}
public void setPid(String pid) {
this.pid = pid;
}
public String getA_name() {
return a_name;
}
public void setA_name(String a_name) {
this.a_name = a_name;
}
public String getTransactionId() {
return transactionId;
}
public void setTransactionId(String transactionId) {
this.transactionId = transactionId;
}
public String getN_name() {
return n_name;
}
public void setN_name(String n_name) {
this.n_name = n_name;
}
public String getE_name() {
return e_name;
}
public void setE_name(String e_name) {
this.e_name = e_name;
}
}
/**
* @param content
* @Description: 获取用户操作信息
* @Author: LGH
* @CreateDate: 2019/3/20 11:07
*/
public static Record pu(String content){
Record record =new Record();
String s1=content.split("STATEMENT:")[0];
String[] group=s1.split(" ");
String data=null;
String session="";
String db="";
String pid="";
String a_name="";
String transactionId="";
String n_name="";
String e_name="";
for (int i=0;i<group.length;i++){
if (i<3){
if (data==null){
data=group[i];
}else {
data=data+" "+group[i];
}
}
if (i==3){
session=group[i] ;
}else if (i==4){
db=group[i] ;
}else if (i==5){
pid=group[i] ;
}else if (i==6){
a_name=group[i] ;
}else if (i==7){
transactionId=group[i] ;
}else if (i==8){
n_name=group[i] ;
}else if (i==9){
e_name=group[i] ;
}
}
record.setData(data);
record.setSession(session);
record.setDb(db);
record.setPid(pid);
record.setA_name(a_name);
record.setN_name(n_name);
record.setTransactionId(transactionId);
record.setE_name(e_name);
return record;
}
public static String sql1(String content){
return content.split("STATEMENT:")[1];
}
public static String sql4(String content){
if (content.indexOf("SQL statement")>0){
content=content.split("SQL statement")[1];
}
return content;
}
public static String sql5(String oldmes,String content,boolean multipleRows){
if (multipleRows){
return oldmes+" "+content;
}else{
return content;
}
}
}
#agent
customInterceptor.sources=r1
customInterceptor.channels=c1
customInterceptor.sinks=s1
#source
customInterceptor.sources.r1.type=spooldir
customInterceptor.sources.r1.spoolDir=/opt/module/logs/log
customInterceptor.sources.r1.consumeOrder=youngest
customInterceptor.sources.r1.recursiveDirectorySearch=false
customInterceptor.sources.r1.deletePolicy=immediate
customInterceptor.sources.r1.pollDelay=500
#source1-interceptor
customInterceptor.sources.r1.interceptors=i1
customInterceptor.sources.r1.interceptors.i1.type=cn.com.lgh.interceptor.CustomInterceptor$Builder
customInterceptor.sources.r1.interceptors.i1.param=parameter
#channe1
customInterceptor.channels.c1.type=memory
customInterceptor.channels.c1.capacity=1000
customInterceptor.channels.c1.transactionCapacity=100
#sink
#设置Kafka接收器
customInterceptor.sinks.s1.type= org.apache.flume.sink.kafka.KafkaSink
#设置Kafka的broker地址和端口号
customInterceptor.sinks.s1.brokerList=192.168.0.20:9092
#设置Kafka的Topic
customInterceptor.sinks.s1.topic=first5
#设置序列化方式
customInterceptor.sinks.s1.serializer.class=kafka.serializer.StringEncoder
#package
customInterceptor.sources.r1.channels=c1
customInterceptor.sinks.s1.channel=c1
上次自己写的拦截器到flume下面的lib目录 ,安装自己的配置启动flume
./flume-ng agent -c ./ -f ../conf/customInterceptor2.conf -n customInterceptor | grep INFO