Apache Flume
Flume开发者指南
Flume日志整合
引入依赖
<dependencies>
<dependency>
<groupId>org.apache.flume</groupId>
<artifactId>flume-ng-sdk</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flume.flume-ng-clients</groupId>
<artifactId>flume-ng-log4jappender</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.5</version>
</dependency>
</dependencies>
启动flume
[root@Centos ~]# cd /usr/apache-flume-1.9.0-bin/
[root@Centos apache-flume-1.9.0-bin]# ./bin/flume-ng version
Flume 1.9.0
Source code repository: https://git-wip-us.apache.org/repos/asf/flume.git
Revision: d4fcab4f501d41597bc616921329a4339f73585e
Compiled by fszabo on Mon Dec 17 20:45:25 CET 2018
From source with checksum 35db629a3bda49d23e9b3690c80737f9
启动avro Source
创建 example2.properties文本
[root@Centos conf]# vim example2.properties
# 声明基本组件 Source Channel Sink example2.properties
a1.sources = s1
a1.sinks = sk1
a1.channels = c1
# 配置Source组件,从Socket中接收⽂本数据
a1.sources.s1.type = avro
a1.sources.s1.bind = Centos
a1.sources.s1.port = 44444
# 配置Sink组件,将接收数据打印在⽇志控制台
a1.sinks.sk1.type = logger
# 配置Channel通道,主要负责数据缓冲
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
# 进⾏组件间的绑定
a1.sources.s1.channels = c1
a1.sinks.sk1.channel = c1
运行flume输出端口
[root@Centos apache-flume-1.9.0-bin]# ./bin/flume-ng agent --conf conf/ --name a1 --conf-file conf/example2.properties -Dflume.root.logger=INFO,console
单机链接测试类
package com.baizhi.jsy;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.api.RpcClient;
import org.apache.flume.api.RpcClientFactory;
import org.apache.flume.event.EventBuilder;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.HashMap;
public class RpcClientTests {
private RpcClient client;
@Before
public void before(){
client= RpcClientFactory.getDefaultInstance("Centos",44444);
}
@Test
public void testSend() throws EventDeliveryException {
Event event = EventBuilder.withBody("this is a beautiful girl,很漂亮的姑娘!".getBytes());
HashMap<String, String> header = new HashMap<>();
header.put("name","张佳洋");
event.setHeaders(header);
client.append(event);
}
@After
public void after(){
client.close();
}
}
结果
集群链接测试类
Failover
package com.baizhi.jsy;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.api.RpcClient;
import org.apache.flume.api.RpcClientFactory;
import org.apache.flume.event.EventBuilder;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.HashMap;
import java.util.Properties;
public class RpcClientTests02Failover {
private RpcClient client;
@Before
public void before(){
Properties props = new Properties();
props.put("client.type", "default_failover");
// List of hosts (space-separated list of user-chosen host aliases)
props.put("hosts", "h1 h2 h3");
// host/port pair for each host alias
String host1 = "Centos:44444";
String host2 = "Centos:44444";
String host3 = "Centos:44444";
props.put("hosts.h1", host1);
props.put("hosts.h2", host2);
props.put("hosts.h3", host3);
client= RpcClientFactory.getInstance(props);
}
@Test
public void testSend() throws EventDeliveryException {
Event event = EventBuilder.withBody("this is a beautiful girl,很漂亮的姑娘!".getBytes());
HashMap<String, String> header = new HashMap<>();
header.put("name","张佳洋Failover");
event.setHeaders(header);
client.append(event);
}
@After
public void after(){
client.close();
}
}
结果
LoadBalancing
package com.baizhi.jsy;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.api.RpcClient;
import org.apache.flume.api.RpcClientFactory;
import org.apache.flume.event.EventBuilder;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.HashMap;
import java.util.Properties;
public class RpcClientTests02LoadBalancing {
private RpcClient client;
@Before
public void before() {
Properties props = new Properties();
props.put("client.type", "default_loadbalance");
// List of hosts (space-separated list of user-chosen host aliases)
props.put("hosts", "h1 h2 h3");
// host/port pair for each host alias
String host1 = "Centos:44444";
String host2 = "Centos:44444";
String host3 = "Centos:44444";
props.put("hosts.h1", host1);
props.put("hosts.h2", host2);
props.put("hosts.h3", host3);
props.put("host-selector", "random"); // For random host selection
// props.put("host-selector", "round_robin"); // For round-robin host
props.put("backoff", "true"); // Disabled by default.
props.put("maxBackoff", "10000"); // Defaults 0, which effectively
// becomes 30000 ms
client = RpcClientFactory.getInstance(props);
}
@Test
public void testSend() throws EventDeliveryException {
Event event = EventBuilder.withBody("this is a beautiful girl,很漂亮的姑娘!".getBytes());
HashMap<String, String> header = new HashMap<>();
header.put("name", "张佳洋default_loadbalance");
event.setHeaders(header);
client.append(event);
}
@After
public void after() {
client.close();
}
}
结果
log4j集成
log4j.properties
log4j.appender.flume=org.apache.flume.clients.log4jappender.LoadBalancingLog4jAppender
log4j.appender.flume.Hosts = Centos:44444 Centos:44444 Centos:44444
log4j.appender.flume.Selector = RANDOM
log4j.logger.com.baizhi = DEBUG,flume
log4j.appender.flume.layout=org.apache.log4j.PatternLayout
log4j.appender.flume.layout.ConversionPattern=%p %d %c %m %n
测试类
package com.baizhi;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class TestLog {
private static Log log= LogFactory.getLog(TestLog.class);
public static void main(String[] args) {
log.debug("你好!_debug");
log.info("你好!_info");
log.warn("你好!_warn");
log.error("你好!_error");
}
}
结果