从Kafka topic中获取数据
String zks = "x.x.x.x:2181,x.x.x.x:2181,x.x.x.x:2181";
String topic = "test";
String zkRoot = "/storm";
String id = "word";
BrokerHosts brokerHosts = new ZkHosts(zks);
/**
* 1、Spout属性设置
*/
SpoutConfig spoutConf = new SpoutConfig(brokerHosts, topic, zkRoot, "KafkaSpout-reader");
spoutConf.scheme = new SchemeAsMultiScheme(new StringScheme());
spoutConf.forceFromStart = false;
spoutConf.zkServers = Arrays.asList(new String[] {"x.x.x.x","x.x.x.x","x.x.x.x"});
spoutConf.zkPort = 2181;
spoutConf.scheme = new SchemeAsMultiScheme(new StringScheme());
spoutConf.startOffsetTime =kafka.api.OffsetRequest.LatestTime();
spoutConf.fetchMaxWait = 10000000;
/**
* 2、创建TopologyBuilder
* 并设置Spout对象(其中KafkaSpout对象来自storm-kafka-0.9.2-incubating.jar包下
* public class storm.kafka.KafkaSpout extends backtype.storm.topology.base.BaseRichSpout)
*/
TopologyBuilder builder = new TopologyBuilder();
builder.setSpout("KafkaSpout-reader", new KafkaSpout(spoutConf), 1);
builder.setBolt("xxx", new KafkaBolt(), 3).shuffleGrouping("KafkaSpout-reader");
将获取的数据提交的Storm中处理