创建工程,导入相应的jar包
拓扑主类
<span style="font-size:14px;">package cn.itcast.randomword.topo;
import cn.itcast.randomword.bolt.TransferBolt;
import cn.itcast.randomword.bolt.WritableBolt;
import cn.itcast.randomword.spout.RandomSpout;
import backtype.storm.Config;
import backtype.storm.StormSubmitter;
import backtype.storm.generated.AlreadyAliveException;
import backtype.storm.generated.InvalidTopologyException;
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.tuple.Fields;
/**
* 拓扑,用来配置spou和bout
* @author Administrator
*
*/
public class TopoMain {
public static void main(String[] args) throws Exception {
//声明拓扑构建器
TopologyBuilder builder = new TopologyBuilder();
//设置数据源
builder.setSpout("random", new RandomSpout(),4);
//处理业务逻辑
builder.setBolt("transfer", new TransferBolt(),4).shuffleGrouping("random").setNumTasks(8);
builder.setBolt("writer", new WritableBolt(),8).fieldsGrouping("transfer", new Fields("word"));
//配置对象
Config conf = new Config();
//设置工作的worker数量,
conf.setNumWorkers(4);
//设置数据跟踪器
conf.setNumAckers(0);
conf.setDebug(false);
//提交拓扑
StormSubmitter.submitTopology("comp-test-1", conf, builder.createTopology());
}
}
</span>
spouts(数据源)
package cn.itcast.randomword.spout;
import java.util.Map;
import java.util.Random;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import backtype.storm.utils.Utils;
/**
* 随机产生一个单词,发送给下一个bolt
* @author Administrator
*
*/
public class RandomSpout extends BaseRichSpout{
private static final long serialVersionUID = -4287209449750623371L;
//相当于hadoop中的context,用来发送数据,要发送的数据封装到values中
private SpoutOutputCollector collector;
private String[] words = {"storm","hadoop","flume","hive"};
private Random random = new Random();
/**
* 不停歇,用来读数据,只要有数据产生就发送给bolt
*/
@Override
public void nextTuple() {
Utils.sleep(500);
String str = words[random.nextInt(words.length)];
collector.emit(new Values(str));
}
/**
* 用来做初始化一些对象的,在对象实例化之后执行,也就是在构造方法之后
*/
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
this.collector = collector;
}
/**
* 用来声明要发送给下一个组件的字段,相当于表头
*/
@Override
public void declareOutputFields(OutputFieldsDeclarer declare) {
declare.declare(new Fields("str"));
}
}
bolt 处理逻辑
package cn.itcast.randomword.bolt;
import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
/**
* 将数据做简单传递
* @author Administrator
*
*/
public class TransferBolt extends BaseBasicBolt{
private static final long serialVersionUID = 4223708336037089125L;
/**
* 循环执行,只要有发送者发来数据就执行
*
*/
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
// tuple 封装了接收到的数据
String word = tuple.getStringByField("str");
collector.emit(new Values(word));
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declare) {
declare.declare(new Fields("word"));
}
}
输出bolt
package cn.itcast.randomword.bolt;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;
import java.util.UUID;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Tuple;
public class WritableBolt extends BaseBasicBolt{
private static final long serialVersionUID = -6586283337287975719L;
private FileWriter writer = null;
/**
*
*/
@Override
public void prepare(Map stormConf, TopologyContext context) {
try {
writer = new FileWriter("/home/itcast/" + UUID.randomUUID().toString());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
String s = tuple.getString(0);
try {
writer.write(s);
writer.write("\n");
writer.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void declareOutputFields(OutputFieldsDeclarer arg0) {
// TODO Auto-generated method stub
}
}