storm1.2.1-wordcount可靠的单词计数

项目源码下载:

https://download.csdn.net/download/adam_zs/10294019


测试程序运行了5次,每次失败的消息都会再次发送。



SentenceSpout->SplitSentenceBolt->WordCountBolt->ReportBolt
这个TopologyBuilder顺序如上,
在SentenceSpout中定义ConcurrentHashMap<UUID, Values> pending用来保存发送消息的msgId,values;
在SplitSentenceBolt如果消息接收处理成功this.outputCollector.ack(tuple);失败this.outputCollector.fail(tuple);
根据程序打印结果看到在WordCountBolt,ReportBolt中msgId为空,经测试在WordCountBolt,ReportBolt中设置this.outputCollector.fail(tuple),显示的结果都是#####[ack]######;
所以在WordCountBolt,ReportBolt中设置的ack,fail无效;
总结:我写的这个程序只能保证SentenceSpout->SplitSentenceBolt的消息传递的可靠性;

package com.wangzs.chapter1.wordcountreliable;

import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;

import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import org.apache.storm.utils.Utils;

/**
 * @title: 数据源 <br/>
 * @author: wangzs <br/>
 * @date: 2018年3月18日
 */
public class SentenceSpout extends BaseRichSpout {

	private ConcurrentHashMap<UUID, Values> pending;
	private SpoutOutputCollector spoutOutputCollector;
	private String[] sentences = { "a b c d", "a b c ", "a b", "a" };

	@Override
	public void open(Map map, TopologyContext topologycontext, SpoutOutputCollector spoutoutputcollector) {
		this.spoutOutputCollector = spoutoutputcollector;
		this.pending = new ConcurrentHashMap<UUID, Values>();
	}

	@Override
	public void nextTuple() {
		for (String sentence : sentences) {
			Values values = new Values(sentence);
			UUID msgId = UUID.randomUUID();
			this.spoutOutputCollector.emit(values, msgId);
			this.pending.put(msgId, values);
			System.out.println("SentenceSpout==> " + values + " msgId=" + msgId);
		}
		Utils.sleep(1000);
	}

	@Override
	public void declareOutputFields(OutputFieldsDeclarer outputfieldsdeclarer) {
		outputfieldsdeclarer.declare(new Fields("sentence"));
	}

	@Override
	public void ack(Object msgId) {
		System.out.println("#####[ack]###### msgId=" + msgId + " values=" + this.pending.get(msgId));
		this.pending.remove(msgId);
	}

	@Override
	public void fail(Object msgId) {
		System.out.println("#####[fail]###### msgId=" + msgId + " values=" + this.pending.get(msgId));
		this.spoutOutputCollector.emit(this.pending.get(msgId), msgId);
	}

}
package com.wangzs.chapter1.wordcountreliable;

import java.util.Map;

import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;

/**
 * @title: 分隔单词 <br/>
 * @author: wangzs <br/>
 * @date: 2018年3月18日
 */
public class SplitSentenceBolt extends BaseRichBolt {
	private OutputCollector outputCollector;

	@Override
	public void execute(Tuple tuple) {
		String sentence = tuple.getStringByField("sentence");
		String[] words = sentence.split(" ");
		for (String word : words) {
			this.outputCollector.emit(new Values(word));
		}
		System.out.println("SplitSentenceBolt==> " + sentence + " msgId=" + tuple.getMessageId());
		if (sentence.equals("a b c d")) {
			this.outputCollector.fail(tuple);
		} else {
			this.outputCollector.ack(tuple);
		}
	}

	@Override
	public void prepare(Map map, TopologyContext topologycontext, OutputCollector outputcollector) {
		this.outputCollector = outputcollector;
	}

	@Override
	public void declareOutputFields(OutputFieldsDeclarer outputfieldsdeclarer) {
		outputfieldsdeclarer.declare(new Fields("word"));
	}

}

package com.wangzs.chapter1.wordcountreliable;

import java.util.HashMap;
import java.util.Map;

import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;

/**
 * @title: 实现单词计数 <br/>
 * @author: wangzs <br/>
 * @date: 2018年3月18日
 */
public class WordCountBolt extends BaseRichBolt {
	private OutputCollector outputCollector;
	private HashMap<String, Integer> counts = null;

	@Override
	public void prepare(Map map, TopologyContext topologycontext, OutputCollector outputcollector) {
		this.outputCollector = outputcollector;
		this.counts = new HashMap<String, Integer>();
	}

	@Override
	public void execute(Tuple tuple) {
		String word = tuple.getStringByField("word");
		Integer count = counts.get(word);
		if (count == null) {
			count = 0;
		}
		count++;
		this.counts.put(word, count);
		this.outputCollector.emit(new Values(word, count));
		this.outputCollector.ack(tuple);
		System.out.println("WordCountBolt==> " + word + " msgId=" + tuple.getMessageId());
	}

	@Override
	public void declareOutputFields(OutputFieldsDeclarer outputfieldsdeclarer) {
		outputfieldsdeclarer.declare(new Fields("word", "count"));
	}

}

package com.wangzs.chapter1.wordcountreliable;

import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.topology.TopologyBuilder;
import org.apache.storm.tuple.Fields;
import org.apache.storm.utils.Utils;

/**
 * @title: 可靠的单词计数 <br/>
 * @author: wangzs <br/>
 * @date: 2018年3月18日
 */
public class WordCountTopology {
	public static void main(String[] args) {
		SentenceSpout sentenceSpout = new SentenceSpout();
		SplitSentenceBolt splitSentenceBolt = new SplitSentenceBolt();
		WordCountBolt wordCountBolt = new WordCountBolt();
		ReportBolt reportBolt = new ReportBolt();

		TopologyBuilder builder = new TopologyBuilder();
		builder.setSpout("sentenceSpout-1", sentenceSpout);
		builder.setBolt("splitSentenceBolt-1", splitSentenceBolt).shuffleGrouping("sentenceSpout-1");
		builder.setBolt("wordCountBolt-1", wordCountBolt).fieldsGrouping("splitSentenceBolt-1", new Fields("word"));
		builder.setBolt("reportBolt-1", reportBolt).globalGrouping("wordCountBolt-1");

		Config config = new Config();
		LocalCluster cluster = new LocalCluster();
		// 本地提交
		cluster.submitTopology("wordCountTopology-1", config, builder.createTopology());
		Utils.sleep(10000);
		cluster.killTopology("wordCountTopology-1");
		cluster.shutdown();
	}
}

运行结果:



  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值