任务:将得到的字符串更改为大写,添加后缀后写入文件
1:spout类
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import org.apache.storm.utils.Utils;
import java.util.Map;
import java.util.Random;
//tuple类,拿数据
public class MySpout extends BaseRichSpout {
private SpoutOutputCollector collector;
String string[]={"a","b","c"};
//初始化方法
@Override
public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
this.collector=collector;
}
//向下一个组件发送tuple消息
@Override
public void nextTuple() {
//从string数组中拿到数据,也可以从kafuka消息队列中拿数据
Random random=new Random();
int index=random.nextInt(string.length);
String who=string[index];
//封装成tuple进行发送
collector.emit(new Values(who,"001","12"));
Utils.sleep(200);
}
//声明本tuple组件发送数据的字段名
@Override
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
//默认的流发送
outputFieldsDeclarer.declare(new Fields("name","id","age"));
}
}
2:一级bolt
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseBasicBolt;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;
public class MyBolt extends BaseBasicBolt {
//业务逻辑,不断进行调用。
@Override
public void execute(Tuple tuple, BasicOutputCollector basicOutputCollector) {
//获取数据
String name=tuple.getString(0);
int id=tuple.getInteger(1);
int age=tuple.getInteger(2);
//转化为大写
String upString=name.toUpperCase();
//发送数据
basicOutputCollector.emit(new Values(upString));
}
//声明字段名
@Override
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
outputFieldsDeclarer.declare(new Fields("upname"));
}
}
3:二级bolt
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseBasicBolt;
import org.apache.storm.tuple.Tuple;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;
import java.util.UUID;
public class SuffixBolt extends BaseBasicBolt {
FileWriter fileWriter=null;
//bolt运行时只调用一次
//初始化filewriter
@Override
public void prepare(Map stormConf, TopologyContext context) {
try {
fileWriter=new FileWriter("/home/hadoop/text"+UUID.randomUUID());
} catch (IOException e) {
e.printStackTrace();
}
}
//写文件
@Override
public void execute(Tuple tuple, BasicOutputCollector basicOutputCollector) {
//为上个组件发送过来的数据添加后缀
StringBuffer name=null;
name.append(tuple.getString(0));
name.append("_ok");
try {
fileWriter.write(String.valueOf(name));
fileWriter.write("\n");
fileWriter.flush();
} catch (IOException e) {
e.printStackTrace();
}
}
//无需进行声明
@Override
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
}
}
4:主类
import org.apache.storm.Config;
import org.apache.storm.StormSubmitter;
import org.apache.storm.generated.AlreadyAliveException;
import org.apache.storm.generated.AuthorizationException;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.generated.StormTopology;
import org.apache.storm.topology.TopologyBuilder;
/**
* 组织各个组件形成完整处理流程,topolog提交到集群后就一直会运行。
* 提交给storm集群运行
* */
public class Main {
public static void main(String[] args) throws InvalidTopologyException, AuthorizationException, AlreadyAliveException {
TopologyBuilder builder=new TopologyBuilder();
//并发度为4,8个task每个线程中有两个task
builder.setSpout("MySpout",new MySpout(),4).strNumTask(8);
//接受mysoot信息,随机进行分组
builder.setBolt("upbolt",new MyBolt(),4).shuffleGrouping("MySpout");
//接受upbolt信息
builder.setBolt("suffixbole",new SuffixBolt(),4).shuffleGrouping("upbolt");
//创建topplogy
StormTopology topology=builder.createTopology();
//配置topology
Config config=new Config();
//用多少worker
config.setNumWorkers(4);
config.setDebug(true);
//事务配置,不进行事务操作
config.setNumAckers(0);
//提交集群运行
StormSubmitter.submitTopology("task1",config,topology);
}
}
maven依赖
<!-- https://mvnrepository.com/artifact/org.apache.storm/storm-core -->
<dependency>
<groupId>org.apache.storm</groupId>
<artifactId>storm-core</artifactId>
<version>1.2.2</version>
<scope>provided</scope>
</dependency>
提交集群
storm jar /home/hadoop/ftpDocuments/stormdemo.jar storm.demo.Main