1.zjgm01 zjgm02 zjgm03 “./zkServer.sh start”
“alt+p”将“apache-storm-0.9.2-incubating.tar.gz”包拖入
解压“apache-storm-0.9.2-incubating.tar.gz”包到app下
“tar -zxvf apache-storm-0.9.2-incubating.tar.gz -C app/”
进入“app/apache-storm-0.9.2-incubating/conf/”路径下
配置“storm.yaml”
storm.zookeeper.servers:
- "zjgm01"
- "zjgm02"
- "zjgm03"
nimbus.host: "zjgm01"
在app路径下将其“apache-storm-0.9.2-incubating”文件分别复制到“zjgm02”,“zjgm03”的app下
“scp -r apache-storm-0.9.2-incubating zjgm02:/home/hadoop/app/”
“scp -r apache-storm-0.9.2-incubating zjgm03:/home/hadoop/app/”
进入“app/apache-storm-0.9.2-incubating/bin”路径下
在“192.168.2.100”上分别启动“./storm nimbus”,“./storm ui”
分别在“192.168.2.102”,“192.168.2.103”上启动“./storm supervisor ”
输入网址“192.168.2.100:8080”可看到如下网页
打开idea,新建项目“storm”,结构如下
“pom.xml”增加代码
<dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.storm/storm-core -->
<dependency>
<groupId>org.apache.storm</groupId>
<artifactId>storm-core</artifactId>
<version>0.9.3</version>
<scope>provided</scope>
</dependency>
</dependencies>
“RandomSpount.java”
package com.zhongruan.strom;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import java.util.Map;
import java.util.Random;
public class RandomSpount extends BaseRichSpout {
String[] phones={"iphone","huawei","xiaomi","xiaolajiao","meizu"};
private SpoutOutputCollector collector;
@Override
public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
this.collector=spoutOutputCollector;
}
@Override
public void nextTuple() {
Random random=new Random();
int index=random.nextInt(phones.length);
String phonename=phones[index];
collector.emit(new Values(phonename));
}
@Override
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
outputFieldsDeclarer.declare(new Fields("pn"));
}
}
“UpperBolt.java”
package com.zhongruan.strom;
import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
public class UpperBolt extends BaseBasicBolt {
@Override
public void execute(Tuple tuple, BasicOutputCollector basicOutputCollector) {
String phonename=tuple.getString(0);
String upName=phonename.toUpperCase();
basicOutputCollector.emit(new Values(upName));
}
@Override
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
outputFieldsDeclarer.declare(new Fields("uppername"));
}
}
“OkBolt.java”
package com.zhongruan.strom;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Tuple;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;
import java.util.UUID;
public class OkBolt extends BaseBasicBolt {
FileWriter fileWriter=null;
@Override
public void prepare(Map stormConf, TopologyContext context) {
try {
fileWriter=new FileWriter("/home/hadoop/stormdata/"+ UUID.randomUUID());
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void execute(Tuple tuple, BasicOutputCollector basicOutputCollector) {
String name=tuple.getString(0);
String fName=name+"_ok";
try {
//华为小米
fileWriter.write(fName);
fileWriter.write("\n");
fileWriter.flush();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
}
}
“ToMain.java”
package com.zhongruan.strom;
import backtype.storm.Config;
import backtype.storm.StormSubmitter;
import backtype.storm.generated.AlreadyAliveException;
import backtype.storm.generated.InvalidTopologyException;
import backtype.storm.generated.StormTopology;
import backtype.storm.topology.TopologyBuilder;
public class ToMain {
public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException {
TopologyBuilder builder=new TopologyBuilder();
builder.setSpout("randomSpount",new RandomSpount());
builder.setBolt("upperBolt",new UpperBolt(),4).shuffleGrouping("randomSpount");
builder.setBolt("okBolt",new OkBolt(),4).shuffleGrouping("upperBolt");
StormTopology topology=builder.createTopology();
Config conf=new Config();
conf.setNumWorkers(4);
StormSubmitter.submitTopology("ramtopology",conf,topology);
}
}
生成“storm.jar”
“Build”—“Build Artifacts”
进入“E:\storm\out\artifacts\storm_jar”路径下
将“storm.jar”拖入到根路径下
分别在“zjgm02”,“zjgm03”的根路径下新建“stormdata”—“mkdir stormdata”
进入“app/apache-storm-0.9.2-incubating/”路径下
“bin/storm jar /home/hadoop/storm.jar com.zhongruan.storm.ToMain”
分别在“zjgm02”,“zjgm03”的stormdata下查看生成的文件
tail -n 100 文件名
查看文件后100行内容,实时更新
网页新增“ramtopology”
结束–“./storm kill ramtopology”