public static void initConnectDB(){
primaryKey = "id";
rdbmsUrl = "jdbc:mysql://hadoop/DB" ;
rdbmsUserName = "";
rdbmsPassword = "";
connector = new RDBMSConnector();
try {
con = connector.getConnection(rdbmsUrl, rdbmsUserName, rdbmsPassword);
communicator = new RDBMSCommunicator2UFN(con);
} catch (Exception e){
System.out.println("connect to db exception in initConnectDB()");
e.printStackTrace();
}
}
public static class GetUserID extends BaseBasicBolt{
//private RDBMSCommunicator communicator = null;
private ResultSet rs = null;
@Override
public void prepare(Map stormConf, TopologyContext context) {
System.out.println("in prepare con : "+con);
//this.communicator = new RDBMSCommunicator(con);
System.out.println("in pretpare communicator :"+communicator);
}
public void execute(Tuple input, BasicOutputCollector collector) {
Object id = input.getValue(0);
String userName = input.getString(1);
String sql = String.format("select userID from usersinfo where username='%s'", userName);
System.out.println("sql in get-user-id: "+sql);
rs = communicator.selecteExec(sql);
String userID = null;
if (rs != null){
try {
rs.next();
userID = rs.getString("userID");
} catch (Exception e){
e.printStackTrace();
}
collector.emit(new Values(id, userID));
}
}
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("id", "userID"));
}
}
public static class GetUserFunctionsID extends BaseBasicBolt{
//private RDBMSCommunicator communicator = null;
private ResultSet rs = null;
@Override
public void prepare(Map stormConf, TopologyContext context) {
//communicator = new RDBMSCommunicator(con);
}
public void execute(Tuple input, BasicOutputCollector collector) {
Object id = input.getValue(0);
String userID = input.getString(1);
if (userID == null || userID.trim().length() == 0){
return;
}
String sql = String.format("select functionID from userfunctions where userID='%s'", userID);
System.out.println("sql in get-user-functionid : "+sql);
rs = communicator.selecteExec(sql);
String functionID = null;
if (rs != null){
try {
while(rs.next()){
functionID = rs.getString("functionID");
collector.emit(new Values(id,functionID));
}
} catch(Exception e){
e.printStackTrace();
}
}
}
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("id","functionID"));
}
}
public static class GetUserFunctionsName extends BaseBatchBolt{
//private RDBMSCommunicator communicator = null;
private ResultSet rs = null;
List<String> functionsName = new ArrayList<String>();
BatchOutputCollector _collector;
Object _id;
public void execute(Tuple tuple) {
String functionID = tuple.getString(1);
if (functionID == null || functionID.trim().length() == 0){
return ;
}
String sql = String.format("select functionName from functionsinfo where functionID='%s'",functionID);
System.out.println("sql in get-user-functionname : "+sql );
rs = communicator.selecteExec(sql);
String functionName = null;
if(rs != null){
try {
rs.next();
functionName = rs.getString("functionName");
functionsName.add(functionName);
} catch (Exception e){
e.printStackTrace();
}
}
}
public void finishBatch() {
_collector.emit(new Values(_id,functionsName.toString()));
}
public void prepare(Map conf, TopologyContext context,
BatchOutputCollector collector, Object id) {
_collector = collector;
_id = id;
}
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("id", "user-funcions-name"));
}
}
public static LinearDRPCTopologyBuilder construct(){
initConnectDB();
LinearDRPCTopologyBuilder builder = new LinearDRPCTopologyBuilder("user-functions-name");
builder.addBolt(new GetUserID(), 2);
builder.addBolt(new GetUserFunctionsID(),2).shuffleGrouping();
builder.addBolt(new GetUserFunctionsName(),2).fieldsGrouping(new Fields("id","functionID"));
return builder;
}
public static void main(String[] args) throws Exception{
LinearDRPCTopologyBuilder builder = construct();
Config conf = new Config();
if(args==null || args.length==0) {
conf.setMaxTaskParallelism(3);
LocalDRPC drpc = new LocalDRPC();
LocalCluster cluster = new LocalCluster();
cluster.submitTopology("user-fn-drpc", conf, builder.createLocalTopology(drpc));
String[] userNames = new String[] { "qingwu.fu"};
for(String un: userNames) {
System.out.println("Functions name of : " + un + ": " + drpc.execute("user-functions-name", un));
}
cluster.shutdown();
drpc.shutdown();
} else {
conf.setNumWorkers(6);
StormSubmitter.submitTopology(args[0], conf, builder.createRemoteTopology());
}
}
storm 的一个drpc例子
最新推荐文章于 2019-10-17 16:44:47 发布
该博客展示了如何使用storm构建一个DRPC(Direct Request Processing)应用,通过多个Bolt步骤从数据库中获取并处理用户的功能ID和名称。首先初始化数据库连接,接着定义了三个Bolt类:GetUserID、GetUserFunctionsID和GetUserFunctionsName,分别用于获取用户ID、根据用户ID获取功能ID和根据功能ID获取功能名称。最后通过LinearDRPCTopologyBuilder构建并运行DRPC拓扑。
摘要由CSDN通过智能技术生成