storm 的一个drpc例子(值得一看)

1465人阅读评论(0)收藏举报

  1. public static void initConnectDB(){ 
  2.         primaryKey = "id"
  3.         rdbmsUrl = "jdbc:mysql://hadoop/DB"
  4.         rdbmsUserName = "";  
  5.         rdbmsPassword = ""
  6.          
  7.         connector = new RDBMSConnector(); 
  8.         try
  9.             con = connector.getConnection(rdbmsUrl, rdbmsUserName, rdbmsPassword); 
  10.             communicator = new RDBMSCommunicator2UFN(con); 
  11.         } catch (Exception e){ 
  12.             System.out.println("connect to db exception in initConnectDB()"); 
  13.             e.printStackTrace(); 
  14.         } 
  15.     } 
  16.      
  17.     public static class GetUserID extends BaseBasicBolt{ 
  18.         //private RDBMSCommunicator communicator = null; 
  19.         private ResultSet rs = null
  20.          
  21.         @Override 
  22.         public void prepare(Map stormConf, TopologyContext context) { 
  23.             System.out.println("in prepare con : "+con); 
  24.             //this.communicator = new RDBMSCommunicator(con);  
  25.             System.out.println("in pretpare communicator :"+communicator); 
  26.         } 
  27.  
  28.         public void execute(Tuple input, BasicOutputCollector collector) { 
  29.             Object id = input.getValue(0); 
  30.             String userName = input.getString(1); 
  31.              
  32.             String sql = String.format("select userID from usersinfo where username='%s'", userName); 
  33.             System.out.println("sql in get-user-id: "+sql); 
  34.             rs = communicator.selecteExec(sql); 
  35.             String userID = null
  36.              
  37.             if (rs != null){ 
  38.                 try
  39.                     rs.next(); 
  40.                     userID = rs.getString("userID"); 
  41.                 } catch (Exception e){ 
  42.                     e.printStackTrace(); 
  43.                 } 
  44.                 collector.emit(new Values(id, userID)); 
  45.             } 
  46.              
  47.         } 
  48.  
  49.         public void declareOutputFields(OutputFieldsDeclarer declarer) { 
  50.             declarer.declare(new Fields("id", "userID")); 
  51.         } 
  52.          
  53.     } 
  54.      
  55.     public static class GetUserFunctionsID extends BaseBasicBolt{ 
  56.         //private RDBMSCommunicator communicator = null; 
  57.         private ResultSet rs = null
  58.          
  59.         @Override 
  60.         public void prepare(Map stormConf, TopologyContext context) { 
  61.             //communicator = new  RDBMSCommunicator(con);  
  62.         } 
  63.  
  64.         public void execute(Tuple input, BasicOutputCollector collector) { 
  65.             Object id = input.getValue(0); 
  66.             String userID = input.getString(1); 
  67.              
  68.             if (userID == null || userID.trim().length() == 0){ 
  69.                 return
  70.             } 
  71.              
  72.             String sql = String.format("select functionID from userfunctions where userID='%s'", userID); 
  73.             System.out.println("sql in get-user-functionid : "+sql); 
  74.             rs = communicator.selecteExec(sql); 
  75.             String functionID = null
  76.              
  77.             if (rs != null){ 
  78.                 try
  79.                     while(rs.next()){ 
  80.                         functionID = rs.getString("functionID"); 
  81.                         collector.emit(new Values(id,functionID)); 
  82.                     } 
  83.                 } catch(Exception e){ 
  84.                     e.printStackTrace(); 
  85.                 } 
  86.             } 
  87.         } 
  88.  
  89.         public void declareOutputFields(OutputFieldsDeclarer declarer) { 
  90.             declarer.declare(new Fields("id","functionID")); 
  91.         } 
  92.     } 
  93.      
  94.     public static class GetUserFunctionsName extends BaseBatchBolt{ 
  95.         //private RDBMSCommunicator communicator = null; 
  96.         private ResultSet rs = null
  97.          
  98.         List<String> functionsName = new ArrayList<String>(); 
  99.         BatchOutputCollector _collector; 
  100.         Object _id; 
  101.          
  102.         public void execute(Tuple tuple) { 
  103.             String functionID = tuple.getString(1); 
  104.              
  105.             if (functionID == null || functionID.trim().length() == 0){ 
  106.                 return
  107.             } 
  108.              
  109.             String sql = String.format("select functionName from functionsinfo where functionID='%s'",functionID); 
  110.             System.out.println("sql in get-user-functionname : "+sql ); 
  111.             rs = communicator.selecteExec(sql); 
  112.             String functionName = null
  113.              
  114.             if(rs != null){ 
  115.                 try
  116.                     rs.next(); 
  117.                     functionName = rs.getString("functionName"); 
  118.                     functionsName.add(functionName); 
  119.                 } catch (Exception e){ 
  120.                     e.printStackTrace(); 
  121.                 } 
  122.             } 
  123.         } 
  124.  
  125.         public void finishBatch() { 
  126.             _collector.emit(new Values(_id,functionsName.toString())); 
  127.         } 
  128.  
  129.         public void prepare(Map conf, TopologyContext context, 
  130.                 BatchOutputCollector collector, Object id) { 
  131.             _collector = collector; 
  132.             _id = id; 
  133.         } 
  134.  
  135.         public void declareOutputFields(OutputFieldsDeclarer declarer) { 
  136.             declarer.declare(new Fields("id", "user-funcions-name")); 
  137.         } 
  138.          
  139.     } 
  140.      
  141.     public static LinearDRPCTopologyBuilder construct(){ 
  142.          initConnectDB(); 
  143.          LinearDRPCTopologyBuilder builder = new LinearDRPCTopologyBuilder("user-functions-name"); 
  144.           
  145.          builder.addBolt(new GetUserID(), 2); 
  146.          builder.addBolt(new GetUserFunctionsID(),2).shuffleGrouping(); 
  147.          builder.addBolt(new GetUserFunctionsName(),2).fieldsGrouping(new Fields("id","functionID")); 
  148.          return builder; 
  149.          
  150.     } 
  151.     public static void main(String[] args) throws Exception{ 
  152.         LinearDRPCTopologyBuilder builder = construct(); 
  153.          
  154.          
  155.         Config conf = new Config(); 
  156.          
  157.         if(args==null || args.length==0) { 
  158.             conf.setMaxTaskParallelism(3); 
  159.             LocalDRPC drpc = new LocalDRPC(); 
  160.             LocalCluster cluster = new LocalCluster(); 
  161.             cluster.submitTopology("user-fn-drpc", conf, builder.createLocalTopology(drpc)); 
  162.              
  163.             String[] userNames = new String[] { "qingwu.fu"}; 
  164.             for(String un: userNames) { 
  165.                 System.out.println("Functions name of : " + un + ": " + drpc.execute("user-functions-name", un)); 
  166.             } 
  167.             cluster.shutdown(); 
  168.             drpc.shutdown(); 
  169.         } else
  170.             conf.setNumWorkers(6); 
  171.             StormSubmitter.submitTopology(args[0], conf, builder.createRemoteTopology()); 
  172.         } 
  173.          
  174.     } 
public static void initConnectDB(){
		primaryKey = "id";
		rdbmsUrl = "jdbc:mysql://hadoop/DB" ;
		rdbmsUserName = ""; 
		rdbmsPassword = "";
		
		connector = new RDBMSConnector();
		try {
			con = connector.getConnection(rdbmsUrl, rdbmsUserName, rdbmsPassword);
			communicator = new RDBMSCommunicator2UFN(con);
		} catch (Exception e){
			System.out.println("connect to db exception in initConnectDB()");
			e.printStackTrace();
		}
	}
	
	public static class GetUserID extends BaseBasicBolt{
		//private RDBMSCommunicator communicator = null;
		private ResultSet rs = null;
		
		@Override
	    public void prepare(Map stormConf, TopologyContext context) {
			System.out.println("in prepare con : "+con);
			//this.communicator = new RDBMSCommunicator(con); 
			System.out.println("in pretpare communicator :"+communicator);
	    }

		public void execute(Tuple input, BasicOutputCollector collector) {
			Object id = input.getValue(0);
			String userName = input.getString(1);
			
			String sql = String.format("select userID from usersinfo where username='%s'", userName);
			System.out.println("sql in get-user-id: "+sql);
			rs = communicator.selecteExec(sql);
			String userID = null;
			
			if (rs != null){
				try {
					rs.next();
					userID = rs.getString("userID");
				} catch (Exception e){
					e.printStackTrace();
				}
				collector.emit(new Values(id, userID));
			}
			
		}

		public void declareOutputFields(OutputFieldsDeclarer declarer) {
			declarer.declare(new Fields("id", "userID"));
		}
		
	}
	
	public static class GetUserFunctionsID extends BaseBasicBolt{
		//private RDBMSCommunicator communicator = null;
		private ResultSet rs = null;
		
		@Override
	    public void prepare(Map stormConf, TopologyContext context) {
			//communicator = new  RDBMSCommunicator(con); 
	    }

		public void execute(Tuple input, BasicOutputCollector collector) {
			Object id = input.getValue(0);
			String userID = input.getString(1);
			
			if (userID == null || userID.trim().length() == 0){
				return;
			}
			
			String sql = String.format("select functionID from userfunctions where userID='%s'", userID);
			System.out.println("sql in get-user-functionid : "+sql);
			rs = communicator.selecteExec(sql);
			String functionID = null;
			
			if (rs != null){
				try {
					while(rs.next()){
						functionID = rs.getString("functionID");
						collector.emit(new Values(id,functionID));
					}
				} catch(Exception e){
					e.printStackTrace();
				}
			}
		}

		public void declareOutputFields(OutputFieldsDeclarer declarer) {
			declarer.declare(new Fields("id","functionID"));
		}
	}
	
	public static class GetUserFunctionsName extends BaseBatchBolt{
		//private RDBMSCommunicator communicator = null;
		private ResultSet rs = null;
		
		List<String> functionsName = new ArrayList<String>();
		BatchOutputCollector _collector;
	    Object _id;
	    
		public void execute(Tuple tuple) {
			String functionID = tuple.getString(1);
			
			if (functionID == null || functionID.trim().length() == 0){
				return ;
			}
			
			String sql = String.format("select functionName from functionsinfo where functionID='%s'",functionID);
			System.out.println("sql in get-user-functionname : "+sql );
			rs = communicator.selecteExec(sql);
			String functionName = null;
			
			if(rs != null){
				try {
					rs.next();
					functionName = rs.getString("functionName");
					functionsName.add(functionName);
				} catch (Exception e){
					e.printStackTrace();
				}
			}
		}

		public void finishBatch() {
			_collector.emit(new Values(_id,functionsName.toString()));
		}

		public void prepare(Map conf, TopologyContext context,
				BatchOutputCollector collector, Object id) {
			_collector = collector;
            _id = id;
		}

		public void declareOutputFields(OutputFieldsDeclarer declarer) {
			declarer.declare(new Fields("id", "user-funcions-name"));
		}
		
	}
	
	public static LinearDRPCTopologyBuilder construct(){
		 initConnectDB();
		 LinearDRPCTopologyBuilder builder = new LinearDRPCTopologyBuilder("user-functions-name");
		 
		 builder.addBolt(new GetUserID(), 2);
		 builder.addBolt(new GetUserFunctionsID(),2).shuffleGrouping();
		 builder.addBolt(new GetUserFunctionsName(),2).fieldsGrouping(new Fields("id","functionID"));
		 return builder;
		
	}
	public static void main(String[] args) throws Exception{
		LinearDRPCTopologyBuilder builder = construct();
        
        
        Config conf = new Config();
        
        if(args==null || args.length==0) {
            conf.setMaxTaskParallelism(3);
            LocalDRPC drpc = new LocalDRPC();
            LocalCluster cluster = new LocalCluster();
            cluster.submitTopology("user-fn-drpc", conf, builder.createLocalTopology(drpc));
            
            String[] userNames = new String[] { "qingwu.fu"};
            for(String un: userNames) {
                System.out.println("Functions name of : " + un + ": " + drpc.execute("user-functions-name", un));
            }
            cluster.shutdown();
            drpc.shutdown();
        } else {
            conf.setNumWorkers(6);
            StormSubmitter.submitTopology(args[0], conf, builder.createRemoteTopology());
        }
		
	}

 
转载: http://blog.csdn.net/baiyangfu_love/article/details/8227085

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值