Storm案例实战

  • 需求:实时获取上网用户数据(APP的IP地址),并在地图展示出来用户的位置信息。
  • 实时计算的解决方案:Storm0.8.2+Hadoop2.2.0+JDK1.7+ZooKeeper3.4.6
  • 架构设计

数据文件A:用户,手机号,手机MAC地址,上网地址,访问网站,网址类型,上行数据包数量,下行数据包数量,上行总量,下行总量,HTTP访问返回值

141,3276006        18540852316      71-77-16-4c-41-b4:CMCC     10.116.136.202    alipay.com  支付   15     9       7161 4269 200

1413272713        15568385737      76-12-7d-b4-9e-6c:CMCC-EASY     158.219.67.93      iteye.com   门户   2       12     4613 1074 200

数据文件B:国家,省份,ISP提供商,最大IP,最小IP

中国   内蒙古                  联通   1039179776        1039187967

中国   福建省                  电信   3405938176        3405938687

数据文件C:国家,纬度,经度

安道尔         42.5   1.5

阿拉伯联合酋长国 24     54

  • 第一步,创建Spout和Bolt,提交area-topology任务

package cyb.storm;

import backtype.storm.Config;

import backtype.storm.StormSubmitter;

import backtype.storm.topology.TopologyBuilder;

public class Main {      

         public static void main(String[] args) throws Exception {

                   TopologyBuilder builder = new TopologyBuilder();

                   builder.setSpout("spout", new ReaderSpout(), 4);

                   //4 the number of tasks that should be assigned to execute this spout

                   builder.setBolt("area-bolt", new GetAreaBolt()).shuffleGrouping("spout");

                   builder.setBolt("longitude-bolt", new GetLongitudeBolt()).shuffleGrouping("area-bolt");

                   Config config = new Config();

                   config.setNumWorkers(4);

                   config.setMaxSpoutPending(1000);

                   StormSubmitter.submitTopology("area-topology", config,

                                     builder.createTopology());

         }

}

  • 第二步,创建读取HDFS文件,截取出IP地址,发射个下一个Bolt

package cyb.storm;

import java.io.BufferedReader;

import java.io.IOException;

import java.io.InputStream;

import java.io.InputStreamReader;

import java.net.URI;

import java.util.Map;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.IOUtils;

import backtype.storm.spout.SpoutOutputCollector;

import backtype.storm.task.TopologyContext;

import backtype.storm.topology.OutputFieldsDeclarer;

import backtype.storm.topology.base.BaseRichSpout;

import backtype.storm.tuple.Fields;

import backtype.storm.tuple.Values;

import backtype.storm.utils.Utils;

@SuppressWarnings("serial")

public class ReaderSpout extends BaseRichSpout {

         SpoutOutputCollector _collector;

         @SuppressWarnings("rawtypes")

         @Override

         public void open(Map conf, TopologyContext context,

                            SpoutOutputCollector collector) {

                   _collector = collector;

         }

         @Override

         public void nextTuple() {

                   String uri = "hdfs://master:9000/storm/m.txt";

                   InputStream in = null;

                   try {

                            Configuration conf = new Configuration();

                            FileSystem fs = FileSystem.get(URI.create(uri), conf);

                            in = fs.open(new Path(uri));

                            BufferedReader br = new BufferedReader(new InputStreamReader(in));

                            String line = null;

                            while (null != (line = br.readLine())) {

                                     _collector.emit(new Values(line));

                                     Utils.sleep(100);

                            }

                   } catch (IOException e) {

                            e.printStackTrace();

                   } finally {

                            IOUtils.closeStream(in);

                   }

         }

         @Override

         public void ack(Object id) {

         }

         @Override

         public void fail(Object id) {

         }

         @Override

         public void declareOutputFields(OutputFieldsDeclarer declarer) {

                   declarer.declare(new Fields("ip"));

         }

}

  • 第三步,IP地址转换,比如127.0.0.1转换为1846789219,发射1846789219到下一个Bolt

package cyb.storm;

import java.sql.Connection;

import java.sql.DriverManager;

import java.sql.ResultSet;

import java.sql.SQLException;

import java.sql.Statement;

import java.util.Map;

import backtype.storm.task.TopologyContext;

import backtype.storm.topology.BasicOutputCollector;

import backtype.storm.topology.IBasicBolt;

import backtype.storm.topology.OutputFieldsDeclarer;

import backtype.storm.tuple.Fields;

import backtype.storm.tuple.Tuple;

import backtype.storm.tuple.Values;

@SuppressWarnings("serial")

public class GetAreaBolt implements IBasicBolt {

         static Connection conn;

         static Statement st;

         public static Connection getConnection() {

                   Connection con = null; // define Connection

                   try {

                            Class.forName("com.mysql.jdbc.Driver");// load Mysql driver

                            con = DriverManager.getConnection(

                                               "jdbc:mysql://192.168.32.72:3306/test", "hadoop", "hadoop");

                   } catch (Exception e) {

                            System.out.println("Connection failed! " + e.getMessage());

                   }

                   return con;

         }

         public static String select(long ipp) {

                   conn = getConnection(); // get connection

                   try {

                            String sql = "select area from ip where '" + ipp

                                               + "' between minip and maxip";

                            st = conn.createStatement();

                            ResultSet rs = st.executeQuery(sql);

                            String name = rs.getString("area");

                            return name;

                            // conn.close(); //close connection

                   } catch (SQLException e) {

                            System.out.println("failed! " + e.getMessage());

                            return null;

                   }

         }

         @Override

         public void execute(Tuple tuple, BasicOutputCollector collector) {

                   String line = tuple.toString();

                   String all[] = line.split("\t", -1);

                   long longIp = GetAreaBolt.ipToLong(all[3]);

                   collector.emit(new Values(select(longIp)));

         }

         public static long ipToLong(String strIp) {

                   // transfer ip like 127.0.0.1 to decimal integer

                   long[] ip = new long[4];

                   // find the position of dot

                   int position1 = strIp.indexOf(".");

                   int position2 = strIp.indexOf(".", position1 + 1);

                   int position3 = strIp.indexOf(".", position2 + 1);

                   // transfer string to integer

                   ip[0] = Long.parseLong(strIp.substring(0, position1));

                   ip[1] = Long.parseLong(strIp.substring(position1 + 1, position2));

                   ip[2] = Long.parseLong(strIp.substring(position2 + 1, position3));

                   ip[3] = Long.parseLong(strIp.substring(position3 + 1));

                   return (ip[0] << 24) + (ip[1] << 16) + (ip[2] << 8) + ip[3];

         }

         @Override

         public void declareOutputFields(OutputFieldsDeclarer declarer) {

                   declarer.declare(new Fields("area"));

         }

         @Override

         public Map<String, Object> getComponentConfiguration() {

                   return null;

         }

         @SuppressWarnings("rawtypes")

         @Override

         public void prepare(Map stormConf, TopologyContext context) {

                   conn = getConnection();

                   try {

                            st = conn.createStatement();

                   } catch (SQLException e) {

                            e.printStackTrace();

                   }

         }

         @Override

         public void cleanup() {

         }

}

  • 第四步,地址和经纬度转换,从文件中根据地址,获取出经纬度

package chapter04.storm;

import java.io.BufferedReader;

import java.io.IOException;

import java.io.InputStream;

import java.io.InputStreamReader;

import java.net.URI;

import java.sql.Connection;

import java.sql.DriverManager;

import java.sql.SQLException;

import java.sql.Statement;

import java.util.HashMap;

import java.util.Map;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import backtype.storm.task.TopologyContext;

import backtype.storm.topology.BasicOutputCollector;

import backtype.storm.topology.IBasicBolt;

import backtype.storm.topology.OutputFieldsDeclarer;

import backtype.storm.tuple.Fields;

import backtype.storm.tuple.Tuple;

public class GetLongitudeBolt implements IBasicBolt {

         private static final long serialVersionUID = 1L;

         private HashMap<String, String> longitude = new HashMap<String, String>();

         static Connection conn;

         static Statement st;

         private String uri = "hdfs://master:9000/storm/lng-lat-mapping.txt";

         public static Connection getConnection() {

                   Connection con = null; // get connection

                   try {

                            Class.forName("com.mysql.jdbc.Driver");// load Mysql driver

                            con = DriverManager.getConnection(

                                               "jdbc:mysql://192.168.32.72:3306/test", "hadoop", "hadoop");

                   } catch (Exception e) {

                            System.out.println("connect mysql failed! " + e.getMessage());

                   }

                   return con; // return connection

         }

         public static void insert(String area, String jing, String wei) {

                   conn = getConnection(); // get connection

                   try {

                            String sql = "INSERT INTO position(area,lng,lat)" + " VALUES ('"

                                               + area + "','" + jing + "','" + wei + "')";

                            st = (Statement) conn.createStatement(); // create static sql statement

                            st.executeUpdate(sql); // exec sql

                            conn.close(); // close connection

                   } catch (SQLException e) {

                            System.out.println("insert failed! " + e.getMessage());

                   }

         }

         public void execute(Tuple tuple, BasicOutputCollector collector) {

                   String word = tuple.toString();

                   if (longitude.get(word) != null) {

                            insert(longitude.get(word).split("\t", -1)[0], longitude.get(word)

                                               .split("\t", -1)[1], longitude.get(word).split("\t", -1)[2]);

                   }

         }

         public void declareOutputFields(OutputFieldsDeclarer declarer) {

                   declarer.declare(new Fields("area", "lng", "lat"));

         }

         @Override

         public Map<String, Object> getComponentConfiguration() {

                   return null;

         }

         @SuppressWarnings("rawtypes")

         @Override

         public void prepare(Map stormConf, TopologyContext context) {

                   conn = getConnection();

                   try {

                            st = (Statement) conn.createStatement();

                   } catch (SQLException e1) {

                            e1.printStackTrace();

                   }

                   InputStream in = null;

                   FileSystem fs = null;

                   try {

                            fs = FileSystem.get(URI.create(uri), new Configuration());

                            in = fs.open(new Path(uri));

                   } catch (IOException e) {

                            e.printStackTrace();

                   }

                   BufferedReader br = new BufferedReader(new InputStreamReader(in));

                   String line = null;

                   try {

                            while (null != (line = br.readLine())) {

                                     longitude.put(line.split("\t", -1)[0], line);

                            }

                   } catch (IOException e) {

                            e.printStackTrace();

                   }

         }

         @Override

         public void cleanup() {

         }

}

后续就是入库和显示JSP,不是核心技术,就不再赘述了。

  • 1
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值