本地eclipse下开发storm的Topology

这个Topology的功能是从mysql数据库读取数据,然后将数据写入到本地文件里

使用maven创建项目

pom.xml

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
	<modelVersion>4.0.0</modelVersion>
	<groupId>scc</groupId>
	<artifactId>stu-storm</artifactId>
	<version>0.0.1-SNAPSHOT</version>
	<name>MyTopology</name>
	<properties>
		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
	</properties>

	<dependencies>
		<dependency>
			<groupId>org.apache.storm</groupId>
			<artifactId>storm-core</artifactId>
			<version>1.0.2</version>
		</dependency>
		<dependency>
			<groupId>mysql</groupId>
			<artifactId>mysql-connector-java</artifactId>
			<version>5.1.31</version>
		</dependency>
	</dependencies>

	<build>
		<plugins>
			<plugin>
				<artifactId>maven-assembly-plugin</artifactId>
				<version>2.4</version>
				<configuration>
					<descriptorRefs>
						<descriptorRef>jar-with-dependencies</descriptorRef>
					</descriptorRefs>
				</configuration>
				<executions>
					<execution>
						<id>make-assembly</id>
						<phase>package</phase>
						<goals>
							<goal>single</goal>
						</goals>
					</execution>
				</executions>
			</plugin>
		</plugins>
	</build>
</project>

编写spout

public class FromMysqlSpout extends BaseRichSpout {
	private Map conf;
	private TopologyContext context;
	private SpoutOutputCollector collector;

	/*
	 * (非 Javadoc) <p>Title: open</p> <p>Description: </p>
	 * 
	 * @param conf
	 * 
	 * @param context
	 * 
	 * @param collector
	 * 
	 * @see org.apache.storm.spout.ISpout#open(java.util.Map, org.apache.storm.task.TopologyContext,
	 * org.apache.storm.spout.SpoutOutputCollector)
	 */
	public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
		this.conf = conf;
		this.collector = collector;
		this.context = context;
	}

	/*
	 * (非 Javadoc) <p>Title: nextTuple</p> <p>Description: </p>
	 * 
	 * @see org.apache.storm.spout.ISpout#nextTuple()
	 */
	public void nextTuple() {
		JdbcUtils jdbcUtils = new JdbcUtils();
		try {
			List<Map<String, Object>> data = jdbcUtils.findModeResult("select * from sds limit 1",
					new ArrayList<Object>());
			this.collector.emit(new Values(data));
		} catch (SQLException e) {
			e.printStackTrace();
			this.collector.reportError(e);
		}
	}

	/*
	 * (非 Javadoc) <p>Title: declareOutputFields</p> <p>Description: </p>
	 * 
	 * @param declarer
	 * 
	 * @see org.apache.storm.topology.IComponent#declareOutputFields(org.apache.storm.topology.OutputFieldsDeclarer)
	 */
	public void declareOutputFields(OutputFieldsDeclarer declarer) {
		declarer.declare(new Fields("data"));
	}

}

编写bolt

public class ToFileBolt extends BaseRichBolt {
	private Map conf;
	private TopologyContext context;
	private OutputCollector collector;

	/*
	 * (非 Javadoc) <p>Title: prepare</p> <p>Description: </p>
	 * 
	 * @param stormConf
	 * 
	 * @param context
	 * 
	 * @param collector
	 * 
	 * @see org.apache.storm.task.IBolt#prepare(java.util.Map, org.apache.storm.task.TopologyContext,
	 * org.apache.storm.task.OutputCollector)
	 */
	@Override
	public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
		this.conf = this.conf;
		this.collector = collector;
		this.context = context;
	}

	/*
	 * (非 Javadoc) <p>Title: execute</p> <p>Description: </p>
	 * 
	 * @param input
	 * 
	 * @see org.apache.storm.task.IBolt#execute(org.apache.storm.tuple.Tuple)
	 */
	@Override
	public void execute(Tuple input) {
		List<Map<String, Object>> data = (List<Map<String, Object>>) input.getValueByField("data");
		String outdata = data.toString() + "\r\n";
		File file = new File("D:\\stormtest.txt");
		FileOutputStream fos;
		try {
			fos = new FileOutputStream(file, true);
			fos.write(outdata.getBytes());
			fos.flush();
			fos.close();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	/*
	 * (非 Javadoc) <p>Title: declareOutputFields</p> <p>Description: </p>
	 * 
	 * @param declarer
	 * 
	 * @see org.apache.storm.topology.IComponent#declareOutputFields(org.apache.storm.topology.OutputFieldsDeclarer)
	 */
	@Override
	public void declareOutputFields(OutputFieldsDeclarer declarer) {
		// TODO Auto-generated method stub

	}

}

编写topology

public class MyTopology {
	public static void main(String[] args)
			throws AlreadyAliveException, InvalidTopologyException, AuthorizationException {
		TopologyBuilder builder = new TopologyBuilder();
		builder.setSpout("frommysql", new FromMysqlSpout());
		builder.setBolt("tofile", new ToFileBolt()).shuffleGrouping("frommysql");
		Config conf = new Config();
		conf.setDebug(true);
		if (args != null && args.length > 0) {
			conf.setNumWorkers(3);
			StormSubmitter.submitTopology(args[0], conf, builder.createTopology());
		} else {
			LocalCluster cluster = new LocalCluster();
			cluster.submitTopology("firstTopo", conf, builder.createTopology());
			Utils.sleep(100000);
			cluster.killTopology("firstTopo");
			cluster.shutdown();
		}
	}
}

用到的工具类

public class JdbcUtils {
	// 数据库用户名
	private static final String USERNAME = "root";
	// 数据库密码
	private static final String PASSWORD = "1234";
	// 驱动信息
	private static final String DRIVER = "com.mysql.jdbc.Driver";
	// 数据库地址
	private static final String URL = "jdbc:mysql://localhost:3306/hive";
	private Connection connection;
	private PreparedStatement pstmt;
	private ResultSet resultSet;

	public JdbcUtils() {
		// TODO Auto-generated constructor stub
		try {
			Class.forName(DRIVER);
			System.out.println("数据库连接成功!");
			this.getConnection();
		} catch (Exception e) {

		}
	}

	/**
	 * 获得数据库的连接
	 * 
	 * @return
	 */
	public Connection getConnection() {
		try {
			this.connection = DriverManager.getConnection(URL, USERNAME, PASSWORD);
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return this.connection;
	}

	/**
	 * 增加、删除、改
	 * 
	 * @param sql
	 * @param params
	 * @return
	 * @throws SQLException
	 */
	public boolean updateByPreparedStatement(String sql, List<Object> params) throws SQLException {
		boolean flag = false;
		int result = -1;
		this.pstmt = this.connection.prepareStatement(sql);
		int index = 1;
		if (params != null && !params.isEmpty()) {
			for (int i = 0; i < params.size(); i++) {
				this.pstmt.setObject(index++, params.get(i));
			}
		}
		result = this.pstmt.executeUpdate();
		flag = result > 0 ? true : false;
		return flag;
	}

	/**
	 * 查询单条记录
	 * 
	 * @param sql
	 * @param params
	 * @return
	 * @throws SQLException
	 */
	public Map<String, Object> findSimpleResult(String sql, List<Object> params) throws SQLException {
		Map<String, Object> map = new HashMap<String, Object>();
		int index = 1;
		this.pstmt = this.connection.prepareStatement(sql);
		if (params != null && !params.isEmpty()) {
			for (int i = 0; i < params.size(); i++) {
				this.pstmt.setObject(index++, params.get(i));
			}
		}
		this.resultSet = this.pstmt.executeQuery();// 返回查询结果
		ResultSetMetaData metaData = this.resultSet.getMetaData();
		int col_len = metaData.getColumnCount();
		while (this.resultSet.next()) {
			for (int i = 0; i < col_len; i++) {
				String cols_name = metaData.getColumnName(i + 1);
				Object cols_value = this.resultSet.getObject(cols_name);
				if (cols_value == null) {
					cols_value = "";
				}
				map.put(cols_name, cols_value);
			}
		}
		return map;
	}

	/**
	 * 查询多条记录
	 * 
	 * @param sql
	 * @param params
	 * @return
	 * @throws SQLException
	 */
	public List<Map<String, Object>> findModeResult(String sql, List<Object> params) throws SQLException {
		List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
		int index = 1;
		this.pstmt = this.connection.prepareStatement(sql);
		if (params != null && !params.isEmpty()) {
			for (int i = 0; i < params.size(); i++) {
				this.pstmt.setObject(index++, params.get(i));
			}
		}
		this.resultSet = this.pstmt.executeQuery();
		ResultSetMetaData metaData = this.resultSet.getMetaData();
		int cols_len = metaData.getColumnCount();
		while (this.resultSet.next()) {
			Map<String, Object> map = new HashMap<String, Object>();
			for (int i = 0; i < cols_len; i++) {
				String cols_name = metaData.getColumnName(i + 1);
				Object cols_value = this.resultSet.getObject(cols_name);
				if (cols_value == null) {
					cols_value = "";
				}
				map.put(cols_name, cols_value);
			}
			list.add(map);
		}
		this.connection.close();
		return list;
	}

	/**
	 * 通过反射机制查询单条记录
	 * 
	 * @param sql
	 * @param params
	 * @param cls
	 * @return
	 * @throws Exception
	 */
	public <T> T findSimpleRefResult(String sql, List<Object> params, Class<T> cls) throws Exception {
		T resultObject = null;
		int index = 1;
		this.pstmt = this.connection.prepareStatement(sql);
		if (params != null && !params.isEmpty()) {
			for (int i = 0; i < params.size(); i++) {
				this.pstmt.setObject(index++, params.get(i));
			}
		}
		this.resultSet = this.pstmt.executeQuery();
		ResultSetMetaData metaData = this.resultSet.getMetaData();
		int cols_len = metaData.getColumnCount();
		while (this.resultSet.next()) {
			// 通过反射机制创建一个实例
			resultObject = cls.newInstance();
			for (int i = 0; i < cols_len; i++) {
				String cols_name = metaData.getColumnName(i + 1);
				Object cols_value = this.resultSet.getObject(cols_name);
				if (cols_value == null) {
					cols_value = "";
				}
				Field field = cls.getDeclaredField(cols_name);
				field.setAccessible(true); // 打开javabean的访问权限
				field.set(resultObject, cols_value);
			}
		}
		return resultObject;

	}

	/**
	 * 通过反射机制查询多条记录
	 * 
	 * @param sql
	 * @param params
	 * @param cls
	 * @return
	 * @throws Exception
	 */
	public <T> List<T> findMoreRefResult(String sql, List<Object> params, Class<T> cls) throws Exception {
		List<T> list = new ArrayList<T>();
		int index = 1;
		this.pstmt = this.connection.prepareStatement(sql);
		if (params != null && !params.isEmpty()) {
			for (int i = 0; i < params.size(); i++) {
				this.pstmt.setObject(index++, params.get(i));
			}
		}
		this.resultSet = this.pstmt.executeQuery();
		ResultSetMetaData metaData = this.resultSet.getMetaData();
		int cols_len = metaData.getColumnCount();
		while (this.resultSet.next()) {
			// 通过反射机制创建一个实例
			T resultObject = cls.newInstance();
			for (int i = 0; i < cols_len; i++) {
				String cols_name = metaData.getColumnName(i + 1);
				Object cols_value = this.resultSet.getObject(cols_name);
				if (cols_value == null) {
					cols_value = "";
				}
				Field field = cls.getDeclaredField(cols_name);
				field.setAccessible(true); // 打开javabean的访问权限
				field.set(resultObject, cols_value);
			}
			list.add(resultObject);
		}
		return list;
	}

	/**
	 * 释放数据库连接
	 */
	public void releaseConn() {
		if (this.resultSet != null) {
			try {
				this.resultSet.close();
			} catch (SQLException e) {
				e.printStackTrace();
			}
		}
	}

	public static void main(String[] args) {
		System.out.println(new JdbcUtils().getConnection());
	}
}

 

转载于:https://my.oschina.net/shyloveliyi/blog/785812

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值