import com.alibaba.fastjson.JSONObject;
import com.mx.config.mysql.MysqlConfig;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.rdd.JdbcRDD;
import scala.reflect.ClassManifestFactory$;
import scala.runtime.AbstractFunction0;
import scala.runtime.AbstractFunction1;
import java.io.Serializable;
import java.sql.*;
import java.util.Properties;
/**
* mysql 工具类
* 2021/10/8 13:16
* @author lidd
*/
public class MysqlUtils implements Serializable{
private static final org.apache.log4j.Logger LOGGER = org.apache.log4j.Logger.getLogger(MysqlUtils.class);
static String MYSQL_DRIVER;
static String MYSQL_CONNECTION_URL;
static String MYSQL_USERNAME;
static String MYSQL_PWD;
private static JavaSparkContext sc = null;
public MysqlUtils(JavaSparkContext sc) {
MysqlUtils.sc = sc;
MYSQL_DRIVER = MysqlConfig.MYSQL_DRIVER;
MYSQL_CONNECTION_URL = MysqlConfig.MYSQL_CONNECTION_URL;
MYSQL_USERNAME = MysqlConfig.MYSQL_USERNAME;
MYSQL_PWD = MysqlConfig.MYSQL_PWD;
}
/**
* 获取用户数据spark读取mysql
*
* @return javaRDD数据
*/
public JavaRDD<JSONObject> getData(String sql) {
DbConnection dbConnection = new DbConnection(MYSQL_DRIVER, MYSQL_CONNECTION_URL, MYSQL_USERNAME, MYSQL_PWD);
JdbcRDD<JSONObject> jdbcRDD = new JdbcRDD<>(sc.sc(), dbConnection, sql, 0, 10, 2, new MapResult(), ClassManifestFactory$.MODULE$.fromClass(JSONObject.class));
return JavaRDD.fromRDD(jdbcRDD, ClassManifestFactory$.MODULE$.fromClass(JSONObject.class));
}
/**
* 插入
*
* @return 插入Id
*/
public Integer insert(String sql) {
int autoInckey = -1;
try {
DbConnection dbConnection = new DbConnection(MYSQL_DRIVER, MYSQL_CONNECTION_URL, MYSQL_USERNAME, MYSQL_PWD);
Connection conn = dbConnection.apply();
PreparedStatement pstmt = conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS);
pstmt.executeUpdate();
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next()) {
autoInckey = rs.getInt(1);
} else {
return autoInckey;
}
rs.close();
pstmt.close();
conn.close();
} catch (Exception e) {
e.printStackTrace();
}
return autoInckey;
}
/**
* 不带返回Id insert 操作
*
* @return 插入数据
*/
public Boolean insertBak(String sql) {
boolean isSuccess = false;
try {
DbConnection dbConnection = new DbConnection(MYSQL_DRIVER, MYSQL_CONNECTION_URL, MYSQL_USERNAME, MYSQL_PWD);
Connection conn = dbConnection.apply();
Statement stmt = conn.createStatement();
isSuccess = stmt.execute(sql);
conn.close();
} catch (Exception e) {
e.printStackTrace();
}
return isSuccess;
}
/**
* 更新
*
* @param updateRunningInfoSql 更新sql语句
* @return 更新结果标识
*/
public Boolean update(String updateRunningInfoSql) {
boolean isSuccess = false;
try {
DbConnection dbConnection = new DbConnection(MYSQL_DRIVER, MYSQL_CONNECTION_URL, MYSQL_USERNAME, MYSQL_PWD);
Connection conn = dbConnection.apply();
Statement stmt = conn.createStatement();
isSuccess = stmt.execute(updateRunningInfoSql);
conn.close();
} catch (Exception e) {
e.printStackTrace();
}
return isSuccess;
}
/**
* 内部类 创建数据库连接
*/
static class DbConnection extends AbstractFunction0<Connection> implements Serializable {
private final String driverClassName;
private final String connectionUrl;
private final String userName;
private final String password;
public DbConnection(String driverClassName, String connectionUrl, String userName, String password) {
this.driverClassName = driverClassName;
this.connectionUrl = connectionUrl;
this.userName = userName;
this.password = password;
}
@Override
public Connection apply() {
try {
Class.forName(driverClassName);
} catch (ClassNotFoundException e) {
LOGGER.error("Failed to load driver class", e);
}
Properties properties = new Properties();
properties.setProperty("user", userName);
properties.setProperty("password", password);
Connection connection = null;
try {
connection = DriverManager.getConnection(connectionUrl, properties);
} catch (SQLException e) {
LOGGER.error("Connection failed", e);
}
return connection;
}
}
/**
* 内部类:对mysql内部标字段与值进行映射
*/
static class MapResult extends AbstractFunction1<ResultSet, JSONObject> implements Serializable {
@Override
public JSONObject apply(ResultSet resultSet) {
ResultSetMetaData metaData;
JSONObject jsonObj = new JSONObject();
try {
metaData = resultSet.getMetaData();
int columnCount = metaData.getColumnCount();
for (int i = 1; i <= columnCount; i++) {
String columnName = metaData.getColumnLabel(i);
String value = resultSet.getString(columnName);
jsonObj.put(columnName, value);
}
} catch (SQLException e) {
e.printStackTrace();
}
return jsonObj;
}
}
}
Spark读取Mysql实现
于 2022-01-04 09:45:34 首次发布