前言:为什么会写这个程序:因为生产环境下有一张表需要和hdfs上的数据关联(10年的数据),然后就创建了一个外部表,增加分区的话手动太复杂,所以就有了这个程序,写之前也想百度白嫖一下,发现没有,然后下边的它就来了
一、先来pom看看
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.dtstack</groupId>
<artifactId>hive_add_partition</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>1.1.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.15.2</version>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.6.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.3</version>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.19</version>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>com.dtstack.app.AddHivePartitons</mainClass>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
<defaultGoal>compile</defaultGoal>
</build>
</project>
二、获取hive链接
package com.dtstack.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
/**
* @Author :mitu
* @Date :2022/06/01 9:50
* @Version :1.0
*/
public class GetHiveConn {
private static final Logger LOG = LoggerFactory.getLogger(GetHiveConn.class);
public static Connection getHiveConn(String url, String userName, String passWord){
//驱动
String driverName = "org.apache.hive.jdbc.HiveDriver";
Connection conn = null;
try {
//加载驱动
Class.forName(driverName);
//连接对象,连接hive的本机ip,username,pwd预留参数
conn = DriverManager.getConnection("jdbc:hive2://" + url, userName, passWord);
} catch (ClassNotFoundException | SQLException e) {
LOG.error(e.getMessage());
}
//
return conn;
}
}
三、获取时间分区
package com.dtstack.utils;
import com.dtstack.app.AddHivePartitons;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
/**
* @Author :mitu
* @Date :2022/06/01 10:30
* @Version :1.0
*/
public class GetHivePartitionsList {
private static final Logger LOG = LoggerFactory.getLogger(GetHivePartitionsList.class);
public static void main(String[] args) {
String starTime = "20220601";
String endTime = "20220701";
List times = getpartitionsList(starTime, endTime);
for (int i = 0; i < times.size(); i++) {
System.out.println(times.get(i));
}
}
public static List getpartitionsList(String starTime, String endTime) {
//时间格式化
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd");
DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd");
List<String> list = null;
try {
//获取开始时间
Date starTimeParse = simpleDateFormat.parse(starTime);
Date endTimeParse = simpleDateFormat.parse(endTime);
//定义一个集合用来存放时间
list = new ArrayList<String>();
//重新实例化两个calender示例,将开始时间和结束时间传进去
Calendar tempStart = Calendar.getInstance();
tempStart.setTime(starTimeParse);
Calendar temEnd = Calendar.getInstance();
temEnd.setTime(endTimeParse);
//判断开始时间是否在结束时间之前
while (tempStart.before(temEnd)) {
//如果开始时间一直在结束时间之前,那么就将这个时间添加到list中
list.add(dateFormat.format(tempStart.getTime()));
//每天加一
tempStart.add(Calendar.DAY_OF_YEAR, 1);
}
} catch (ParseException e) {
LOG.error(e.getMessage());
}
list.add(endTime);
return list;
}
}
四、主函数
package com.dtstack.app;
import com.dtstack.utils.GetHiveConn;
import groovy.json.JsonOutput;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.List;
import static com.dtstack.utils.GetHiveConn.getHiveConn;
import static com.dtstack.utils.GetHivePartitionsList.getpartitionsList;
/**
* @Author :mitu
* @Date :2022/06/01 9:47
* @Version :1.0
*/
public class AddHivePartitons {
private static final Logger LOG = LoggerFactory.getLogger(AddHivePartitons.class);
public static void main(String[] args) {
//添加分区的表
String tableName = args[0];//xxx
String url = args[1];//"xxx:8191/xxx"
String startTime = args[2];//20220601
String endTime = args[3];//20220701
//获取hive的连接
Connection hiveConn = getHiveConn(url, "", "");
//获取想要生成分区数量的list
List times = getpartitionsList(startTime, endTime);
//循环便利list,对添加分区SQL进行拼接
for (int i = 0; i < times.size(); i++) {
//拼接添加分区的SQL
String sql = "alter table " + tableName + " add if not exists PARTITION (pt=" + times.get(i) + ") " +
"LOCATION 'hdfs://xxx/user/hive/warehouse/xxx.db/"+ tableName + "/pt=" + times.get(i) + "'";
// String delSql = "alter table xxx drop partition(pt=" + times.get(i) + ")";
// System.out.println(times.get(i));
// System.out.println(sql);
try {
//执行SQL
PreparedStatement preparedStatement = hiveConn.prepareStatement(sql);
//返回结果值
boolean execute = preparedStatement.execute();
if (execute) {
System.out.println("新增分区:" + times.get(i) + "成功");
}else {
System.out.println("新增分区:" + times.get(i) + "失败");
}
} catch (SQLException e) {
LOG.error(e.getMessage());
}
finally {
try {
hiveConn.close();
} catch (SQLException e) {
LOG.error(e.getMessage());
}
}
}
}
}
五、注意事项
1、请修改对应的SQL语句;
2、入参分别是tableName、url、startTime、endTime;
3、url只需要ip:port/database;
4、startTime传入的是yyyyMMdd,如果想要传别的格式请修改GetHivePartitionsList类;
5、说下GetHivePartitionsList,传20220501,20220505,就会返回(20220501,20220502,20220503,...),我直接添加的是2014-2022,很稳
6、其他的有什么问题可以联系我