maven项目集成hive

1 篇文章 0 订阅
本文档详细介绍了如何在Spring Boot项目中配置Hive数据源,包括在pom.xml中引入Hive-JDBC依赖并排除不必要的包,然后在application.yml中设置连接池参数,最后通过@Configuration注解的Java类创建并配置Hive Druid模板,实现Hive与Spring Boot的数据源整合。
摘要由CSDN通过智能技术生成

 

第一步

pom.xm导入相关配置信息

  <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>2.3.8</version>
            <exclusions>
                <exclusion>
                    <groupId>org.apache.logging.log4j</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>log4j</groupId>
                    <artifactId>log4j</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.antlr</groupId>
                    <artifactId>antlr-runtime</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.ant</groupId>
                    <artifactId>ant</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.hive</groupId>
                    <artifactId>hive-shims</artifactId>
                </exclusion>

                <exclusion>
                    <groupId>io.netty</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.codehaus.jackson</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>commons-httpclient</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>commons-lang</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.hbase</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.hadoop</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>com.tdunning</groupId>
                    <artifactId>json</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>jline</groupId>
                    <artifactId>jline</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.datanucleus</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>io.dropwizard.metrics</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>com.github.joshelser</groupId>
                    <artifactId>*</artifactId>
                </exclusion><exclusion>
                    <groupId>com.google.inject</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>com.google.inject.extensions</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.hive</groupId>
                    <artifactId>hive-llap-server</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.hive</groupId>
                    <artifactId>hive-metastore</artifactId>
                </exclusion>
                <exclusion>
                    <artifactId>jasper-compiler</artifactId>
                    <groupId>tomcat</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>jasper-runtime</artifactId>
                    <groupId>tomcat</groupId>
                </exclusion>
                <exclusion>
                    <groupId>javax.servlet.jsp</groupId>
                    <artifactId>jsp-api</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>net.sf.jpam</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.jamon</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.eclipse.jetty.aggregate</groupId>
                    <artifactId>*</artifactId>
                </exclusion>

                <exclusion>
                    <groupId>org.eclipse.jetty.orbit</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.mortbay.jetty</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>tomcat</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>com.sun.jersey</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.parquet</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.derby</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.orc</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
            </exclusions>
        </dependency>

 

第二步

添加yml文件 配置信息

hive:
  url: jdbc:hive2://192.168.0.215:10000/default
  driver-class-name: org.apache.hive.jdbc.HiveDriver
  type: com.alibaba.druid.pool.DruidDataSource
  user:
  password:
  # 下面为连接池的补充设置,应用到上面所有数据源中
  # 初始化大小,最小,最大
  initialSize: 1
  minIdle: 3
  maxActive: 20
  # 配置获取连接等待超时的时间
  maxWait: 60000
  # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
  timeBetweenEvictionRunsMillis: 60000
  # 配置一个连接在池中最小生存的时间,单位是毫秒
  minEvictableIdleTimeMillis: 30000
  validationQuery: select 1
  testWhileIdle: true
  testOnBorrow: false
  testOnReturn: false
  # 打开PSCache,并且指定每个连接上PSCache的大小
  poolPreparedStatements: true
  maxPoolPreparedStatementPerConnectionSize: 20

第三步

创建java类


@Configuration
@Slf4j
public class HiveDruidConfig {
    @Value("${hive.url}")
    private String url;
    @Value("${hive.user}")
    private String user;
    @Value("${hive.password}")
    private String password;
    @Value("${hive.driver-class-name}")
    private String driverClassName;
    @Value("${hive.initialSize}")
    private int initialSize;
    @Value("${hive.minIdle}")
    private int minIdle;
    @Value("${hive.maxActive}")
    private int maxActive;
    @Value("${hive.maxWait}")
    private int maxWait;
    @Value("${hive.timeBetweenEvictionRunsMillis}")
    private int timeBetweenEvictionRunsMillis;
    @Value("${hive.minEvictableIdleTimeMillis}")
    private int minEvictableIdleTimeMillis;
    @Value("${hive.validationQuery}")
    private String validationQuery;
    @Value("${hive.testWhileIdle}")
    private boolean testWhileIdle;
    @Value("${hive.testOnBorrow}")
    private boolean testOnBorrow;
    @Value("${hive.testOnReturn}")
    private boolean testOnReturn;
    @Value("${hive.poolPreparedStatements}")
    private boolean poolPreparedStatements;
    @Value("${hive.maxPoolPreparedStatementPerConnectionSize}")
    private int maxPoolPreparedStatementPerConnectionSize;

    // 此处省略各个属性的get和set方法
    @Bean(name = "hiveDruidTemplate")
    public JdbcTemplate hiveDruidTemplate() {
        DruidDataSource dataSource = new DruidDataSource();
        dataSource.setUrl(url);
        dataSource.setUsername(user);
        dataSource.setPassword(password);
        dataSource.setDriverClassName(driverClassName);

        // pool configuration
        dataSource.setInitialSize(initialSize);
        dataSource.setMinIdle(minIdle);
        dataSource.setMaxActive(maxActive);
        dataSource.setMaxWait(maxWait);
        dataSource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        dataSource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        dataSource.setValidationQuery(validationQuery);
        dataSource.setTestWhileIdle(testWhileIdle);
        dataSource.setTestOnBorrow(testOnBorrow);
        dataSource.setTestOnReturn(testOnReturn);
        dataSource.setPoolPreparedStatements(poolPreparedStatements);
        dataSource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
        log.info("-------------------hive数据源创建完成!-------------------");
        return new JdbcTemplate(dataSource);
    }

}

 

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

JTZ001

你的鼓励是我创作的最大动力?

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值