SpringBoot项目集成连接hive数据库查询数据

第一步:pom.xml文件添加hadoop及hive相关依赖

<!-- 添加hadoop依赖 -->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>2.6.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-mapreduce-client-core</artifactId>
            <version>2.6.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-mapreduce-client-common</artifactId>
            <version>2.6.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>2.6.0</version>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-configuration-processor</artifactId>
            <optional>true</optional>
        </dependency>

        <!-- 添加hive依赖 -->
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>2.1.1</version>
            <exclusions>
                <exclusion>
                    <groupId>org.eclipse.jetty.aggregate</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>druid</artifactId>
            <version>1.1.22</version>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-jdbc</artifactId>
        </dependency>

第二步:application.yml文件添加hive驱动相关的参数

#配置多个数据源(这里测试hive数据源)
spring:
  datasource:
    hive: #hive数据源
      url: jdbc:hive2://10.10.0.17:10000/bks_bigdata
      type: com.alibaba.druid.pool.DruidDataSource
      username: root
      password: 123123
      driver-class-name: org.apache.hive.jdbc.HiveDriver
    commonConfig: #连接池统一配置,应用到所有的数据源
      initialSize: 1
      minIdle: 1
      maxIdle: 5
      maxActive: 50
      maxWait: 10000
      timeBetweenEvictionRunsMillis: 10000
      minEvictableIdleTimeMillis: 300000
      validationQuery: select 'x'
      testWhileIdle: true
      testOnBorrow: false
      testOnReturn: false
      poolPreparedStatements: true
      maxOpenPreparedStatements: 20
      filters: stat

第三步:写配置类配置数据源

package com.bigdata.hive.config;

import com.alibaba.druid.pool.DruidDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;

import javax.sql.DataSource;
import java.sql.SQLException;

/**
 *@author liuxingying
 *@description hive数据源配置
 *@date 2020/9/8
 */
@Configuration
public class HiveJdbcConfig {

    private static Logger logger = LoggerFactory.getLogger(HiveJdbcConfig.class);

    @Value("${spring.datasource.hive.url}")
    private String url;

    @Value("${spring.datasource.hive.driver-class-name}")
    private String driver;

    @Value("${spring.datasource.hive.username}")
    private String user;

    @Value("${spring.datasource.hive.password}")
    private String password;

    @Value("${spring.datasource.commonConfig.initialSize}")
    private int initialSize;

    @Value("${spring.datasource.commonConfig.minIdle}")
    private int minIdle;

    @Value("${spring.datasource.commonConfig.maxActive}")
    private int maxActive;

    @Value("${spring.datasource.commonConfig.maxWait}")
    private int maxWait;

    @Value("${spring.datasource.commonConfig.timeBetweenEvictionRunsMillis}")
    private int timeBetweenEvictionRunsMillis;

    @Value("${spring.datasource.commonConfig.minEvictableIdleTimeMillis}")
    private int minEvictableIdleTimeMillis;

    @Value("${spring.datasource.commonConfig.validationQuery}")
    private String validationQuery;

    @Value("${spring.datasource.commonConfig.testWhileIdle}")
    private boolean testWhileIdle;

    @Value("${spring.datasource.commonConfig.testOnBorrow}")
    private boolean testOnBorrow;

    @Value("${spring.datasource.commonConfig.testOnReturn}")
    private boolean testOnReturn;

    @Value("${spring.datasource.commonConfig.poolPreparedStatements}")
    private boolean poolPreparedStatements;

    @Value("${spring.datasource.commonConfig.maxOpenPreparedStatements}")
    private int maxOpenPreparedStatements;

    @Value("${spring.datasource.commonConfig.filters}")
    private String filters;

    @Bean
    public DataSource dataSource(){
        DruidDataSource datasource = new DruidDataSource();
        //配置数据源属性
        datasource.setUrl(url);
        datasource.setDriverClassName(driver);
        datasource.setUsername(user);
        datasource.setPassword(password);
        //配置统一属性
        datasource.setInitialSize(initialSize);
        datasource.setMinIdle(minIdle);
        datasource.setMaxActive(maxActive);
        datasource.setMaxWait(maxWait);
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setValidationQuery(validationQuery);
        datasource.setTestWhileIdle(testWhileIdle);
        datasource.setTestOnBorrow(testOnBorrow);
        datasource.setTestOnReturn(testOnReturn);
        datasource.setPoolPreparedStatements(poolPreparedStatements);
        datasource.setMaxOpenPreparedStatements(maxOpenPreparedStatements);
        try {
            datasource.setFilters(filters);
        } catch (SQLException e) {
            logger.error("Druid configuration initialization filter error.", e);
        }
        return datasource;
    }

    @Bean
    public JdbcTemplate jdbcTemplate(DataSource dataSource){
        return new JdbcTemplate(dataSource);
    }
}

第四步:dao层连接

package com.bigdata.hive.dao;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;

import java.util.List;
import java.util.Map;

/**
 *@author liuxingying
 *@description
 *@date 2020/9/8
 */
@Repository
public class TestHiveDao {
    @Autowired
    @Qualifier("jdbcTemplate")
    private JdbcTemplate jdbcTemplate;
    /**
     * 测试获取hive数据库数据信息
     * @return
     */
    public List<Map<String, Object>> test() {
        String sql = "SELECT * from ads_city_exam_rate";
        List<Map<String, Object>> maps = jdbcTemplate.queryForList(sql);
        return maps;
    }

}

第五步:测试

package com.bigdata.hive;

import com.bigdata.hive.dao.TestHiveDao;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;

import java.util.List;
import java.util.Map;

@SpringBootTest
class HiveApplicationTests {
    @Autowired
    private TestHiveDao hiveDao;

    @Test
    void hiveTest() {
        List<Map<String, Object>> hiveMaps = hiveDao.test();
        for (Map<String, Object> hiveMap : hiveMaps) {
            System.out.println(hiveMap);
        }
    }

}

ps:测试可能出现的bug

请求成功返回数据:
[
    {
	ads_city_exam_rate.id = fcfe368d8f2845608201d9cf7bd32550,
	ads_city_exam_rate.exam_id = KS201809010000,
	ads_city_exam_rate.exam_name = 重庆市一诊,
	ads_city_exam_rate.city_id = 500100,
	ads_city_exam_rate.city_name = 重庆市,
	ads_city_exam_rate.subject_id = XK0004,
	ads_city_exam_rate.subject_name = 物理,
	ads_city_exam_rate.exam_people_num = 94964,
	ads_city_exam_rate.gear_id = DW0002,
	ads_city_exam_rate.gear_name = 良好,
	ads_city_exam_rate.gear_num = 11681,
	ads_city_exam_rate.gear_rate = 0.1230,
	ads_city_exam_rate.gmt_create = 2020 - 09 - 08 15: 19: 01.284
}
 ]

如果请求报一下错误错:
The dir: /tmp/hive on HDFS should be writable. Current permissions are: rwxr-xr-x

需要给/tmp/hive读写权限
去hadoop服务器bin目录下执行即可
./hdfs dfs -chmod -R 777 /tmp
  • 3
    点赞
  • 23
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值