springboot集成 hive

1 添加依赖

 <!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
        <dependency>
            <groupId>org.apache.avro</groupId>
            <artifactId>avro</artifactId>
            <version>1.8.2</version>
        </dependency>
        <dependency>
            <groupId>com.google.guava</groupId>
            <artifactId>guava</artifactId>
            <version>25.1-jre</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
            <version>3.4</version>
        </dependency>

        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <scope>runtime</scope>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-jdbc</artifactId>
        </dependency>

        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-hadoop</artifactId>
            <version>2.5.0.RELEASE</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>1.2.1</version>
            <exclusions>
                <exclusion>
                    <groupId>org.eclipse.jetty.aggregate</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
            </exclusions>
        </dependency>

        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>druid-spring-boot-starter</artifactId>
            <version>1.1.10</version>
        </dependency>

2 添加配置

hive:
  url: jdbc:hive2://127.0.0.1:10000/test02
  driver-class-name: org.apache.hive.jdbc.HiveDriver
  type: com.alibaba.druid.pool.DruidDataSource
  user: hive
  password:
  # 下面为连接池的补充设置,应用到上面所有数据源中
  # 初始化大小,最小,最大
  initialSize: 1
  minIdle: 3
  maxActive: 20
  # 配置获取连接等待超时的时间
  maxWait: 60000
  # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
  timeBetweenEvictionRunsMillis: 60000
  # 配置一个连接在池中最小生存的时间,单位是毫秒
  minEvictableIdleTimeMillis: 30000
  validationQuery: select 1
  testWhileIdle: true
  testOnBorrow: false
  testOnReturn: false
  # 打开PSCache,并且指定每个连接上PSCache的大小
  poolPreparedStatements: true
  maxPoolPreparedStatementPerConnectionSize: 20

3 数据连接池配置

@Configuration
@ConfigurationProperties(prefix = "hive")
public class HiveDruidConfig {

    private String url;
    private String user;
    private String password;
    private String driverClassName;
    private int initialSize;
    private int minIdle;
    private int maxActive;
    private int maxWait;
    private int timeBetweenEvictionRunsMillis;
    private int minEvictableIdleTimeMillis;
    private String validationQuery;
    private boolean testWhileIdle;
    private boolean testOnBorrow;
    private boolean testOnReturn;
    private boolean poolPreparedStatements;
    private int maxPoolPreparedStatementPerConnectionSize;

    @Bean(name = "hiveDruidDataSource")
    @Qualifier("hiveDruidDataSource")
    public DataSource dataSource() {
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(url);
        datasource.setUsername(user);
        datasource.setPassword(password);
        datasource.setDriverClassName(driverClassName);

        // pool configuration
        datasource.setInitialSize(initialSize);
        datasource.setMinIdle(minIdle);
        datasource.setMaxActive(maxActive);
        datasource.setMaxWait(maxWait);
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setValidationQuery(validationQuery);
        datasource.setTestWhileIdle(testWhileIdle);
        datasource.setTestOnBorrow(testOnBorrow);
        datasource.setTestOnReturn(testOnReturn);
        datasource.setPoolPreparedStatements(poolPreparedStatements);
        datasource.setMaxPoolPreparedStatementPerConnectionSize(
            maxPoolPreparedStatementPerConnectionSize);
        return datasource;
    }

    // 此处省略各个属性的get和set方法

    @Bean(name = "hiveDruidTemplate")
    public JdbcTemplate hiveDruidTemplate(
        @Qualifier("hiveDruidDataSource") DataSource dataSource) {
        return new JdbcTemplate(dataSource);
    }

}

4 调用

 @Autowired
 private JdbcTemplate hiveJdbcTemplate;

 @Test
 public void fangfa1() {
       String sql="select * from deliver";
       List<Map<String, Object>> maps = hiveJdbcTemplate.queryForList(sql);
       System.out.println(maps);
 }

 

 

  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
要在Spring Boot中集成Hive,您需要遵循以下步骤: 1. 添加Hive依赖:在您的`pom.xml`文件中添加以下依赖: ```xml <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>3.1.2</version> </dependency> ``` 2. 配置Hive连接:在`application.properties`或`application.yml`文件中添加Hive连接配置,例如: ```properties spring.datasource.url=jdbc:hive2://localhost:10000/default spring.datasource.username=hive spring.datasource.password=password ``` 3. 创建HiveTemplate Bean:在您的配置类中创建HiveTemplate Bean,例如: ```java @Configuration public class HiveConfig { @Value("${spring.datasource.url}") private String hiveUrl; @Value("${spring.datasource.username}") private String hiveUsername; @Value("${spring.datasource.password}") private String hivePassword; @Bean public HiveTemplate hiveTemplate() { return new HiveTemplate(new JdbcTemplate(hiveDataSource())); } @Bean public DataSource hiveDataSource() { DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName("org.apache.hive.jdbc.HiveDriver"); dataSource.setUrl(hiveUrl); dataSource.setUsername(hiveUsername); dataSource.setPassword(hivePassword); return dataSource; } } ``` 4. 使用HiveTemplate执行查询:在您的服务类中注入HiveTemplate,并使用它执行Hive查询,例如: ```java @Service public class HiveService { private final HiveTemplate hiveTemplate; public HiveService(HiveTemplate hiveTemplate) { this.hiveTemplate = hiveTemplate; } public List<Map<String, Object>> executeHiveQuery(String query) { return hiveTemplate.queryForList(query); } } ``` 这样,您就可以在Spring Boot应用程序中集成Hive,并使用HiveTemplate执行Hive查询了。请确保在运行应用程序之前,您已经启动了Hive服务。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值