springboot集成hive,使用druid连接池

  • hive基础知识

#后台启动
>bin/hiveserver2 &
#进入beeline
>bin/beeline
#连接ip是部署hive的服务器,端口默认 10000;可在conf/hive-site.xml修改hive.server2.thrift.port属性值 
!connect jdbc:hive2://ip:端口
>输入用户名 默认空
>输入密码 默认空
#连接成功后,即可看到Beeline version 2.3.4 by Apache Hive当前版本。于引入的jar相对应
#查看数据库
>show databases;
#使用default库
>use default;
#即可进行显示表,查询等
>show tables;
>select * from 表名 limit 1,10;

 

  • pom添加hive-jdbc
 <!-- 添加hive依赖 -->
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>2.3.4</version>
            <exclusions>
                <exclusion>
                    <groupId>org.eclipse.jetty.aggregate</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.hive</groupId>
                    <artifactId>hive-shims</artifactId>
                </exclusion>
                <exclusion>
                    <artifactId>jasper-compiler</artifactId>
                    <groupId>tomcat</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>jasper-runtime</artifactId>
                    <groupId>tomcat</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>servlet-api</artifactId>
                    <groupId>javax.servlet</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>log4j-slf4j-impl</artifactId>
                    <groupId>org.apache.logging.log4j</groupId>
                </exclusion>
                <exclusion>
                    <artifactId>slf4j-log4j12</artifactId>
                    <groupId>org.slf4j</groupId>
                </exclusion>
                <exclusion>
                    <groupId>tomcat</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>ch.qos.logback</groupId>
                    <artifactId>logback-classic</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.eclipse.jetty.orbit</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.eclipse.jetty.aggregate</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>javax.servlet</groupId>
                    <artifactId>servlet-api</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.mortbay.jetty</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>jdk.tools</groupId>
            <artifactId>jdk.tools</artifactId>
            <version>1.8</version>
            <scope>system</scope>
            <systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
        </dependency>
    </dependencies>
  • 配置文件
#hive数据库
spring.datasource.druid.hive.name=hive
spring.datasource.druid.hive.url=jdbc:hive2://192.168.1.12:10000/default
spring.datasource.druid.hive.type=com.alibaba.druid.pool.DruidDataSource
spring.datasource.druid.hive.username=
spring.datasource.druid.hive.password=
spring.datasource.druid.hive.driver-class-name=org.apache.hive.jdbc.HiveDriver
spring.datasource.druid.hive.initialSize=3
spring.datasource.druid.hive.minIdle=1
spring.datasource.druid.hive.maxActive=20
spring.datasource.druid.hive.maxWait=60000
spring.datasource.druid.hive.timeBetweenEvictionRunsMillis=60000
spring.datasource.druid.hive.minEvictableIdleTimeMillis=30000
spring.datasource.druid.hive.validationQuery=select 1
spring.datasource.druid.hive.testWhileIdle=true
spring.datasource.druid.hive.testOnBorrow=false
spring.datasource.druid.hive.testOnReturn=false
spring.datasource.druid.hive.poolPreparedStatements=true
spring.datasource.druid.hive.maxOpenPreparedStatement=20
#特别注意,filters不能有wall,要不然报错:Failed to obtain JDBC Connection: dbType not support
spring.datasource.druid.hive.filters=stat
  • springboot添加数据源

@Configuration
@ConfigurationProperties(prefix = "spring.datasource.druid.hive")
@Data
public class HiveJdbcConfiguration {
    private static Logger logger = LoggerFactory.getLogger(HiveJdbcConfiguration.class);
    private String url;
    private String user;
    private String password;
    private String driverClassName;
    private int initialSize;
    private int minIdle;
    private int maxActive;
    private int maxWait;
    private int timeBetweenEvictionRunsMillis;
    private int minEvictableIdleTimeMillis;
    private String validationQuery;
    private boolean testWhileIdle;
    private boolean testOnBorrow;
    private boolean testOnReturn;
    private boolean poolPreparedStatements;
    private int maxOpenPreparedStatement;
    private String filters;

    /**
     * hive数据库源配置
     *
     * @return hive数据库源
     */
    @Bean(name = "hiveDataSource")
    @Qualifier("hiveDataSource")
    public DataSource hiveDataSource() {
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(url);
        datasource.setUsername(user);
        datasource.setPassword(password);
        datasource.setDriverClassName(driverClassName);

        // pool configuration
        datasource.setInitialSize(initialSize);
        datasource.setMinIdle(minIdle);
        datasource.setMaxActive(maxActive);
        datasource.setMaxWait(maxWait);
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setValidationQuery(validationQuery);
        datasource.setTestWhileIdle(testWhileIdle);
        datasource.setTestOnBorrow(testOnBorrow);
        datasource.setTestOnReturn(testOnReturn);
        datasource.setPoolPreparedStatements(poolPreparedStatements);
        datasource.setMaxOpenPreparedStatements(maxOpenPreparedStatement);
        try {
            datasource.setFilters(filters);
        } catch (SQLException e) {
            logger.error("Druid configuration initialization filter error.", e);
        }
        return datasource;
    }

    @Bean(name = "hiveJdbcTemplate")
    public JdbcTemplate hiveJdbcTemplate(@Qualifier("hiveDataSource") DataSource dataSource) {
        return new JdbcTemplate(dataSource);
    }
}
  • 测试

@Service
@Slf4j
public class HiveTestService {
    @Autowired
    @Qualifier("hiveJdbcTemplate")
    private JdbcTemplate jdbcTemplate;

    public void test2() {
        List<Map<String, Object>> datalist = jdbcTemplate.queryForList("select * from  t_user limit 1,10");
        log.info(datalist.size()); 
    }
    
    //原始jdbc测试
    public void test1() {
        try {
            Class.forName("org.apache.hive.jdbc.HiveDriver");
            Connection conn = DriverManager.getConnection("jdbc:hive2://192.168.1.12:10000/default", "", "");
            Statement stmt = conn.createStatement();
            ResultSet resultSet = stmt.executeQuery(" select * from  t_user limit 1,10");
            int columns = resultSet.getMetaData().getColumnCount();
            int rowIndex = 1;
            while (resultSet.next()) {
                for (int i = 1; i <= columns; i++) {
                    System.out.println("RowIndex: " + rowIndex + ", ColumnIndex: " + i + ", ColumnValue: " + resultSet.getString(i));
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

@EnableTransactionManagement
@SpringBootApplication
public class HiveApplication {
    public static void main(String[] args) {
        SpringApplicationBuilder builder = new SpringApplicationBuilder(HiveApplication .class);
        builder.bannerMode(Banner.Mode.LOG).run(args);
        builder.addCommandLineProperties(false);
         HiveTestService hiveTestService = (HiveTestService ) SpringContextHolder.getBean("hiveTestService");
        //shellCrmService.test1();
        shellCrmService.test2();
    }  
}

注意:hivesql查询不使用map传递参数,直接拼接就可以了。要不然会报错:ERROR c.a.d.p.DruidPooledPreparedStatement - getMaxFieldSize error

  • 0
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
Spring Boot可以通过整合Hive、MySQL、Druid和MyBatis来实现数据访问。以下是整合步骤: 1. 添加依赖 在pom.xml文件中添加需要的依赖。例如: ``` <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-jdbc</artifactId> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>3.1.2</version> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>1.2.6</version> </dependency> <dependency> <groupId>org.mybatis.spring.boot</groupId> <artifactId>mybatis-spring-boot-starter</artifactId> <version>2.2.0</version> </dependency> ``` 2. 添加数据源配置 在application.properties文件中添加数据源配置,例如: ``` # Hive数据源配置 spring.datasource.hive.url=jdbc:hive2://localhost:10000/default spring.datasource.hive.username=hiveuser spring.datasource.hive.password=hivepassword spring.datasource.hive.driver-class-name=org.apache.hive.jdbc.HiveDriver # MySQL数据源配置 spring.datasource.mysql.url=jdbc:mysql://localhost:3306/test spring.datasource.mysql.username=root spring.datasource.mysql.password=root spring.datasource.mysql.driver-class-name=com.mysql.jdbc.Driver # Druid数据源配置 spring.datasource.type=com.alibaba.druid.pool.DruidDataSource spring.datasource.druid.initial-size=5 spring.datasource.druid.min-idle=5 spring.datasource.druid.max-active=20 spring.datasource.druid.test-on-borrow=true spring.datasource.druid.test-while-idle=true spring.datasource.druid.time-between-eviction-runs-millis=60000 spring.datasource.druid.min-evictable-idle-time-millis=300000 spring.datasource.druid.validation-query=SELECT 1 # MyBatis配置 mybatis.type-aliases-package=com.example.mybatis.model mybatis.mapper-locations=classpath:mappers/*.xml ``` 3. 配置Druid监控 在配置文件中添加Druid监控的相关配置,例如: ``` # Druid监控配置 spring.datasource.druid.stat-view-servlet.allow=true spring.datasource.druid.stat-view-servlet.url-pattern=/druid/* spring.datasource.druid.web-stat-filter.enabled=true spring.datasource.druid.web-stat-filter.url-pattern=/* spring.datasource.druid.web-stat-filter.exclusions=*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/* ``` 4. 编写MyBatis配置文件和Mapper接口 在resources目录下创建mybatis配置文件和Mapper接口。例如: - mybatis-config.xml ``` <configuration> <!-- 配置数据源 --> <environments default="development"> <environment id="development"> <transactionManager type="JDBC" /> <dataSource type="POOLED"> <property name="driver" value="${spring.datasource.mysql.driver-class-name}" /> <property name="url" value="${spring.datasource.mysql.url}" /> <property name="username" value="${spring.datasource.mysql.username}" /> <property name="password" value="${spring.datasource.mysql.password}" /> </dataSource> </environment> </environments> <!-- 配置Mapper接口 --> <mappers> <mapper resource="mappers/TestMapper.xml" /> </mappers> </configuration> ``` - TestMapper.xml ``` <?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> <mapper namespace="com.example.mybatis.mapper.TestMapper"> <select id="findAll" resultType="Test"> SELECT * FROM test_table </select> </mapper> ``` - TestMapper.java ``` @Repository public interface TestMapper { List<Test> findAll(); } ``` 5. 编写Hive和MySQL数据访问代码 在代码中通过JdbcTemplate或NamedParameterJdbcTemplate等Spring JDBC模板类访问Hive和MySQL数据。例如: - Hive访问代码 ``` @Autowired @Qualifier("hiveJdbcTemplate") private JdbcTemplate hiveJdbcTemplate; public List<Map<String, Object>> queryHiveData() { String sql = "SELECT * FROM table_name"; return hiveJdbcTemplate.queryForList(sql); } ``` - MySQL访问代码 ``` @Autowired private TestMapper testMapper; public List<Test> findAll() { return testMapper.findAll(); } ``` 6. 运行测试 完成以上配置和代码编写后,就可以运行测试了。 以上是整合Hive、MySQL、Druid和MyBatis的基本步骤,您可以根据实际情况进行调整和扩展。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值