java连接mysql,hive,hbase全解

创建项目

Maven–>勾选Create from archetype–>maven-archetype-quickstart->NEXT
New Project:Groupld :cn.kgc.mysql Artifactld:java_mysql->NEXT->NEXT->Finsh

导包(pom)

点击Enable Auto Import自动导包

在pom文件的properties里修改版本为1.8

修改设置

Project Structure->Modules->将language level改为8(右边框)
Settings->Build, Execution, Deployment->Compiler->Java Compiler->修改字节编码版本为1.8

配置资源文件

在src->main下新建文件夹名resources,将其配置为资源文件夹,此后所有的配置文件均可放在此文件夹下,运行java代码时这些配置就可起作用
Project Structure–>Project Settrings -->Modules -->找到resources文件夹,选中,点击Resources–>Apply -->OK

JAVA连接Mysql

pom文件导依赖

加上mysql-connector-java的依赖即可

<dependency>
  <groupId>mysql</groupId>
  <artifactId>mysql-connector-java</artifactId>
  <version>5.1.38</version>
</dependency>
全部pom.xml文件
<?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>

  <groupId>cn.wxj.mysql.jdbc</groupId>
  <artifactId>java_mysql</artifactId>
  <version>1.0-SNAPSHOT</version>

  <name>java_mysql</name>
  <!-- FIXME change it to the project's website -->
  <url>http://www.example.com</url>

  <properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <maven.compiler.source>1.8</maven.compiler.source>
    <maven.compiler.target>1.8</maven.compiler.target>
  </properties>

  <dependencies>
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.11</version>
      <scope>test</scope>
    </dependency>

    <dependency>
      <groupId>mysql</groupId>
      <artifactId>mysql-connector-java</artifactId>
      <version>5.1.38</version>
    </dependency>
  </dependencies>

  <build>
    <pluginManagement><!-- lock down plugins versions to avoid using Maven defaults (may be moved to parent pom) -->
      <plugins>
        <!-- clean lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#clean_Lifecycle -->
        <plugin>
          <artifactId>maven-clean-plugin</artifactId>
          <version>3.1.0</version>
        </plugin>
        <!-- default lifecycle, jar packaging: see https://maven.apache.org/ref/current/maven-core/default-bindings.html#Plugin_bindings_for_jar_packaging -->
        <plugin>
          <artifactId>maven-resources-plugin</artifactId>
          <version>3.0.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-compiler-plugin</artifactId>
          <version>3.8.0</version>
        </plugin>
        <plugin>
          <artifactId>maven-surefire-plugin</artifactId>
          <version>2.22.1</version>
        </plugin>
        <plugin>
          <artifactId>maven-jar-plugin</artifactId>
          <version>3.0.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-install-plugin</artifactId>
          <version>2.5.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-deploy-plugin</artifactId>
          <version>2.8.2</version>
        </plugin>
        <!-- site lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#site_Lifecycle -->
        <plugin>
          <artifactId>maven-site-plugin</artifactId>
          <version>3.7.1</version>
        </plugin>
        <plugin>
          <artifactId>maven-project-info-reports-plugin</artifactId>
          <version>3.0.0</version>
        </plugin>
      </plugins>
    </pluginManagement>
  </build>
</project>
创建配置文件datasource.properties

resources文件夹下新建文件datasource.properties
编辑以下四项配置内容

driver=com.mysql.jdbc.Driver
url=jdbc:mysql://192.168.182.131:3306/kb06mysqltestdb?useUnicode=true&characterEncoding=utf8&useSSL=true
username=root
password=javakb10

代码实现阶段

BaseConfig类

package cn.wxj.mysql.jdbc;

import java.io.FileReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public class BaseConfig {
    //定义内部类Config,使用其对象作为BaseConfig的成员变量,加载驱动等信息
    private class Config{
        String driver;
        String url;
        String username;
        String password;
    }
    private Config config;

//valid类检测url格式是否正确
    private boolean valid(String url)  {
        Pattern p = Pattern.compile("jdbc:\\w+://((\\d{1,3}\\.){3}\\d{1,3}|\\w+):\\d{1,5}/\\w+");
        Matcher m = p.matcher(url);
        return m.matches();
    }

    //用代码块调用init方法,加载数据库配置信息并加载驱动driver
    {
        try {
            init();
            Class.forName(config.driver);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    //获得数据库连接Connection
    protected Connection getCon() throws SQLException {
        return DriverManager.getConnection(config.url,config.username,config.password);
    }

//通用关闭线程方法
    protected void close(AutoCloseable...acs){
        if(acs!=null){
            for (AutoCloseable ac : acs) {
                try {
                    ac.close();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }
    //init方法将配置信息加载到BaseConfig的成员变量config上
    private void init() throws Exception {

        //用线程读取读取资源文件datasource.properties的路径
        String path=Thread.currentThread().getContextClassLoader()
                .getResource("datasource.properties").getPath();

        //使用Properties包装类解析properties文件
        Properties pro=new Properties();
        pro.load(new FileReader(path));
        String url=pro.getProperty("url");
        if(url==null|| !valid(url)){
            throw new Exception("no or invalid url exception");
        }
        config=new Config();
        config.url=url;
        config.driver =pro.getProperty("driver","org.apache.hive.jdbc.HiveDriver");
        config.username=pro.getProperty("username","root");
        config.password=pro.getProperty("password","javakb10");
        pro.clear();

    }
}

BaseDao类

package cn.wxj.mysql.jdbc;

import java.io.BufferedReader;
import java.io.FileReader;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;

public class BaseDao extends BaseConfig {

    //预编译执行器PreparedStatement(防止sql注入)
    private PreparedStatement getPst(Connection con, String sql, Object...params) throws SQLException {
        PreparedStatement pst=con.prepareStatement(sql);
        if(params.length>0){
            for (int i = 0; i < params.length; i++) {
                pst.setObject(i+1,params[i]);
            }
        }
        return  pst;
    }

    //非查询语句调用方法
    public Result exeNonQuery(String sql,Object...params){
        Connection con=null;
        PreparedStatement pst=null;
        try {
            con=getCon();
            pst=getPst(con,sql,params);
            return Result.Succeed(pst.executeUpdate());
        } catch (SQLException e) {
            e.printStackTrace();
            return Result.Fail();
        }finally {
            close(pst,con);
        }
    }

    //查询语句调用方法
    public Result exeQuery(String sql,Object...params){
        Connection con=null;
        PreparedStatement pst=null;
        ResultSet rst=null;
        List<List<String>> data=new ArrayList<>();
        try {
            con=getCon();
            pst=getPst(con,sql,params);
            rst=pst.executeQuery();
            final int COUNT=rst.getMetaData().getColumnCount();
            while (rst.next()){
                List<String> row=new ArrayList<>(COUNT);
                for (int i = 1; i <= COUNT; i++) {
                    row.add(rst.getObject(i).toString());
                }
                data.add(row);
            }
            return Result.Succeed(data);
        } catch (SQLException e) {
            e.printStackTrace();
            return Result.Fail();
        }finally {
            close(rst,pst,con);
        }
    }

    //读取sql语句
    public String readSql(String...paths) throws Exception {
        String path=paths.length==0?"sql/sql.sql":paths[0];
        StringBuilder builder=new StringBuilder();
        BufferedReader read=new BufferedReader(new FileReader(path));
        String line=null;
        while (null!=(line=read.readLine())){
            builder.append(line.trim()+" ");
        }
        return builder.toString();
    }

}

Result类

package cn.wxj.mysql.jdbc;

public class Result<T> {
    private T data;
    private boolean isErr;

    public Result( boolean isErr,T data) {
        this.data = data;
        this.isErr = isErr;
    }

    public T getData() {
        return data;
    }

    public boolean isErr() {
        return isErr;
    }

    public static <T>Result Succeed(T data){
        return new Result(false,data);
    }

    public static Result Fail(){
        return new Result(true,null);
    }
}

Test类

public class Test {
    public static void main(String[] args) throws Exception{

        BaseDao dao=new BaseDao();
        Result<List<List<String>>> result = dao.exeQuery(dao.readSql());

        List<List<String>>  table=result.getData();
        table.forEach(tab->{
            tab.forEach(row->{
                System.out.print(row+"\t");
            });
            System.out.println();
        });

        Result result1 = dao.exeNonQuery(dao.readSql());
        System.out.println(result1);
    }
}

JAVA连接hive

创建项目同上

pom依赖

<properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <maven.compiler.source>1.8</maven.compiler.source>
    <maven.compiler.target>1.8</maven.compiler.target>
    <hadoop.version>2.6.0</hadoop.version>
    <hive.version>1.1.0</hive.version>
  </properties>

  <dependencies>
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.11</version>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.hive</groupId>
      <artifactId>hive-jdbc</artifactId>
      <version>${hive.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-auth</artifactId>
      <version>${hadoop.version}</version>
      <exclusions>
        <exclusion>
          <artifactId>jdk.tools</artifactId>
          <groupId>jdk.tools</groupId>
        </exclusion>
      </exclusions>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>${hadoop.version}</version>
      <exclusions>
        <exclusion>
          <artifactId>jdk.tools</artifactId>
          <groupId>jdk.tools</groupId>
        </exclusion>
      </exclusions>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-hdfs</artifactId>
      <version>${hadoop.version}</version>
      <exclusions>
        <exclusion>
          <artifactId>jdk.tools</artifactId>
          <groupId>jdk.tools</groupId>
        </exclusion>
      </exclusions>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-client</artifactId>
      <version>${hadoop.version}</version>
      <exclusions>
        <exclusion>
          <artifactId>jdk.tools</artifactId>
          <groupId>jdk.tools</groupId>
        </exclusion>
      </exclusions>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-core</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
  </dependencies>

pom完整版

<?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>

  <groupId>cn.wxj.hive.jdbc</groupId>
  <artifactId>java2hive</artifactId>
  <version>1.0-SNAPSHOT</version>

  <name>java2hive</name>
  <!-- FIXME change it to the project's website -->
  <url>http://www.example.com</url>

  <properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <maven.compiler.source>1.8</maven.compiler.source>
    <maven.compiler.target>1.8</maven.compiler.target>
    <hadoop.version>2.6.0</hadoop.version>
    <hive.version>1.1.0</hive.version>
  </properties>

  <dependencies>
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.11</version>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.hive</groupId>
      <artifactId>hive-jdbc</artifactId>
      <version>${hive.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-auth</artifactId>
      <version>${hadoop.version}</version>
      <exclusions>
        <exclusion>
          <artifactId>jdk.tools</artifactId>
          <groupId>jdk.tools</groupId>
        </exclusion>
      </exclusions>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>${hadoop.version}</version>
      <exclusions>
        <exclusion>
          <artifactId>jdk.tools</artifactId>
          <groupId>jdk.tools</groupId>
        </exclusion>
      </exclusions>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-hdfs</artifactId>
      <version>${hadoop.version}</version>
      <exclusions>
        <exclusion>
          <artifactId>jdk.tools</artifactId>
          <groupId>jdk.tools</groupId>
        </exclusion>
      </exclusions>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-client</artifactId>
      <version>${hadoop.version}</version>
      <exclusions>
        <exclusion>
          <artifactId>jdk.tools</artifactId>
          <groupId>jdk.tools</groupId>
        </exclusion>
      </exclusions>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-core</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
  </dependencies>

  <build>
    <pluginManagement><!-- lock down plugins versions to avoid using Maven defaults (may be moved to parent pom) -->
      <plugins>
        <!-- clean lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#clean_Lifecycle -->
        <plugin>
          <artifactId>maven-clean-plugin</artifactId>
          <version>3.1.0</version>
        </plugin>
        <!-- default lifecycle, jar packaging: see https://maven.apache.org/ref/current/maven-core/default-bindings.html#Plugin_bindings_for_jar_packaging -->
        <plugin>
          <artifactId>maven-resources-plugin</artifactId>
          <version>3.0.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-compiler-plugin</artifactId>
          <version>3.8.0</version>
        </plugin>
        <plugin>
          <artifactId>maven-surefire-plugin</artifactId>
          <version>2.22.1</version>
        </plugin>
        <plugin>
          <artifactId>maven-jar-plugin</artifactId>
          <version>3.0.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-install-plugin</artifactId>
          <version>2.5.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-deploy-plugin</artifactId>
          <version>2.8.2</version>
        </plugin>
        <!-- site lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#site_Lifecycle -->
        <plugin>
          <artifactId>maven-site-plugin</artifactId>
          <version>3.7.1</version>
        </plugin>
        <plugin>
          <artifactId>maven-project-info-reports-plugin</artifactId>
          <version>3.0.0</version>
        </plugin>
      </plugins>
    </pluginManagement>
  </build>
</project>

创建配置文件

resources文件夹下创建datasource.properties

driver=org.apache.hive.jdbc.HiveDriver
url=jdbc:hive2://192.168.182.131:10000/default
username=root

resources文件夹下新增log4j.properties配置文件,内容为

log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/hadoop.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n

代码实现阶段

BaseConfig
package cn.kgc.hive.jdbc;

import java.io.FileReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public class BaseConfig {
    private class Config{
       private String driver;
       private String url;
       private String username;
       private String password;

    }
    private Config config;

    private boolean valid(String url){
        Pattern p = Pattern.compile("jdbc:\\w+://((\\d{1,3}\\.){3}\\d{1,3}|\\w+):\\d{1,5}/\\w+");
        Matcher m=p.matcher(url);
        return m.matches();
    }

    private void init() throws Exception {
        String path=Thread.currentThread().getContextClassLoader().getResource("datasources.properties").getPath();
        Properties pro=new Properties();
       pro.load(new FileReader(path));
        String url=pro.getProperty("url");
        if(null==url || !valid(url)){
            throw new Exception("invalid url exception");
        }
        config = new Config();
        this.config.url=url;
        this.config.driver =pro.getProperty("driver","com.mysql.jdbc.Driver");
        this.config.username=pro.getProperty("username","root");
        this.config.password=pro.getProperty("password","");

    }

    {
        try {
            init();
           Class.forName(config.driver);
        } catch (Exception e) {
            System.out.println(e.getMessage());
        }
    }

    Connection getCon() throws SQLException {
        return DriverManager.getConnection(config.url,config.username,config.password);
    }

    void close(AutoCloseable...acs){
        for (AutoCloseable ac : acs) {
            if(null!=ac){
                try {
                    ac.close();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }
}

BaseDao类
package cn.kgc.hive.jdbc;

import java.io.BufferedReader;
import java.io.FileReader;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;

public class BaseDao extends BaseConfig{
    private PreparedStatement getPst(Connection con,String sql,Object...params) throws SQLException {
        PreparedStatement pst = con.prepareStatement(sql);
        if(params.length>0){
            for (int i = 0; i < params.length; i++) {
                pst.setObject(i+1,params[i]);
            }
        }
        return pst;
    }

    public Result exeNonQuery(String sql,Object...params){
        Connection con=null;
        PreparedStatement pst=null;
        try {
            con=getCon();
            pst=getPst(con,sql,params);
            pst.execute();
            return Result.succeed();
        } catch (SQLException e) {
            e.printStackTrace();
            return Result.failed();
        }finally {
            close(pst,con);
        }

    }

    public Result exeQuery (String sql, Object...params){
        Connection con=null;
        PreparedStatement pst=null;
        ResultSet rst=null;
        try{
            con=getCon();
            pst=getPst(con,sql,params);
            rst=pst.executeQuery();
            List<List<String>> table=null;
            if(null != rst && rst.next()){
                table=new ArrayList<>();
                final int COL=rst.getMetaData().getColumnCount();
                do{
                    List<String> row=new ArrayList<>(COL);
                    for (int i = 1; i <= COL; i++) {
                        row.add(rst.getObject(i).toString());
                    }
                    table.add(row);
                }while(rst.next());
            }
            return Result.succeed(table);
        }catch (Exception e){
            return Result.failed();
        }finally {
            close(rst,pst,con);
        }
    }

    public String readSql(String...paths) throws Exception {
        String path = paths.length==0 ? "sql/sql.sql" : paths[0];
        StringBuilder builder = new StringBuilder();
        BufferedReader reader = new BufferedReader(new FileReader(path));
        String line = null;
        while (null != (line=reader.readLine())){
            builder.append(line.trim()+" ");
        }
        return builder.toString();
    }

}

Result类
package cn.kgc.hive.jdbc;

public class Result <T>{
    private boolean err;
    private T data;

    public static <T> Result succeed(T...data){
        return new Result(false,data){};
    }

    public static Result failed(){
        return new Result(true){};
    }
    private Result(boolean err, T... data) {
        this.err = err;
        this.data = data.length>0?data[0]:null;
    }

    public boolean isErr() {
        return err;
    }

    public T getData() {
        return data;
    }
}
Test类
package cn.kgc.hive.jdbc;


import java.util.List;

public class App
{
    public static void main( String[] args ) throws Exception {
    BaseDao dao=new BaseDao();
      //System.out.println(dao.exeNonQuery("create table hdba1(id int,name string)").isErr());
//        BaseConfig b = new BaseConfig();




  Result<List<List<String>>> tables = dao.exeQuery("show tables");
  //   Result<List<List<String>>> tables = dao.exeQuery(dao.readSql());

        tables.getData().forEach(row->{
            row.forEach(cell->{
                System.out.print(cell+"\t");
            });
            System.out.println();
        });



    }
}

JAVA连接HBase

需要配置windows主机映射

pom完整版

<?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>

  <groupId>cn.kgc.kb10.wxj.hbase</groupId>
  <artifactId>hbasejdbc</artifactId>
  <version>1.0-SNAPSHOT</version>

  <name>hbasejdbc</name>
  <!-- FIXME change it to the project's website -->
  <url>http://www.example.com</url>

  <properties>
    <maven.compiler.source>1.8</maven.compiler.source>
    <maven.compiler.target>1.8</maven.compiler.target>
    <hadoop.version>2.6.0-cdh5.14.2</hadoop.version>
    <hive.version>1.1.0-cdh5.14.2</hive.version>
    <hbase.version>1.2.0-cdh5.14.2</hbase.version>
  </properties>

  <repositories>
    <repository>
      <id>cloudera</id>
      <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
    </repository>
  </repositories>

  <dependencies>
    <!--hadoop-->
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-client</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-hdfs</artifactId>
      <version>${hadoop.version}</version>
    </dependency>

    <!--日志-->
    <dependency>
      <groupId>commons-logging</groupId>
      <artifactId>commons-logging</artifactId>
      <version>1.2</version>
    </dependency>

    <!--MapReduce-->
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-auth</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-core</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
      <version>${hadoop.version}</version>
    </dependency>

    <!-- https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper -->
    <!--zookeeper-->
    <dependency>
      <groupId>org.apache.zookeeper</groupId>
      <artifactId>zookeeper</artifactId>
      <version>3.4.5</version>
      <type>pom</type>
    </dependency>

    <!--hbase-->
    <dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-client</artifactId>
      <version>${hbase.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-common</artifactId>
      <version>${hbase.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-server</artifactId>
      <version>${hbase.version}</version>
    </dependency>

    <!--log4j-->
    <dependency>
      <groupId>log4j</groupId>
      <artifactId>log4j</artifactId>
      <version>1.2.17</version>
    </dependency>

    <!--测试-->
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.11</version>
      <!--<scope>test</scope>-->
    </dependency>
  </dependencies>
</project>

代码实现阶段

package cn.kgc.kb10.wxj.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import java.io.IOException;

public class HBaseClientDemo {
    //创建一个表
  @Test
    public void createTable() throws IOException {
        //1.获取hbase连接,配置
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","192.168.182.131");
        conf.set("hbase.zookeeper.property.clientPort","2181");
        //2.创建连接
        Connection conn = ConnectionFactory.createConnection(conf);
        //3.创建admin
        Admin admin=conn.getAdmin();
        //4.创建表的相关信息,表名
        HTableDescriptor student = new HTableDescriptor(TableName.valueOf("student100"));
        //5.添加列族信息
        student.addFamily(new HColumnDescriptor("info"));
        student.addFamily(new HColumnDescriptor("score"));
        //6.调用创建表的方法进行建表操作
        admin.createTable(student);
        //7.关闭连接
        conn.close();
    }
    @Test
    public void putData2Table() throws IOException {
        //1.获取hbase连接,配置
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","192.168.182.131");
        conf.set("hbase.zookeeper.property.clientPort","2181");
        //2.创建连接
        Connection conn = ConnectionFactory.createConnection(conf);
        //3.获取table
        Table student =conn.getTable(TableName.valueOf("student100"));
        //4.表中添加数据rowkey
        Put put=new Put(Bytes.toBytes("1001"));
        //5.添加列 info:name zhangsan
        put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("name"),Bytes.toBytes("zhangsan"));
        put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("gender"),Bytes.toBytes("male"));
        put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("age"),Bytes.toBytes("11"));
        //6.插入数据
        student.put(put);
        //7.关闭连接
        conn.close();
    }

    //读取数据
    @Test
    public void getDataFromTable() throws IOException {
//1.获取hbase连接,配置
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","192.168.182.131");
        conf.set("hbase.zookeeper.property.clientPort","2181");
        //2.创建连接
        Connection conn = ConnectionFactory.createConnection(conf);
        //3.获取table
        Table student =conn.getTable(TableName.valueOf("student100"));
        //4.读取数据,Get
        Get get = new Get(Bytes.toBytes("1001"));
        //5.获取结果
        Result result = student.get(get);
        
        //6.遍历
        Cell[] cells = result.rawCells();
        for (Cell cell : cells) {
            System.out.println("rowkey:"+Bytes.toString(CellUtil.cloneRow(cell)));
            System.out.println("列族:"+Bytes.toString(CellUtil.cloneFamily(cell)));
            System.out.println("列名:"+Bytes.toString(CellUtil.cloneQualifier(cell)));
            System.out.println("value:"+Bytes.toString(CellUtil.cloneValue(cell)));
            System.out.println("-----------------");
        }
        conn.close();
    }

    //删除
    @Test
    public void dropTable() throws IOException {
        //1.获取hbase连接,配置
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","192.168.182.131");
        conf.set("hbase.zookeeper.property.clientPort","2181");
        //2.创建连接
        Connection conn = ConnectionFactory.createConnection(conf);
        //3.创建admin
        Admin admin=conn.getAdmin();
        //4.禁用表
        admin.disableTable(TableName.valueOf("student100"));


        //5.删除表
        admin.deleteTable(TableName.valueOf("student100"));
        conn.close();

    }
}
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值