Java操作外部数据源(RDBMS,Hive,HBase)


一、RDBMS

1.Maven工程添加依赖

<!-- https://mvnrepository.com/artifact/mysql/mysql-connector-java -->
<dependency>
  <groupId>mysql</groupId>
  <artifactId>mysql-connector-java</artifactId>
  <version>5.1.38</version>
</dependency>

2.配置properties文件

driver=com.mysql.jdbc.Driver
url=jdbc:mysql://192.168.72.130:3306/mysqltest?useUnicode=true&characterEncoding=utf8&useSSL=true
username=root
password=javakb10

3.BaseConfig类

package cn.kb10.jdbc2;

import java.io.FileReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;

class BaseConfig {
    private class Config{
        String driver;
        String url;
        String username;
        String password;

        public Config(String driver, String url, String username, String password) {
            this.driver = driver;
            this.url = url;
            this.username = username;
            this.password = password;
        }
    }
    private Config config;
    
    {
        try {
            init();
            Class.forName(config.driver);
        } catch (Exception e) {
            System.err.println(e.getMessage());
        }
    }

    private void init() throws Exception {
        String path = Thread.currentThread().getContextClassLoader().getResource("datasource.properties").getPath();
        Properties pro = new Properties();
        pro.load(new FileReader(path));
        String url = pro.getProperty("url");
        if (null==url){
            throw new Exception("缺少url配置项异常");
        }
        String driver = pro.getProperty("driver","com.mysql.jdbc.Driver");
        String username = pro.getProperty("username","root");
        String password = pro.getProperty("password","root");
        this.config = new Config(driver,url,username,password);
    }

    protected Connection getCon() throws SQLException {
        return DriverManager.getConnection(config.url,config.username,config.password);
    }

    protected void close(AutoCloseable...acs){
        for (AutoCloseable ac : acs) {
            if (null!=ac){
                try {
                    ac.close();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }
}

4.BaseDao类

package cn.kb10.jdbc2;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;

public final class BaseDao extends BaseConfig{
    private PreparedStatement getPst(Connection con,String sql, Object...params) throws SQLException {
        PreparedStatement pst = con.prepareStatement(sql);
        if (params.length>0){
            for (int i = 0; i < params.length; i++) {
                pst.setObject(i+1,params[i]);
            }
        }
        return pst;
    }

    public Result exeNonQuery(String sql,Object...params){
        Connection con = null;
        PreparedStatement pst = null;
        try {
            con = getCon();
            pst = getPst(con,sql,params);
            return Result.succeed(pst.executeUpdate());
        } catch (SQLException e) {
            return Result.fail();
        }finally {
            close(pst,con);
        }
    }

    public Result exeQuery(String sql,Object...params){
        Connection con =null;
        PreparedStatement pst = null;
        ResultSet rst = null;
        try {
            con = getCon();
            pst = getPst(con,sql,params);
            rst = pst.executeQuery();
            List<List<String>> data = new ArrayList<>();
            if (null!=rst&&rst.next()){
                final int CC = rst.getMetaData().getColumnCount();
                do{
                    List<String> row = new ArrayList<>(CC);
                    for (int i = 1; i <=CC ; i++) {
                        row.add(rst.getObject(i).toString());
                    }
                    data.add(row);
                }while (rst.next());
            }
            return Result.succeed(data);
        }catch (Exception e){
            return Result.fail();
        }finally {
            close(rst,pst,con);
        }
    }
}

5.Result类

package cn.kb10.jdbc2;

public final class Result<T> {
    private boolean err;
    private T data;

    //方法级泛型
    public static <T> Result succeed(T data){
        return new Result(false,data);
    }

    public static Result fail(){
        return new Result(true,null);
    }

    private Result(boolean err, T data) {
        this.err = err;
        this.data = data;
    }

    public boolean isErr() {
        return err;
    }

    public T getData() {
        return data;
    }
}

6.Test类

package cn.kb10.jdbc2;

import java.sql.Types;
import java.util.Arrays;
import java.util.List;
import java.util.Map;

public class Test {
    public static void main(String[] args) {
        BaseDao dao = new BaseDao();
        Result<List<List<String>>> result = dao.exeQuery("select * from student");

        if (!result.isErr()) {
            List<List<String>> data = result.getData();
            for (List<String> row : data) {
                for (String col : row) {
                    System.out.print(col+"\t");
                }
                System.out.println();
            }
        }else{
            System.out.println("查询异常");
        }
    }
}

二、Hive

1.Maven工程添加依赖

<dependency>
  <groupId>org.apache.hive</groupId>
  <artifactId>hive-jdbc</artifactId>
  <version>${hive.version}</version>
</dependency>
<dependency>
  <groupId>org.apache.hadoop</groupId>
  <artifactId>hadoop-auth</artifactId>
  <version>${hadoop.version}</version>
  <exclusions>
    <exclusion>
      <artifactId>jdk.tools</artifactId>
      <groupId>jdk.tools</groupId>
    </exclusion>
  </exclusions>
</dependency>
<dependency>
  <groupId>org.apache.hadoop</groupId>
  <artifactId>hadoop-common</artifactId>
  <version>${hadoop.version}</version>
  <exclusions>
    <exclusion>
      <artifactId>jdk.tools</artifactId>
      <groupId>jdk.tools</groupId>
    </exclusion>
  </exclusions>
</dependency>
<dependency>
  <groupId>org.apache.hadoop</groupId>
  <artifactId>hadoop-hdfs</artifactId>
  <version>${hadoop.version}</version>
  <exclusions>
    <exclusion>
      <artifactId>jdk.tools</artifactId>
      <groupId>jdk.tools</groupId>
    </exclusion>
  </exclusions>
</dependency>
<dependency>
  <groupId>org.apache.hadoop</groupId>
  <artifactId>hadoop-client</artifactId>
  <version>${hadoop.version}</version>
  <exclusions>
    <exclusion>
      <artifactId>jdk.tools</artifactId>
      <groupId>jdk.tools</groupId>
    </exclusion>
  </exclusions>
</dependency>
<dependency>
  <groupId>org.apache.hadoop</groupId>
  <artifactId>hadoop-mapreduce-client-core</artifactId>
  <version>${hadoop.version}</version>
</dependency>

2.配置properties文件

  • datasource.properties
driver=org.apache.hive.jdbc.HiveDriver
url=jdbc:hive2://192.168.72.130:10000/default
username=root
  • log4j.properties
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/hadoop.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n

3.BaseConfig类

package cn.kgc.hive.jdbc.hdbc;

import java.io.FileReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public class BaseConfig {
    private class Config{
        private String driver;
        private String url;
        private String username;
        private String password;
    }

    private Config config;

    private boolean valid(String url){
        Pattern p = Pattern.compile("jdbc:\\w+://((\\d{1,3}\\.){3}\\d{1,3}|\\w+):\\d{1,5}/\\w+");
        Matcher m = p.matcher(url);
        return m.matches();
    }

    private void init() throws Exception {
        String path = Thread.currentThread().getContextClassLoader().getResource("datasource.properties").getPath();
        Properties pro = new Properties();
        pro.load(new FileReader(path));
        String url = pro.getProperty("url");
        if (null == url || !valid(url)) {
            throw new Exception("no or invalid url exception");
        }
        config = new Config();
        config.url = url;
        config.driver = pro.getProperty("driver","com.mysql.jdbc.Driver");
        config.username = pro.getProperty("username","root");
        config.password = pro.getProperty("password","");
    }

    {
        try {
            init();
            Class.forName(config.driver);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    Connection getCon() throws SQLException {
        return DriverManager.getConnection(
                config.url,
                config.username,
                config.password
        );
    }

    void close(AutoCloseable...closeables){
        for (AutoCloseable closeable : closeables) {
            if (closeable != null) {
                try {
                    closeable.close();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }
}

4.BaseDao类

package cn.kgc.hive.jdbc.hdbc;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;

public class BaseDao extends BaseConfig {
    private PreparedStatement getPst(Connection con,String sql,Object...params) throws SQLException {
        PreparedStatement pst = con.prepareStatement(sql);
        if (params.length>0) {
            for (int i = 0; i < params.length; i++) {
                pst.setObject(i+1,params[i]);
            }
        }
        return pst;
    }

    public Result exeNonQuery(String sql,Object...params){
        Connection con = null;
        PreparedStatement pst = null;
        try {
            con = getCon();
            pst = getPst(con,sql,params);
            pst.execute();
            return Result.succeed();
        } catch (SQLException e) {
            e.printStackTrace();
            return Result.fail();
        }finally {
            close(pst,con);
        }
    }

    public Result exeQuery(String sql,Object...params){
        Connection con = null;
        PreparedStatement pst = null;
        ResultSet rst = null;
        try {
            con = getCon();
            pst = getPst(con,sql,params);
            rst = pst.executeQuery();
            List<List<String>> table = null;
            if (null != rst && rst.next()) {
                table = new ArrayList<>();
                final int COL = rst.getMetaData().getColumnCount();
                do {
                    List<String> row = new ArrayList<>(COL);
                    for (int i = 1; i <= COL; i++) {
                        row.add(rst.getObject(i).toString());
                    }
                    table.add(row);
                }while (rst.next());
            }
            return Result.succeed(table);
        } catch (SQLException e) {
            e.printStackTrace();
            return Result.fail();
        }finally {
            close(rst,pst,con);
        }
    }

    public String readSql(String...paths) throws IOException {
        String path = paths.length==0 ? "sql/sql.sql" : paths[0];
        StringBuilder builder = new StringBuilder();
        BufferedReader reader = new BufferedReader(new FileReader(path));
        String line = null;
        while (null != (line = reader.readLine())) {
            builder.append(line.trim()+" ");
        }
        return builder.toString();
    }
}

5.Result类

package cn.kgc.hive.jdbc.hdbc;

public abstract class Result<T> {
    private boolean err;
    private T data;

    public Result(boolean err,T...data){
        this.err = err;
        this.data = data.length>0 ? data[0] : null;
    }

    public static Result fail(){
        return new Result(true) {};
    }

    public static <T> Result succeed(T...t){
        return new Result(false,t) {};
    }

    public boolean isErr() {
        return err;
    }

    public T getData() {
        return data;
    }
}

6.App类

import java.io.IOException;
import java.util.List;

public class App 
{
    public static void main( String[] args ) throws IOException{
        BaseDao dao = new BaseDao();
        Result<List<List<String>>> tables = dao.exeQuery(dao.readSql());
        tables.getData().forEach(row->{
            row.forEach(cell->{
                System.out.print(cell+"\t");
            });
            System.out.println();
        });
    }
}

7.Sql语句文件

with score as(
select
    stuname,classname,subname,score,
    last_value(subname) over(distribute by stuname sort by score rows between unbounded preceding and unbounded following) subname2,
    max(score) over(distribute by stuname sort by score rows between unbounded preceding and unbounded following) maxscore,
    row_number() over(distribute by stuname sort by score rows between unbounded preceding and unbounded following) rownum
from
    mysql_school
),
diff as(
select
    stuname,
    classname,
    subname subname1,
    score minScore,
    subname2,
    maxscore,
    (maxscore-score) diff
from
    score
where
    rownum=1
),
level as(
select
    stuname,classname,subname1,minscore,subname2,maxscore,diff,
    case when diff>=30 then 'A' when diff>=20 then 'B' when diff>=10 then 'C' else 'D' end rank
from
    diff
)

select
    stuname,classname,subname1,minscore,subname2,maxscore,diff,rank
from
    level
where
    rank like '%A%'
or
    rank like '%B%'

8.查询结果

丁聪	KB08	KB JavaOOP	60	KB MySql	100	40	A	
丰笛	KW06	EasyUI	46	KW JQuery	90	44	A	
何健	KB07	KB HTML	48	KB CSS	100	52	A	
余杰	KW05	KW HTML	48	ElementUI	88	40	A	
关敬元	KB05	Scala	49	Hive	100	51	A	
凌宇翔	KB07	KB JSP	53	KB MySql	92	39	A	
刘用兵	KB08	KB HTML	56	KB Java逻辑	100	44	A	
刘青青	KW06	KW JS	53	KW JQuery	94	41	A	
吴凡	KW05	KW JS	59	KW HTML	94	35	A	
吴凯	KW06	KW CSS+BootStrap	48	EasyUI	75	27	B	
吴彦祥	KW05	KW CSS+BootStrap	47	ElementUI	96	49	A	
吴志远	KW05	KW JS	62	KW JQuery	96	34	A	
吴悠	KB05	KB JavaOOP	46	HBase	100	54	A	
吴涛	KW03	KB JDBC	48	KB JQuery	97	49	A	
吴衍文	KB07	KB JS	48	KB JQuery	96	48	A	
周炜	KW06	KW JS	45	KW JQuery	84	39	A	
周璟	KW06	KW CSS+BootStrap	57	KW JQuery	100	43	A	
周继伟	KW03	KB MySql	51	KB JavaOOP	94	43	A	
唐宇飞	KB08	KB Java逻辑	51	KB JQuery	80	29	B	
唐明汉	KB07	KB MySql	47	KB CSS	96	49	A	
姚成耀	KW05	KW CSS+BootStrap	52	ElementUI	100	48	A	
孙亚洲	KB05	KB JDBC	45	KB MySql	100	55	A	
孟涛涛	KW06	EasyUI	48	KW JS	96	48	A	
庄俊伟	KB07	KB JavaOOP	50	KB Java逻辑	100	50	A	
应春雨	KW05	KW CSS+BootStrap	52	EasyUI	88	36	A	
张全根	KB05	KB MySql	50	Hive	100	50	A	
张刘留	KW06	KW CSS+BootStrap	54	KW JQuery	96	42	A	
张啸尘	KB07	KB Java高级特性	51	KB JDBC	100	49	A	
张小港	KW06	EasyUI	45	KW HTML	92	47	A	
张志强	KW05	KW JS	49	KW JQuery	93	44	A	
张梓尧	KW05	KW CSS+BootStrap	60	EasyUI	100	40	A	
张琪	KB07	KB CSS	49	KB MySql	100	51	A	
张超	KW05	ElementUI	52	KW CSS+BootStrap	93	41	A	
徐凯强	KW05	KW HTML	45	KW CSS+BootStrap	87	42	A	
徐磊	KW03	KB HTML	46	KB JS	100	54	A	
戴媛媛	KB05	KB CSS	46	KB MySql	97	51	A	
戴彬	KB07	KB JS	61	KB MySql	97	36	A	
李佛光	KB05	KB JQuery	45	KB Spring	100	55	A	
李刚	KW05	KW JS	45	KW CSS+BootStrap	99	54	A	
李国旗	KB05	Spark+SparkStreaming	45	Hadoop+MapReduce	91	46	A	
李志杰	KW06	KW CSS+BootStrap	61	KW JS	99	38	A	
李楚鸿	KB08	KB JQuery	48	KB JavaOOP	99	51	A	
殷狄	KB07	KB JavaOOP	45	KB JDBC	92	47	A	
江文涛	KB08	KB JavaOOP	54	KB HTML	97	43	A	
江玉	KB05	KB Java高级特性	46	KB MySql	100	54	A	
汤伟杰	KB05	KB JavaOOP	45	KB HTML	100	55	A	
汪小天	KW06	KW JS	58	KW CSS+BootStrap	84	26	B	
王亮	KB08	KB MySql	49	KB Java逻辑	87	38	A	
王伟	KW03	KB JDBC	54	KB HTML	100	46	A	
王家能	KB05	KB JDBC	46	KB Spring	100	54	A	
王志远	KW05	ElementUI	50	KW JQuery	92	42	A	
王春雷	KB08	KB JS	47	KB HTML	91	44	A	
王波	KB08	KB JavaOOP	47	KB JS	84	37	A	
王纪文	KW06	KW JQuery	55	EasyUI	88	33	A	
王跃	KW03	KB CSS	57	KB JDBC	98	41	A	
相羽	KW05	KW HTML	49	KW JS	80	31	A	
秦超	KW03	KB MySql	51	KB Java逻辑	100	49	A	
管若涵	KB08	KB HTML	45	KB JS	89	44	A	
胡妙妙	KB07	KB JS	47	KB JSP	97	50	A	
胡正双	KB08	KB Java高级特性	45	KB JQuery	99	54	A	
花磊	KW06	KW JQuery	56	KW HTML	100	44	A	
苏郑梓梵	KB08	KB JS	46	KB JQuery	95	49	A	
苏金峰	KW06	KW CSS+BootStrap	63	EasyUI	94	31	A	
范新宇	KB07	KB CSS	67	KB Java高级特性	98	31	A	
葛优	KB05	Hive	52	KB SpringMVC	100	48	A	
蔡志远	KW03	KB HTML	46	KB CSS	83	37	A	
袁潇凯	KW03	KB CSS	51	KB Java逻辑	88	37	A	
许梦娟	KB07	KB JS	49	KB JavaOOP	77	28	B	
谢武阳	KW03	KB JQuery	45	KB JS	80	35	A	
贾仲羽	KB07	KB MySql	45	KB Java逻辑	95	50	A	
赵伟伟	KB05	KB SpringMVC	47	Hadoop+MapReduce	93	46	A	
赵杰	KB05	ELK	45	KB SpringBoot	96	51	A	
邢慧斌	KW03	KB CSS	48	KB JS	98	50	A	
邵娅	KW03	KB JDBC	54	KB Java高级特性	100	46	A	
郑朋	KB07	KB HTML	51	KB JavaOOP	93	42	A	
钱一鸣	KB08	KB CSS	45	KB Java逻辑	100	55	A	
陈元生	KW06	KW CSS+BootStrap	46	KW HTML	97	51	A	
陈军华	KW03	KB Java高级特性	48	KB JS	100	52	A	
陈明秋	KB05	KB CSS	49	Scala	98	49	A	
陈涛	KB08	KB MySql	45	KB HTML	99	54	A	
陈玉莹	KB08	KB CSS	52	KB JQuery	94	42	A	
陈真	KB08	KB CSS	53	KB Java逻辑	99	46	A	
陈稼轩	KW03	KB JQuery	46	KB Java高级特性	90	44	A	
陈立志	KB07	KB HTML	50	KB Java逻辑	98	48	A	
雷兵	KB08	KB MySql	45	KB HTML	89	44	A	
韩睿刘洋	KB05	KB JSP	45	KB JavaOOP	100	55	A	
黄超杰	KW05	KW JS	47	ElementUI	98	51	A	
黄鑫	KB07	KB JS	51	KB CSS	100	49	A	
黄雄	KW03	KB MySql	48	KB JDBC	94	46	A	

三、HBase

1.配置Windows主机映射

  • 进入以下路径

      C:\Windows\System32\drivers\etc
    
  • 修改hosts文件,添加需要映射的虚拟机地址和主机名

      192.168.72.130 single
      192.168.72.140 master
    

2.pom.xml文件

<?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>

  <groupId>cn.kgc.kb10.wxj.hbase</groupId>
  <artifactId>hbasejdbc</artifactId>
  <version>1.0-SNAPSHOT</version>

  <name>hbasejdbc</name>
  <!-- FIXME change it to the project's website -->
  <url>http://www.example.com</url>

  <properties>
    <maven.compiler.source>1.8</maven.compiler.source>
    <maven.compiler.target>1.8</maven.compiler.target>
    <hadoop.version>2.6.0-cdh5.14.2</hadoop.version>
    <hive.version>1.1.0-cdh5.14.2</hive.version>
    <hbase.version>1.2.0-cdh5.14.2</hbase.version>
  </properties>

  <repositories>
    <repository>
      <id>cloudera</id>
      <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
    </repository>
  </repositories>

  <dependencies>
    <!--hadoop-->
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-client</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-hdfs</artifactId>
      <version>${hadoop.version}</version>
    </dependency>

    <!--日志-->
    <dependency>
      <groupId>commons-logging</groupId>
      <artifactId>commons-logging</artifactId>
      <version>1.2</version>
    </dependency>

    <!--MapReduce-->
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-auth</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-core</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
      <version>${hadoop.version}</version>
    </dependency>

    <!-- https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper -->
    <!--zookeeper-->
    <dependency>
      <groupId>org.apache.zookeeper</groupId>
      <artifactId>zookeeper</artifactId>
      <version>3.4.5</version>
      <type>pom</type>
    </dependency>

    <!--hbase-->
    <dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-client</artifactId>
      <version>${hbase.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-common</artifactId>
      <version>${hbase.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-server</artifactId>
      <version>${hbase.version}</version>
    </dependency>

    <!--log4j-->
    <dependency>
      <groupId>log4j</groupId>
      <artifactId>log4j</artifactId>
      <version>1.2.17</version>
    </dependency>

    <!--测试-->
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.11</version>
      <!--<scope>test</scope>-->
    </dependency>
  </dependencies>
</project>

3.代码实现

package cn.kgc.kb10.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import java.io.IOException;

public class HBaseClientDemo {
    //创建一个表
  @Test
    public void createTable() throws IOException {
        //1.获取hbase连接,配置
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","192.168.182.131");
        conf.set("hbase.zookeeper.property.clientPort","2181");
        //2.创建连接
        Connection conn = ConnectionFactory.createConnection(conf);
        //3.创建admin
        Admin admin=conn.getAdmin();
        //4.创建表的相关信息,表名
        HTableDescriptor student = new HTableDescriptor(TableName.valueOf("student100"));
        //5.添加列族信息
        student.addFamily(new HColumnDescriptor("info"));
        student.addFamily(new HColumnDescriptor("score"));
        //6.调用创建表的方法进行建表操作
        admin.createTable(student);
        //7.关闭连接
        conn.close();
    }
    @Test
    public void putData2Table() throws IOException {
        //1.获取hbase连接,配置
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","192.168.182.131");
        conf.set("hbase.zookeeper.property.clientPort","2181");
        //2.创建连接
        Connection conn = ConnectionFactory.createConnection(conf);
        //3.获取table
        Table student =conn.getTable(TableName.valueOf("student100"));
        //4.表中添加数据rowkey
        Put put=new Put(Bytes.toBytes("1001"));
        //5.添加列 info:name zhangsan
        put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("name"),Bytes.toBytes("zhangsan"));
        put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("gender"),Bytes.toBytes("male"));
        put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("age"),Bytes.toBytes("11"));
        //6.插入数据
        student.put(put);
        //7.关闭连接
        conn.close();
    }

    //读取数据
    @Test
    public void getDataFromTable() throws IOException {
		//1.获取hbase连接,配置
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","192.168.182.131");
        conf.set("hbase.zookeeper.property.clientPort","2181");
        //2.创建连接
        Connection conn = ConnectionFactory.createConnection(conf);
        //3.获取table
        Table student =conn.getTable(TableName.valueOf("student100"));
        //4.读取数据,Get
        Get get = new Get(Bytes.toBytes("1001"));
        //5.获取结果
        Result result = student.get(get);
        
        //6.遍历
        Cell[] cells = result.rawCells();
        for (Cell cell : cells) {
            System.out.println("rowkey:"+Bytes.toString(CellUtil.cloneRow(cell)));
            System.out.println("列族:"+Bytes.toString(CellUtil.cloneFamily(cell)));
            System.out.println("列名:"+Bytes.toString(CellUtil.cloneQualifier(cell)));
            System.out.println("value:"+Bytes.toString(CellUtil.cloneValue(cell)));
            System.out.println("-----------------");
        }
        conn.close();
    }

    //删除
    @Test
    public void dropTable() throws IOException {
        //1.获取hbase连接,配置
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","192.168.182.131");
        conf.set("hbase.zookeeper.property.clientPort","2181");
        //2.创建连接
        Connection conn = ConnectionFactory.createConnection(conf);
        //3.创建admin
        Admin admin=conn.getAdmin();
        //4.禁用表
        admin.disableTable(TableName.valueOf("student100"));


        //5.删除表
        admin.deleteTable(TableName.valueOf("student100"));
        conn.close();

    }
}
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值