添加数据
要对一个表添加数据,我们需要一个Put
对象,在定义Put
对象之前我们需要获取到Table
对象,这样才能对指定的表进行操作:
Table table = connection.getTable(tableName);//获取Table对象
try {
byte[] row = Bytes.toBytes("row1"); //定义行
Put put = new Put(row); //创建Put对象
byte[] columnFamily = Bytes.toBytes("data"); //列簇
byte[] qualifier = Bytes.toBytes(String.valueOf(1)); //列
byte[] value = Bytes.toBytes("张三丰"); //值
put.addColumn(columnFamily, qualifier, value);
table.put(put); //向表中添加数据
} finally {
//使用完了要释放资源
table.close();
}
编程要求
好了,到你啦,使用本关知识,在右侧编辑器begin-end
处补充代码,请你编写一个Java
程序,在HBase
中创建表tb_step2
,列簇都为:data
,添加数据:
- 行号分别为:
row1
,row2
; - 列名分别为:
1
,2
; - 值分别为:
张三丰
,张无忌
。
package step2;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void insertInfo()throws Exception{
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection connection = ConnectionFactory.createConnection(config);
Admin admin = connection.getAdmin();
TableName tableName = TableName.valueOf("tb_step2");
TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tableName);
ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data")).build();// 构建列族对象
tableDescriptor.setColumnFamily(family); // 设置列族
admin.createTable(tableDescriptor.build()); // 创建表
// 添加数据
byte[] row1 = Bytes.toBytes("row1");
Put put1 = new Put(row1);
byte[] columnFamily1 = Bytes.toBytes("data"); // 列
byte[] qualifier1 = Bytes.toBytes(String.valueOf(1)); // 列族修饰词
byte[] value1 = Bytes.toBytes("张三丰"); // 值
put1.addColumn(columnFamily1, qualifier1, value1);
byte[] row2 = Bytes.toBytes("row2");
Put put2 = new Put(row2);
byte[] columnFamily2 = Bytes.toBytes("data"); // 列
byte[] qualifier2 = Bytes.toBytes(String.valueOf(2)); // 列族修饰词
byte[] value2 = Bytes.toBytes("张无忌"); // 值
put2.addColumn(columnFamily2, qualifier2, value2);
Table table = connection.getTable(tableName);
table.put(put1);
table.put(put2);
/********* End *********/
}
}
start-dfs.sh
start-hbase.sh
hadoop fs -ls /hbase