使用JavaAPI连接
更新时间:2022-07-25
HBase Java API是HBase最常规和最高效的访问方式,Java客户端其实是shell客户端的一种实现,操作命令基本上也是shell客户端命令的一个映射,适合Hadoop MapReduce Job并行批处理 HBase表数据等场景。
建立完连接后,即可使用Java API访问HBase集群,下面提供一些简单的Java 示例。
表操作常见方法
获取Table
Plain Text
1import org.apache.hadoop.hbase.TableName;
2import org.apache.hadoop.hbase.client.Connection;
3import org.apache.hadoop.hbase.client.Table;
4public Table getTable(String myTableName) {
5 TableName tableName = TableName.valueOf(myTableName);
6 Table table = conn.getTable(tableName);
7 return table;
8}
列举Table
Plain Text
1import org.apache.hadoop.hbase.TableName;
2import org.apache.hadoop.hbase.client.Connection;
3import org.apache.hadoop.hbase.client.ConnectionFactory;
4import org.apache.hadoop.hbase.HBaseConfiguration;
5import org.apache.hadoop.security.UserGroupInformation;
6public class Test {
7 public static void main(String[] args) {
8 try {
9 TableName[] names = conn.getAdmin().listTableNames();
10 for (TableName name : names) {
11 System.out.println(name.getNameAsString());
12 }
13 } catch (Exception e) {
14 e.printStackTrace();
15 }
16 }
17}
Scan表数据表数据
Plain Text
1import org.apache.hadoop.hbase.client.Result;
2import org.apache.hadoop.hbase.client.ResultScanner;
3import org.apache.hadoop.hbase.client.Scan;
4import org.apache.hadoop.hbase.client.Table;
5public void scanTable(Table table, byte[] startRow, byte[] stopRow) throws IOException {
6 Scan scan = new Scan();
7 scan.withStartRow(startRow);
8 scan.withStartRow(stopRow);
9 ResultScanner rs = table.getScanner(scan);
10 for (Result r = rs.next(); r != null; r = rs.next()) {
11 for (Cell kv : r.rawCells()) {
12 //scan cell
13 }
14 }
15 rs.close();
16}
17//Get表数据
18public void get(Table table,byte[] rowkey) throws IOException {
19 final byte[] CF = Bytes.toBytes("f");
20 final byte[] QUALIFIER = Bytes.toBytes("c1");
21 Get get = new Get(rowkey);
22 Result result = table.get(get);
23 byte[] cValue = result.getValue(CF,QUALIFIER);
24}
Get Table
Plain Text
1public void get(Table table,byte[] rowkey) throws IOException {
2 final byte[] CF = Bytes.toBytes("f");
3 final byte[] QUALIFIER = Bytes.toBytes("c1");
4 Get get = new Get(rowkey);
5 Result result = table.get(get);
6 byte[] cValue = result.getValue(CF,QUALIFIER);
7}
创建Table(基于HBase2.2版本)
Plain Text
1import org.apache.hadoop.hbase.TableName;
2import org.apache.hadoop.hbase.client.Admin;
3import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
4import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
5import org.apache.hadoop.hbase.client.Connection;
6import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
7import org.apache.hadoop.hbase.util.Bytes;
8public void createTable(String myTableName, String myColumnFamily) throws IOException {
9 Admin admin = getConnection().getAdmin();
10 TableName tableName = TableName.valueOf(myTableName);
11 TableDescriptorBuilder htd = TableDescriptorBuilder.newBuilder(tableName);
12 byte[] cf = Bytes.toBytes(myColumnFamily);
13 ColumnFamilyDescriptorBuilder hcd = ColumnFamilyDescriptorBuilder.newBuilder(cf);
14 ColumnFamilyDescriptor columnFamilyDescriptor = hcd.build();
15 htd.setColumnFamily(columnFamilyDescriptor);
16 admin.createTable(htd.build());
17}
删除Table
Plain Text
1import org.apache.hadoop.hbase.TableName;
2import org.apache.hadoop.hbase.client.Admin;
3public void deleteTable(String myTableName) {
4 Admin admin = getAdmin();
5 TableName tableName = TableName.valueOf(myTableName);
6 if (!admin.isTableDisabled(tableName)) {
7 admin.disableTable(tableName);
8 }
9 admin.deleteTable(tableName);
10 admin.close();
11}
行操作常用方法
获取Row
Plain Text
1import org.apache.hadoop.hbase.client.Get;
2import org.apache.hadoop.hbase.client.Result;
3import org.apache.hadoop.hbase.client.Table;
4public void getRow(Table table, byte[] rowKey) {
5 Get get = new Get(rowKey);
6 Result result = table.get(get);
7 // do something
8}
插入Row
Plain Text
1import org.apache.hadoop.hbase.client.Put;
2import org.apache.hadoop.hbase.client.Table;
3public void putRow(Table table, byte[] rowKey, byte[] family,byte[] qualifier, byte[] value) {
4 Put put = new Put(rowKey);
5 put.addColumn(family, qualifier, value);
6 table.put(put);
7}
8//插入多条
9public void put(Table table,List<Put> puts) {
10 table.put(puts);
11}
删除Row
Plain Text
1import org.apache.hadoop.hbase.client.Delete;
2import org.apache.hadoop.hbase.client.Table;
3public void deleteRow(Table table, byte[] rowKey) {
4 Delete delete = new Delete(rowKey);
5 table.delete(delete);
6}
7
8Admin admin = conn.getAdmin();
9TableName tableName = TableName.valueOf(tableNameStr);
10HTableDescriptor htd = new HTableDescriptor(tableName);
11byte[] cf = Bytes.toBytes(columnFamily);
12HColumnDescriptor hcd = new HColumnDescriptor(cf);
13htd.addFamily(hcd);
14RegionSplitter.HexStringSplit hexStringSplit = new RegionSplitter.HexStringSplit();
15hexStringSplit.setFirstRow(Bytes.toBytes("00000000"));
16hexStringSplit.setLastRow(Bytes.toBytes("ffffffff"));
17byte[][] splitKeys = hexStringSplit.split(100);
18admin.createTable(htd,splitKeys);
19admin.close();