使用JavaAPI连接
更新时间:2022-07-25
HBase Java API是HBase最常规和最高效的访问方式,Java客户端其实是shell客户端的一种实现,操作命令基本上也是shell客户端命令的一个映射,适合Hadoop MapReduce Job并行批处理 HBase表数据等场景。
建立完连接后,即可使用Java API访问HBase集群,下面提供一些简单的Java 示例。
表操作常见方法
获取Table
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Table;
public Table getTable(String myTableName) {
TableName tableName = TableName.valueOf(myTableName);
Table table = conn.getTable(tableName);
return table;
}
列举Table
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.security.UserGroupInformation;
public class Test {
public static void main(String[] args) {
try {
TableName[] names = conn.getAdmin().listTableNames();
for (TableName name : names) {
System.out.println(name.getNameAsString());
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
Scan表数据表数据
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
public void scanTable(Table table, byte[] startRow, byte[] stopRow) throws IOException {
Scan scan = new Scan();
scan.withStartRow(startRow);
scan.withStartRow(stopRow);
ResultScanner rs = table.getScanner(scan);
for (Result r = rs.next(); r != null; r = rs.next()) {
for (Cell kv : r.rawCells()) {
//scan cell
}
}
rs.close();
}
//Get表数据
public void get(Table table,byte[] rowkey) throws IOException {
final byte[] CF = Bytes.toBytes("f");
final byte[] QUALIFIER = Bytes.toBytes("c1");
Get get = new Get(rowkey);
Result result = table.get(get);
byte[] cValue = result.getValue(CF,QUALIFIER);
}
Get Table
public void get(Table table,byte[] rowkey) throws IOException {
final byte[] CF = Bytes.toBytes("f");
final byte[] QUALIFIER = Bytes.toBytes("c1");
Get get = new Get(rowkey);
Result result = table.get(get);
byte[] cValue = result.getValue(CF,QUALIFIER);
}
创建Table(基于HBase2.2版本)
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
public void createTable(String myTableName, String myColumnFamily) throws IOException {
Admin admin = getConnection().getAdmin();
TableName tableName = TableName.valueOf(myTableName);
TableDescriptorBuilder htd = TableDescriptorBuilder.newBuilder(tableName);
byte[] cf = Bytes.toBytes(myColumnFamily);
ColumnFamilyDescriptorBuilder hcd = ColumnFamilyDescriptorBuilder.newBuilder(cf);
ColumnFamilyDescriptor columnFamilyDescriptor = hcd.build();
htd.setColumnFamily(columnFamilyDescriptor);
admin.createTable(htd.build());
}
删除Table
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
public void deleteTable(String myTableName) {
Admin admin = getAdmin();
TableName tableName = TableName.valueOf(myTableName);
if (!admin.isTableDisabled(tableName)) {
admin.disableTable(tableName);
}
admin.deleteTable(tableName);
admin.close();
}
行操作常用方法
获取Row
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
public void getRow(Table table, byte[] rowKey) {
Get get = new Get(rowKey);
Result result = table.get(get);
// do something
}
插入Row
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
public void putRow(Table table, byte[] rowKey, byte[] family,byte[] qualifier, byte[] value) {
Put put = new Put(rowKey);
put.addColumn(family, qualifier, value);
table.put(put);
}
//插入多条
public void put(Table table,List<Put> puts) {
table.put(puts);
}
删除Row
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Table;
public void deleteRow(Table table, byte[] rowKey) {
Delete delete = new Delete(rowKey);
table.delete(delete);
}
Admin admin = conn.getAdmin();
TableName tableName = TableName.valueOf(tableNameStr);
HTableDescriptor htd = new HTableDescriptor(tableName);
byte[] cf = Bytes.toBytes(columnFamily);
HColumnDescriptor hcd = new HColumnDescriptor(cf);
htd.addFamily(hcd);
RegionSplitter.HexStringSplit hexStringSplit = new RegionSplitter.HexStringSplit();
hexStringSplit.setFirstRow(Bytes.toBytes("00000000"));
hexStringSplit.setLastRow(Bytes.toBytes("ffffffff"));
byte[][] splitKeys = hexStringSplit.split(100);
admin.createTable(htd,splitKeys);
admin.close();