Hbase提供了丰富的Java API,以及线程池操作,下面我用线程池来展示一下使用Java API操作Hbase。

项目结构如下:

我使用的Hbase的版本是

hbase-0.98.9-hadoop2-bin.tar.gz

大家下载后,可以拿到里面的lib目录下面的jar文件,即上所示的hbase-lib资源。

接口类:

/hbase-util/src/com/b510/hbase/util/dao/HbaseDao.java

 package com.b510.hbase.util.dao;

 import java.util.List;

 import org.apache.hadoop.hbase.client.HTableInterface;

 /**
* @author Hongten
* @created 7 Nov 2018
*/
public interface HbaseDao { // initial table
public HTableInterface getHTableFromPool(String tableName); // check if the table is exist
public boolean isHTableExist(String tableName); // create table
public void createHTable(String tableName, String[] columnFamilys); // insert new row
public void addRow(String tableName, String rowKey, String columnFamily, String column, String value); // get row by row key
public void getRow(String tableName, String rowKey); public void getAllRows(String tableName); // get rows by giving range
public void getRowsByRange(String tableName, String startRowKey, String endRowKey); //delete row
public void delRow(String tableName, String rowKey); //delete rows by row keys
public void delRowsByRowKeys(String tableName, List<String> rowKeys); // auto flush data when close
public void closeAutoFlush(HTableInterface table); // close table
public void closeTable(HTableInterface table); // close pool connection
public void closePoolConnection(); // delete table
public void deleteHTable(String tableName);
}

实现类:

/hbase-util/src/com/b510/hbase/util/dao/impl/HbaseDaoImpl.java

 package com.b510.hbase.util.dao.impl;

 import java.io.IOException;
import java.util.List; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import com.b510.hbase.util.dao.HbaseDao; /**
* @author Hongten
* @created 7 Nov 2018
*/
@SuppressWarnings("deprecation")
public class HbaseDaoImpl implements HbaseDao { private static Configuration conf = null;
private static HBaseAdmin hAdmin;
private static HTablePool pool; private static int defaultPoolSize = 5; public HbaseDaoImpl(int poolSize) {
conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", "node1:2888,node2:2888,node3:2888");
try {
hAdmin = new HBaseAdmin(conf);
// the default pool size is 5.
pool = new HTablePool(conf, poolSize <= 0 ? defaultPoolSize : poolSize);
} catch (MasterNotRunningException e) {
e.printStackTrace();
} catch (ZooKeeperConnectionException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
} @Override
public HTableInterface getHTableFromPool(String tableName) {
HTableInterface table = pool.getTable(tableName);
return table;
} @Override
public boolean isHTableExist(String tableName) {
try {
return hAdmin.tableExists(tableName);
} catch (IOException e) {
e.printStackTrace();
}
return false;
} @Override
public void createHTable(String tableName, String[] columnFamilys) {
if (!isHTableExist(tableName)) {
HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName));
// The Hbase suggested the number of column family should be less than 3.
// Normally, there only have 1 column family.
for (String cfName : columnFamilys) {
HColumnDescriptor hColumnDescriptor = new HColumnDescriptor(cfName);
tableDescriptor.addFamily(hColumnDescriptor);
}
try {
hAdmin.createTable(tableDescriptor);
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("The table [" + tableName + "] is created.");
} else {
System.out.println("The table [" + tableName + "] is existing already.");
} } @Override
public void addRow(String tableName, String rowKey, String columnFamily, String column, String value) {
if (isHTableExist(tableName)) {
HTableInterface table = getHTableFromPool(tableName);
Put put = new Put(rowKey.getBytes());
put.add(columnFamily.getBytes(), column.getBytes(), value.getBytes());
try {
table.put(put);
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("Insert into table [" + tableName + "], Rowkey=[" + rowKey + "], Column=[" + columnFamily + ":" + column + "], Vlaue=[" + value + "].");
closeTable(table);
} else {
System.out.println("The table [" + tableName + "] does not exist.");
}
} @Override
public void getRow(String tableName, String rowKey) {
if (isHTableExist(tableName)) {
HTableInterface table = getHTableFromPool(tableName);
Get get = new Get(rowKey.getBytes());
Result result;
try {
result = table.get(get);
String columnName = "";
String timeStamp = "";
String columnFamily = "";
String value = "";
for (Cell cell : result.rawCells()) {
timeStamp = String.valueOf(cell.getTimestamp());
columnFamily = new String(CellUtil.cloneFamily(cell));
columnName = new String(CellUtil.cloneQualifier(cell));
value = new String(CellUtil.cloneValue(cell)); System.out.println("Get from table [" + tableName + "], Rowkey=[" + rowKey + "], Column=[" + columnFamily + ":" + columnName + "], Timestamp=[" + timeStamp + "], Vlaue=[" + value + "].");
}
} catch (IOException e) {
e.printStackTrace();
}
closeTable(table);
} else {
System.out.println("The table [" + tableName + "] does not exist.");
}
} @Override
public void getAllRows(String tableName) {
if (isHTableExist(tableName)) {
Scan scan = new Scan();
scanHTable(tableName, scan);
} else {
System.out.println("The table [" + tableName + "] does not exist.");
}
} private void scanHTable(String tableName, Scan scan) {
try {
HTableInterface table = getHTableFromPool(tableName);
ResultScanner results = table.getScanner(scan);
for (Result result : results) {
String rowKey = "";
String columnName = "";
String timeStamp = "";
String columnFamily = "";
String value = "";
for (Cell cell : result.rawCells()) {
rowKey = new String(CellUtil.cloneRow(cell));
timeStamp = String.valueOf(cell.getTimestamp());
columnFamily = new String(CellUtil.cloneFamily(cell));
columnName = new String(CellUtil.cloneQualifier(cell));
value = new String(CellUtil.cloneValue(cell)); System.out.println("Get from table [" + tableName + "], Rowkey=[" + rowKey + "], Column=[" + columnFamily + ":" + columnName + "], Timestamp=[" + timeStamp + "], Vlaue=[" + value + "].");
}
}
closeTable(table);
} catch (IOException e) {
e.printStackTrace();
}
} @Override
public void getRowsByRange(String tableName, String startRowKey, String endRowKey) {
if (isHTableExist(tableName)) {
Scan scan = new Scan();
scan.setStartRow(startRowKey.getBytes());
// not equals Stop Row Key, it mean the result does not include the stop row record(exclusive).
// the hbase version is 0.98.9
scan.setStopRow(endRowKey.getBytes());
scanHTable(tableName, scan);
} else {
System.out.println("The table [" + tableName + "] does not exist.");
}
} @Override
public void delRow(String tableName, String rowKey) {
if (isHTableExist(tableName)) {
HTableInterface table = getHTableFromPool(tableName);
deleteRow(table, rowKey);
} else {
System.out.println("The table [" + tableName + "] does not exist.");
}
} private void deleteRow(HTableInterface table, String rowKey) {
Delete del = new Delete(rowKey.getBytes());
try {
table.delete(del);
System.out.println("Delete from table [" + new String(table.getTableName()) + "], Rowkey=[" + rowKey + "].");
closeTable(table);
} catch (IOException e) {
e.printStackTrace();
}
} @Override
public void delRowsByRowKeys(String tableName, List<String> rowKeys) {
if (rowKeys != null && rowKeys.size() > 0) {
for (String rowKey : rowKeys) {
delRow(tableName, rowKey);
}
}
} @Override
public void deleteHTable(String tableName) {
if (isHTableExist(tableName)) {
try {
hAdmin.disableTable(tableName.getBytes());
hAdmin.deleteTable(tableName.getBytes());
System.out.println("The table [" + tableName + "] is deleted.");
} catch (IOException e) {
e.printStackTrace();
}
} else {
System.out.println("The table [" + tableName + "] does not exist.");
} } @Override
public void closeAutoFlush(HTableInterface table) {
table.setAutoFlush(false, false);
} @Override
public void closeTable(HTableInterface table) {
try {
table.close();
} catch (IOException e) {
e.printStackTrace();
}
} @Override
public void closePoolConnection() {
try {
pool.close();
} catch (IOException e) {
e.printStackTrace();
}
} }

测试类:

/hbase-util/src/com/b510/hbase/util/dao/test/HbaseDaoTest.java

 package com.b510.hbase.util.dao.test;

 import java.util.ArrayList;
import java.util.List; import org.junit.Test; import com.b510.hbase.util.dao.HbaseDao;
import com.b510.hbase.util.dao.impl.HbaseDaoImpl; /**
* @author Hongten
* @created 7 Nov 2018
*/
public class HbaseDaoTest { HbaseDao dao = new HbaseDaoImpl(4); public static final String tableName = "t_test";
public static final String columnFamilyName = "cf1";
public static final String[] CFs = { columnFamilyName }; public static final String COLUMN_NAME_NAME = "name";
public static final String COLUMN_NAME_AGE = "age"; @Test
public void main() {
createTable();
addRow();
getRow();
getAllRows();
getRowsByRange();
delRow();
delRowsByRowKeys();
deleteHTable();
} public void createTable() {
System.out.println("=== create table ====");
dao.createHTable(tableName, CFs);
} public void addRow() {
System.out.println("=== insert record ====");
dao.addRow(tableName, "12345566", columnFamilyName, COLUMN_NAME_NAME, "Hongten");
dao.addRow(tableName, "12345566", columnFamilyName, COLUMN_NAME_AGE, "22"); dao.addRow(tableName, "12345567", columnFamilyName, COLUMN_NAME_NAME, "Tom");
dao.addRow(tableName, "12345567", columnFamilyName, COLUMN_NAME_AGE, "25"); dao.addRow(tableName, "12345568", columnFamilyName, COLUMN_NAME_NAME, "Jone");
dao.addRow(tableName, "12345568", columnFamilyName, COLUMN_NAME_AGE, "30"); dao.addRow(tableName, "12345569", columnFamilyName, COLUMN_NAME_NAME, "Jobs");
dao.addRow(tableName, "12345569", columnFamilyName, COLUMN_NAME_AGE, "24");
} public void getRow() {
System.out.println("=== get record ====");
dao.getRow(tableName, "12345566");
} public void getAllRows() {
System.out.println("=== scan table ====");
dao.getAllRows(tableName);
} public void getRowsByRange() {
System.out.println("=== scan record by giving range ====");
// it will return the '12345567' and '12345568' rows.
dao.getRowsByRange(tableName, "12345567", "12345569");
} public void delRow() {
System.out.println("=== delete record ====");
dao.delRow(tableName, "12345568");
// only '12345567' row.
getRowsByRange();
} public void delRowsByRowKeys() {
System.out.println("=== delete batch records ====");
List<String> rowKeys = new ArrayList<String>();
rowKeys.add("12345566");
rowKeys.add("12345569");
dao.delRowsByRowKeys(tableName, rowKeys);
// can not find the '12345566' and '12345569'
getAllRows();
} public void deleteHTable() {
System.out.println("=== delete table ====");
dao.deleteHTable(tableName);
}
}

测试结果:

log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
=== create table ====
The table [t_test] is created.
=== insert record ====
Insert into table [t_test], Rowkey=[12345566], Column=[cf1:name], Vlaue=[Hongten].
Insert into table [t_test], Rowkey=[12345566], Column=[cf1:age], Vlaue=[22].
Insert into table [t_test], Rowkey=[12345567], Column=[cf1:name], Vlaue=[Tom].
Insert into table [t_test], Rowkey=[12345567], Column=[cf1:age], Vlaue=[25].
Insert into table [t_test], Rowkey=[12345568], Column=[cf1:name], Vlaue=[Jone].
Insert into table [t_test], Rowkey=[12345568], Column=[cf1:age], Vlaue=[30].
Insert into table [t_test], Rowkey=[12345569], Column=[cf1:name], Vlaue=[Jobs].
Insert into table [t_test], Rowkey=[12345569], Column=[cf1:age], Vlaue=[24].
=== get record ====
Get from table [t_test], Rowkey=[12345566], Column=[cf1:age], Timestamp=[1541652952697], Vlaue=[22].
Get from table [t_test], Rowkey=[12345566], Column=[cf1:name], Timestamp=[1541652952626], Vlaue=[Hongten].
=== scan table ====
Get from table [t_test], Rowkey=[12345566], Column=[cf1:age], Timestamp=[1541652952697], Vlaue=[22].
Get from table [t_test], Rowkey=[12345566], Column=[cf1:name], Timestamp=[1541652952626], Vlaue=[Hongten].
Get from table [t_test], Rowkey=[12345567], Column=[cf1:age], Timestamp=[1541652952779], Vlaue=[25].
Get from table [t_test], Rowkey=[12345567], Column=[cf1:name], Timestamp=[1541652952743], Vlaue=[Tom].
Get from table [t_test], Rowkey=[12345568], Column=[cf1:age], Timestamp=[1541652952834], Vlaue=[30].
Get from table [t_test], Rowkey=[12345568], Column=[cf1:name], Timestamp=[1541652952807], Vlaue=[Jone].
Get from table [t_test], Rowkey=[12345569], Column=[cf1:age], Timestamp=[1541652952928], Vlaue=[24].
Get from table [t_test], Rowkey=[12345569], Column=[cf1:name], Timestamp=[1541652952869], Vlaue=[Jobs].
=== scan record by giving range ====
Get from table [t_test], Rowkey=[12345567], Column=[cf1:age], Timestamp=[1541652952779], Vlaue=[25].
Get from table [t_test], Rowkey=[12345567], Column=[cf1:name], Timestamp=[1541652952743], Vlaue=[Tom].
Get from table [t_test], Rowkey=[12345568], Column=[cf1:age], Timestamp=[1541652952834], Vlaue=[30].
Get from table [t_test], Rowkey=[12345568], Column=[cf1:name], Timestamp=[1541652952807], Vlaue=[Jone].
=== delete record ====
Delete from table [t_test], Rowkey=[12345568].
=== scan record by giving range ====
Get from table [t_test], Rowkey=[12345567], Column=[cf1:age], Timestamp=[1541652952779], Vlaue=[25].
Get from table [t_test], Rowkey=[12345567], Column=[cf1:name], Timestamp=[1541652952743], Vlaue=[Tom].
=== delete batch records ====
Delete from table [t_test], Rowkey=[12345566].
Delete from table [t_test], Rowkey=[12345569].
=== scan table ====
Get from table [t_test], Rowkey=[12345567], Column=[cf1:age], Timestamp=[1541652952779], Vlaue=[25].
Get from table [t_test], Rowkey=[12345567], Column=[cf1:name], Timestamp=[1541652952743], Vlaue=[Tom].
=== delete table ====
The table [t_test] is deleted.

源码下载:

hbase-util.zip

========================================================

More reading,and english is important.

I'm Hongten

大哥哥大姐姐,觉得有用打赏点哦!你的支持是我最大的动力。谢谢。
Hongten博客排名在100名以内。粉丝过千。
Hongten出品,必是精品。

E | hongtenzone@foxmail.com  B | http://www.cnblogs.com/hongten

========================================================

hadoop2-HBase的Java API操作的更多相关文章

  1. linux 下通过过 hbase 的Java api 操作hbase

    hbase版本:0.98.5 hadoop版本:1.2.1 使用自带的zk 本文的内容是在集群中创建java项目调用api来操作hbase,主要涉及对hbase的创建表格,删除表格,插入数据,删除数据 ...

  2. HBase的Java Api连接失败的问题及解决方法

    分布式方式部署的HBase,启动正常,Shell操作正常,使用HBase的Java Api操作时总是连接失败,信息如下: This server is in the failed servers li ...

  3. 5 hbase-shell + hbase的java api

    本博文的主要内容有 .HBase的单机模式(1节点)安装 .HBase的单机模式(1节点)的启动 .HBase的伪分布模式(1节点)安装  .HBase的伪分布模式(1节点)的启动    .HBase ...

  4. hbase-shell + hbase的java api

    本博文的主要内容有 .HBase的单机模式(1节点)安装 .HBase的单机模式(1节点)的启动 .HBase的伪分布模式(1节点)安装   .HBase的伪分布模式(1节点)的启动    .HBas ...

  5. hive-通过Java API操作

    通过Java API操作hive,算是测试hive第三种对外接口 测试hive 服务启动 package org.admln.hive; import java.sql.SQLException; i ...

  6. Hbase框架原理及相关的知识点理解、Hbase访问MapReduce、Hbase访问Java API、Hbase shell及Hbase性能优化总结

    转自:http://blog.csdn.net/zhongwen7710/article/details/39577431 本blog的内容包含: 第一部分:Hbase框架原理理解 第二部分:Hbas ...

  7. java api操作

    java api操作 导入开发包 将hbase安装包中lib下包导入java项目   创建表   Configuration conf = HBaseConfiguration.create(); c ...

  8. 使用Java API操作HDFS文件系统

    使用Junit封装HFDS import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org ...

  9. Kafka系列三 java API操作

    使用java API操作kafka 1.pom.xml <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xs ...

随机推荐

  1. 组件或者dom的特殊属性

    key:用在 Vue 的虚拟 DOM 算法,在新旧 nodes 对比时辨识 VNodes. 常用姿势: 1.结合 v-for,有相同父元素的子元素必须有唯一key. <ul> <li ...

  2. tomcat去掉ContextPath

    众所周知,项目打成war包直接放到webapps下启动tomcat后访问项目需要带上ContextPath,也就是war包的文件名,需要去除掉这玩意最简单的办法是将war包重命名为ROOT.war,为 ...

  3. Java并发编程之美之并发编程线程基础

    什么是线程 进程是代码在数据集合上的一次运行活动,是系统进行资源分配和调度的基本单位,线程则是进程的一个执行路径,一个进程至少有一个线程,进程的多个线程共享进程的资源. java启动main函数其实就 ...

  4. bootstrap时间格式化

    /** * 将"2018-05-19T08:04:52.000+0000"这种格式的时间转化为正常格式 * @param time */ function timeFormat(t ...

  5. Unsafe 的简单使用

    Unsafe 简介 Unsafe 是sun.misc包中的一个类,可以通过内存偏移量操作类变量/成员变量 Unsafe 用途 AQS(AbstractQueuedSynchronizer) 常用作实现 ...

  6. JavaScript 当月第一天和最后一天

    1. 概述 1.1 说明 在项目过程中,有时候需要默认展示一个月的查询条件,即当月的第一天和最后一天. 2. 代码 2.1 代码示例 直接调用getFirstAndLastDay()即可得到当月的第一 ...

  7. python基础--numpy.random

    # *_*coding:utf-8 *_* # athor:auto import numpy.random #rand(d0, d1, ..., dn)n维随机值 data0 = numpy.ran ...

  8. idea的一些设置

    在File->Settings->Appearance & Behavior->System Settings->Updates下取消Automatically che ...

  9. git在项目中的实际运用

    项目中只运用git版本管理的情况下: 1.创建分支命令: git branch (branchname) 切换分支命令: git checkout (branchname) 当你切换分支的时候,Git ...

  10. java项目----衣服购买

    执行bat文件:注意t_temp.properties保存的文件是否为utf8编码native2ascii -encoding UTF-8 t_temp.properties r.properties ...