启动:start-hbase.sh

停止:stop-hbase.sh

进入shell:hbase shell

  状态:status

  创建表:create 'tableName', 'colFam1'

  查看表包含行数:list 'tableName'

  列出所有:scan 'tableName'

  插入:put 'tableName', 'rowName', 'colFam1:qualify', 'value'

  查询:get 'tableName', 'rowName'

  删除:delete 'tableName', 'rowName'

  禁用表:disable 'tableName'

  删除表:drop 'tableName'

  退出shell:exit


添加列族: java实现

disable 'tableName'

alter 'tableName', NAME=>'fam2'

enable 'tableName'

删除列族:alter 'tableName', {NAME=>'fam2', METHOD=>'delete'}
获取指定时间的值:get 'my','id1',{COLUMN=>'fam1:q1',TIMESTAMP=>1408261511642}
计数器:incr 'my','id2','fam1:age'
查询计数:get_counter 'my','id1','fam1:age',


HBaseHelper

package util;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes; import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random; /**
* Used by the book examples to generate tables and fill them with test data.
*/
public class HBaseHelper { private Configuration conf = null;
private HBaseAdmin admin = null; protected HBaseHelper(Configuration conf) throws IOException {
this.conf = conf;
this.admin = new HBaseAdmin(conf);
} public static HBaseHelper getHelper(Configuration conf) throws IOException {
return new HBaseHelper(conf);
} public boolean existsTable(String table)
throws IOException {
return admin.tableExists(table);
} public void createTable(String table, String... colfams)
throws IOException {
createTable(table, null, colfams);
} public void createTable(String table, byte[][] splitKeys, String... colfams)
throws IOException {
HTableDescriptor desc = new HTableDescriptor(table);
for (String cf : colfams) {
HColumnDescriptor coldef = new HColumnDescriptor(cf);
desc.addFamily(coldef);
}
if (splitKeys != null) {
admin.createTable(desc, splitKeys);
} else {
admin.createTable(desc);
}
} public void disableTable(String table) throws IOException {
admin.disableTable(table);
} public void dropTable(String table) throws IOException {
if (existsTable(table)) {
disableTable(table);
admin.deleteTable(table);
}
} public void fillTable(String table, int startRow, int endRow, int numCols,
String... colfams)
throws IOException {
fillTable(table, startRow, endRow, numCols, -1, false, colfams);
} public void fillTable(String table, int startRow, int endRow, int numCols,
boolean setTimestamp, String... colfams)
throws IOException {
fillTable(table, startRow, endRow, numCols, -1, setTimestamp, colfams);
} public void fillTable(String table, int startRow, int endRow, int numCols,
int pad, boolean setTimestamp, String... colfams)
throws IOException {
fillTable(table, startRow, endRow, numCols, pad, setTimestamp, false, colfams);
} public void fillTable(String table, int startRow, int endRow, int numCols,
int pad, boolean setTimestamp, boolean random,
String... colfams)
throws IOException {
HTable tbl = new HTable(conf, table);
Random rnd = new Random();
for (int row = startRow; row <= endRow; row++) {
for (int col = 0; col < numCols; col++) {
Put put = new Put(Bytes.toBytes("row-" + padNum(row, pad)));
for (String cf : colfams) {
String colName = "col-" + padNum(col, pad);
String val = "val-" + (random ?
Integer.toString(rnd.nextInt(numCols)) :
padNum(row, pad) + "." + padNum(col, pad));
if (setTimestamp) {
put.add(Bytes.toBytes(cf), Bytes.toBytes(colName),
col, Bytes.toBytes(val));
} else {
put.add(Bytes.toBytes(cf), Bytes.toBytes(colName),
Bytes.toBytes(val));
}
}
tbl.put(put);
}
}
tbl.close();
} public String padNum(int num, int pad) {
String res = Integer.toString(num);
if (pad > 0) {
while (res.length() < pad) {
res = "0" + res;
}
}
return res;
} public void put(String table, String row, String fam, String qual,
String val) throws IOException {
HTable tbl = new HTable(conf, table);
Put put = new Put(Bytes.toBytes(row));
put.add(Bytes.toBytes(fam), Bytes.toBytes(qual), Bytes.toBytes(val));
tbl.put(put);
tbl.close();
} public void put(String table, String row, String fam, String qual, long ts,
String val) throws IOException {
HTable tbl = new HTable(conf, table);
Put put = new Put(Bytes.toBytes(row));
put.add(Bytes.toBytes(fam), Bytes.toBytes(qual), ts,
Bytes.toBytes(val));
tbl.put(put);
tbl.close();
} public void put(String table, String[] rows, String[] fams, String[] quals,
long[] ts, String[] vals) throws IOException {
HTable tbl = new HTable(conf, table);
for (String row : rows) {
Put put = new Put(Bytes.toBytes(row));
for (String fam : fams) {
int v = 0;
for (String qual : quals) {
String val = vals[v < vals.length ? v : vals.length - 1];
long t = ts[v < ts.length ? v : ts.length - 1];
put.add(Bytes.toBytes(fam), Bytes.toBytes(qual), t,
Bytes.toBytes(val));
v++;
}
}
tbl.put(put);
}
tbl.close();
} public void dump(String table, String[] rows, String[] fams, String[] quals)
throws IOException {
HTable tbl = new HTable(conf, table);
List<Get> gets = new ArrayList<Get>();
for (String row : rows) {
Get get = new Get(Bytes.toBytes(row));
get.setMaxVersions();
if (fams != null) {
for (String fam : fams) {
for (String qual : quals) {
get.addColumn(Bytes.toBytes(fam), Bytes.toBytes(qual));
}
}
}
gets.add(get);
}
Result[] results = tbl.get(gets);
for (Result result : results) {
for (KeyValue kv : result.raw()) {
System.out.println("KV: " + kv +
", Value: " + Bytes.toString(kv.getValue()));
}
}
}
}

Main

package test;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes; import util.HBaseHelper; public class Main { public static void main(String[] args) throws IOException {
Configuration conf = HBaseConfiguration.create();
HBaseHelper helper = HBaseHelper.getHelper(conf);
String tableName = "testtable"; if (helper.existsTable(tableName)) {
helper.dropTable(tableName);
}
helper.createTable(tableName, "colfam1"); HTable table = new HTable(conf, tableName);
Put put = new Put(Bytes.toBytes("row1"));
put.add(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1"),
Bytes.toBytes("val1"));
put.add(Bytes.toBytes("colfam1"), Bytes.toBytes("qual2"),
Bytes.toBytes("val2"));
table.put(put); } }

Hbase Basic的更多相关文章

  1. Hbase Basic Prerequisites

    Table 2. Java HBase Version   JDK 6      JDK 7       JDK 8 1.0 Not     Supported yes Running with JD ...

  2. Hbase的伪分布式安装

    Hbase安装模式介绍 单机模式 1> Hbase不使用HDFS,仅使用本地文件系统 2> ZooKeeper与Hbase运行在同一个JVM中 分布式模式– 伪分布式模式1> 所有进 ...

  3. WIN10下安装HBASE教程

    工作需要,现在开始做大数据开发了,通过下面的配置步骤,你可以在win10系统中,部署出一套hadoop+hbase,便于单机测试调试开发. 准备资料: 1. hadoop-2.7.2: https:/ ...

  4. 设计与开发一款简单易用的Web报表工具(支持常用关系数据及hadoop、hbase等)

    EasyReport是一个简单易用的Web报表工具(支持Hadoop,HBase及各种关系型数据库),它的主要功能是把SQL语句查询出的行列结构转换成HTML表格(Table),并支持表格的跨行(Ro ...

  5. HBase 数据模型(Data Model)

    HBase Data Model--HBase 数据模型(翻译) 在HBase中,数据是存储在有行有列的表格中.这是与关系型数据库重复的术语,并不是有用的类比.相反,HBase可以被认为是一个多维度的 ...

  6. 【HBase】HBase Getting Started(HBase 入门指南)

    入门指南 1. 简介 Quickstart 会让你启动和运行一个单节点单机HBase. 2. 快速启动 – 单点HBase 这部分描述单节点单机HBase的配置.一个单例拥有所有的HBase守护线程- ...

  7. HBase数据库集群配置

    0,HBase简介 HBase是Apache Hadoop中的一个子项目,是一个HBase是一个开源的.分布式的.多版本的.面向列的.非关系(NoSQL)的.可伸缩性分布式数据存储模型,Hbase依托 ...

  8. Spark读取Hbase的数据

    val conf = HBaseConfiguration.create() conf.addResource(new Path("/opt/cloudera/parcels/CDH-5.4 ...

  9. How-to: Enable User Authentication and Authorization in Apache HBase

    With the default Apache HBase configuration, everyone is allowed to read from and write to all table ...

随机推荐

  1. CSA Round 84 Mahattan Center

    题目 题目大意 给定平面上的 $n$ 个点和常数 $k$,求 $x$ 轴上的点 $p$ 到 $n$ 个点中距其最近的 $k$ 个点的距离之和的最小值.两点之间的距离定义为曼哈顿距离. 数据范围 $1\ ...

  2. Python 安装MySQLdb模块遇到报错及解决方案:_mysql.c(42) : fatal error C1083: Cannot open include file: 'config-win.h': No such file or directory

    一.问题 系统:win7 64位 在下载MySQL-python-1.2.5.zip,使用python setup.py install 安装时,出现以下报错: _mysql.c(42) : fata ...

  3. one pragmatical sqlhelper

    namespace ConsoleApplication2 { using System; using System.Collections.Generic; using System.Linq; u ...

  4. poj 2280 Islands and Bridges 哈密尔顿路 状压dp

    题目链接 题意 给定一个\(N\)个点的无向图,求一条哈密尔顿路径\(C_1C_2...C_n\),使其\(value\)最大. \(value\)的计算方式如下:\[\begin{aligned}v ...

  5. vim的使用技巧--模式入门

    vim作为编辑器之神,一直都是程序爱好者的最爱,与一般的编辑器的最大不同就是对模式的把握更加的细腻和得当.普通编辑主要分为使用菜单和使用键盘,菜单就是输入命令作用,键盘主要用来输入文本,中间穿插着使用 ...

  6. HDU5036 Explosion(期望&&bitset)

    #include <iostream> #include <cstring> #include <string> #include <vector> # ...

  7. LeetCode OJ-- Container With Most Water

    https://oj.leetcode.com/problems/container-with-most-water/ 不同高度的柱子排一列,两个柱子可以组成一个容器,求最大容积. 最直观的方法就是暴 ...

  8. jdbc in postgres

    try { Class.forName("org.postgresql.Driver").newInstance(); String url = "jdbc:postgr ...

  9. NieR:Automata中的一段文字

    还没开始玩这个游戏,但在网易云音乐上听到一首歌,很好听 http://music.163.com/#/m/song?id=468490570 搜了一下相关视频,发现这首歌是在与一个叫做歌姬的boss战 ...

  10. FZU 1078 计算循环冗余码【模拟】

    计算机网络中采用循环冗余码来校验数据的正确性.其原理是:发送方计算出待发送的二进制数据的循环冗余码,并随同原数据一起发送到接收方:接收方通过重新计算接收到的数据的循环冗余码,并和收到的循环冗余码进行比 ...