hbase的api操作
创建maven工程,修改jdk
pom文件里添加需要的jar包
dependencies>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-it</artifactId>
<version>1.2.5</version>
<type>pom</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.1</version>
</dependency>
</dependencies>
import java.io.IOException;
import java.util.ArrayList; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes; public class CreateTableTest { static Configuration conf = HBaseConfiguration.create();//读取hbase的配置文件 static HBaseAdmin admin = null;//执行管理员 public static void main(String[] args) throws Exception{ admin=new HBaseAdmin(conf);
// createTable();
// listTable();
// deleteTable();
// listTable();
// putData();
// scanTable();
getData();
// putaLots(); }
//创建表
public static void createTable() throws Exception, IOException{
//表的描述对象
HTableDescriptor table=new HTableDescriptor(TableName.valueOf("javaemp1"));
//列簇
table.addFamily(new HColumnDescriptor("personal"));
table.addFamily(new HColumnDescriptor("professional")); admin.createTable(table);
System.out.println("create table finished");
}
//列出所有的表
public static void listTable() throws IOException{
HTableDescriptor[] Tablelist = admin.listTables();
for(int i=0;i<Tablelist.length;i++)
{
System.out.println(Tablelist[i].getNameAsString());
}
}
//删除表
public static void deleteTable() throws Exception{
//删表之前先禁用表
// admin.disableTable("javaemp1");
// admin.deleteTable("javaemp1");
// System.out.println("delete finished"); //删除表中的某一列簇
// admin.deleteColumn("javaemp","professional");
// System.out.println("delete column"); //增加一列
admin.addColumn("javaemp",new HColumnDescriptor("professional"));
System.out.println("add column"); }
//----------------------------------------------------------------------------------------------------------
//插入数据
public static void putData() throws Exception{
//HTable类实现对单个表的操作,参数为:配置对象,表名
HTable table = new HTable(conf,"javaemp"); Put p = new Put(Bytes.toBytes("1001"));//实例化Put类,指定rwo key来操作
Put p1=new Put(Bytes.toBytes("1002"));
//
p.add(Bytes.toBytes("personal"),Bytes.toBytes("name"),Bytes.toBytes("lalisa"));
// 参数:列簇,列,值
p.add(Bytes.toBytes("personal"),Bytes.toBytes("city"),Bytes.toBytes("beijing")); table.put(p); p1.add(Bytes.toBytes("professional"),Bytes.toBytes("designation"),Bytes.toBytes("it"));
p1.add(Bytes.toBytes("professional"),Bytes.toBytes("salary"),Bytes.toBytes("16010")); table.put(p1); System.out.println("put data finished"); table.close();//释放HTable的资源
}
//批量插入数据
public static void putaLots() throws IOException{
HTable table = new HTable(conf,"javaemp");
ArrayList<Put> list = new ArrayList<Put>(10);
for (int i=0;i<10;i++)
{
Put put = new Put(Bytes.toBytes("row"+i));
put.add(Bytes.toBytes("personal"),Bytes.toBytes("name"),Bytes.toBytes("people"+i));
list.add(put);
}
table.put(list);
System.out.println("put list finished");
}
//获取某一列数据
public static void getData() throws IOException{
HTable table = new HTable(conf, "javaemp"); Get get = new Get(Bytes.toBytes("1001"));//实例化Get类 Result result = table.get(get);//获取这一row的数据 // 输出这一行的某一个字段
byte[] value = result.getValue(Bytes.toBytes("personal"),Bytes.toBytes("name"));
String name=Bytes.toString(value);
System.out.println("Name:"+name); //输出这一行的所有数据
Cell[] cells = result.rawCells();
for(Cell cell:cells)
{
System.out.print(Bytes.toString(CellUtil.cloneRow(cell))+"--");
System.out.print(Bytes.toString(CellUtil.cloneFamily(cell))+":");
System.out.print(Bytes.toString(CellUtil.cloneQualifier(cell))+"->");
System.out.println(Bytes.toString(CellUtil.cloneValue(cell))); } table.close();//释放HTable的资源
}
//scan某一列
public static void scanTable() throws IOException{
HTable table = new HTable(conf, "javaemp");
Scan scan = new Scan(); // 实例化Scan类
scan.addColumn(Bytes.toBytes("personal"), Bytes.toBytes("name"));//scan某列簇的某列
scan.addFamily(Bytes.toBytes("professional"));//scan某列簇
ResultScanner scanner = table.getScanner(scan); for(Result res=scanner.next();res!=null;res=scanner.next())
{
System.out.println(res);
}
table.close();//释放HTable的资源
}
//统计row key的个数
public static void count(){ }
//关闭hbase
public static void close() throws IOException{
admin.shutdown();
}
}
package com.neworigin.Work; import java.io.IOException;
import java.util.ArrayList; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes; public class HbaseWork { static Configuration conf=HBaseConfiguration.create();
// static Connection conn= ConnectionFactory.createConnection(conf);
static HBaseAdmin admin=null;
public static void createTable() throws IOException{
HTableDescriptor table=new HTableDescriptor(TableName.valueOf("member"));
table.addFamily(new HColumnDescriptor("member_id"));
table.addFamily(new HColumnDescriptor("address"));
table.addFamily(new HColumnDescriptor("info"));
admin.createTable(table);
System.out.println("create table finished");
}
public static void deletefamily() throws IOException{
// HTableDescriptor table = new HTableDescriptor(TableName.valueOf("member"));
admin.deleteColumn("member", "member_id");
System.out.println("delete");
}
public static void insertdata() throws IOException{
HTable table = new HTable(conf,"member");
ArrayList<Put> list =new ArrayList<Put>(25);
for(int i=0;i<5;i++)
{
Put put = new Put(Bytes.toBytes("row"+i));
put.add(Bytes.toBytes("address"), Bytes.toBytes("province"), Bytes.toBytes("pr"+i));
put.add(Bytes.toBytes("address"), Bytes.toBytes("city"), Bytes.toBytes("ct"+i));
put.add(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes("2"+i));
put.add(Bytes.toBytes("info"), Bytes.toBytes("birthday"), Bytes.toBytes("data"+i));
put.add(Bytes.toBytes("info"), Bytes.toBytes("company"), Bytes.toBytes("com"+i));
list.add(put);
}
table.put(list);
}
public static void getinfo() throws IOException{
HTable table = new HTable(conf, "member");
// for(int i=0;i<5;i++)
// {
//
// }
Get get = new Get(Bytes.toBytes(("row0")));
Result result = table.get(get);
for(Cell cell: result.rawCells())
{
System.out.print(Bytes.toString(CellUtil.cloneRow(cell))+"--");
System.out.print(Bytes.toString(CellUtil.cloneFamily(cell))+":");
System.out.print(Bytes.toString(CellUtil.cloneQualifier(cell))+"->");
System.out.println(Bytes.toString(CellUtil.cloneValue(cell)));
} }
public static void alterfirstrow() throws IOException{
HTable table = new HTable(conf, "member");
Put put = new Put(Bytes.toBytes("row0"));
put.add(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes("30"));
table.put(put);
}
public static void getage() throws IOException{
HTable table = new HTable(conf,"member");
Get get = new Get(Bytes.toBytes("row0"));
Result result = table.get(get);
byte[] bs = result.getValue(Bytes.toBytes("info"), Bytes.toBytes("age"));
String age=Bytes.toString(bs);
System.out.println("age:"+age);
}
public static void scanTable() throws IOException{
Scan scan = new Scan();
HTable table = new HTable(conf,"member");
scan.addColumn(Bytes.toBytes("address"), Bytes.toBytes("province"));
scan.addColumn(Bytes.toBytes("address"), Bytes.toBytes("city"));
scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("age"));
scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("birthday"));
scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("company"));
ResultScanner scanner = table.getScanner(scan);
for(Result res=scanner.next();res!=null;res=scanner.next())
{
System.out.println(res);
}
}
public static void countrow() throws IOException{
HTable table = new HTable(conf,"member");
Scan scan = new Scan();
ResultScanner scanner = table.getScanner(scan);
int i=0;
while(scanner.next()!=null)
{
i++;
// System.out.println(scanner.next());
}
System.out.println(i);
}
public static void delTable() throws IOException{
boolean b = admin.isTableEnabled("member");
if(b)
{
admin.disableTable("member");
}
admin.deleteTable("member");
}
public static void main(String[] args) throws IOException {
admin=new HBaseAdmin(conf);
// createTable();
// deletefamily();
// insertdata();
// alterfirstrow();
// getinfo();
// getage();
// scanTable();
// countrow();
delTable();
}
}
hbase的api操作的更多相关文章
- HBase伪分布式环境下,HBase的API操作,遇到的问题
在hadoop2.5.2伪分布式上,安装了hbase1.0.1.1的伪分布式 利用HBase的API创建个testapi的表时,提示 Exception in thread "main&q ...
- HBase学习之路 (四)HBase的API操作
Eclipse环境搭建 具体的jar的引入方式可以参考http://www.cnblogs.com/qingyunzong/p/8623309.html HBase API操作表和数据 import ...
- HBase(五)HBase的API操作
一.项目环境搭建 新建 Maven Project,新建项目后在 pom.xml 中添加依赖: <dependency> <groupId>org.apache.hbase&l ...
- hbase的api操作之过滤器
Comparison Filter: 对比过滤器: 1.RowFilter select * from ns1:t1 where rowkey <= row100 ...
- hbase的api操作之scan
扫描器缓存---------------- 面向行级别的. @Test public void getScanCache() throws IOException { Configu ...
- Java API 操作HBase Shell
HBase Shell API 操作 创建工程 本实验的环境实在ubuntu18.04下完成,首先在改虚拟机中安装开发工具eclipse. 然后创建Java项目名字叫hbase-test 配置运行环境 ...
- HBase 6、用Phoenix Java api操作HBase
开发环境准备:eclipse3.5.jdk1.7.window8.hadoop2.2.0.hbase0.98.0.2.phoenix4.3.0 1.从集群拷贝以下文件:core-site.xml.hb ...
- HBase API操作
|的ascII最大ctrl+shift+t查找类 ctrl+p显示提示 HBase API操作 依赖的jar包 <dependencies> <dependency> < ...
- Hbase Shell命令详解+API操作
HBase Shell 操作 3.1 基本操作1.进入 HBase 客户端命令行,在hbase-2.1.3目录下 bin/hbase shell 2.查看帮助命令 hbase(main):001:0& ...
随机推荐
- Bytomd 助记词恢复密钥体验指南
比原项目仓库: Github地址:https://github.com/Bytom/bytom Gitee地址:https://gitee.com/BytomBlockchain/bytom 背景知识 ...
- entity framework浅谈
1. 什么是EF 微软提供的ORM工具. ORM让开发人员节省数据库访问代码的时间. 将更多的时间放在业务逻辑层面上. 开发人员使用linq语言, 对数据库进行操作. 2. EF的使用场景 EF有三种 ...
- 【Mysql】key 、primary key 、unique key 与index区别
参考:https://blog.csdn.net/nanamasuda/article/details/52543177 总的来说,primary key .unique key 这些key建立的同时 ...
- go 获取网址html 源码
// Sample program to show how to write a simple version of curl using // the io.Reader and io.Writer ...
- siege 高并发测试工具
安装siege 下载 http://download.joedog.org/siege/siege-4.0.4.tar.gz 解压 sudo tar -zvxf siege-4.0.4.tar.gz ...
- Spring重要注解@ControllerAdvice
@ControllerAdvice是一个@Component,用于定义@ExceptionHandler,@InitBinder和@ModelAttribute方法,适用于所有使用@RequestMa ...
- DAY2 初识python
一.编程语言介绍 1.1 机器语言:直接用计算机能理解的二进制指令编写程序,直接控制硬件 1.2 汇编语言:用英文标签取代二进制指令取编写程序,本质也是在直接控制硬件 1.3 高级语言:用人能理解的表 ...
- Java 8里面lambda的最佳实践
Java 8已经推出一段时间了,越来越多开发人员选择升级JDK,这条热门动弹里面看出,JDK7最多,其次是6和8,这是好事! 在8 里面Lambda是最火的主题,不仅仅是因为语法的改变,更重要的是带来 ...
- redux与redux-react使用示例
redux使用 <script type="text/babel"> var Counter=React.createClass({ incrementIfOdd:fu ...
- Codeforces 801B - Valued Keys
B. Valued Keys 题目链接:http://codeforces.com/contest/801/problem/B time limit per test 2 seconds memory ...