Hive工具类
Hive2.x的工具类,对常用方法进行了封装,其中设置了kerberos认证。
package com.ideal.template.openbigdata.util; import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; public class HiveOper
{
private static String driverClass = "org.apache.hive.jdbc.HiveDriver"; /*
* 用户的keytab路径
*/
private String key; /*
* 用户的keytab文件
*/
private String tab; /**
* hive仓库的连接地址
*/
private String url; /**
* hive对应用户
*/
private String user; /**
* hive用户对应的密码
*/
private String pwd; /**
* Hive 连接
*/
private Connection conn = null; public HiveOper(String key, String tab, String url, String user, String pwd)
{
this.key = key;
this.tab = tab;
this.url = url;
this.user = user;
this.pwd = pwd;
} /**
* 获取hive连接
*
* @return
*/
private Connection getConnection()
{
if (conn == null)
{
try
{
/**
* 加入Kerberos认证
*/
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "Kerberos"); UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab(key, tab);
Class.forName(driverClass);
conn = DriverManager.getConnection(url, user, pwd);
}
catch (ClassNotFoundException e)
{
throw new HiveDBException(e);
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
catch (Exception e)
{
throw new HiveDBException(e);
}
}
return conn;
} /**
* 关闭连接
*/
public void close()
{
try
{
if (conn != null && !conn.isClosed())
{
conn.close();
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
conn = null;
}
} /**
* 关闭Statement
*
* @param stmt
*/
public void close(Statement stmt)
{
try
{
if (stmt != null)
{
stmt.close();
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
stmt = null;
}
} /**
* 关闭PreparedStatement
*
* @param pst
*/
public void close(PreparedStatement pst)
{
try
{
if (pst != null)
{
pst.close();
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
pst = null;
}
} /**
* 关闭ResultSet
*
* @param rs
*/
public void close(ResultSet rs)
{
try
{
if (rs != null)
{
rs.close();
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
rs = null;
}
} /**
* 列出指定数据库下的所有表
*
* @param dataBaseName
* @return
*/
public List<String> listTables(String dbName)
{
Statement stmt = null;
ResultSet res = null;
List<String> tables = new LinkedList<String>();
try
{
stmt = getConnection().createStatement();
if (dbName != null && dbName.trim().length() > 0)
{
stmt.execute("USE " + dbName);
}
res = stmt.executeQuery("SHOW TABLES");
while (res.next())
{
tables.add(res.getString(1));
}
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(res);
close(stmt);
close();
}
return tables;
} /*
* 获取数据库
*/
public List<String> showdatabases()
{
Statement stmt = null;
ResultSet res = null;
List<String> tables = new LinkedList<String>();
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("SHOW DATABASES");
while (res.next())
{
tables.add(res.getString(1));
}
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(res);
close(stmt);
close();
}
return tables;
} /**
* 执行非查询的sql语句,比如创建表,加载数据等等
*
* @param sql
* @return
*/
public boolean executeNonQuery(String sql)
{
Statement stmt = null;
boolean result = true;
try
{
stmt = getConnection().createStatement();
stmt.execute(sql);
}
catch (SQLException e)
{
result = false;
throw new HiveDBException(e);
}
finally
{
close(stmt);
close();
}
return result;
} /**
* 使用Statement查询数据,返回ResultSet
*
* @param sql
* @return
*/
public ResultSet queryForResultSet(String sql)
{
Statement stmt = null;
ResultSet res = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery(sql);
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(stmt);
}
return res;
} /**
* 使用Statement查询数据,返回List集合,数据量比较小的时候用
*
* @param sql
* @return
*/
public List<Map<String, Object>> queryForList(String sql)
{
Statement stmt = null;
ResultSet res = null;
List<Map<String, Object>> list = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery(sql);
Map<String, Object> map = null;
ResultSetMetaData rsmd = res.getMetaData();
int rowCnt = rsmd.getColumnCount();
list = new LinkedList<Map<String, Object>>();
while (res.next())
{
map = new LinkedHashMap<String, Object>(rowCnt);
for (int i = 1; i <= rowCnt; i++)
{
map.put(rsmd.getColumnName(i), res.getObject(i));
}
list.add(map);
}
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(res);
close(stmt);
close();
}
return list;
} /**
* 使用PreparedStatement查询数据,返回ResultSet
*
* @param sql
* @param values
* @return
*/
public ResultSet queryForResultSet(String sql, String[] values)
{
PreparedStatement pst = null;
ResultSet res = null;
try
{
pst = getConnection().prepareStatement(sql);
setValue(pst, values);
res = pst.executeQuery();
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(pst);
}
return res;
} /**
* 使用PreparedStatement查询数据,返回List集合,数据量比较小的时候用
*
* @param sql
* @param values
* @return
*/
public List<Map<String, Object>> queryForList(String sql, String[] values)
{
PreparedStatement pst = null;
ResultSet res = null;
List<Map<String, Object>> list = null;
try
{
pst = getConnection().prepareStatement(sql);
setValue(pst, values);
res = pst.executeQuery();
Map<String, Object> map = null;
ResultSetMetaData rsmd = res.getMetaData();
int rowCnt = rsmd.getColumnCount();
list = new LinkedList<Map<String, Object>>();
while (res.next())
{
map = new LinkedHashMap<String, Object>(rowCnt);
for (int i = 1; i <= rowCnt; i++)
{
map.put(rsmd.getColumnName(i), res.getObject(i));
}
list.add(map);
}
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(res);
close(pst);
close();
}
return list;
} /**
* 执行数据文件导入
*
* @param sql
* @param values
* @return
*/
public boolean impBySql(String sql)
{
PreparedStatement pst = null;
boolean flag = false;
try
{
pst = getConnection().prepareStatement(sql);
flag = pst.execute(); }
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(pst);
close();
}
return flag;
} private void setValue(PreparedStatement pst, String[] values)
{
try
{
if(values == null || values.length == 0)
{
return;
}
for (int i = 0; i < values.length; i++)
{
pst.setString(i + 1, values[i]);
}
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
} /**
* 获取表所在的路径
* @param tblName
* @return
* @throws Exception
*/
public String getHiveTblPath(String tblName) throws Exception
{
String result = "";
Statement stmt = null;
ResultSet res = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc extended " + tblName);
while (res.next())
{
if(res.getString(1).trim().equals("Detailed Table Information"))
{
String content = res.getString(2).trim();
int start = content.indexOf("location:");
if (start == -1)
{
continue;
} String sub = content.substring(start);
int end = sub.indexOf(",");
if (end == -1)
{
continue;
} result = sub.substring("location:".length(), end);
}
else
{
continue;
} // String content = res.getString(1).trim(); }
}
catch (SQLException e)
{
throw new Exception(e);
}
finally
{
close(res);
close(stmt);
close();
}
return result;
}
/**
* 获取表所在的路径
* @param tblName
* @return
* @throws Exception
*/
public Map<String,String> getNewHiveTblPath(String tblName) throws Exception
{
String result = "";
String field = "";
Statement stmt = null;
ResultSet res = null;
Map<String,String> map = new HashMap<String,String>();
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc extended " + tblName);
while (res.next())
{
if(res.getString(1).trim().equals("Detailed Table Information"))
{
String content = res.getString(2).trim();
field=getField(content);
int start = content.indexOf("dragoncluster");
if (start == -1)
{
continue;
} String sub = content.substring(start);
int end = sub.indexOf(",");
if (end == -1)
{
continue;
} result = sub.substring("dragoncluster".length(), end);
}
else
{
continue;
} // String content = res.getString(1).trim(); }
}
catch (SQLException e)
{
throw new Exception(e);
}
finally
{
close(res);
close(stmt);
close();
}
map.put("field", field);
map.put("hdfsPath", result);
return map;
} public String getField(String content) throws Exception{
int start = content.indexOf("field.delim=");
if(start==-1){
return "\\\\001";
}else{
String sub = content.substring(start);
// int end = sub.indexOf("}");
return sub.substring("field.delim=".length(), "field.delim=".length()+1); }
} public String getTblComment(String tblName)
{
String result = null;
Statement stmt = null;
ResultSet res = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc extended " + tblName);
while (res.next())
{
if(res.getString(1).trim().equals("Detailed Table Information"))
{
String content = res.getString(2).trim();
int start = content.lastIndexOf("comment");
if (start == -1)
{
continue;
} String sub = content.substring(start); int endBracket = sub.indexOf("}");
int endDot = sub.indexOf(",");
int end = endBracket < endDot ? endBracket : endDot;
if (end == -1)
{
continue;
} result = sub.substring("comment=".length(), end);
if(result != null && result.startsWith("null"))
{
result = null;
}
}
else
{
continue;
}
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return result;
} /**
* desc:获取hive表文件的类型
* @param tblName
* @return
* date:2017年1月4日
* author:Tonny Chien
*/
public String getTblFileType(String tblName)
{
String result = null;
Statement stmt = null;
ResultSet res = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc extended " + tblName);
while (res.next())
{
if(res.getString(1).trim().equals("Detailed Table Information"))
{
String content = res.getString(2).trim();
if(content.toUpperCase().contains("TEXTINPUTFORMAT"))
{
result = "TEXTFILE";
}
else if(content.toUpperCase().contains("SEQUENCEFILEINPUTFORMAT"))
{
result = "SEQUENCEFILE";
}
else
{
result = "SEQUENCEFILE";
}
}
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return result;
} /**
* desc:查看表是否为外部表
* @param tblName
* @return
* date:2017年1月4日
* author:Tonny Chien
*/
public boolean isExternalTbl(String tblName)
{
boolean result = false;
Statement stmt = null;
ResultSet res = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc extended " + tblName);
while (res.next())
{
if(res.getString(1).trim().equals("Detailed Table Information"))
{
String content = res.getString(2).trim();
if(content.toUpperCase().contains("EXTERNAL_TABLE"))
{
result = true;
}
}
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return result;
} public List<String[]> getColumAndType(String tblName)
{
Statement stmt = null;
ResultSet res = null;
List<String[]> list = null;
String[] item = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc formatted " + tblName);
list = new LinkedList<String[]>(); while (res.next())
{
if (res.getString(1).trim().equals("# col_name"))
{
continue;
} if (res.getString(1).equals("# Detailed Table Information") || res.getString(1).equals("# Partition Information"))
{
break;
} if (res.getString(1).trim().equals(""))
{
continue;
}
String column = res.getString(1).trim().toUpperCase();
String type = res.getString(2).trim().toUpperCase();
String comment = "";
if (res.getString(3) != null && res.getString(3).trim().length() > 0)
{
comment = res.getString(3).trim().toUpperCase();
if ("NONE".equals(comment))
{
comment = "";
}
}
item = new String[]{column, type, comment}; list.add(item);
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return list; } public List<String[]> getHiveTblPartitions(String tblName)
{
Statement stmt = null;
ResultSet res = null;
List<String[]> list = null;
String[] item = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc " + tblName);
list = new LinkedList<String[]>(); while (res.next())
{
if (res.getString(1).equals("# Partition Information"))
{
while (res.next())
{
if (res.getString(1).trim().equals("# col_name"))
{
continue;
}
if (res.getString(1).trim().equals(""))
{
continue;
}
String column = res.getString(1).trim().toUpperCase();
String type = res.getString(2).trim().toUpperCase();
String comment = "";
if (res.getString(3) != null && res.getString(3).trim().length() > 0)
{
comment = res.getString(3).trim().toUpperCase();
if ("NONE".equals(comment))
{
comment = "";
}
}
item = new String[] { column, type, comment };
list.add(item);
}
}
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return list; } public List<String> getHiveTblColumns(String tblName)
{
Statement stmt = null;
ResultSet res = null;
List<String> list = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc formatted " + tblName);
list = new LinkedList<String>();
while (res.next())
{
if (res.getString(1).trim().equals("# col_name"))
{
continue;
} if (res.getString(1).equals("# Detailed Table Information") || res.getString(1).equals("# Partition Information"))
{
break;
} if (res.getString(1).trim().equals(""))
{
continue;
}
System.out.println(res.getString(1).trim());
list.add(res.getString(1).trim().toUpperCase());
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return list;
} /**
* desc:查看某张表是否存在
* @param database
* @param tableName
* @return
* date:2016年10月25日
* author:Tonny Chien
*/
public boolean existTable(String database, String tableName)
{
boolean result = false;
Statement stmt = null;
ResultSet res = null;
try
{
String hql = "SHOW TABLES IN " + database;
stmt = getConnection().createStatement();
res = stmt.executeQuery(hql);
while (res.next())
{
if (res.getString(1).trim().toUpperCase().equals(tableName.toUpperCase()))
{
result = true;
break;
}
}
}
catch (Exception e)
{
result = false;
}
finally
{
close(res);
close(stmt);
close();
}
return result;
} /**
*
* @param cmdType grant/revoke
* @param privs 权限
* @param tgtType database/table
* @param tgt 数据库名/表名
* @param recvType group/user
* @param recv
* @return
* @author Tonny Chien
* @date 207-5-21 20:13
*/
public boolean auth(AUTH cmdType, String privs, AUTH tgtType, String tgt, AUTH recvType, String recv)
{
// 拼接命令
StringBuilder sb = new StringBuilder();
switch (cmdType)
{
case grant:// 如果是权限
sb.append("GRANT ");
break;
case revoke:// 如果是回收
sb.append("REVOKE ");
break;
default:
break;
} sb.append(privs);
sb.append(" ON "); switch (tgtType)
{
case database:// 如果是数据库
sb.append("DATABASE ");
break;
case table:// 如果是表
sb.append("TABLE ");
break;
default:
break;
} sb.append(tgt); switch (cmdType)
{
case grant:// 如果是权限
sb.append(" TO ");
break;
case revoke:// 如果是回收
sb.append(" FROM ");
break;
default:
break;
} switch (recvType)
{
case user:// 如果是用户
sb.append(" USER ");
break;
case group:// 如果是组
sb.append(" GROUP ");
break;
default:
break;
}
sb.append(recv); String hql = sb.toString(); boolean result = false;
Statement stmt = null;
try
{
stmt = getConnection().createStatement();
stmt.execute("set role admin");
stmt.execute(hql);
result = true;
}
catch (Exception e)
{
result = false;
e.printStackTrace();
}
finally
{
close(stmt);
close();
}
return result;
} public static void main(String[] args)
{
String url = "jdbc:hive2://d004.dragon.com:2181,d002.dragon.com:2181,d005.dragon.com:2181/;serviceDiscoveryMode=zookeeper;zookeeperNamespace=hiveserver2";
String key = "openbigdata@DRAGON.COM";//域名不变,用户名变化
String tab = "/etc/security/keytabs/openbigdata.keytab";//keytab文件根据具体的用户进行
String huser = null;
String password = null; HiveOper oper = new HiveOper(key, tab, url, huser, password);
List<String > dbs = oper.showdatabases();
for(String db:dbs)
{
System.out.println(db);
} List<String > tbls = oper.listTables("default");
for(String tbl:tbls)
{
System.out.println(tbl);
}
}
} class HiveDBException extends RuntimeException
{
private static final long serialVersionUID = 2637639405785985892L; public HiveDBException(Exception e)
{
super(e.getMessage());
} public HiveDBException(String message, Throwable cause)
{
super(message, cause);
} public HiveDBException(String message)
{
super(message);
} public HiveDBException(Throwable cause)
{
super(cause);
}
}
Hive工具类的更多相关文章
- HDFS 工具类
读取HDFS上文件数据 import java.io.File; import java.io.FileInputStream; import java.io.IOException; import ...
- Java基础Map接口+Collections工具类
1.Map中我们主要讲两个接口 HashMap 与 LinkedHashMap (1)其中LinkedHashMap是有序的 怎么存怎么取出来 我们讲一下Map的增删改查功能: /* * Ma ...
- Android—关于自定义对话框的工具类
开发中有很多地方会用到自定义对话框,为了避免不必要的城府代码,在此总结出一个工具类. 弹出对话框的地方很多,但是都大同小异,不同无非就是提示内容或者图片不同,下面这个类是将提示内容和图片放到了自定义函 ...
- [转]Java常用工具类集合
转自:http://blog.csdn.net/justdb/article/details/8653166 数据库连接工具类——仅仅获得连接对象 ConnDB.java package com.ut ...
- js常用工具类.
一些js的工具类 复制代码 /** * Created by sevennight on 15-1-31. * js常用工具类 */ /** * 方法作用:[格式化时间] * 使用方法 * 示例: * ...
- Guava库介绍之实用工具类
作者:Jack47 转载请保留作者和原文出处 欢迎关注我的微信公众账号程序员杰克,两边的文章会同步,也可以添加我的RSS订阅源. 本文是我写的Google开源的Java编程库Guava系列之一,主要介 ...
- Java程序员的日常—— Arrays工具类的使用
这个类在日常的开发中,还是非常常用的.今天就总结一下Arrays工具类的常用方法.最常用的就是asList,sort,toStream,equals,copyOf了.另外可以深入学习下Arrays的排 ...
- .net使用正则表达式校验、匹配字符工具类
开发程序离不开数据的校验,这里整理了一些数据的校验.匹配的方法: /// <summary> /// 字符(串)验证.匹配工具类 /// </summary> public c ...
- WebUtils-网络请求工具类
网络请求工具类,大幅代码借鉴aplipay. using System; using System.Collections.Generic; using System.IO; using System ...
随机推荐
- MYSQL初级学习笔记一:MYSQL常用命令和数据库操作(DDL)!(视频序号:初级_3,4)
知识点一:MYSQL常用命令(3) 登入方法:一,mysql –u 账号 –p 密码 退出方法:一,EXIT,QUIT 修改MYSQL命令提示符: 连接上客户机之后,通常使用prompt命令修改: 连 ...
- ES6 数组的解构赋值
数组的解构赋值 ES6 允许按照一定模式,从数组和对象中提取值,对变量进行赋值,这被称为解构(Destructuring). 以前,为变量赋值,只能直接指定值. let a = 1; let b = ...
- 一步一步学Silverlight 2系列(11):数据绑定
概念 Silverlight 2 Beta 1版本发布了,无论从Runtime还是Tools都给我们带来了很多的惊喜,如支持框架语言Visual Basic, Visual C#, IronRuby, ...
- hihocoder-1347 小h的树上的朋友(lca+线段树)
题目链接: 小h的树上的朋友 时间限制:18000ms 单点时限:2000ms 内存限制:512MB 描述 小h拥有n位朋友.每位朋友拥有一个数值Vi代表他与小h的亲密度.亲密度有可能发生变化. 岁月 ...
- HihoCoder1705: 座位问题(STL)
描述 HIHO银行等待区有一排N个座位,从左到右依次编号1~N.现在有M位顾客坐在座位上,其中第i位坐在编号Ai的座位上. 之后又陆续来了K位顾客,(K + M ≤ N) 他们都会选择坐在最" ...
- Python测试框架doctest
doctest是python自带的一个模块.本博客将介绍doctest的两种使用方式:一种是嵌入到python源码中,另外一种是放到一个独立文件. doctest 的概念模型 在python的官方文档 ...
- MyBatis缓存设计
和大多数ORM框架一样,为了尽可能减少数据库的访问,MyBatis设计支持缓存功能.设计上通过Cache接口提供SPI(服务提供接口),可以让第三方缓存提供具体的缓存实现,比如使用ehcache.Re ...
- bzoj4557 [JLoi2016]侦察守卫——DP
题目:https://www.lydsy.com/JudgeOnline/problem.php?id=4557 见这位的博客:https://www.cnblogs.com/Narh/p/91403 ...
- 在linux下用tomcat部署java web项目的过程与注意事项(转)
在linux下用tomcat部署java web项目的过程与注意事项一.安装JDK到http://www.oracle.com/technetwork/java/javase/downloads/in ...
- Android之APP模块编译
一,如何把app编进系统 a.源码编译,在packages/apps目录下有安卓原生的app,以Bluetooth为例,源码根目录下有Android.mk文件: packages\apps\Bluet ...