Hive2.x的工具类,对常用方法进行了封装,其中设置了kerberos认证。

package com.ideal.template.openbigdata.util;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; public class HiveOper
{
private static String driverClass = "org.apache.hive.jdbc.HiveDriver"; /*
* 用户的keytab路径
*/
private String key; /*
* 用户的keytab文件
*/
private String tab; /**
* hive仓库的连接地址
*/
private String url; /**
* hive对应用户
*/
private String user; /**
* hive用户对应的密码
*/
private String pwd; /**
* Hive 连接
*/
private Connection conn = null; public HiveOper(String key, String tab, String url, String user, String pwd)
{
this.key = key;
this.tab = tab;
this.url = url;
this.user = user;
this.pwd = pwd;
} /**
* 获取hive连接
*
* @return
*/
private Connection getConnection()
{
if (conn == null)
{
try
{
/**
* 加入Kerberos认证
*/
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "Kerberos"); UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab(key, tab);
Class.forName(driverClass);
conn = DriverManager.getConnection(url, user, pwd);
}
catch (ClassNotFoundException e)
{
throw new HiveDBException(e);
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
catch (Exception e)
{
throw new HiveDBException(e);
}
}
return conn;
} /**
* 关闭连接
*/
public void close()
{
try
{
if (conn != null && !conn.isClosed())
{
conn.close();
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
conn = null;
}
} /**
* 关闭Statement
*
* @param stmt
*/
public void close(Statement stmt)
{
try
{
if (stmt != null)
{
stmt.close();
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
stmt = null;
}
} /**
* 关闭PreparedStatement
*
* @param pst
*/
public void close(PreparedStatement pst)
{
try
{
if (pst != null)
{
pst.close();
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
pst = null;
}
} /**
* 关闭ResultSet
*
* @param rs
*/
public void close(ResultSet rs)
{
try
{
if (rs != null)
{
rs.close();
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
rs = null;
}
} /**
* 列出指定数据库下的所有表
*
* @param dataBaseName
* @return
*/
public List<String> listTables(String dbName)
{
Statement stmt = null;
ResultSet res = null;
List<String> tables = new LinkedList<String>();
try
{
stmt = getConnection().createStatement();
if (dbName != null && dbName.trim().length() > 0)
{
stmt.execute("USE " + dbName);
}
res = stmt.executeQuery("SHOW TABLES");
while (res.next())
{
tables.add(res.getString(1));
}
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(res);
close(stmt);
close();
}
return tables;
} /*
* 获取数据库
*/
public List<String> showdatabases()
{
Statement stmt = null;
ResultSet res = null;
List<String> tables = new LinkedList<String>();
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("SHOW DATABASES");
while (res.next())
{
tables.add(res.getString(1));
}
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(res);
close(stmt);
close();
}
return tables;
} /**
* 执行非查询的sql语句,比如创建表,加载数据等等
*
* @param sql
* @return
*/
public boolean executeNonQuery(String sql)
{
Statement stmt = null;
boolean result = true;
try
{
stmt = getConnection().createStatement();
stmt.execute(sql);
}
catch (SQLException e)
{
result = false;
throw new HiveDBException(e);
}
finally
{
close(stmt);
close();
}
return result;
} /**
* 使用Statement查询数据,返回ResultSet
*
* @param sql
* @return
*/
public ResultSet queryForResultSet(String sql)
{
Statement stmt = null;
ResultSet res = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery(sql);
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(stmt);
}
return res;
} /**
* 使用Statement查询数据,返回List集合,数据量比较小的时候用
*
* @param sql
* @return
*/
public List<Map<String, Object>> queryForList(String sql)
{
Statement stmt = null;
ResultSet res = null;
List<Map<String, Object>> list = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery(sql);
Map<String, Object> map = null;
ResultSetMetaData rsmd = res.getMetaData();
int rowCnt = rsmd.getColumnCount();
list = new LinkedList<Map<String, Object>>();
while (res.next())
{
map = new LinkedHashMap<String, Object>(rowCnt);
for (int i = 1; i <= rowCnt; i++)
{
map.put(rsmd.getColumnName(i), res.getObject(i));
}
list.add(map);
}
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(res);
close(stmt);
close();
}
return list;
} /**
* 使用PreparedStatement查询数据,返回ResultSet
*
* @param sql
* @param values
* @return
*/
public ResultSet queryForResultSet(String sql, String[] values)
{
PreparedStatement pst = null;
ResultSet res = null;
try
{
pst = getConnection().prepareStatement(sql);
setValue(pst, values);
res = pst.executeQuery();
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(pst);
}
return res;
} /**
* 使用PreparedStatement查询数据,返回List集合,数据量比较小的时候用
*
* @param sql
* @param values
* @return
*/
public List<Map<String, Object>> queryForList(String sql, String[] values)
{
PreparedStatement pst = null;
ResultSet res = null;
List<Map<String, Object>> list = null;
try
{
pst = getConnection().prepareStatement(sql);
setValue(pst, values);
res = pst.executeQuery();
Map<String, Object> map = null;
ResultSetMetaData rsmd = res.getMetaData();
int rowCnt = rsmd.getColumnCount();
list = new LinkedList<Map<String, Object>>();
while (res.next())
{
map = new LinkedHashMap<String, Object>(rowCnt);
for (int i = 1; i <= rowCnt; i++)
{
map.put(rsmd.getColumnName(i), res.getObject(i));
}
list.add(map);
}
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(res);
close(pst);
close();
}
return list;
} /**
* 执行数据文件导入
*
* @param sql
* @param values
* @return
*/
public boolean impBySql(String sql)
{
PreparedStatement pst = null;
boolean flag = false;
try
{
pst = getConnection().prepareStatement(sql);
flag = pst.execute(); }
catch (SQLException e)
{
throw new HiveDBException(e);
}
finally
{
close(pst);
close();
}
return flag;
} private void setValue(PreparedStatement pst, String[] values)
{
try
{
if(values == null || values.length == 0)
{
return;
}
for (int i = 0; i < values.length; i++)
{
pst.setString(i + 1, values[i]);
}
}
catch (SQLException e)
{
throw new HiveDBException(e);
}
} /**
* 获取表所在的路径
* @param tblName
* @return
* @throws Exception
*/
public String getHiveTblPath(String tblName) throws Exception
{
String result = "";
Statement stmt = null;
ResultSet res = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc extended " + tblName);
while (res.next())
{
if(res.getString(1).trim().equals("Detailed Table Information"))
{
String content = res.getString(2).trim();
int start = content.indexOf("location:");
if (start == -1)
{
continue;
} String sub = content.substring(start);
int end = sub.indexOf(",");
if (end == -1)
{
continue;
} result = sub.substring("location:".length(), end);
}
else
{
continue;
} // String content = res.getString(1).trim(); }
}
catch (SQLException e)
{
throw new Exception(e);
}
finally
{
close(res);
close(stmt);
close();
}
return result;
}
/**
* 获取表所在的路径
* @param tblName
* @return
* @throws Exception
*/
public Map<String,String> getNewHiveTblPath(String tblName) throws Exception
{
String result = "";
String field = "";
Statement stmt = null;
ResultSet res = null;
Map<String,String> map = new HashMap<String,String>();
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc extended " + tblName);
while (res.next())
{
if(res.getString(1).trim().equals("Detailed Table Information"))
{
String content = res.getString(2).trim();
field=getField(content);
int start = content.indexOf("dragoncluster");
if (start == -1)
{
continue;
} String sub = content.substring(start);
int end = sub.indexOf(",");
if (end == -1)
{
continue;
} result = sub.substring("dragoncluster".length(), end);
}
else
{
continue;
} // String content = res.getString(1).trim(); }
}
catch (SQLException e)
{
throw new Exception(e);
}
finally
{
close(res);
close(stmt);
close();
}
map.put("field", field);
map.put("hdfsPath", result);
return map;
} public String getField(String content) throws Exception{
int start = content.indexOf("field.delim=");
if(start==-1){
return "\\\\001";
}else{
String sub = content.substring(start);
// int end = sub.indexOf("}");
return sub.substring("field.delim=".length(), "field.delim=".length()+1); }
} public String getTblComment(String tblName)
{
String result = null;
Statement stmt = null;
ResultSet res = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc extended " + tblName);
while (res.next())
{
if(res.getString(1).trim().equals("Detailed Table Information"))
{
String content = res.getString(2).trim();
int start = content.lastIndexOf("comment");
if (start == -1)
{
continue;
} String sub = content.substring(start); int endBracket = sub.indexOf("}");
int endDot = sub.indexOf(",");
int end = endBracket < endDot ? endBracket : endDot;
if (end == -1)
{
continue;
} result = sub.substring("comment=".length(), end);
if(result != null && result.startsWith("null"))
{
result = null;
}
}
else
{
continue;
}
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return result;
} /**
* desc:获取hive表文件的类型
* @param tblName
* @return
* date:2017年1月4日
* author:Tonny Chien
*/
public String getTblFileType(String tblName)
{
String result = null;
Statement stmt = null;
ResultSet res = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc extended " + tblName);
while (res.next())
{
if(res.getString(1).trim().equals("Detailed Table Information"))
{
String content = res.getString(2).trim();
if(content.toUpperCase().contains("TEXTINPUTFORMAT"))
{
result = "TEXTFILE";
}
else if(content.toUpperCase().contains("SEQUENCEFILEINPUTFORMAT"))
{
result = "SEQUENCEFILE";
}
else
{
result = "SEQUENCEFILE";
}
}
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return result;
} /**
* desc:查看表是否为外部表
* @param tblName
* @return
* date:2017年1月4日
* author:Tonny Chien
*/
public boolean isExternalTbl(String tblName)
{
boolean result = false;
Statement stmt = null;
ResultSet res = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc extended " + tblName);
while (res.next())
{
if(res.getString(1).trim().equals("Detailed Table Information"))
{
String content = res.getString(2).trim();
if(content.toUpperCase().contains("EXTERNAL_TABLE"))
{
result = true;
}
}
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return result;
} public List<String[]> getColumAndType(String tblName)
{
Statement stmt = null;
ResultSet res = null;
List<String[]> list = null;
String[] item = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc formatted " + tblName);
list = new LinkedList<String[]>(); while (res.next())
{
if (res.getString(1).trim().equals("# col_name"))
{
continue;
} if (res.getString(1).equals("# Detailed Table Information") || res.getString(1).equals("# Partition Information"))
{
break;
} if (res.getString(1).trim().equals(""))
{
continue;
}
String column = res.getString(1).trim().toUpperCase();
String type = res.getString(2).trim().toUpperCase();
String comment = "";
if (res.getString(3) != null && res.getString(3).trim().length() > 0)
{
comment = res.getString(3).trim().toUpperCase();
if ("NONE".equals(comment))
{
comment = "";
}
}
item = new String[]{column, type, comment}; list.add(item);
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return list; } public List<String[]> getHiveTblPartitions(String tblName)
{
Statement stmt = null;
ResultSet res = null;
List<String[]> list = null;
String[] item = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc " + tblName);
list = new LinkedList<String[]>(); while (res.next())
{
if (res.getString(1).equals("# Partition Information"))
{
while (res.next())
{
if (res.getString(1).trim().equals("# col_name"))
{
continue;
}
if (res.getString(1).trim().equals(""))
{
continue;
}
String column = res.getString(1).trim().toUpperCase();
String type = res.getString(2).trim().toUpperCase();
String comment = "";
if (res.getString(3) != null && res.getString(3).trim().length() > 0)
{
comment = res.getString(3).trim().toUpperCase();
if ("NONE".equals(comment))
{
comment = "";
}
}
item = new String[] { column, type, comment };
list.add(item);
}
}
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return list; } public List<String> getHiveTblColumns(String tblName)
{
Statement stmt = null;
ResultSet res = null;
List<String> list = null;
try
{
stmt = getConnection().createStatement();
res = stmt.executeQuery("desc formatted " + tblName);
list = new LinkedList<String>();
while (res.next())
{
if (res.getString(1).trim().equals("# col_name"))
{
continue;
} if (res.getString(1).equals("# Detailed Table Information") || res.getString(1).equals("# Partition Information"))
{
break;
} if (res.getString(1).trim().equals(""))
{
continue;
}
System.out.println(res.getString(1).trim());
list.add(res.getString(1).trim().toUpperCase());
}
}
catch (SQLException e)
{
e.printStackTrace();
}
finally
{
close(res);
close(stmt);
close();
}
return list;
} /**
* desc:查看某张表是否存在
* @param database
* @param tableName
* @return
* date:2016年10月25日
* author:Tonny Chien
*/
public boolean existTable(String database, String tableName)
{
boolean result = false;
Statement stmt = null;
ResultSet res = null;
try
{
String hql = "SHOW TABLES IN " + database;
stmt = getConnection().createStatement();
res = stmt.executeQuery(hql);
while (res.next())
{
if (res.getString(1).trim().toUpperCase().equals(tableName.toUpperCase()))
{
result = true;
break;
}
}
}
catch (Exception e)
{
result = false;
}
finally
{
close(res);
close(stmt);
close();
}
return result;
} /**
*
* @param cmdType grant/revoke
* @param privs 权限
* @param tgtType database/table
* @param tgt 数据库名/表名
* @param recvType group/user
* @param recv
* @return
* @author Tonny Chien
* @date 207-5-21 20:13
*/
public boolean auth(AUTH cmdType, String privs, AUTH tgtType, String tgt, AUTH recvType, String recv)
{
// 拼接命令
StringBuilder sb = new StringBuilder();
switch (cmdType)
{
case grant:// 如果是权限
sb.append("GRANT ");
break;
case revoke:// 如果是回收
sb.append("REVOKE ");
break;
default:
break;
} sb.append(privs);
sb.append(" ON "); switch (tgtType)
{
case database:// 如果是数据库
sb.append("DATABASE ");
break;
case table:// 如果是表
sb.append("TABLE ");
break;
default:
break;
} sb.append(tgt); switch (cmdType)
{
case grant:// 如果是权限
sb.append(" TO ");
break;
case revoke:// 如果是回收
sb.append(" FROM ");
break;
default:
break;
} switch (recvType)
{
case user:// 如果是用户
sb.append(" USER ");
break;
case group:// 如果是组
sb.append(" GROUP ");
break;
default:
break;
}
sb.append(recv); String hql = sb.toString(); boolean result = false;
Statement stmt = null;
try
{
stmt = getConnection().createStatement();
stmt.execute("set role admin");
stmt.execute(hql);
result = true;
}
catch (Exception e)
{
result = false;
e.printStackTrace();
}
finally
{
close(stmt);
close();
}
return result;
} public static void main(String[] args)
{
String url = "jdbc:hive2://d004.dragon.com:2181,d002.dragon.com:2181,d005.dragon.com:2181/;serviceDiscoveryMode=zookeeper;zookeeperNamespace=hiveserver2";
String key = "openbigdata@DRAGON.COM";//域名不变,用户名变化
String tab = "/etc/security/keytabs/openbigdata.keytab";//keytab文件根据具体的用户进行
String huser = null;
String password = null; HiveOper oper = new HiveOper(key, tab, url, huser, password);
List<String > dbs = oper.showdatabases();
for(String db:dbs)
{
System.out.println(db);
} List<String > tbls = oper.listTables("default");
for(String tbl:tbls)
{
System.out.println(tbl);
}
}
} class HiveDBException extends RuntimeException
{
private static final long serialVersionUID = 2637639405785985892L; public HiveDBException(Exception e)
{
super(e.getMessage());
} public HiveDBException(String message, Throwable cause)
{
super(message, cause);
} public HiveDBException(String message)
{
super(message);
} public HiveDBException(Throwable cause)
{
super(cause);
}
}

Hive工具类的更多相关文章

  1. HDFS 工具类

    读取HDFS上文件数据 import java.io.File; import java.io.FileInputStream; import java.io.IOException; import ...

  2. Java基础Map接口+Collections工具类

    1.Map中我们主要讲两个接口 HashMap  与   LinkedHashMap (1)其中LinkedHashMap是有序的  怎么存怎么取出来 我们讲一下Map的增删改查功能: /* * Ma ...

  3. Android—关于自定义对话框的工具类

    开发中有很多地方会用到自定义对话框,为了避免不必要的城府代码,在此总结出一个工具类. 弹出对话框的地方很多,但是都大同小异,不同无非就是提示内容或者图片不同,下面这个类是将提示内容和图片放到了自定义函 ...

  4. [转]Java常用工具类集合

    转自:http://blog.csdn.net/justdb/article/details/8653166 数据库连接工具类——仅仅获得连接对象 ConnDB.java package com.ut ...

  5. js常用工具类.

    一些js的工具类 复制代码 /** * Created by sevennight on 15-1-31. * js常用工具类 */ /** * 方法作用:[格式化时间] * 使用方法 * 示例: * ...

  6. Guava库介绍之实用工具类

    作者:Jack47 转载请保留作者和原文出处 欢迎关注我的微信公众账号程序员杰克,两边的文章会同步,也可以添加我的RSS订阅源. 本文是我写的Google开源的Java编程库Guava系列之一,主要介 ...

  7. Java程序员的日常—— Arrays工具类的使用

    这个类在日常的开发中,还是非常常用的.今天就总结一下Arrays工具类的常用方法.最常用的就是asList,sort,toStream,equals,copyOf了.另外可以深入学习下Arrays的排 ...

  8. .net使用正则表达式校验、匹配字符工具类

    开发程序离不开数据的校验,这里整理了一些数据的校验.匹配的方法: /// <summary> /// 字符(串)验证.匹配工具类 /// </summary> public c ...

  9. WebUtils-网络请求工具类

    网络请求工具类,大幅代码借鉴aplipay. using System; using System.Collections.Generic; using System.IO; using System ...

随机推荐

  1. 机器学习 Hidden Markov Models 3

    Viterbi Algorithm 前面我们提到过,HMM的第二类问题是利用HMM模型和可观察序列寻找最有可能生成该观察序列的隐藏变量的序列.简单来说,第一类问题是通过模型计算生成观察序列的概率,而第 ...

  2. 【Codeforces 20C】 Dijkstra?

    [题目链接] 点击打开链接 [算法] dijkstra [代码] #include<bits/stdc++.h> using namespace std; typedef long lon ...

  3. node安装升级npm

    安装npm npm上有很多优秀的nodejs包,来解决常见的一些问题,比如用node-mysql,就可以方便通过nodejs链接到mysql,进行数据库的操作 在开发过程往往会需要用到其他的包,使用n ...

  4. 单选框 复选框 隐藏之后,绑定的change事件在ie中失效的问题

    有时候需要对单选框和复选框进行美化,就需要在<input type="radio">和<input type="checkbox">元素 ...

  5. 超实用的JavaScript技巧及最佳实践给

    1.数组创建一个随机项 var items = [12,548,'a',2,5478,'foo',8852,,'Doe',2145,119]; var randomItem = items[Math. ...

  6. Centos7 编译安装 Nginx、MariaDB、PHP

    前言 本文主要大致介绍CentOS 7下编译安装Nginx.MariaDB.PHP.面向有Linux基础且爱好钻研的朋友.技艺不精,疏漏再所难免,还望指正. 环境简介: 系统: CentOS 7,最小 ...

  7. NOI前总结:点分治

    点分治: 点分治的题目基本一样,都是路径计数. 其复杂度的保证是依靠 $O(n)$ 找重心的,每一次至少将问题规模减小为原先的$1/2$. 找重心我喜欢$BFS$防止爆栈. int Root(int ...

  8. lightoj1169【DP】

    题意(来自大哥): 有两栋楼,左边一栋,右边一栋,层数从1-n,地面的标号为0,每一层有一个水果.有一只猴子在地面上,他现在要上到n层去,在第i层会吃掉水果花费一定时间. 猴子有两种方式从第i层到i+ ...

  9. Jquery+ajaxfileupload上传文件

    1.说明 ajaxfileupload.js是一款jQuery插件,用于通过ajax上传文件. 下载地址:http://files.cnblogs.com/files/lengzhan/ajaxfil ...

  10. IDEA thymeleaf ${xxx.xxx}表达式报错,红色波浪线

    解决方法: 在<!DOCTYPE html>标签下面加上 <!--suppress ALL--> 代码如图: 不再报错,效果如下图: