1.在终端启动hiveserver2
#hiveserver2

2.使用beeline连接hive
另外打开一个终端,输入如下命令(xavierdb必须是已经存在的数据库)
#beeline -u jdbc:hive2://localhost:10000/xavierdb -n hive -p hive

3.添加maven依赖

<!-- https://mvnrepository.com/artifact/org.apache.hadoop.hive/hive-jdbc -->
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.1.0</version>
</dependency> <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.9</version>
</dependency> <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency> <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.0</version>
</dependency> <!-- https://mvnrepository.com/artifact/org.apache.hive/hive-metastore -->
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-metastore</artifactId>
<version>1.1.0</version>
</dependency> <!-- https://mvnrepository.com/artifact/org.apache.hive/hive-metastore -->
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>1.1.0</version>
</dependency>

maven依赖

出现过的错误: Error: Could not open client transport with JDBC Uri: jdbc:hive2://localhost:10000/default

解决办法:检查发现运行beeline时Driver版本Driver: Hive JDBC (version 1.1.0-cdh5.16.1)比maven依赖中的Driver版本低,将maven版本调至1.1.0问题解决

Java API测试:

注意:这里的url必须是beeline值中使用的url


package TestOption;

import org.junit.Test;
import org.junit.After;
import org.junit.Before; import java.sql.*; /**
* @Author:Xavier
* @Data:2019-02-18 11:43
**/ public class HiveOption { private static String driverName = "org.apache.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive2://yourhost:10000/yourdatabase"; private static Connection con = null;
private static Statement state = null;
private static ResultSet res = null; //加载驱动,创建连接
@Before
public void init() throws ClassNotFoundException, SQLException {
Class.forName(driverName);
con = DriverManager.getConnection(url, "hive", "hive");
state = con.createStatement();
} //创建数据库
@Test
public void CreateDb() throws SQLException { state.execute("create database xavierdb1"); } // 查询所有数据库
@Test
public void showtDb() throws SQLException {
res = state.executeQuery("show databases");
while (res.next()) {
System.out.println(res.getString(1));
}
} // 删除数据库
@Test
public void dropDb() throws SQLException {
state.execute("drop database if exists xavierdb1");
} /*
*
*
* 内部表基本操作
*
*
* */ // 创建表
@Test
public void createTab() throws SQLException { state.execute("create table if not exists student ( " +
"name string , " +
"age int , " +
"agent string ," +
"adress struct<street:STRING,city:STRING>) " +
"row format delimited " +
"fields terminated by ',' " +//字段与字段之间的分隔符
"collection items terminated by ':'" +//一个字段各个item的分隔符
"lines terminated by '\n' ");//行分隔符
} // 查询所有表
@Test
public void showTab() throws SQLException {
res = state.executeQuery("show tables");
while (res.next()) {
System.out.println(res.getString(1));
}
} // 查看表结构
@Test
public void descTab() throws SQLException {
res = state.executeQuery("desc emp");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
} // 加载数据
@Test
public void loadData() throws SQLException {
String infile = " '/root/studentData' ";
state.execute("load data local inpath " + infile + "overwrite into table student");
} // 查询数据
@Test
public void selectTab() throws SQLException {
res = state.executeQuery("select * from student1");
while (res.next()) {
System.out.println(
res.getString(1) + "-" +
res.getString(2) + "-" +
res.getString(3) + "-" +
res.getString(4));
}
} // 统计查询(会运行mapreduce作业,资源开销较大)
@Test
public void countData() throws SQLException {
res = state.executeQuery("select count(1) from student");
while (res.next()) {
System.out.println(res.getInt(1));
}
} // 删除表
@Test
public void dropTab() throws SQLException {
state.execute("drop table emp");
} /*
* 外部表基本操作
*
*外部表删除后,hdfs文件系统上的数据还在,
*重新创建同路径外部表后,其数据仍然存在
*
* */ //创建外部表
@Test
public void createExTab() throws SQLException { state.execute("create external table if not exists student1 ( " +
"name string , " +
"age int , " +
"agent string ," +
"adress struct<street:STRING,city:STRING>) " +
"row format delimited " +
"fields terminated by ',' " +
"collection items terminated by ':'" +
"lines terminated by '\n' " +
"stored as textfile " +
"location '/testData/hive/student1' ");//不指定路径时默认使用hive.metastore.warehouse.dir指定的路径
} //从一张已经存在的表上复制其表结构,并不会复制其数据
//
//创建表,携带数据
//create table student1 as select * from student
//创建表,携带表结构
//create table student1 like student
//
@Test
public void copyExTab() throws SQLException {
state.execute("create external table if not exists student2 " +
"like xavierdb.student " +
"location '/testData/hive/student1'");
} /*
* 分区表
*
* 必须在表定义时创建partition
*
*
* */ //静态分区 //创建分区格式表
@Test
public void creatPartab() throws SQLException {
state.execute("create table if not exists emp (" +
"name string ," +
"salary int ," +
"subordinate array<string> ," +
"deductions map<string,float> ," +
"address struct<street:string,city:string>) " +
"partitioned by (city string,street string) " +
"row format delimited " +
"fields terminated by '\t' " +
"collection items terminated by ',' " +
"map keys terminated by ':' " +
"lines terminated by '\n' " +
"stored as textfile");
} //添加分区表
@Test
public void addPartition() throws SQLException {
state.execute("alter table emp add partition(city='shanghai',street='jinkelu') ");
} //查看分区表信息
@Test
public void showPartition() throws SQLException {
// res=state.executeQuery("select * from emp");
res = state.executeQuery("show partitions emp");
while (res.next()) {
System.out.println(res.getString(1));
}
} //插入数据
@Test
public void loadParData() throws SQLException {
String filepath = " '/root/emp' ";
state.execute("load data local inpath " + filepath + " overwrite into table emp partition (city='shanghai',street='jinkelu')");
} //删除分区表
@Test
public void dropPartition() throws SQLException {
state.execute("alter table employees drop partition (city='shanghai',street='jinkelu') ");
/*
*
* 1,把一个分区打包成一个har包
alter table emp archive partition (city='shanghai',street='jinkelu')
2, 把一个分区har包还原成原来的分区
` alter table emp unarchive partition (city='shanghai',street='jinkelu')
3, 保护分区防止被删除
alter table emp partition (city='shanghai',street='jinkelu') enable no_drop
4,保护分区防止被查询
alter table emp partition (city='shanghai',street='jinkelu') enable offline
5,允许分区删除和查询
alter table emp partition (city='shanghai',street='jinkelu') disable no_drop
alter table emp partition (city='shanghai',street='jinkelu') disable offline
* */
}
//外部表同样可以使用分区 //动态分区
//
//当需要一次插入多个分区的数据时,可以使用动态分区,根据查询得到的数据动态分配到分区里。
// 动态分区与静态分区的区别就是不指定分区目录,由hive根据实际的数据选择插入到哪一个分区。
//
//set hive.exec.dynamic.partition=true; 启动动态分区功能
//set hive.exec.dynamic.partition.mode=nonstrict 分区模式,默认nostrict
//set hive.exec.max.dynamic.partitions=1000 最大动态分区数,默认1000 //创建分区格式表
@Test
public void creatPartab1() throws SQLException {
state.execute("create table if not exists emp1 (" +
"name string ," +
"salary int ," +
"subordinate array<string> ," +
"deductions map<string,float> ," +
"address struct<street:string,city:string>) " +
"partitioned by (city string,street string) " +
"row format delimited " +
"fields terminated by '\t' " +
"collection items terminated by ',' " +
"map keys terminated by ':' " +
"lines terminated by '\n' " +
"stored as textfile");
} //靠查询到的数据来分区
@Test
public void loadPartitionData() throws SQLException {
state.execute("insert overwrite table emp1 partition (city='shanghai',street) " +
"select name,salary,subordinate,deductions,address,address.street from emp");
} // 释放资源
@After
public void destory() throws SQLException {
if (res != null) state.close();
if (state != null) state.close();
if (con != null) con.close();
}
}
 

***连接HiveServer2修改hive配置的方法***

1)、直接在URL中添加

...
url = "jdbc:hive2://yourhost:10000/yourdatabase?mapreduce.job.queuename=root.hive-server2;hive.execution.enginer=spark";
Connection con = DriverManager.getConnection(url, "hive", "hive");
...

多个conf配置之间使用" ; "  分割开;conf配置内容和url 变量内容使用" # "分割开

2)、使用state.execute 直接执行set 操作

...
state.execute("set hive.execution.engine=spark");
...

3 )、通过连接属性设置

Class.forName("org.apache.hive.jdbc.HiveDriver");
Properties propertie = new Properties();
propertie.setProperty("user", "hive");
propertie.setProperty("password", "hive");
// 这里传递了一个队列的hive_conf
propertie.setProperty("hive.execution.engine", "spark");
String url="jdbc:hive2://yourhost:10000/yourdatabase";
Connection conn = DriverManager.getConnection(url, propertie);
HiveStatement stat = (HiveStatement) conn.createStatement();

用Java代码通过JDBC连接Hiveserver2的更多相关文章

  1. Hive:用Java代码通过JDBC连接Hiveserver

    参考https://www.iteblog.com/archives/846.html 1.hive依赖hadoop,将hdfs当作文件存储介质,那是否意味着hive需要知道namenode的地址? ...

  2. java代码实现JDBC连接MySql以及引用驱动程序包

    JDBC链接MySql     JDBC链接MySql的话题已经老掉牙了,这次我只想通过使用简洁的代码实现,采用封装的思想,将链接MySql的代码封装在类的静态方法中,供一次性调用返回java.sql ...

  3. 通过JDBC连接HiveServer2

    如果通过JDBC连接HiveServer2时提示:User: hive is not allowed to impersonate hive,需要在core-site.xml中新增如下配置: hado ...

  4. Java基础93 JDBC连接MySQL数据库

    本文知识点(目录): 1.什么是jdbc     2.jdbc接口的核心API     3.使用JDBC技术连接MySQL数据库的方法    4.使用Statement执行sql语句(DDL.DML. ...

  5. 【JDBC】java程序通过jdbc连接oracle数据库方法

    版权声明:本文为博主原创文章(原文:blog.csdn.net/clark_xu 徐长亮的专栏).未经博主同意不得转载. https://blog.csdn.net/u011538954/articl ...

  6. 大数据系列-java用官方JDBC连接greenplum数据库

    这个其实非常简单,之所以要写此文是因为当前网上搜索到的文章都是使用PostgreSQL的驱动,没有找到使用greenplum官方驱动的案例,两者有什么区别呢? 一开始我也使用的是PostgreSQL的 ...

  7. java 命令行JDBC连接Mysql

    环境:Windows10 + java8 + mysql 8.0.15 + mysql-connector-java-8.0.15.jar mysql驱动程序目录 项目目录 代码: //package ...

  8. (转)CDH中启动的hive,使用jdbc连接hiveServer2时候权限不足解决方案

    Hive JDBC:java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.securi ...

  9. Java是用JDBC连接MySQL数据库

    首先要下载Connector/J地址:http://www.mysql.com/downloads/connector/j/ 这是MySQL官方提供的连接方式: 解压后得到jar库文件,需要在工程中导 ...

随机推荐

  1. sql优化使用技巧

    1.LIMIT 语句分页查询是最常用的场景之一,但也通常也是最容易出问题的地方.比如对于下面简单的语句,一般 DBA 想到的办法是在 type, name, create_time 字段上加组合索引. ...

  2. 观察者模式的python实现

    什么会观察者模式?观察者模式就是订阅-推送模式.是为了解耦合才会被利用起来的设计模式. 经典的就是boss 前台和员工之间的故事.一天A员工在看电影,B员工在看动漫,但是两人担心boss来了,自己没及 ...

  3. Android 开发 记录一个DP、PX、SP转换工具类

    public class UnitConversionUtil { /** * 根据手机分辨率从DP转成PX * @param context * @param dpValue * @return * ...

  4. JAVA 数组遍历

    一.遍历List 1.增强for循环 String[] arr = new String[] {"xx","yy","zz"}; for(S ...

  5. 稀疏矩阵 part 5

    ▶ 目前为止能跑的所有代码及其结果(2019年2月24日),之后添加:DIA 乘法 GPU 版:其他维度的乘法(矩阵乘矩阵):其他稀疏矩阵格式之间的相互转化 #include <stdio.h& ...

  6. Excel组合图表快速制作小功能

    1.  选中数据区域,插入推荐的图表 2. 然后可以选择快速布局小工具进行布局微调 选中图表 -> 设计(菜单) -> 快速布局(左边) 个人特别喜欢带表格的那个组合图布局,清晰好看

  7. 13. nginx,lvs之一

    摘要: 1.详细描述常见nginx常用模块和模块的使用示例 2.简述Linux集群类型.系统扩展方式及调度方法 3.简述lvs四种集群有点及使用场景 4.描述LVS-NAT.LVS-DR的工作原理并实 ...

  8. day319 1、正则表达式的定义及使用 2、Date类的用法 3、Calendar类的用法

    1.正则表达式的定义及使用2.Date类的用法3.Calendar类的用法 一.正则表达式 ###01正则表达式的概念和作用* A: 正则表达式的概念和作用* a: 正则表达式的概述* 正则表达式也是 ...

  9. eclipse启动tomcat正常,但是debug启动报错FATAL ERROR in native method:JDWP No transports initialized,jvmtiError=AGENT_ERROR_TRANSPORT_INIT(197) ERROR: transport error 202: connect failed:Connection timed out

    FATAL ERROR in native method:JDWP No transports initialized,jvmtiError=AGENT_ERROR_TRANSPORT_INIT(19 ...

  10. QT项目添加现有文件后不能运行,MFC在类视图中自动隐藏类

    解决方案:1)QT 5.6版本的QtCreator打开pro文件,在最后加一行空行或者删除一行空行,保存即可: 2)在隐藏的类对应的头文件中增加一行或删除一行(空格也可以),即可自动出现.