一、基础DDL练习

SHOW DATABASES;

CREATE DATABASE IF NOT EXISTS db1 COMMENT 'Our database db1';

SHOW DATABASES;

DESCRIBE DATABASE db1;

CREATE TABLE db1.table1 (word STRING, count INT);

SHOW TABLES in db1;

DESCRIBE db1.table1;

USE db1;

SHOW TABLES;

SELECT * FROM db1.table1;

DROP TABLE table1;

DROP DATABASE db1;

USE default;

二、基础DML语句

创建表
create table if not exists user_dimension (
uid STRING,
name STRING,
gender STRING,
birth DATE,
province STRING
)ROW FORMAT DELIMITED //按行切分的意思
FIELDS TERMINATED BY ',' //按逗号分隔的
查看表信息
describe user_dimension; show create table user_dimension; 查看所有表
show tables; 载入本地数据
load data local inpath '/home/orco/tempdata/user.data' overwrite into table user_dimension; 载入HDFS上的数据
load data inpath '/user/orco/practice_1/user.data' overwrite into table user_dimension; 验证
select * from user_dimension; 查看hive在hdfs上的存储目录
hadoop fs -ls /warehouse/
hadoop fs -ls /warehouse/user_dimension

三、复杂数据类型

示例2:
CREATE TABLE IF NOT EXISTS employees (
name STRING,
salary FLOAT,
subordinates ARRAY<STRING>,
deductions MAP<STRING, FLOAT>,
address STRUCT<street:STRING, city:STRING, state:STRING, zip:INT>
)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\001'
COLLECTION ITEMS TERMINATED BY '\002'
MAP KEYS TERMINATED BY '\003'
LINES TERMINATED BY '\n'
STORED AS TEXTFILE; //最后这一行,是默认,可以不写 载入数据
load data local inpath ' /home/orco/tempdata/data/employees.txt' overwrite into table employees ; 查询数据
SELECT name, deductions['Federal Taxes'] FROM employees WHERE deductions['Federal Taxes'] > 0.2; SELECT name, deductions['Federal Taxes'] FROM employees WHERE deductions['Federal Taxes'] > cast( 0.2 as float); SELECT name FROM employees WHERE subordinates[] = 'Todd Jones'; SELECT name, address FROM employees WHERE address.street RLIKE '^.*(Ontario|Chicago).*$';

四、数据模型-分区

为减少不必要的暴力数据扫描,可以对表进行分区,为避免产生过多小文件,建议只对离散字段进行分区

建表
CREATE TABLE IF NOT EXISTS stocks (
ymd DATE,
price_open FLOAT,
price_high FLOAT,
price_low FLOAT,
price_close FLOAT,
volume INT,
price_adj_close FLOAT
)
PARTITIONED BY (exchanger STRING, symbol STRING)
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','; 载入数据
load data local inpath '/home/orco/resources/apache-hive-2.1.1-bin/hivedata/stocks/NASDAQ/AAPL/stocks.csv' overwrite into table stocks partition(exchanger="NASDAQ", symbol="AAPL"); show partitions stocks; load data local inpath '/home/orco/resources/apache-hive-2.1.1-bin/hivedata/stocks/NASDAQ/INTC/stocks.csv' overwrite into table stocks partition(exchanger="NASDAQ", symbol="INTC"); load data local inpath '/home/orco/resources/apache-hive-2.1.1-bin/hivedata/stocks/NYSE/GE/stocks.csv' overwrite into table stocks partition(exchanger="NYSE", symbol="GE"); show partitions stocks; 查询
SELECT * FROM stocks WHERE exchanger = 'NASDAQ' AND symbol = 'AAPL' LIMIT 10; SELECT ymd, price_close FROM stocks WHERE exchanger = 'NASDAQ' AND symbol = 'AAPL' LIMIT 10; 查看HDFS文件目录
hadoop fs -ls /warehouse/stocks/ hadoop fs -ls /warehouse/stocks/exchanger=NASDAQ hadoop fs -ls /warehouse/stocks/exchanger=NASDAQ/symbol=AAPL

六、外部表

external关键字,删除表时,外部表只删除元数据,不删除数据,更加安全

数据
hadoop fs -put stocks /user/orco/ 创建外部表
CREATE EXTERNAL TABLE IF NOT EXISTS stocks_external (
ymd DATE,
price_open FLOAT,
price_high FLOAT,
price_low FLOAT,
price_close FLOAT,
volume INT,
price_adj_close FLOAT
)
PARTITIONED BY (exchanger STRING, symbol STRING)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
LOCATION '/user/orco/stocks'; select * from stocks_external; 载入数据
alter table stocks_external add partition(exchanger="NASDAQ", symbol="AAPL") location '/user/orco/stocks/NASDAQ/AAPL/' show partitions stocks_external; select * from stocks_external limit 10; alter table stocks_external add partition(exchanger="NASDAQ", symbol="INTC") location '/user/orco/stocks/NASDAQ/INTC/'; alter table stocks_external add partition(exchanger="NYSE", symbol="IBM") location '/user/orco/stocks/NYSE/IBM/'; alter table stocks_external add partition(exchanger="NYSE", symbol="GE") location '/user/orco/stocks/NYSE/GE/'; show partitions stocks_external; 查询
SELECT * FROM stocks_external WHERE exchanger = 'NASDAQ' AND symbol = 'AAPL' LIMIT 10; SELECT ymd, price_close FROM stocks_external WHERE exchanger = 'NASDAQ' AND symbol = 'AAPL' LIMIT 10; select exchanger, symbol,count(*) from stocks_external group by exchanger, symbol; select exchanger, symbol, max(price_high) from stocks_external group by exchanger, symbol; 删除表
删除内部表stocks
drop table stocks; 查看HDFS上文件目录
hadoop fs -ls /warehouse/ 删除外部表stocks_external
drop table stocks_external; 查看HDFS上文件目录
hadoop fs -ls /user/orco hadoop fs -ls /user/stocks

七、列式存储

在Create/Alter表的时候,可以为表以及分区的文件指定不同的格式
• Storage Formats
• Row Formats
• SerDe

STORED AS file_format
– STORED AS PARQUET
– STORED AS ORC
– STORED AS SEQUENCEFILE
– STORED AS AVRO
– STORED AS TEXTFILE

列式存储格式ORC与Parquet:存储空间

列式存储格式ORC与Parquet:性能

如何创建ORC表

create table if not exists record_orc (
rid STRING,
uid STRING,
bid STRING,
price INT,
source_province STRING,
target_province STRING,
site STRING,
express_number STRING,
express_company STRING,
trancation_date DATE
)
stored as orc; show create table record_orc; 载入数据
select * from record_orc limit 10; insert into table record_orc select * from record; select * from record_orc limit 10;

八、Lateral View,行转多列

CREATE TABLE IF NOT EXISTS employees (
name STRING,
salary FLOAT,
subordinates ARRAY<STRING>,
deductions MAP<STRING, FLOAT>,
address STRUCT<street:STRING, city:STRING, state:STRING, zip:INT>
)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\001'
COLLECTION ITEMS TERMINATED BY '\002'
MAP KEYS TERMINATED BY '\003'
LINES TERMINATED BY '\n'
STORED AS TEXTFILE; 查询
select name,subordinate from employees LATERAL VIEW explode(subordinates) subordinates_table AS subordinate;

九、explain

Hive练习的更多相关文章

  1. 初识Hadoop、Hive

    2016.10.13 20:28 很久没有写随笔了,自打小宝出生后就没有写过新的文章.数次来到博客园,想开始新的学习历程,总是被各种琐事中断.一方面确实是最近的项目工作比较忙,各个集群频繁地上线加多版 ...

  2. Hive安装配置指北(含Hive Metastore详解)

    个人主页: http://www.linbingdong.com 本文介绍Hive安装配置的整个过程,包括MySQL.Hive及Metastore的安装配置,并分析了Metastore三种配置方式的区 ...

  3. Hive on Spark安装配置详解(都是坑啊)

    个人主页:http://www.linbingdong.com 简书地址:http://www.jianshu.com/p/a7f75b868568 简介 本文主要记录如何安装配置Hive on Sp ...

  4. HIVE教程

    完整PDF下载:<HIVE简明教程> 前言 Hive是对于数据仓库进行管理和分析的工具.但是不要被“数据仓库”这个词所吓倒,数据仓库是很复杂的东西,但是如果你会SQL,就会发现Hive是那 ...

  5. 基于Ubuntu Hadoop的群集搭建Hive

    Hive是Hadoop生态中的一个重要组成部分,主要用于数据仓库.前面的文章中我们已经搭建好了Hadoop的群集,下面我们在这个群集上再搭建Hive的群集. 1.安装MySQL 1.1安装MySQL ...

  6. hive

    Hive Documentation https://cwiki.apache.org/confluence/display/Hive/Home 2016-12-22  14:52:41 ANTLR  ...

  7. 深入浅出数据仓库中SQL性能优化之Hive篇

    转自:http://www.csdn.net/article/2015-01-13/2823530 一个Hive查询生成多个Map Reduce Job,一个Map Reduce Job又有Map,R ...

  8. Hive读取外表数据时跳过文件行首和行尾

    作者:Syn良子 出处:http://www.cnblogs.com/cssdongl 转载请注明出处 有时候用hive读取外表数据时,比如csv这种类型的,需要跳过行首或者行尾一些和数据无关的或者自 ...

  9. Hive索引功能测试

    作者:Syn良子 出处:http://www.cnblogs.com/cssdongl 转载请注明出处 从Hive的官方wiki来看,Hive0.7以后增加了一个对表建立index的功能,想试下性能是 ...

  10. 轻量级OLAP(二):Hive + Elasticsearch

    1. 引言 在做OLAP数据分析时,常常会遇到过滤分析需求,比如:除去只有性别.常驻地标签的用户,计算广告媒体上的覆盖UV.OLAP解决方案Kylin不支持复杂数据类型(array.struct.ma ...

随机推荐

  1. Android无线测试之—UiAutomator UiScrollable API介绍七

    滑动到某个对象 一.滑动到某个对象相关API 返回值 API 描述 boolean scrollIntoView(UiSelector selector) 滑动到条件元素所在位置,并且尽量让其居于屏幕 ...

  2. pushViewController自定义动画http://blog.csdn.net/ralbatr/article/details/22039233

     本文转载至  http://blog.csdn.net/ralbatr/article/details/22039233 实现的主要代码如下: CATransition *transition =  ...

  3. FluentNhibernate 不支持存储过程

    一直以为没有使用FN进行存储过程的操作,这次因为后台首页想统计下数据,就利用了存储过程,但在使用中却发现FN目前还不支持存储过程(点击查看官方),没有办法,只能利用Fluent Configurati ...

  4. 【BZOJ5071】[Lydsy十月月赛]小A的数字 发现性质

    [BZOJ5071][Lydsy十月月赛]小A的数字 题解:一般遇到这种奇奇怪怪的操作,常用的套路是将原序列差分一下,或者求个前缀和什么的.本题就是直接对原序列求前缀和,然后发现一次操作相当于交换两个 ...

  5. Android XListView下拉刷新、上拉载入更多

    source code: https://github.com/Maxwin-z/XListView-Android 提供了两个接口: a) IXListViewListener:  触发下拉刷新.上 ...

  6. CH5402 选课【树形DP】【背包】

    5402 选课 0x50「动态规划」例题 描述 学校实行学分制.每门的必修课都有固定的学分,同时还必须获得相应的选修课程学分.学校开设了 N(N≤300) 门的选修课程,每个学生可选课程的数量 M 是 ...

  7. HDU3342有向图判圈DFS&&拓扑排序法

    HDU3342 Legal or Not 题目链接:http://acm.hdu.edu.cn/showproblem.php?pid=3342 题目意思:一群大牛互相问问题,大牛有不会的,会被更厉害 ...

  8. Linux 搭建Git服务器

    安装Git yum install -y git git --version 创建 Git 用户 sudo adduser git // 设置密码 passwd git 导入公钥 find / -na ...

  9. 剑指Offer——按之字形顺序打印二叉树

    题目描述: 请实现一个函数按照之字形打印二叉树,即第一行按照从左到右的顺序打印,第二层按照从右至左的顺序打印,第三行按照从左到右的顺序打印,其他行以此类推. 分析: 我们都知道二叉树的层次遍历用的是队 ...

  10. Keras学习-1

    本文基于http://keras-cn.readthedocs.io/en/latest/for_beginners/concepts/提及的知识总结,感谢作者做出的贡献,如有侵权将立即删除 符号计算 ...