spark sql01
package sql; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SQLContext; /**
*
*/
public class DataFrameReadJsonOps2 { /**
* @param args
*/
public static void main(String[] args) {
//创建SparkConf用于读取系统配置信息并设置当前应用程序的名字
SparkConf conf = new SparkConf().setAppName("DataFrameOps").setMaster("local");
//创建JavaSparkContext对象实例作为整个Driver的核心基石
JavaSparkContext sc = new JavaSparkContext(conf);
//设置日志级别为WARN
sc.setLogLevel("WARN");
//创建SQLContext上下文对象用于SQL的分析
SQLContext sqlContext = new SQLContext(sc);
//创建Data Frame,可以简单的认为DataFrame是一张表
DataFrame df = sqlContext.read().json("c:/resources/people.json");
//select * from table
df.show();
//desc table
df.printSchema();
//select name from table
df.select(df.col("name")).show();
//select name, age+10 from table
df.select(df.col("name"), df.col("age").plus()).show();
//select * from table where age > 21
df.filter(df.col("age").gt()).show();
//select age, count(1) from table group by age
df.groupBy("age").count().show(); //df.groupBy(df.col("age")).count().show();
} }
//
//SLF4J: Class path contains multiple SLF4J bindings.
//SLF4J: Found binding in [jar:file:/E:/bigdata/spark-1.4.0-bin-hadoop2.6/lib/spark-assembly-1.4.0-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
//SLF4J: Found binding in [jar:file:/E:/bigdata/spark-1.4.0-bin-hadoop2.6/lib/spark-examples-1.4.0-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
//SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
//SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
//Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
//17/12/29 14:15:10 INFO SparkContext: Running Spark version 1.4.0
//17/12/29 14:15:24 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
//17/12/29 14:15:28 INFO SecurityManager: Changing view acls to: alamps
//17/12/29 14:15:28 INFO SecurityManager: Changing modify acls to: alamps
//17/12/29 14:15:28 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(alamps); users with modify permissions: Set(alamps)
//17/12/29 14:15:37 INFO Slf4jLogger: Slf4jLogger started
//17/12/29 14:15:39 INFO Remoting: Starting remoting
//17/12/29 14:15:44 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriver@172.18.3.7:55458]
//17/12/29 14:15:44 INFO Utils: Successfully started service 'sparkDriver' on port 55458.
//17/12/29 14:15:45 INFO SparkEnv: Registering MapOutputTracker
//17/12/29 14:15:46 INFO SparkEnv: Registering BlockManagerMaster
//17/12/29 14:15:46 INFO DiskBlockManager: Created local directory at C:\Users\alamps\AppData\Local\Temp\spark-cd3ecbc3-41b5-4d8b-8e78-8c2c368ce80b\blockmgr-660894dd-39d3-4c8a-bf25-ae1d3850953d
//17/12/29 14:15:46 INFO MemoryStore: MemoryStore started with capacity 467.6 MB
//17/12/29 14:15:47 INFO HttpFileServer: HTTP File server directory is C:\Users\alamps\AppData\Local\Temp\spark-cd3ecbc3-41b5-4d8b-8e78-8c2c368ce80b\httpd-106ce90e-d496-4e96-a383-b471aeb5a224
//17/12/29 14:15:47 INFO HttpServer: Starting HTTP Server
//17/12/29 14:15:48 INFO Utils: Successfully started service 'HTTP file server' on port 55464.
//17/12/29 14:15:48 INFO SparkEnv: Registering OutputCommitCoordinator
//17/12/29 14:15:49 INFO Utils: Successfully started service 'SparkUI' on port 4040.
//17/12/29 14:15:49 INFO SparkUI: Started SparkUI at http://172.18.3.7:4040
//17/12/29 14:15:49 INFO Executor: Starting executor ID driver on host localhost
//17/12/29 14:15:50 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 55483.
//17/12/29 14:15:50 INFO NettyBlockTransferService: Server created on 55483
//17/12/29 14:15:50 INFO BlockManagerMaster: Trying to register BlockManager
//17/12/29 14:15:50 INFO BlockManagerMasterEndpoint: Registering block manager localhost:55483 with 467.6 MB RAM, BlockManagerId(driver, localhost, 55483)
//17/12/29 14:15:50 INFO BlockManagerMaster: Registered BlockManager
//+----+-------+
//| age| name|
//+----+-------+
//|null|Michael|
//| 30| Andy|
//| 19| Justin|
//+----+-------+
//
//root
// |-- age: long (nullable = true)
// |-- name: string (nullable = true)
//
//+-------+
//| name|
//+-------+
//|Michael|
//| Andy|
//| Justin|
//+-------+
//
//+-------+----------+
//| name|(age + 10)|
//+-------+----------+
//|Michael| null|
//| Andy| 40|
//| Justin| 29|
//+-------+----------+
//
//+---+----+
//|age|name|
//+---+----+
//| 30|Andy|
//+---+----+
//
//+----+-----+
//| age|count|
//+----+-----+
//|null| 1|
//| 19| 1|
//| 30| 1|
//+----+-----+
spark sql01的更多相关文章
- Spark踩坑记——Spark Streaming+Kafka
[TOC] 前言 在WeTest舆情项目中,需要对每天千万级的游戏评论信息进行词频统计,在生产者一端,我们将数据按照每天的拉取时间存入了Kafka当中,而在消费者一端,我们利用了spark strea ...
- Spark RDD 核心总结
摘要: 1.RDD的五大属性 1.1 partitions(分区) 1.2 partitioner(分区方法) 1.3 dependencies(依赖关系) 1.4 compute(获取分区迭代列表) ...
- spark处理大规模语料库统计词汇
最近迷上了spark,写一个专门处理语料库生成词库的项目拿来练练手, github地址:https://github.com/LiuRoy/spark_splitter.代码实现参考wordmaker ...
- Hive on Spark安装配置详解(都是坑啊)
个人主页:http://www.linbingdong.com 简书地址:http://www.jianshu.com/p/a7f75b868568 简介 本文主要记录如何安装配置Hive on Sp ...
- Spark踩坑记——数据库(Hbase+Mysql)
[TOC] 前言 在使用Spark Streaming的过程中对于计算产生结果的进行持久化时,我们往往需要操作数据库,去统计或者改变一些值.最近一个实时消费者处理任务,在使用spark streami ...
- Spark踩坑记——初试
[TOC] Spark简介 整体认识 Apache Spark是一个围绕速度.易用性和复杂分析构建的大数据处理框架.最初在2009年由加州大学伯克利分校的AMPLab开发,并于2010年成为Apach ...
- Spark读写Hbase的二种方式对比
作者:Syn良子 出处:http://www.cnblogs.com/cssdongl 转载请注明出处 一.传统方式 这种方式就是常用的TableInputFormat和TableOutputForm ...
- (资源整理)带你入门Spark
一.Spark简介: 以下是百度百科对Spark的介绍: Spark 是一种与 Hadoop 相似的开源集群计算环境,但是两者之间还存在一些不同之处,这些有用的不同之处使 Spark 在某些工作负载方 ...
- Spark的StandAlone模式原理和安装、Spark-on-YARN的理解
Spark是一个内存迭代式运算框架,通过RDD来描述数据从哪里来,数据用那个算子计算,计算完的数据保存到哪里,RDD之间的依赖关系.他只是一个运算框架,和storm一样只做运算,不做存储. Spark ...
随机推荐
- invariant theory 不变量理论
https://baike.baidu.com/item/不变量理论/9224903?fr=aladdininvariant theory 一组几何元素由 k个参数组成的向量 P1表示.若 T为某一变 ...
- saltstack---自动化运维平台
https://github.com/ixrjog/adminset[自动化运维平台:CMDB.CD.DevOps.资产管理.任务编排.持续交付.系统监控.运维管理.配置管理 ] https://ww ...
- ios-静态库,动态库,framework浅析(一)
一,所谓的“库” * 所谓的“库” 库(Library)说白了就是一段编译好的二进制代码,加上头文件就可以供别人使用.什么时候我们会用到库呢? 一种情 ...
- [DPDK] 转发 DPDK分析
前半部分,对于背景和需求的分析,写的相当好啊! 原文地址:https://www.jianshu.com/p/0ff8cb4deaef 背景分析 前10年中,网络程序性能优化的目标主要是为了解决C10 ...
- [daily] socks代理转化为http代理
我用SS爬梯子,它是socks5的代理,在电脑上. 很长时间以来,我的手机是不能出去的.那么我该怎么弄才能让手机也出去呢.最简单的办法是让手机也ss. 但问题是,怎么给手机装上一个ss. 1. 用电 ...
- visual stodio 编译前后动作定制总结
copy "$(TargetDir)$(TargetName).lib" ..\lib\deploy\$(TargetName).lib 编译完成后将一个.lib 文件拷贝到指定目 ...
- 使用eclipse自动生成WSDL客户端代码
一.获取WSDL文件 从网上可以搜到一些提供各种服务的免费接口,比如获取天气预报的接口: http://www.webxml.com.cn/WebServices/WeatherWebService. ...
- 【SQL】where 后不可以接聚合函数,都哪些是聚合函数?
where 后不可以接聚合函数,比如函数:SUM(count),AVG(count),MIN(count),MAX(count)
- 【托业】【新托业TOEIC新题型真题】学习笔记10-题库七-P7
1.to request a review of information 要求审查资料 2.inform of 将…告知(某人); 3.flammable [ˈflæməbl]adj.易燃的,可燃的; ...
- gitlab数据库
event表中action对应操作: 1 - 新建项目 5 - push 8 - 在某项目中赋予某人权限 9 - 在某项目中取消某人权限