spark sql01
package sql; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SQLContext; /**
*
*/
public class DataFrameReadJsonOps2 { /**
* @param args
*/
public static void main(String[] args) {
//创建SparkConf用于读取系统配置信息并设置当前应用程序的名字
SparkConf conf = new SparkConf().setAppName("DataFrameOps").setMaster("local");
//创建JavaSparkContext对象实例作为整个Driver的核心基石
JavaSparkContext sc = new JavaSparkContext(conf);
//设置日志级别为WARN
sc.setLogLevel("WARN");
//创建SQLContext上下文对象用于SQL的分析
SQLContext sqlContext = new SQLContext(sc);
//创建Data Frame,可以简单的认为DataFrame是一张表
DataFrame df = sqlContext.read().json("c:/resources/people.json");
//select * from table
df.show();
//desc table
df.printSchema();
//select name from table
df.select(df.col("name")).show();
//select name, age+10 from table
df.select(df.col("name"), df.col("age").plus()).show();
//select * from table where age > 21
df.filter(df.col("age").gt()).show();
//select age, count(1) from table group by age
df.groupBy("age").count().show(); //df.groupBy(df.col("age")).count().show();
} }
//
//SLF4J: Class path contains multiple SLF4J bindings.
//SLF4J: Found binding in [jar:file:/E:/bigdata/spark-1.4.0-bin-hadoop2.6/lib/spark-assembly-1.4.0-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
//SLF4J: Found binding in [jar:file:/E:/bigdata/spark-1.4.0-bin-hadoop2.6/lib/spark-examples-1.4.0-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
//SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
//SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
//Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
//17/12/29 14:15:10 INFO SparkContext: Running Spark version 1.4.0
//17/12/29 14:15:24 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
//17/12/29 14:15:28 INFO SecurityManager: Changing view acls to: alamps
//17/12/29 14:15:28 INFO SecurityManager: Changing modify acls to: alamps
//17/12/29 14:15:28 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(alamps); users with modify permissions: Set(alamps)
//17/12/29 14:15:37 INFO Slf4jLogger: Slf4jLogger started
//17/12/29 14:15:39 INFO Remoting: Starting remoting
//17/12/29 14:15:44 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriver@172.18.3.7:55458]
//17/12/29 14:15:44 INFO Utils: Successfully started service 'sparkDriver' on port 55458.
//17/12/29 14:15:45 INFO SparkEnv: Registering MapOutputTracker
//17/12/29 14:15:46 INFO SparkEnv: Registering BlockManagerMaster
//17/12/29 14:15:46 INFO DiskBlockManager: Created local directory at C:\Users\alamps\AppData\Local\Temp\spark-cd3ecbc3-41b5-4d8b-8e78-8c2c368ce80b\blockmgr-660894dd-39d3-4c8a-bf25-ae1d3850953d
//17/12/29 14:15:46 INFO MemoryStore: MemoryStore started with capacity 467.6 MB
//17/12/29 14:15:47 INFO HttpFileServer: HTTP File server directory is C:\Users\alamps\AppData\Local\Temp\spark-cd3ecbc3-41b5-4d8b-8e78-8c2c368ce80b\httpd-106ce90e-d496-4e96-a383-b471aeb5a224
//17/12/29 14:15:47 INFO HttpServer: Starting HTTP Server
//17/12/29 14:15:48 INFO Utils: Successfully started service 'HTTP file server' on port 55464.
//17/12/29 14:15:48 INFO SparkEnv: Registering OutputCommitCoordinator
//17/12/29 14:15:49 INFO Utils: Successfully started service 'SparkUI' on port 4040.
//17/12/29 14:15:49 INFO SparkUI: Started SparkUI at http://172.18.3.7:4040
//17/12/29 14:15:49 INFO Executor: Starting executor ID driver on host localhost
//17/12/29 14:15:50 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 55483.
//17/12/29 14:15:50 INFO NettyBlockTransferService: Server created on 55483
//17/12/29 14:15:50 INFO BlockManagerMaster: Trying to register BlockManager
//17/12/29 14:15:50 INFO BlockManagerMasterEndpoint: Registering block manager localhost:55483 with 467.6 MB RAM, BlockManagerId(driver, localhost, 55483)
//17/12/29 14:15:50 INFO BlockManagerMaster: Registered BlockManager
//+----+-------+
//| age| name|
//+----+-------+
//|null|Michael|
//| 30| Andy|
//| 19| Justin|
//+----+-------+
//
//root
// |-- age: long (nullable = true)
// |-- name: string (nullable = true)
//
//+-------+
//| name|
//+-------+
//|Michael|
//| Andy|
//| Justin|
//+-------+
//
//+-------+----------+
//| name|(age + 10)|
//+-------+----------+
//|Michael| null|
//| Andy| 40|
//| Justin| 29|
//+-------+----------+
//
//+---+----+
//|age|name|
//+---+----+
//| 30|Andy|
//+---+----+
//
//+----+-----+
//| age|count|
//+----+-----+
//|null| 1|
//| 19| 1|
//| 30| 1|
//+----+-----+
spark sql01的更多相关文章
- Spark踩坑记——Spark Streaming+Kafka
[TOC] 前言 在WeTest舆情项目中,需要对每天千万级的游戏评论信息进行词频统计,在生产者一端,我们将数据按照每天的拉取时间存入了Kafka当中,而在消费者一端,我们利用了spark strea ...
- Spark RDD 核心总结
摘要: 1.RDD的五大属性 1.1 partitions(分区) 1.2 partitioner(分区方法) 1.3 dependencies(依赖关系) 1.4 compute(获取分区迭代列表) ...
- spark处理大规模语料库统计词汇
最近迷上了spark,写一个专门处理语料库生成词库的项目拿来练练手, github地址:https://github.com/LiuRoy/spark_splitter.代码实现参考wordmaker ...
- Hive on Spark安装配置详解(都是坑啊)
个人主页:http://www.linbingdong.com 简书地址:http://www.jianshu.com/p/a7f75b868568 简介 本文主要记录如何安装配置Hive on Sp ...
- Spark踩坑记——数据库(Hbase+Mysql)
[TOC] 前言 在使用Spark Streaming的过程中对于计算产生结果的进行持久化时,我们往往需要操作数据库,去统计或者改变一些值.最近一个实时消费者处理任务,在使用spark streami ...
- Spark踩坑记——初试
[TOC] Spark简介 整体认识 Apache Spark是一个围绕速度.易用性和复杂分析构建的大数据处理框架.最初在2009年由加州大学伯克利分校的AMPLab开发,并于2010年成为Apach ...
- Spark读写Hbase的二种方式对比
作者:Syn良子 出处:http://www.cnblogs.com/cssdongl 转载请注明出处 一.传统方式 这种方式就是常用的TableInputFormat和TableOutputForm ...
- (资源整理)带你入门Spark
一.Spark简介: 以下是百度百科对Spark的介绍: Spark 是一种与 Hadoop 相似的开源集群计算环境,但是两者之间还存在一些不同之处,这些有用的不同之处使 Spark 在某些工作负载方 ...
- Spark的StandAlone模式原理和安装、Spark-on-YARN的理解
Spark是一个内存迭代式运算框架,通过RDD来描述数据从哪里来,数据用那个算子计算,计算完的数据保存到哪里,RDD之间的依赖关系.他只是一个运算框架,和storm一样只做运算,不做存储. Spark ...
随机推荐
- php之print_r
stdClass类是PHP的一个内部保留类,初始时没有成员变量也没有成员方法,所有的魔术方法都被 设置为null,可以使用其传递变量参数,但是没有可以调用的方法.stdClass类可以被继承. 只是这 ...
- [tcpreplay] tcpreplay高级用法--使用tcpreplay-edit进行循环动态发包
tcpreplay-edit提供了可对包进行修改的高级用法: --unique-ip Modify IP addresses each loop iteration to generate uniqu ...
- mysql学习【第1篇】:初识MySQL
狂神声明 : 文章均为自己的学习笔记 , 转载一定注明出处 ; 编辑不易 , 防君子不防小人~共勉 ! mysql学习[第1篇]:初识MySQL 只会写代码的是码农:学好数据库,基本能混口饭吃:在此基 ...
- $ORACLE_HOME/OPatch/opatch lsinventory查oracle补丁
> $ORACLE_HOME/OPatch/opatch lsinventoryInvoking OPatch 10.2.0.4.2 Oracle Interim Patch Installer ...
- 阿里创新自动化测试工具平台--Doom
摘要: 阿里内部诞生一了个依赖真实流量用于自动回归的自动化测试平台,通过创新的自动mock机制不仅支持读接口的回归验证,同时支持了写接口验证,在内部产生了极大价值,有价值的东西就应该分享,目前该工具已 ...
- javascript面向对象中继承实现?
面向对象的基本特征有:封闭.继承.多态. 在javascript中实现继承的方法: 1.原型链(prototype chaining) 2.call()/apply() 3.混合方式(prototyp ...
- 原生js获取子元素、给元素增加div
//鼠标移入移出动画 解决页面闪屏问题 window.onload = function () { var el = document.createElement('div'); el.classNa ...
- xshell帮助
查看内置命令 $help Internal Commands:new: Creates a new session.open: Opens a session or the session dialo ...
- pandas官方文档阅读收获
1.当心它里面的简写: 第二张图中的输出实际上是等效于: df = df.drop() df 若只进行下面的操作,则drop操作不会起作用,因为它的inplace默认为False: df.drop() ...
- [django]主次表如何取出对方数据[主表obj.子表__set()]
[sql]mysql管理手头手册,多对多sql逻辑 国家--城市例子 class Country(models.Model): name = models.CharField(max_length=3 ...