1.[start-all.sh]

#!/usr/bin/env bash

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# # Start all spark daemons.    #启动所有守护进程
# Starts the master on this node.  #在这个节点上启动master
# Starts a worker on each node specified in conf/slaves #在conf/slaves文件下指定的节点上启动worker进程 if [ -z "${SPARK_HOME}" ]; then    #判断spark环境变量在不在
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"  #如果在的话,就把环境变量导入
fi # Load the Spark configuration    #加载配置文件
. "${SPARK_HOME}/sbin/spark-config.sh"  #调用spark-config.sh脚本 # Start Master
"${SPARK_HOME}/sbin"/start-master.sh  #启动master脚本进程 # Start Workers
"${SPARK_HOME}/sbin"/start-slaves.sh  #启动slaves脚本进程

2.[start-master.sh]

#!/usr/bin/env bash

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# # Starts the master on the machine this script is executed on.  #在这个机器上执行脚本,启动master进程 if [ -z "${SPARK_HOME}" ]; then    #首先配置环境变量
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi # NOTE: This exact class name is matched downstream by SparkSubmit.
# Any changes need to be reflected there.
CLASS="org.apache.spark.deploy.master.Master" if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
echo "Usage: ./sbin/start-master.sh [options]"
pattern="Usage:"
pattern+="\|Using Spark's default log4j profile:"
pattern+="\|Registered signal handlers for" "${SPARK_HOME}"/bin/spark-class $CLASS --help >& | grep -v "$pattern" >&
exit
fi ORIGINAL_ARGS="$@" . "${SPARK_HOME}/sbin/spark-config.sh" . "${SPARK_HOME}/bin/load-spark-env.sh" if [ "$SPARK_MASTER_PORT" = "" ]; then
SPARK_MASTER_PORT=
fi if [ "$SPARK_MASTER_HOST" = "" ]; then
SPARK_MASTER_HOST=`hostname -f`
fi if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
SPARK_MASTER_WEBUI_PORT=
fi "${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS \
--host $SPARK_MASTER_HOST --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT \
$ORIGINAL_ARGS

spark复习笔记(4):spark脚本分析的更多相关文章

  1. spark复习笔记(2)

    之前工作的时候经常用,隔了段时间,现在学校要用学的东西也忘了,翻翻书谢谢博客吧. 1.什么是spark? Spark是一种快速.通用.可扩展的大数据分析引擎,2009年诞生于加州大学伯克利分校AMPL ...

  2. spark学习笔记总结-spark入门资料精化

    Spark学习笔记 Spark简介 spark 可以很容易和yarn结合,直接调用HDFS.Hbase上面的数据,和hadoop结合.配置很容易. spark发展迅猛,框架比hadoop更加灵活实用. ...

  3. Spark学习笔记-使用Spark History Server

    在运行Spark应用程序的时候,driver会提供一个webUI给出应用程序的运行信息,但是该webUI随着应用程序的完成而关闭端口,也就是 说,Spark应用程序运行完后,将无法查看应用程序的历史记 ...

  4. Spark 学习笔记之 Spark history Server 搭建

    在hdfs上建立文件夹/directory hadoop fs -mkdir /directory 进入conf目录  spark-env.sh 增加以下配置 export SPARK_HISTORY ...

  5. spark复习笔记(5):API分析

    0.spark是基于hadoop的mr模型,扩展了MR,高效实用MR模型,内存型集群计算,提高了app处理速度. 1.特点:(1)在内存中存储中间结果 (2)支持多种语言:java scala pyt ...

  6. spark复习笔记(4):RDD变换

    一.RDD变换 1.返回执行新的rdd的指针,在rdd之间创建依赖关系.每个rdd都有一个计算函数和指向父rdd的指针 Spark是惰性的,因此除非调用某个转换或动作,否则不会执行任何操作,否则将触发 ...

  7. spark复习笔记(1)

    使用spark实现work count ---------------------------------------------------- (1)用sc.textFile(" &quo ...

  8. spark复习笔记(7):sparkstreaming

    一.介绍 1.sparkStreaming是核心模块Spark API的扩展,具有可伸缩,高吞吐量以及容错的实时数据流处理等.数据可以从许多来源(如Kafka,Flume,Kinesis或TCP套接字 ...

  9. spark复习笔记(7):sparkSQL

    一.saprkSQL模块,使用类sql的方式访问Hadoop,实现mr计算,底层使用的是rdd 1.hive //hadoop  mr  sql 2.phenoix //hbase上构建sql的交互过 ...

随机推荐

  1. HTTP教程

    适合人群 本教程已为计算机学科毕业生和Web开发人员准备,帮助他们了解与超文本传输​​协议(HTTP)相关的基本到高级概念. 预备知识 在继续本教程之前,最好对Web概念,Web浏览器,Web服务器, ...

  2. 【微信小程序】基础组件--view text image

    组件的通用属性: id class style hidden bind* catch* data-* view 小程序基础组件,基本等于最常用组件,类似于HTML中的div.view用于构建页面骨架, ...

  3. HDU 1298 T9 ( 字典树 )

    题意 : 给你 w 个单词以及他们的频率,现在给出模拟 9 键打字的一串数字,要你在其模拟打字的过程中给出不同长度的提示词,出现的提示词应当是之前频率最高的,当然提示词不需要完整的,也可以是 w 个单 ...

  4. ubantu apt-get install安装php及开展

    .安装php apt-get install libapache2-mod-php5 php5 报错 E: Package 'libapache2-mod-php5' has no installat ...

  5. jquery ajax实现文件上传

    test5.html <html> <head> <meta http-equiv="Content-Type" content="text ...

  6. 3D Computer Grapihcs Using OpenGL - 06 Vertex and Fragment Shaders

    从这里就接触到了可编程图形渲染管线. 下面介绍使用Vertex Shader (顶点着色器)和 Fragment Shader(像素着色器)的方法. 我们的目标是使用这两个着色器给三角形填充绿色. 添 ...

  7. HDU 5073 Galaxy (数学)

    Galaxy Time Limit: 2000/1000 MS (Java/Others)    Memory Limit: 262144/262144 K (Java/Others)Total Su ...

  8. 北风设计模式课程---访问者模式(Visitor)

    北风设计模式课程---访问者模式(Visitor) 一.总结 一句话总结: 设计模式是日常问题的经验总结方案,所以学好设计模式对日常出现的问题可以有很好的解决. 访问者设计模式有点神似 抽象工厂模式, ...

  9. 玩转visual studio系列之类设计图

    类设计图 vs 类设计图 微软  安装好vs2017后,在打开的IDE中,随便选择一个文件夹创建[类图],却发现没有该选项 类图辅助器 解决方案 1.点击[工具]选项卡,在下拉的菜单中选择第一项 获取 ...

  10. CTEX WinEdt 改变默认 pdf viewer

    CTEX 2.9.2, WinEdt 7.0 "Options" -> "Excution Modes..." -> "PDF viewe ...