1. #!/usr/bin/env bash
  2.  
  3. #
  4. # Licensed to the Apache Software Foundation (ASF) under one or more
  5. # contributor license agreements. See the NOTICE file distributed with
  6. # this work for additional information regarding copyright ownership.
  7. # The ASF licenses this file to You under the Apache License, Version 2.0
  8. # (the "License"); you may not use this file except in compliance with
  9. # the License. You may obtain a copy of the License at
  10. #
  11. # http://www.apache.org/licenses/LICENSE-2.0
  12. #
  13. # Unless required by applicable law or agreed to in writing, software
  14. # distributed under the License is distributed on an "AS IS" BASIS,
  15. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16. # See the License for the specific language governing permissions and
  17. # limitations under the License.
  18. #
  19.  
  20. # NOTE: Any changes in this file must be reflected in SparkSubmitDriverBootstrapper.scala!
  21.  
  22. #Spark的安装目录
  23. export SPARK_HOME="$(cd `dirname $0`/..; pwd)"
  24. #将参数已数组的形式赋值给ORIG_ARGS
  25. ORIG_ARGS=("$@")
  26.  
  27. #根据不同的参数项,把对应的参数值赋给对应的环境变量
  28. while (($#)); do
  29. if [ "$1" = "--deploy-mode" ]; then
  30. SPARK_SUBMIT_DEPLOY_MODE=$
  31. elif [ "$1" = "--properties-file" ]; then
  32. SPARK_SUBMIT_PROPERTIES_FILE=$
  33. elif [ "$1" = "--driver-memory" ]; then
  34. export SPARK_SUBMIT_DRIVER_MEMORY=$
  35. elif [ "$1" = "--driver-library-path" ]; then
  36. export SPARK_SUBMIT_LIBRARY_PATH=$
  37. elif [ "$1" = "--driver-class-path" ]; then
  38. export SPARK_SUBMIT_CLASSPATH=$
  39. elif [ "$1" = "--driver-java-options" ]; then
  40. export SPARK_SUBMIT_OPTS=$
  41. fi
  42. shift
  43. done
  44.  
  45. #定义一些默认的变量,会被用户的自定义参数覆盖
  46. # :- 同 nvl
  47. DEFAULT_PROPERTIES_FILE="$SPARK_HOME/conf/spark-defaults.conf"
  48. export SPARK_SUBMIT_DEPLOY_MODE=${SPARK_SUBMIT_DEPLOY_MODE:-"client"}
  49. export SPARK_SUBMIT_PROPERTIES_FILE=${SPARK_SUBMIT_PROPERTIES_FILE:-"$DEFAULT_PROPERTIES_FILE"}
  50.  
  51. # For client mode, the driver will be launched in the same JVM that launches
  52. # SparkSubmit, so we may need to read the properties file for any extra class
  53. # paths, library paths, java options and memory early on. Otherwise, it will
  54. # be too late by the time the driver JVM has started.
  55.  
  56. #从spark-defaults.conf文件中获取"spark.driver.extra*\|spark.driver.memory" 两个变量的值
  57.  
  58. if [[ "$SPARK_SUBMIT_DEPLOY_MODE" == "client" && -f "$SPARK_SUBMIT_PROPERTIES_FILE" ]]; then
  59. # Parse the properties file only if the special configs exist
  60. contains_special_configs=$(
  61. grep -e "spark.driver.extra*\|spark.driver.memory" "$SPARK_SUBMIT_PROPERTIES_FILE" | \
  62. grep -v "^[[:space:]]*#"
  63. )
  64. if [ -n "$contains_special_configs" ]; then
  65. export SPARK_SUBMIT_BOOTSTRAP_DRIVER=
  66. fi
  67. fi
  68. #将参数传递spark-class
    #exec命令在执行时会把当前的shell process关闭,然后换到后面的命令继续执行
  69. exec $SPARK_HOME/bin/spark-class org.apache.spark.deploy.SparkSubmit "${ORIG_ARGS[@]}"

Spark-submit脚本解读的更多相关文章

  1. Spark Submit 脚本

    当我们需要命令行传递参数时候,将--class 写在前面,然后是jar 最后是参数 spark-submit --master yarn --num-executors 3 --executor-me ...

  2. 【原创】大数据基础之Spark(1)Spark Submit即Spark任务提交过程

    Spark2.1.1 一 Spark Submit本地解析 1.1 现象 提交命令: spark-submit --master local[10] --driver-memory 30g --cla ...

  3. Spark-class启动脚本解读

    #!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contrib ...

  4. spark submit参数及调优(转载)

    spark submit参数介绍 你可以通过spark-submit --help或者spark-shell --help来查看这些参数. 使用格式:  ./bin/spark-submit \ -- ...

  5. spark submit local遇到路径hdfs的问题

    有时候第一次执行 spark submit --master local[*] 单机模式的时候,可以对linux本地路径进行输出.但是有时候提交到yarn的时候,是自动加上hdfs的路径这没问题, 但 ...

  6. Spark 个人实战系列(2)--Spark 服务脚本分析

    前言: spark最近非常的火热, 本文不讲spark原理, 而是研究spark集群搭建和服务的脚本是如何编写的, 管中窥豹, 希望从运行脚本的角度去理解spark集群. 研究的spark为1.0.1 ...

  7. spark相关脚本解析

    spark-shell/spark-submit/pyspark等关系如下: #spark-submit 逻辑: ########################################### ...

  8. Spark-shell启动脚本解读

    #!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contrib ...

  9. spark standalone ha spark submit

    when you build a spark standalone ha cluster, when you submit your app,  you should send it to the l ...

  10. Spark Shell & Spark submit

    Spark 的 shell 是一个强大的交互式数据分析工具. 1. 搭建Spark 2. 两个目录下面有可执行文件: bin  包含spark-shell 和 spark-submit sbin 包含 ...

随机推荐

  1. hnust 最小的x

    问题 G: 最小的x 时间限制: 1 Sec  内存限制: 128 MB提交: 2347  解决: 1155[提交][状态][讨论版] 题目描述 TSQ对DK进行地狱式训练,找出满足下面公式的最小的x ...

  2. 台州学院maximum cow训练记录

    前队名太过晦气,故启用最大牛 我们的组队大概就是18年初,组队阵容是17级生詹志龙.陶源和16级的黄睿博. 三人大学前均无接触过此类竞赛,队伍十分年轻.我可能是我们队最菜的,我只是知道的内容最多,靠我 ...

  3. vmware设置静态ip(复制)

    一.安装好虚拟后在菜单栏选择编辑→ 虚拟网络编辑器,打开虚拟网络编辑器对话框,选择Vmnet8 Net网络连接方式,随意设置子网IP,点击NAT设置页面,查看子网掩码和网关,后面修改静态IP会用到. ...

  4. jQuery选择器示例

    <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8&quo ...

  5. hexo 配置文件 实例

    # Hexo Configuration ## Docs: https://hexo.io/docs/configuration.html ## Source: https://github.com/ ...

  6. 静态编译zsummerX

    下载 https://github.com/zsummer/zsummerX 下载 http://ftp.gnu.org/gnu/glibc/ ../configure --prefix=/home/ ...

  7. REST Web 服务(二)----JAX-RS 介绍

    1. 什么是JAX-RS? JAX-RS——Java API for RESTful Web Services,是为 Java 程序员提供的一套固定的接口(Java API),用于开发表述性状态转移( ...

  8. BZOJ 1208 [HNOI2004]宠物收养所 | SPlay模板题

    题目: 洛谷也能评 题解: 记录一下当前树维护是宠物还是人,用Splay维护插入和删除. 对于任何一次询问操作都求一下value的前驱和后继(这里前驱和后继是可以和value相等的),比较哪个差值绝对 ...

  9. echarts异步加载

    echarts体积很大,在移动端使用异步加载是一种提高渲染速度的方法,结合webpack的做法如下: require.ensure([], function(require){ const echar ...

  10. 使用Asp.Net Identity 2.0 认证邮箱激活账号(附DEMO)

    注:本文系作者原创,但可随意转载.若有任何疑问或错误,欢迎与原作者交流,原文地址:http://www.cnblogs.com/lyosaki88/p/aspnet-itentity-ii-email ...