• spark复习笔记(4):spark脚本分析


    1.[start-all.sh]

    #!/usr/bin/env bash
    
    #
    # Licensed to the Apache Software Foundation (ASF) under one or more
    # contributor license agreements.  See the NOTICE file distributed with
    # this work for additional information regarding copyright ownership.
    # The ASF licenses this file to You under the Apache License, Version 2.0
    # (the "License"); you may not use this file except in compliance with
    # the License.  You may obtain a copy of the License at
    #
    #    http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    #
    
    # Start all spark daemons.    #启动所有守护进程
    # Starts the master on this node.  #在这个节点上启动master
    # Starts a worker on each node specified in conf/slaves   #在conf/slaves文件下指定的节点上启动worker进程
    
    if [ -z "${SPARK_HOME}" ]; then    #判断spark环境变量在不在
      export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"  #如果在的话,就把环境变量导入
    fi
    
    # Load the Spark configuration    #加载配置文件
    . "${SPARK_HOME}/sbin/spark-config.sh"  #调用spark-config.sh脚本
    
    # Start Master
    "${SPARK_HOME}/sbin"/start-master.sh  #启动master脚本进程
    
    # Start Workers
    "${SPARK_HOME}/sbin"/start-slaves.sh  #启动slaves脚本进程

    2.[start-master.sh]

    #!/usr/bin/env bash
    
    #
    # Licensed to the Apache Software Foundation (ASF) under one or more
    # contributor license agreements.  See the NOTICE file distributed with
    # this work for additional information regarding copyright ownership.
    # The ASF licenses this file to You under the Apache License, Version 2.0
    # (the "License"); you may not use this file except in compliance with
    # the License.  You may obtain a copy of the License at
    #
    #    http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    #
    
    # Starts the master on the machine this script is executed on.  #在这个机器上执行脚本,启动master进程
    
    if [ -z "${SPARK_HOME}" ]; then    #首先配置环境变量
      export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
    fi
    
    # NOTE: This exact class name is matched downstream by SparkSubmit.
    # Any changes need to be reflected there.
    CLASS="org.apache.spark.deploy.master.Master"
    
    if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
      echo "Usage: ./sbin/start-master.sh [options]"
      pattern="Usage:"
      pattern+="|Using Spark's default log4j profile:"
      pattern+="|Registered signal handlers for"
    
      "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
      exit 1
    fi
    
    ORIGINAL_ARGS="$@"
    
    . "${SPARK_HOME}/sbin/spark-config.sh"
    
    . "${SPARK_HOME}/bin/load-spark-env.sh"
    
    if [ "$SPARK_MASTER_PORT" = "" ]; then
      SPARK_MASTER_PORT=7077
    fi
    
    if [ "$SPARK_MASTER_HOST" = "" ]; then
      SPARK_MASTER_HOST=`hostname -f`
    fi
    
    if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
      SPARK_MASTER_WEBUI_PORT=8080
    fi
    
    "${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS 1 
      --host $SPARK_MASTER_HOST --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT 
      $ORIGINAL_ARGS
  • 相关阅读:
    深入研究Servlet线程安全性问题
    Sun公司java语言编码规范
    JAVA的内省机制(introspector)与反射机制(reflection)[转]
    Oracle的悲观锁和乐观锁
    java中120个经典问题
    自定义Java异常
    Java事务处理总结
    Tomcat内存溢出的三种情况及解决办法分析
    .net基础
    C#.Net中的转义
  • 原文地址:https://www.cnblogs.com/bigdata-stone/p/9889201.html
Copyright © 2020-2023  润新知