• sshpass和做软链接


    参考:

    https://help.aliyun.com/document_detail/54530.html?spm=5176.11065259.1996646101.searchclickresult.d5d847dbBVUcQJ&aly_as=SCLlYD_p#title-k89-hb2-5zf

    -L file #测试file是否为符号化(软)链接
    $# 默认参数是否符合,不符合提示

    sh deploy.sh <master_ip> master_password_file
    deploy.sh:脚本名称,内容见下面代码。
    master_ip:集群的 Master 节点的 IP,请确保可以访问。
    master_password_file:保存 Master 节点的密码文件,将 Master 节点的密码直接写在文件内即可。

    #!/usr/bin/bash
    if [ $# != 2 ]
    then
    echo "Usage: $0 master_ip master_password_file"
    exit 1;
    fi
    masterip=$1
    masterpwdfile=$2
    if ! type sshpass >/dev/null 2>&1; then
    yum install -y sshpass
    fi
    if ! type java >/dev/null 2>&1; then
    yum install -y java-1.8.0-openjdk
    fi
    mkdir -p /opt/apps
    mkdir -p /etc/ecm
    mkdir -p /usr/lib/bigboot-current/conf
    echo "Start to copy package from $masterip to local gateway(/opt/apps)"
    echo " -copying hadoop-2.7.2"
    sshpass -f $masterpwdfile scp -r -o 'StrictHostKeyChecking no' root@$masterip:/usr/lib/hadoop-current /opt/apps/
    echo " -copying hive-2.0.1"
    sshpass -f $masterpwdfile scp -r root@$masterip:/usr/lib/hive-current /opt/apps/
    echo " -copying spark-2.1.1"
    sshpass -f $masterpwdfile scp -r root@$masterip:/usr/lib/spark-current /opt/apps/
    echo "Start to link /usr/lib/${app}-current to /opt/apps/${app}"
    if [ -L /usr/lib/hadoop-current ]
    then
    unlink /usr/lib/hadoop-current
    fi
    ln -s /opt/apps/hadoop-current /usr/lib/hadoop-current
    if [ -L /usr/lib/hive-current ]
    then
    unlink /usr/lib/hive-current
    fi
    ln -s /opt/apps/hive-current /usr/lib/hive-current
    if [ -L /usr/lib/spark-current ]
    then
    unlink /usr/lib/spark-current
    fi
    ln -s /opt/apps/spark-current /usr/lib/spark-current
    echo "Start to copy conf from $masterip to local gateway(/etc/ecm)"
    sshpass -f $masterpwdfile scp -r root@$masterip:/etc/ecm/hadoop-conf /etc/ecm/hadoop-conf
    sshpass -f $masterpwdfile scp -r root@$masterip:/etc/ecm/hive-conf /etc/ecm/hive-conf
    sshpass -f $masterpwdfile scp -r root@$masterip:/etc/ecm/spark-conf /etc/ecm/spark-conf
    echo "Start to copy environment from $masterip to local gateway(/etc/profile.d)"
    sshpass -f $masterpwdfile scp root@$masterip:/etc/profile.d/hdfs.sh /etc/profile.d/
    sshpass -f $masterpwdfile scp root@$masterip:/etc/profile.d/yarn.sh /etc/profile.d/
    sshpass -f $masterpwdfile scp root@$masterip:/etc/profile.d/hive.sh /etc/profile.d/
    sshpass -f $masterpwdfile scp root@$masterip:/etc/profile.d/spark.sh /etc/profile.d/
    sshpass -f $masterpwdfile scp root@$masterip:/usr/lib/bigboot-current/conf/smartdata-site.xml /usr/lib/bigboot-current/conf/
    if [ -L /usr/lib/jvm/java ]
    then
    unlink /usr/lib/jvm/java
    fi
    echo "" >>/etc/profile.d/hdfs.sh
    echo export JAVA_HOME=/usr/lib/jvm/jre-1.8.0 >>/etc/profile.d/hdfs.sh
    echo "Start to copy host info from $masterip to local gateway(/etc/hosts)"
    sshpass -f $masterpwdfile scp root@$masterip:/etc/hosts /etc/hosts_bak
    cat /etc/hosts_bak | grep emr | grep cluster >>/etc/hosts
    if ! id hadoop >& /dev/null
    then
    useradd hadoop
    fi

  • 相关阅读:
    优达学城自动驾驶课程项目——车道检测
    终于理解了方向导数与梯度
    深入理解决策树的最优分类法则
    学习支持向量机的一点感悟
    时间复杂度Big O以及Python 内置函数的时间复杂度
    机器学习基础系列--先验概率 后验概率 似然函数 最大似然估计(MLE) 最大后验概率(MAE) 以及贝叶斯公式的理解
    信息论相关概念:熵 交叉熵 KL散度 JS散度
    强化学习相关知识的整理
    机器学习系列(三)——目标函数和损失函数
    机器学习系列(二)——分类及回归问题
  • 原文地址:https://www.cnblogs.com/hongfeng2019/p/11926206.html
Copyright © 2020-2023  润新知