• ambari 卸载脚本


    #!/bin/bash
    # Program:
    #    uninstall ambari automatic
    PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
    export PATH
    
    #取得集群的所有主机名,这里需要注意:/etc/hosts配置的IP和主机名只能用制表符,cut默认分割符,其他情况参考注释行
    #hostList=$(cat /etc/hosts | tail -n +3 | cut -d ' ' -f 2)
    hostList=$(cat /etc/hosts | tail -n +3 | cut -f 2)
    yumReposDir=/etc/yum.repos.d/
    alterNativesDir=/etc/alternatives/
    pingCount=5
    logPre=HDP
    
    read -p "Please input your master hostname: " master
    master=${master:-"master"}
    ssh $master "ambari-server stop"
    #重置ambari数据库
    ssh $master "ambari-server reset"
    
    for host in $hostList
    do
        echo $host
        #检测主机的连通性
        unPing=$(ping $host -c $pingCount | grep 'Unreachable' | wc -l)
        if [ "$unPing" == "$pingCount" ]; then
            echo -e "$logPre======>$host is Unreachable,please check '/etc/hosts' file"
            continue
        fi
    
        echo "$logPre======>$host deleting... 
    "
        #1.)删除hdp.repo、HDP.repo、HDP-UTILS.repo和ambari.repo
        ssh $host "cd $yumReposDir"
        ssh $host "rm -rf $yumReposDir/hdp.repo"
        ssh $host "rm -rf $yumReposDir/HDP*"
        ssh $host "rm -rf $yumReposDir/ambari.repo"
        
        # 删除相关用户
        ssh $host "python /usr/lib/ambari-agent/lib/ambari_agent/HostCleanup.py --silent"
        
        # 删除HDP相关的安装包
        hdppackagelist=$(yum list installed | grep HDP | cut -d ' ' -f 1)
        for package in $hdppackagelist
        do
           echo "uninstalling $package"
           ssh $host "yum remove -y $package"
        done
        # 删除ambari相关安装包
        ambaripackagelist=$(yum list installed | grep ambari | cut -d ' ' -f 1)
        for package in $ambaripackagelist
        do 
          echo "uninstalling $package"
          ssh $host "yum remove -y $package"
        done
        
         # 删除快捷方式
        ssh $host "cd $alterNativesDir"
        ssh $host "rm -rf hadoop-etc" 
        ssh $host "rm -rf zookeeper-conf"
        ssh $host "rm -rf hbase-conf" 
        ssh $host "rm -rf hadoop-log" 
        ssh $host "rm -rf hadoop-lib"
        ssh $host "rm -rf hadoop-default" 
        ssh $host "rm -rf oozie-conf"
        ssh $host "rm -rf hcatalog-conf" 
        ssh $host "rm -rf hive-conf"
        ssh $host "rm -rf hadoop-man"
        ssh $host "rm -rf sqoop-conf"
        ssh $host "rm -rf hadoop-confone"
        
         # 删除日志文件
        ssh $host "rm -rf /var/log/ambari*"
        ssh $host "rm -rf /var/log/hadoop*"
        ssh $host "rm -rf /var/log/hbase"
        ssh $host "rm -rf /var/log/hive"
        ssh $host "rm -rf /var/log/nagios" 
        ssh $host "rm -rf /var/log/oozie"
        ssh $host "rm -rf /var/log/zookeeper" 
        ssh $host "rm -rf /var/log/falcon"
        ssh $host "rm -rf /var/log/flume"
        ssh $host "rm -rf /var/log/hive*"
        ssh $host "rm -rf /var/log/knox"
        ssh $host "rm -rf /var/log/solr"
        ssh $host "rm -rf /var/log/hst"
        
        # 删除hadoop文件夹,包括HDFS数据
        ssh $host "rm -rf /hadoop" 
        ssh $host "rm -rf /hdfs/hadoop" 
        ssh $host "rm -rf /hdfs/lost+found" 
        ssh $host "rm -rf /hdfs/var" 
        ssh $host "rm -rf /local/opt/hadoop" 
        ssh $host "rm -rf /tmp/hadoop" 
        ssh $host "rm -rf /usr/bin/hadoop" 
        ssh $host "rm -rf /usr/hdp" 
        ssh $host "rm -rf /var/hadoop"
        
         # 删除所有节点上的配置文件夹 
        ssh $host "rm -rf /etc/ambari-agent" 
        ssh $host "rm -rf /etc/ambari-metrics-grafana" 
        ssh $host "rm -rf /etc/ambari-server" 
        ssh $host "rm -rf /etc/ams-hbase" 
        ssh $host "rm -rf /etc/falcon" 
        ssh $host "rm -rf /etc/flume" 
        ssh $host "rm -rf /etc/hadoop" 
        ssh $host "rm -rf /etc/hadoop-httpfs" 
        ssh $host "rm -rf /etc/hbase" 
        ssh $host "rm -rf /etc/hive" 
        ssh $host "rm -rf /etc/hive-hcatalog" 
        ssh $host "rm -rf /etc/hive-webhcat" 
        ssh $host "rm -rf /etc/hive2" 
        ssh $host "rm -rf /etc/hst" 
        ssh $host "rm -rf /etc/knox" 
        ssh $host "rm -rf /etc/livy" 
        ssh $host "rm -rf /etc/mahout" 
        ssh $host "rm -rf /etc/oozie" 
        ssh $host "rm -rf /etc/phoenix" 
        ssh $host "rm -rf /etc/pig" 
        ssh $host "rm -rf /etc/ranger-admin" 
        ssh $host "rm -rf /etc/ranger-usersync" 
        ssh $host "rm -rf /etc/spark2" 
        ssh $host "rm -rf /etc/tez" 
        ssh $host "rm -rf /etc/tez_hive2" 
        ssh $host "rm -rf /etc/zookeeper"
        
        # 删除所有节点上的PID
        ssh $host "rm -rf /var/run/ambari-agent" 
        ssh $host "rm -rf /var/run/ambari-metrics-grafana" 
        ssh $host "rm -rf /var/run/ambari-server" 
        ssh $host "rm -rf /var/run/falcon" 
        ssh $host "rm -rf /var/run/flume" 
        ssh $host "rm -rf /var/run/hadoop" 
        ssh $host "rm -rf /var/run/hadoop-mapreduce" 
        ssh $host "rm -rf /var/run/hadoop-yarn" 
        ssh $host "rm -rf /var/run/hbase" 
        ssh $host "rm -rf /var/run/hive" 
        ssh $host "rm -rf /var/run/hive-hcatalog" 
        ssh $host "rm -rf /var/run/hive2" 
        ssh $host "rm -rf /var/run/hst" 
        ssh $host "rm -rf /var/run/knox" 
        ssh $host "rm -rf /var/run/oozie" 
        ssh $host "rm -rf /var/run/webhcat" 
        ssh $host "rm -rf /var/run/zookeeper" 
        
        # 删除所有节点上的库文件
        ssh $host "rm -rf /usr/lib/ambari-agent" 
        ssh $host "rm -rf /usr/lib/ambari-infra-solr-client" 
        ssh $host "rm -rf /usr/lib/ambari-metrics-hadoop-sink" 
        ssh $host "rm -rf /usr/lib/ambari-metrics-kafka-sink" 
        ssh $host "rm -rf /usr/lib/ambari-server-backups" 
        ssh $host "rm -rf /var/lib/ambari-agent" 
        ssh $host "rm -rf /var/lib/ambari-metrics-grafana" 
        ssh $host "rm -rf /var/lib/ambari-server" 
        ssh $host "rm -rf /usr/lib/ams-hbase" 
        ssh $host "rm -rf /var/lib/flume" 
        ssh $host "rm -rf /var/lib/hadoop-hdfs" 
        ssh $host "rm -rf /var/lib/hadoop-mapreduce" 
        ssh $host "rm -rf /var/lib/hadoop-yarn" 
        ssh $host "rm -rf /var/lib/hive"
        ssh $host "rm -rf /var/lib/hive2" 
        ssh $host "rm -rf /var/lib/knox" 
        ssh $host "rm -rf /var/lib/smartsense" 
        ssh $host "rm -rf /var/lib/storm"
        ssh $host "rm -rf /usr/lib/hadoop"
        ssh $host "rm -rf /usr/lib/hbase"
        ssh $host "rm -rf /usr/lib/hcatalog" 
        ssh $host "rm -rf /usr/lib/oozie"
        ssh $host "rm -rf /usr/lib/zookeeper" 
        ssh $host "rm -rf /var/lib/ganglia" 
        ssh $host "rm -rf /var/lib/oozie"
        ssh $host "rm -rf /var/lib/zookeeper"
        ssh $host "rm -rf /var/tmp/oozie"
        ssh $host "rm -rf /var/nagios"
        
        # 删除临时文件
        echo "删除临时文件..."
        ssh $host "rm -rf /tmp/*"
        
        # 删除所有节点上的符号链接
        ssh $host "rm -rf accumulo" 
        ssh $host "rm -rf atlas-start" 
        ssh $host "rm -rf atlas-stop" 
        ssh $host "rm -rf beeline" 
        ssh $host "rm -rf falcon" 
        ssh $host "rm -rf flume-ng" 
        ssh $host "rm -rf hbase" 
        ssh $host "rm -rf hcat" 
        ssh $host "rm -rf hdfs" 
        ssh $host "rm -rf hive" 
        ssh $host "rm -rf hiveserver2" 
        ssh $host "rm -rf kafka" 
        ssh $host "rm -rf mahout" 
        ssh $host "rm -rf /usr/bin/mapred" 
        ssh $host "rm -rf /usr/bin/oozie" 
        ssh $host "rm -rf /usr/bin/oozied.sh" 
        ssh $host "rm -rf /usr/bin/phoenix-psql" 
        ssh $host "rm -rf /usr/bin/phoenix-queryserver" 
        ssh $host "rm -rf /usr/bin/phoenix-sqlline" 
        ssh $host "rm -rf /usr/bin/phoenix-sqlline-thin" 
        ssh $host "rm -rf /usr/bin/pig" 
        ssh $host "rm -rf /usr/bin/python-wrap" 
        ssh $host "rm -rf /usr/bin/ranger-admin" 
        ssh $host "rm -rf /usr/bin/ranger-admin-start" 
        ssh $host "rm -rf /usr/bin/ranger-admin-stop" 
        ssh $host "rm -rf /usr/bin/ranger-kms" 
        ssh $host "rm -rf /usr/bin/ranger-usersync" 
        ssh $host "rm -rf /usr/bin/ranger-usersync-start" 
        ssh $host "rm -rf /usr/bin/ranger-usersync-stop" 
        ssh $host "rm -rf /usr/bin/slider" 
        ssh $host "rm -rf /usr/bin/sqoop" 
        ssh $host "rm -rf /usr/bin/sqoop-codegen" 
        ssh $host "rm -rf /usr/bin/sqoop-create-hive-table" 
        ssh $host "rm -rf /usr/bin/sqoop-eval" 
        ssh $host "rm -rf /usr/bin/sqoop-export" 
        ssh $host "rm -rf /usr/bin/sqoop-help" 
        ssh $host "rm -rf /usr/bin/sqoop-import" 
        ssh $host "rm -rf /usr/bin/sqoop-import-all-tables" 
        ssh $host "rm -rf /usr/bin/sqoop-job" 
        ssh $host "rm -rf /usr/bin/sqoop-list-databases" 
        ssh $host "rm -rf /usr/bin/sqoop-list-tables" 
        ssh $host "rm -rf /usr/bin/sqoop-merge" 
        ssh $host "rm -rf /usr/bin/sqoop-metastore" 
        ssh $host "rm -rf /usr/bin/sqoop-version" 
        ssh $host "rm -rf /usr/bin/storm" 
        ssh $host "rm -rf /usr/bin/storm-slider" 
        ssh $host "rm -rf /usr/bin/worker-lanucher" 
        ssh $host "rm -rf /usr/bin/yarn" 
        ssh $host "rm -rf /usr/bin/zookeeper-client" 
        ssh $host "rm -rf /usr/bin/zookeeper-server" 
        ssh $host "rm -rf /usr/bin/zookeeper-server-cleanup"
        
        # 删除数据库
        ssh $host "yum remove -y postgresql"
        ssh $host "rm -rf /var/lib/pgsql"
        # 删除ambari相关包
        # ssh $host "yum remove -y ambari-*"
        # ssh $host "rm -rf /var/lib/ambari*"
        
        echo "$logPre======>$host is done! 
    "
    done
     
  • 相关阅读:
    Gym
    数学公式头文件
    除法取模(比赛常用)
    ACM-ICPC 2017 Asia Urumqi A. Coins【期望dp】
    P1494 小Z的袜子 【普通莫队】
    Codeforces Round #642 (Div. 3) E—K-periodic Garland dp
    luogu P4568 [JLOI2011]飞行路线 最短路Dijkstra+dp
    luogu P2015 二叉苹果树 树形dp
    luogu P1462 通往奥格瑞玛的道路 二分+spfa
    luogu P1879 [USACO06NOV]Corn Fields G 状态压缩dp
  • 原文地址:https://www.cnblogs.com/mymelody/p/9263459.html
Copyright © 2020-2023  润新知