Ambari自动化卸载shell脚本
#!/bin/bash # Program: # uninstall ambari automatic # History: # 2014/01/13 - Ivan - 2862099249@qq.com - First release PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin export PATH #取得集群的所有主机名,这里需要注意:/etc/hosts配置的IP和主机名只能用一个空格分割 hostList=$(cat /etc/hosts | tail -n +3 | cut -d ‘ ‘ -f 2) yumReposDir=/etc/yum.repos.d/ alterNativesDir=/etc/alternatives/ pingCount=5 logPre=TDP read -p "Please input your master hostname: " master master=${master:-"master"} ssh $master "ambari-server stop" #重置ambari数据库 ssh $master "ambari-server reset" for host in $hostList do #echo $host #检测主机的连通性 unPing=$(ping $host -c $pingCount | grep ‘Unreachable‘ | wc -l) if [ "$unPing" == "$pingCount" ]; then echo -e "$logPre======>$host is Unreachable,please check ‘/etc/hosts‘ file" continue fi echo "$logPre======>$host deleting... \n" #1.)删除hdp.repo、HDP.repo、HDP-UTILS.repo和ambari.repo ssh $host "cd $yumReposDir" ssh $host "rm -rf $yumReposDir/hdp.repo" ssh $host "rm -rf $yumReposDir/HDP*" ssh $host "rm -rf $yumReposDir/ambari.repo" #删除HDP相关的安装包 ssh $host "yum remove -y sqoop.noarch" ssh $host "yum remove -y lzo-devel.x86_64" ssh $host "yum remove -y hadoop-libhdfs.x86_64" ssh $host "yum remove -y rrdtool.x86_64" ssh $host "yum remove -y hbase.noarch" ssh $host "yum remove -y pig.noarch" ssh $host "yum remove -y lzo.x86_64" ssh $host "yum remove -y ambari-log4j.noarch" ssh $host "yum remove -y oozie.noarch" ssh $host "yum remove -y oozie-client.noarch" ssh $host "yum remove -y gweb.noarch" ssh $host "yum remove -y snappy-devel.x86_64" ssh $host "yum remove -y hcatalog.noarch" ssh $host "yum remove -y python-rrdtool.x86_64" ssh $host "yum remove -y nagios.x86_64" ssh $host "yum remove -y webhcat-tar-pig.noarch" ssh $host "yum remove -y snappy.x86_64" ssh $host "yum remove -y libconfuse.x86_64" ssh $host "yum remove -y webhcat-tar-hive.noarch" ssh $host "yum remove -y ganglia-gmetad.x86_64" ssh $host "yum remove -y extjs.noarch" ssh $host "yum remove -y hive.noarch" ssh $host "yum remove -y hadoop-lzo.x86_64" ssh $host "yum remove -y hadoop-lzo-native.x86_64" ssh $host "yum remove -y hadoop-native.x86_64" ssh $host "yum remove -y hadoop-pipes.x86_64" ssh $host "yum remove -y nagios-plugins.x86_64" ssh $host "yum remove -y hadoop.x86_64" ssh $host "yum remove -y zookeeper.noarch" ssh $host "yum remove -y hadoop-sbin.x86_64" ssh $host "yum remove -y ganglia-gmond.x86_64" ssh $host "yum remove -y libganglia.x86_64" ssh $host "yum remove -y perl-rrdtool.x86_64" ssh $host "yum remove -y epel-release.noarch" ssh $host "yum remove -y compat-readline5*" ssh $host "yum remove -y fping.x86_64" ssh $host "yum remove -y perl-Crypt-DES.x86_64" ssh $host "yum remove -y exim.x86_64" ssh $host "yum remove -y ganglia-web.noarch" ssh $host "yum remove -y perl-Digest-HMAC.noarch" ssh $host "yum remove -y perl-Digest-SHA1.x86_64" ssh $host "yum remove -y bigtop-jsvc.x86_64" #删除快捷方式 ssh $host "cd $alterNativesDir" ssh $host "rm -rf hadoop-etc" ssh $host "rm -rf zookeeper-conf" ssh $host "rm -rf hbase-conf" ssh $host "rm -rf hadoop-log" ssh $host "rm -rf hadoop-lib" ssh $host "rm -rf hadoop-default" ssh $host "rm -rf oozie-conf" ssh $host "rm -rf hcatalog-conf" ssh $host "rm -rf hive-conf" ssh $host "rm -rf hadoop-man" ssh $host "rm -rf sqoop-conf" ssh $host "rm -rf hadoop-confone" #删除用户 ssh $host "userdel -rf nagios" ssh $host "userdel -rf hive" ssh $host "userdel -rf ambari-qa" ssh $host "userdel -rf hbase" ssh $host "userdel -rf oozie" ssh $host "userdel -rf hcat" ssh $host "userdel -rf mapred" ssh $host "userdel -rf hdfs" ssh $host "userdel -rf rrdcached" ssh $host "userdel -rf zookeeper" ssh $host "userdel -rf sqoop" ssh $host "userdel -rf puppet" ssh $host "userdel -rf flume" ssh $host "userdel -rf tez" ssh $host "userdel -rf yarn" #删除文件夹 ssh $host "rm -rf /hadoop" ssh $host "rm -rf /etc/hadoop" ssh $host "rm -rf /etc/hbase" ssh $host "rm -rf /etc/hcatalog" ssh $host "rm -rf /etc/hive" ssh $host "rm -rf /etc/ganglia" ssh $host "rm -rf /etc/nagios" ssh $host "rm -rf /etc/oozie" ssh $host "rm -rf /etc/sqoop" ssh $host "rm -rf /etc/zookeeper" ssh $host "rm -rf /var/run/hadoop" ssh $host "rm -rf /var/run/hbase" ssh $host "rm -rf /var/run/hive" ssh $host "rm -rf /var/run/ganglia" ssh $host "rm -rf /var/run/nagios" ssh $host "rm -rf /var/run/oozie" ssh $host "rm -rf /var/run/zookeeper" ssh $host "rm -rf /var/log/hadoop" ssh $host "rm -rf /var/log/hbase" ssh $host "rm -rf /var/log/hive" ssh $host "rm -rf /var/log/nagios" ssh $host "rm -rf /var/log/oozie" ssh $host "rm -rf /var/log/zookeeper" ssh $host "rm -rf /usr/lib/hadoop" ssh $host "rm -rf /usr/lib/hbase" ssh $host "rm -rf /usr/lib/hcatalog" ssh $host "rm -rf /usr/lib/hive" ssh $host "rm -rf /usr/lib/oozie" ssh $host "rm -rf /usr/lib/sqoop" ssh $host "rm -rf /usr/lib/zookeeper" ssh $host "rm -rf /var/lib/hive" ssh $host "rm -rf /var/lib/ganglia" ssh $host "rm -rf /var/lib/oozie" ssh $host "rm -rf /var/lib/zookeeper" ssh $host "rm -rf /var/tmp/oozie" ssh $host "rm -rf /tmp/hive" ssh $host "rm -rf /tmp/nagios" ssh $host "rm -rf /tmp/ambari-qa" ssh $host "rm -rf /tmp/sqoop-ambari-qa" ssh $host "rm -rf /var/nagios" ssh $host "rm -rf /hadoop/oozie" ssh $host "rm -rf /hadoop/zookeeper" ssh $host "rm -rf /hadoop/mapred" ssh $host "rm -rf /hadoop/hdfs" ssh $host "rm -rf /tmp/hadoop-hive" ssh $host "rm -rf /tmp/hadoop-nagios" ssh $host "rm -rf /tmp/hadoop-hcat" ssh $host "rm -rf /tmp/hadoop-ambari-qa" ssh $host "rm -rf /tmp/hsperfdata_hbase" ssh $host "rm -rf /tmp/hsperfdata_hive" ssh $host "rm -rf /tmp/hsperfdata_nagios" ssh $host "rm -rf /tmp/hsperfdata_oozie" ssh $host "rm -rf /tmp/hsperfdata_zookeeper" ssh $host "rm -rf /tmp/hsperfdata_mapred" ssh $host "rm -rf /tmp/hsperfdata_hdfs" ssh $host "rm -rf /tmp/hsperfdata_hcat" ssh $host "rm -rf /tmp/hsperfdata_ambari-qa" #删除ambari相关包 ssh $host "yum remove -y ambari-*" ssh $host "yum remove -y postgresql" ssh $host "rm -rf /var/lib/ambari*" ssh $host "rm -rf /var/log/ambari*" ssh $host "rm -rf /etc/ambari*" echo "$logPre======>$host is done! \n" done
郑重声明:本站内容如果来自互联网及其他传播媒体,其版权均属原媒体及文章作者所有。转载目的在于传递更多信息及用于网络分享,并不代表本站赞同其观点和对其真实性负责,也不构成任何其他建议。