Bootstrap

记录一下 集群中的一些组件启停脚本

记录一下 linux集群中的一些启停脚本

启动一些,zk的启动,这样可以实现高可用性包括监控状态,
故障转移等等

#!/bin/bash
echo flume信息采集通道
zk.sh start
start-all.sh
echo 开启kafka
kafka.sh start
if  [ $? -eq  "0" ];then
	echo kafka开启成功
fi
f1.sh start
f2.sh start

start-all包括 hdfs,yarn
再开启kafka,kafka也是依赖zk启动管理
f1 f2分别为两个flume启动通道,这是为了解耦合,也可以实现多数据接入利用kafka削峰填谷等等
#!/bin/bash

启动hive的metastore服务

hive --service metastore > /dev/null 2>&1 &
echo -n “Starting metastore servece,please wait”
mt=1
while [ KaTeX parse error: Expected 'EOF', got '&' at position 91: … > /dev/null 2>&̲1 #端口号根据服务上hive…?
done
echo “”
echo -e “\e[32mHiveMetastore service is running!\e[0m”

启hiveserver2服务

hiveserver2 > /dev/null 2>&1 &
echo -n "Starting hiveserver2 service,please wait"
hv2=1
while [ $hv2 -ne 0 ]
  do
    sleep 0.5
    echo -n "."
	netstat -nltp | grep 10000 > /dev/null 2>&1 #端口号根据服务上hive的实际配置的端口号而定
	hv2=$?
done
echo ""
echo -e "\e[32mHiveServer2 service is running!\e[0m"

kafka配置:

#!/bin/bash

case $1 in
"start"){
      for i in test02 test03 test04
      do
	echo --------$i 启动kafka---------
	ssh $i "kafka-server-start.sh -daemon /opt/module/kafka/config/server.properties"
      done
};;
"stop"){
      for i in test02 test03 test04
      do
	echo --------$i 停止kafka---------
	ssh $i "kafka-server-stop.sh -daemon /opt/module/kafka/config/server.properties"
      done
};;
esac

flume的两个配置:
f1.sh


#!/bin/bash
case $1 in
	"start"){
		echo " --------启动hadoop102采集flume-------"
		ssh test02 "nohup /opt/module/flume/bin/flume-ng agent -n a1 -c /opt/module/flume/conf/ -f /opt/module/flume/job/file_to_kafka.conf >/dev/null 2>&1 &"
	};; 
"stop"){
	echo " --------停止hadoop102采集flume-------"
	ssh test02 "ps -ef | grep file_to_kafka | grep -v grep |awk  '{print \$2}' | xargs -n1 kill -9 "
};;
esac

f2的配置

#!/bin/bash
case $1 in
	"start")
		echo " --------启动hadoop104日志数据flume-------"
		ssh test04 "nohup /opt/module/flume/bin/flume-ng agent -n a1 -c /opt/module/flume/conf -f /opt/module/flume/job/kafka_to_hdfs.conf >/dev/null 2>&1 &"
		;;
	"stop")
		echo " --------停止hadoop104日志数据flume-------"
		ssh test04 "ps -ef | grep kafka_to_hdfs | grep -v grep |awk '{print \$2}' | xargs -n1 kill"
		;;
esac
;