start.sh
2.49 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
#! /bin/bash
source /etc/profile
app_name=$1
app_args=$2
app_main_class=$3
app_main_args=$4
app_jar=$5
default_args="--master yarn-cluster \
--executor-memory 2g \
--driver-memory 2g \
--executor-cores 2 \
--queue hadoop \
--conf spark.dynamicAllocation.enabled=true \
--conf spark.shuffle.service.enabled=true \
--conf spark.dynamicAllocation.executorIdleTimeout=65 \
--conf spark.dynamicAllocation.cachedExecutorIdleTimeout=65 \
--conf spark.dynamicAllocation.schedulerBacklogTimeout=2 \
--conf spark.dynamicAllocation.sustainedSchedulerBacklogTimeout=2 \
--conf spark.dynamicAllocation.initialExecutors=2 \
--conf spark.dynamicAllocation.maxExecutors=4 \
--conf spark.dynamicAllocation.minExecutors=4 \
--conf spark.default.parallelism=6 \
--conf spark.memory.fraction=0.5 \
--conf spark.memory.storageFraction=0.3 \
--conf spark.executor.extraJavaOptions=\"-XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+ParallelRefProcEnabled -XX:+CMSClassUnloadingEnabled -XX:+UseCMSCompactAtFullCollection -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=75\" "
default_name=`date +%s_%N`
if [ "_-" = "_${app_main_class}" ] || [ "_" = "_${app_main_class}" ];then
exit 1
fi
if [ "_-" = "_${app_jar}" ] || [ "_" = "_${app_jar}" ];then
exit 1
fi
if [ "_-" = "_${app_main_args}" ] || [ "_" = "_${app_main_args}" ];then
app_main_args=""
fi
if [ "_-" = "_${app_name}" ] || [ "_" = "_${app_name}" ];then
app_name=${default_name}
fi
if [ "_-" = "_${app_args}" ] || [ "_" = "_${app_args}" ];then
app_args=${default_args}
fi
spark_home=`which spark-submit`
spark_cmd="nohup ${spark_home} --name ${app_name} ${app_args} --class ${app_main_class} ${app_jar} ${app_main_args} 1>&2 2>/home/hadoop/logs/${app_name}.log &"
mkdir -p /home/hadoop/logs
hdfs dfs -rm -r "/spark/checkpoint/${app_name}"
eval "${spark_cmd}"
RES=1
for i in {1..3}
do
sleep 20
i21=`yarn application -list|grep -w "${app_name}"|awk '{print $6}'`
if [ "_$i21" == "_RUNNING" ];then
process=$(ps -ef |grep "${app_name}" |grep '/bin/java' |grep -v grep|awk '{print $2}')
if [ '_${process}' != '_' ];then
echo "${process}"
kill -9 ${process}
fi
RES=1
break
fi
RES=0
done
if [ ${RES} -ne 1 ];then
exit 1
fi
exit 0