Showing
1 changed file
with
23 additions
and
5 deletions
@@ -7,10 +7,24 @@ app_main_class=$3 | @@ -7,10 +7,24 @@ app_main_class=$3 | ||
7 | app_main_args=$4 | 7 | app_main_args=$4 |
8 | app_jar=$5 | 8 | app_jar=$5 |
9 | 9 | ||
10 | -default_args="--master yarn-cluster --executor-memory 2g --driver-memory 2g --executor-cores 2 --queue hadoop --conf spark.dynamicAllocation.enabled=true --conf spark.shuffle.service.enabled=true "\ | ||
11 | - "--conf spark.dynamicAllocation.executorIdleTimeout=65 --conf spark.dynamicAllocation.cachedExecutorIdleTimeout=65 --conf spark.dynamicAllocation.schedulerBacklogTimeout=2 --conf spark.dynamicAllocation.sustainedSchedulerBacklogTimeout=2 "\ | ||
12 | - "--conf spark.dynamicAllocation.initialExecutors=2 --conf spark.dynamicAllocation.maxExecutors=4 --conf spark.dynamicAllocation.minExecutors=4 --conf spark.default.parallelism=6 --conf spark.memory.fraction=0.5 --conf spark.memory.storageFraction=0.3 "\ | ||
13 | - "--conf spark.executor.extraJavaOptions='-XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+ParallelRefProcEnabled -XX:+CMSClassUnloadingEnabled -XX:+UseCMSCompactAtFullCollection -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=75' " | 10 | +default_args="--master yarn-cluster \ |
11 | + --executor-memory 2g \ | ||
12 | + --driver-memory 2g \ | ||
13 | + --executor-cores 2 \ | ||
14 | + --queue hadoop \ | ||
15 | + --conf spark.dynamicAllocation.enabled=true \ | ||
16 | + --conf spark.shuffle.service.enabled=true \ | ||
17 | + --conf spark.dynamicAllocation.executorIdleTimeout=65 \ | ||
18 | + --conf spark.dynamicAllocation.cachedExecutorIdleTimeout=65 \ | ||
19 | + --conf spark.dynamicAllocation.schedulerBacklogTimeout=2 \ | ||
20 | + --conf spark.dynamicAllocation.sustainedSchedulerBacklogTimeout=2 \ | ||
21 | + --conf spark.dynamicAllocation.initialExecutors=2 \ | ||
22 | + --conf spark.dynamicAllocation.maxExecutors=4 \ | ||
23 | + --conf spark.dynamicAllocation.minExecutors=4 \ | ||
24 | + --conf spark.default.parallelism=6 \ | ||
25 | + --conf spark.memory.fraction=0.5 \ | ||
26 | + --conf spark.memory.storageFraction=0.3 \ | ||
27 | + --conf spark.executor.extraJavaOptions='-XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+ParallelRefProcEnabled -XX:+CMSClassUnloadingEnabled -XX:+UseCMSCompactAtFullCollection -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=75' " | ||
14 | default_name=`date +%s_%N` | 28 | default_name=`date +%s_%N` |
15 | if [ "_-" = "_${app_main_class}" ] || [ "_" = "_${app_main_class}" ];then | 29 | if [ "_-" = "_${app_main_class}" ] || [ "_" = "_${app_main_class}" ];then |
16 | exit 1 | 30 | exit 1 |
@@ -28,6 +42,10 @@ if [ "_-" = "_${app_args}" ] || [ "_" = "_${app_args}" ];then | @@ -28,6 +42,10 @@ if [ "_-" = "_${app_args}" ] || [ "_" = "_${app_args}" ];then | ||
28 | app_args=${default_args} | 42 | app_args=${default_args} |
29 | fi | 43 | fi |
30 | spark_args="--name ${app_name} ${app_args} --class ${app_main_class} ${app_jar} ${app_main_args}" | 44 | spark_args="--name ${app_name} ${app_args} --class ${app_main_class} ${app_jar} ${app_main_args}" |
31 | -spark_home=`which spark_submit` | 45 | +spark_home=`which spark-submit` |
32 | mkdir -p /home/hadoop/logs | 46 | mkdir -p /home/hadoop/logs |
47 | + | ||
48 | +hdfs dfs -rm -r "/spark/checkpoint/{app_name}" | ||
49 | + | ||
50 | + | ||
33 | `nohup ${spark_home} ${spark_args} 1>&2 2>/home/hadoop/logs/${app_name}.log &` | 51 | `nohup ${spark_home} ${spark_args} 1>&2 2>/home/hadoop/logs/${app_name}.log &` |
-
Please register or login to post a comment