...
|
...
|
@@ -7,10 +7,24 @@ app_main_class=$3 |
|
|
app_main_args=$4
|
|
|
app_jar=$5
|
|
|
|
|
|
default_args="--master yarn-cluster --executor-memory 2g --driver-memory 2g --executor-cores 2 --queue hadoop --conf spark.dynamicAllocation.enabled=true --conf spark.shuffle.service.enabled=true "\
|
|
|
"--conf spark.dynamicAllocation.executorIdleTimeout=65 --conf spark.dynamicAllocation.cachedExecutorIdleTimeout=65 --conf spark.dynamicAllocation.schedulerBacklogTimeout=2 --conf spark.dynamicAllocation.sustainedSchedulerBacklogTimeout=2 "\
|
|
|
"--conf spark.dynamicAllocation.initialExecutors=2 --conf spark.dynamicAllocation.maxExecutors=4 --conf spark.dynamicAllocation.minExecutors=4 --conf spark.default.parallelism=6 --conf spark.memory.fraction=0.5 --conf spark.memory.storageFraction=0.3 "\
|
|
|
"--conf spark.executor.extraJavaOptions='-XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+ParallelRefProcEnabled -XX:+CMSClassUnloadingEnabled -XX:+UseCMSCompactAtFullCollection -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=75' "
|
|
|
default_args="--master yarn-cluster \
|
|
|
--executor-memory 2g \
|
|
|
--driver-memory 2g \
|
|
|
--executor-cores 2 \
|
|
|
--queue hadoop \
|
|
|
--conf spark.dynamicAllocation.enabled=true \
|
|
|
--conf spark.shuffle.service.enabled=true \
|
|
|
--conf spark.dynamicAllocation.executorIdleTimeout=65 \
|
|
|
--conf spark.dynamicAllocation.cachedExecutorIdleTimeout=65 \
|
|
|
--conf spark.dynamicAllocation.schedulerBacklogTimeout=2 \
|
|
|
--conf spark.dynamicAllocation.sustainedSchedulerBacklogTimeout=2 \
|
|
|
--conf spark.dynamicAllocation.initialExecutors=2 \
|
|
|
--conf spark.dynamicAllocation.maxExecutors=4 \
|
|
|
--conf spark.dynamicAllocation.minExecutors=4 \
|
|
|
--conf spark.default.parallelism=6 \
|
|
|
--conf spark.memory.fraction=0.5 \
|
|
|
--conf spark.memory.storageFraction=0.3 \
|
|
|
--conf spark.executor.extraJavaOptions='-XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+ParallelRefProcEnabled -XX:+CMSClassUnloadingEnabled -XX:+UseCMSCompactAtFullCollection -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=75' "
|
|
|
default_name=`date +%s_%N`
|
|
|
if [ "_-" = "_${app_main_class}" ] || [ "_" = "_${app_main_class}" ];then
|
|
|
exit 1
|
...
|
...
|
@@ -28,6 +42,10 @@ if [ "_-" = "_${app_args}" ] || [ "_" = "_${app_args}" ];then |
|
|
app_args=${default_args}
|
|
|
fi
|
|
|
spark_args="--name ${app_name} ${app_args} --class ${app_main_class} ${app_jar} ${app_main_args}"
|
|
|
spark_home=`which spark_submit`
|
|
|
spark_home=`which spark-submit`
|
|
|
mkdir -p /home/hadoop/logs
|
|
|
|
|
|
hdfs dfs -rm -r "/spark/checkpoint/{app_name}"
|
|
|
|
|
|
|
|
|
`nohup ${spark_home} ${spark_args} 1>&2 2>/home/hadoop/logs/${app_name}.log &` |
|
|
\ No newline at end of file |
...
|
...
|
|