1、在hadoop on yarn环境基础上, 增加spark配置.
spark-env.sh

HADOOP_CONF_DIR=/usr/local/hadoop34/etc/hadoop
YARN_CONF_DIR=/usr/local/hadoop34/etc/hadoop

workers文件:

slave1

2、运行测试

./bin/spark-submit --master yarn --class org.apache.spark.examples.SparkPi ./examples/jars/spark-examples_2.12-3.5.5.jar 10

# 运行pyspark
./bin/pyspark --master yarn


标签: none

添加新评论

icon_mrgreen.gificon_neutral.gificon_twisted.gificon_arrow.gificon_eek.gificon_smile.gificon_confused.gificon_cool.gificon_evil.gificon_biggrin.gificon_idea.gificon_redface.gificon_razz.gificon_rolleyes.gificon_wink.gificon_cry.gificon_surprised.gificon_lol.gificon_mad.gificon_sad.gificon_exclaim.gificon_question.gif