1.spark集群的安装
- spark-2.4.1-bin-hadoop2.7.tgz
tar -zxvf spark-2.4.1-bin-hadoop2.7.tgz
mv spark-2.4.1-bin-hadoop2.7 spark
cd conf
mv slaves.template slaves
mv spark-env.sh-template spark-env.sh
vim slaves
slave1
slave2
slave3
vim spark-env.sh
export JAVA_HOME=/usr/default/java
scp -r spark salve1:/usr/local
...
vim /etc/profile
export SPARK_HOME=/usr/local/spark
export PATH=$PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin
2.启动spark
start-all.sh
输入地址:http://192.168.0.100:8080
spark.png

spark.png








网友评论