链接:https://pan.baidu.com/s/1dlZlEcvwPck1JpSdBbXyYw 提取码:3y22
[hadoop@spark1 softwares]$ tar spark-2.4.0-bin-hadoop2.7.tgz -C /usr/local/modules/
cd /usr/local/modules/spark-2.4.0-bin-hadoop2.7/conf
mv spark-env.sh.template spark-env.sh
mv slaves.template slaves
vim spark-env.sh
export SCALA_HOME=/usr/local/modules/scala-2.10.4 export JAVA_HOME=/usr/local/modules/jdk1.8.0_201 export HADOOP_HOME=/usr/local/modules/hadoop-2.7.7 export HADOOP_CONF_DIR=/usr/local/modules/hadoop-2.7.7/etc/hadoop export SPARK_HOME=/usr/local/modules/spark-2.4.0-bin-hadoop2.7 export SPARK_MASTER_IP=spark1
vim slaves
spark2
spark3
cp hive-site.xml /usr/local/modules/spark-2.4.0-bin-hadoop2.7/conf
scp -r spark-2.4.0-bin-hadoop2.7 hadoop@spark1:/usr/local/modules
scp -r spark-2.4.0-bin-hadoop2.7 hadoop@spark2:/usr/local/modules
启动hive的metastore服务
hive --service metastore &
启动spark 进程 master 和 slaves
start-all.sh
spark1的进程
spark2的进程
spark3的进程
在浏览器上可以看 master 的 8080端口,如下图,则spark启动成功