1.安裝jdk
參考https://blog.51cto.com/13001751/1980999ssh
2.安裝scala
下載路徑: https://downloads.lightbend.com/scala/2.12.8/scala-2.12.8.tgz scala-2.12.8.tgz
把下載包上傳解壓
tar -zxvf scala-2.12.8.tgz
rm -rf scala-2.12.8.tgz
配置環境變量
vi /etc/profile
export SCALA_HOME=/usr/local/scala-2.12.8
export PATH=$PATH:$JAVA_HOME/bin:$SCALA_HOME/bin
複製到其餘節點:
scp -r scala-2.12.8 192.168.0.109:/usr/local/
scp -r scala-2.12.8 192.168.0.110:/usr/local/
scp /etc/profile 192.168.0.109:/etc/
scp /etc/profile 192.168.0.110:/etc/
使環境變量生效:source /etc/profile
驗證:scala -versionide
3.ssh 免密碼登陸
參考https://blog.51cto.com/13001751/2487972oop
4.安裝hadoop
參考https://blog.51cto.com/13001751/2487972spa
5.修改hosts文件
C:\Windows\System32\drivers\etc\hosts
192.168.0.108 spark1
192.168.0.109 spark2scala
6.安裝spark
把下載包上傳解壓
cd /usr/local/
tar -zxvf spark-2.4.5-bin-hadoop2.7.tgz
cd /usr/local/spark-2.4.5-bin-hadoop2.7/conf/ #進入spark配置目錄
mv spark-env.sh.template spark-env.sh #從配置模板複製
vi spark-env.sh #添加配置內容
export SPARK_HOME=/usr/local/spark-2.4.5-bin-hadoop2.7
export SCALA_HOME=/usr/local/scala-2.12.8
export JAVA_HOME=/usr/local/jdk1.8.0_191
export HADOOP_HOME=/usr/local/hadoop-2.7.7
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$SCALA_HOME/bin
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export YARN_CONF_DIR=$HADOOP_HOME/etc/hadoop
export SPARK_MASTER_IP=spark1
SPARK_LOCAL_DIRS=/usr/local/spark-2.4.5-bin-hadoop2.7
SPARK_DRIVER_MEMORY=1G
export SPARK_LIBARY_PATH=.:$JAVA_HOME/lib:$JAVA_HOME/jre/lib:$HADOOP_HOME/lib/native、
vi slaves
spark2
spark3
scp -r /usr/local/spark-2.4.5-bin-hadoop2.7 root@spark2:/usr/local/
scp -r /usr/local/spark-2.4.5-bin-hadoop2.7 root@spark3:/usr/local/
./sbin/start-all.sh(不可直接start-all.sh,這個命令是hadoop的)
3d