adduser hadoop passwd hadoop
chmod u+w /etc/sudoers vi /etc/sudoers 在 root ALL=(ALL) ALL 下添加 hadoop ALL=(ALL) ALL chmod u-w /etc/sudoers
service iptables status
service iptables stop
chkconfig iptables --list
chkconfig iptables off
rpm -qa | grep jdk
java-1.7.0-openjdk-1.7.0.51-2.4.5.5.el7.x86_64 java-1.7.0-openjdk-headless-1.7.0.51-2.4.5.5.el7.x86_64
rpm -e --nodeps java-1.7.0-openjdk-1.7.0.51-2.4.5.5.el7.x86_64 rpm -e --nodeps java-1.7.0-openjdk-headless-1.7.0.51-2.4.5.5.el7.x86_64
下載地址: jdk-7u79-linux-x64.tar.gz 下載html
使用Filezilla client把jdk-7u79-linux-x64.tar.gz放到CentOS目錄/usr/lib/jvm中(史上最簡單的上傳文件到linux系統方法)java
sudo chmod u+x jdk-7u79-linux-x64.tar.gz
cd /usr/lib/jvm sudo tar -zxvf ./jdk-7u79-linux-x64.tar.gz -C /usr/lib/jvm
sudo gedit /etc/profile
export JAVA_HOME=/usr/lib/jvm/jdk1.7.0_4579 export JRE_HOME=/usr/lib/jvm/jdk1.7.0_4579/jre export CLASSPATH=.:$JRE_HOME/lib/tr.jar:$JAVA_HOME/lib:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib:$CLASSPATH export PATH=$JAVA_HOME/bin:$PATH
source /etc/profile
java -version
sudo update-alternatives --install /usr/bin/java java /usr/lib/jvm/jdk1.7.0_79/bin/java 300 sudo update-alternatives --install /usr/bin/javac javac /usr/lib/jvm/jdk1.7.0_79/bin/javac 300 sudo update-alternatives --install /usr/bin/jar jar /usr/lib/jvm/jdk1.7.0_79/bin/jar 300 sudo update-alternatives --config java
ssh-keygen -t dsa -P '' -f ~/.ssh/id_dsa cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys
ssh -version
ssh localhost
sudo tar -zxvf ./hadoop-2.6.0.tar.gz -C /usr/local cd /usr/local/ sudo mv ./hadoop-2.6.0/ ./hadoop sudo chown -R hadoop:hadoop ./hadoop
sudo gedit /etc/profile
# set hadoop path export HADOOP_HOME=/usr/local/hadoop export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
source /etc/profile
export JAVA_HOME=/usr/lib/jvm/jdk1.7.0_79
<configuration> <property> <name>fs.defaultFS</name> <value>hdfs://localhost:9000</value> </property> </configuration>
<configuration> <property> <name>dfs.replication</name> <value>1</value> </property> </configuration>
cd /usr/local/hadoop/bin
hdfs namenode -format
cd /usr/local/hadoop/sbin
start-all.sh
成功的話,會看到 successfully formatted 的提示,且倒數第5行的提示以下,Exitting with status 0 表示成功,若爲 Exitting with status 1 則是出錯。若出錯(不應如此,請仔細檢查以前步驟),可試着加上 sudo, 既 sudo bin/hdfs namenode -format 再試試看。linux
stop-all.sh
Jps ResourceManager NameNode DataNode SecondaryNameNode NodeManager
WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
緣由是hadoop-2.6.0.tar.gz安裝包是在32位機器上編譯的,64位的機器加載本地庫.so文件時出錯,不影響使用。apache
一、從新編譯源碼後將新的lib/native替換到集羣中原來的lib/native 二、修改hadoop-env.sh ,增長 export HADOOP_OPTS="-Djava.library.path=$HADOOP_PREFIX/lib:$HADOOP_PREFIX/lib/native"