因爲使用linux的是64位的,可是hadoop的native 是32位的,因此須要下載hadoop的源碼包進行本地編譯java
須要安裝一些軟件:node
# 安裝基本套件
[root@localhost ~]# su root
[root@localhost ~]# yum -y install gcc gcc-c++ svn cmake git zlib zlib-devel openssl openssl-devel rsync
或
[root@localhost ~]# yum -y groupinstall 'Development Tools'
[root@localhost ~]# yum -y install cmake zlib-devel openssl openssl-devel rsynclinux
安裝 jdk 和 maven
c++
# 安裝 Apache Maven 3.1.1
cd /usr/local/src
wget http://ftp.tc.edu.tw/pub/Apache/maven/maven-3/3.1.1/binaries/apache-maven-3.1.1-bin.tar.gz
tar zxvf apache-maven-3.1.1-bin.tar.gz -C /usr/local
ln -s /usr/local/apache-maven-3.1.1/bin/mvn /usr/bin/mvn
# 安裝 FindBugs
cd /usr/local/src
wget http://prdownloads.sourceforge.net/findbugs/findbugs-2.0.2.tar.gz?download
tar zxvf findbugs-2.0.2.tar.gz -C /usr/local/
ln -s /usr/local/findbugs-2.0.2/bin/findbugs /usr/bin/findbugs
# 安裝 Protoc 2.5.0
cd /usr/local/src
wget https://protobuf.googlecode.com/files/protobuf-2.5.0.tar.gz
tar zxvf protobuf-2.5.0.tar.gz -C /usr/local/src
cd /usr/local/src/protobuf-2.5.0
./configure
make
make install
ln -s /usr/local/bin/protoc /usr/bin/protocgit
修改 pom.xml:web
hadoop-2.2.0-src/hadoop-common-project/hadoop-authapache
<dependency>網絡
63 <groupId>org.mortbay.jetty</groupId>dom
64 <artifactId>jetty-util</artifactId>maven
65 <scope>test</scope>
66 </dependency>
hadoop-2.2.0-src/hadoop-common-project
<dependencies>
41 <dependency>
42 <groupId>org.mortbay.jetty</groupId>
43 <artifactId>jetty-util</artifactId>
44 <scope>test</scope>
45 </dependency>
46 <dependency>
47 <groupId>org.mortbay.jetty</groupId>
48 <artifactId>jetty</artifactId>
49 <scope>test</scope>
50 </dependency>
51
52 </dependencies>
/root/java/hadoop-2.2.0-src/hadoop-dist/target
hadoop-2.2.0 將是一個適合64位的文件,直接copy出來就能夠使用
安裝:
修改hostname:
Fedora 9中hostname命令根本不會讀取/etc/sysconfig/network文件,它讀取的是/etc/hostname文件
輔助操做:/etc/rc.d/init.d/network restart 從新啓動網絡服務
systemctl stop iptables.service 關閉防火牆
修改 etc/profile:
export TERM=xterm-color
78 export GOROOT=/root/go
79 export PATH=$PATH:$GOROOT/bin
80 JAVA_HOME=/root/java/jdk
81 CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
82 PATH=$JAVA_HOME/bin:$PATH:$GOROOT/bin
83 MAVEN_HOME=/root/java/maven
84 PATH=${PATH}:${MAVEN_HOME}/bin
85
86 export HADOOP_PREFIX=/root/java/hadoop-2.2.0
87 export PATH=$PATH:$HADOOP_PREFIX/bin
88 export PATH=$PATH:$HADOOP_PREFIX/sbin
89 export HADOOP_MAPRED_HOME=${HADOOP_PREFIX}
90 export HADOOP_COMMON_HOME=${HADOOP_PREFIX}
91 export HADOOP_HDFS_HOME=${HADOOP_PREFIX}
92 export YARN_HOME=${HADOOP_PREFIX}
93 export HADOOP_CONF_DIR=${HADOOP_PREFIX}/etc/hadoop
94 export YARN_CONF_DIR=${HADOOP_PREFIX}/etc/hadoop
95
96 export JAVA_HOME CLASSPATH PATH MAVEN_HOME
修改:/etc/hosts:
0.0.0.0 localhost localhost.localdomain localhost4 localhost4.localdomain4
由於加入使用虛擬機的話,127.0.0.1 沒法被外部訪問
0.0.0.0 是全部ip的集合
hadoop namenode -format
啓動 start-dfs.sh start-yarn.sh
[root@localhost target]# jps
2883 ResourceManager
2697 SecondaryNameNode
2493 DataNode
2980 NodeManager
2348 NameNode
3587 Jps