title: 搭建全分佈式Hadoopnode
date:2017-12-25 22:12:54shell
將文件傳輸到服務器上, 由於我用的是Ubuntu Server因此用ssh傳輸 scp local_file remote_user@remote_ip:/home/user_name
bash
修改/etc/hosts, 將第二行127.0.1.1所有註釋掉, 在下面添加IP地址和對應的主機名服務器
sudo mkdir /opt/hadoop/logs
chown -R mapred logs
chgrp -R hadoop logs
sudo chmod 775 logs
<configuration> <property> <name>fs.defaultFS</name> <value>hdfs://MAIN_IP:9000</value> </property> </configuration> #9000是端口, 不容許改動 這裏的MAIN_IP是指當作主節點的虛擬機的IP,下同
<configuration> <!-- 指定MapReduce程序運行在Yarn上 --> <property> <name>mapreduce.framework.name</name> <value>yarn</value> </property> </configuration>
<configuration> <!--指定ResourceManager的地址--> <property> <name>yarn.resourcemanager.hostname</name> <value>MAIN_IP</value> </property> <property> <name>yarn.nodemanager.aux-services</name> <value>mapreduce_shuffle</value> </property> <property> <name>yran.nodemanager.local-dirs</name> <value>/data/hadoop/yarn/logs</value> </property> </configuration>
<configuration> <property> <name>dfs.nameservices</name> <value>hadoop-cluster1</value> </property> <property> <name>dfs.replication</name> <value>1</value> </property> <property> <name>dfs.namenode.name.dir</name> <value>file:/data/hadoop/hdfs/snn</value> </property> <property> <name>fs.checkpoint.dir</name> <value>file:/data/hadoop/hdfs/snn</value> </property> <property> <name>fs.checkpoint.edits.dir</name> <value>file:/data/hadoop/dfhs/snn</value> </property> <property> <name>dfs.datanode.data.dir</name> <value>file:/data/hadoop/hdfs/dn</value> /property> </configuration>
創建data目錄網絡
sudo mkdir /data
在/data目錄下新建hadoop目錄ssh
sudo mkdir /data/hadoop
在hadoop目錄下新建hdfs和yarn目錄分佈式
sudo mkdir /data/hadoop/hdfs
sudo mkdir /data/hadoop/yarn
在hdfs目錄下新建dn, nn, snn目錄oop
sudo mkdir /data/hadoop/hdfs/dn
sudo mkdir /data/hadoop/hdfs/nn
sudo mkdir /data/hadoop/hdfs/snn
在yarn目錄下新建logs, nm目錄code
sudo mkdir /data/hadoop/yarn/logs
sudo mkdir /data/hadoop/yarn/nm
更改hdfs和yarn所屬用戶和用戶組orm
sudo chown -R hdfs hdfs/
sudo chgrp -R hadoop hdfs/
sudo chgrp -R hadoop yarn/
使用 su - hdfs, 進入hdfs帳戶 默認密碼hadoop
export JAVA_HOME=/opt/jdk
export HADOOP_HOME=/opt/hadoop
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
沒有的話加進去