http://mirrors.hust.edu.cn/ap...html
文檔 http://hadoop.apache.org/docs/node
#jdk 環境 由於要遠程調用 ${java_home}找不到變量 export JAVA_HOME=/export/servers/jdk1.8.0_141
core-site.xmlshell
<configuration> <property> #hadoop使用的文件系統(uri) hdfs 和hdfs的位置 <name>fs.defaultFS</name> <value>hdfs://yh01:9000</value> </property> <property> #hadoop運行時產生的文件的存儲位置 <name>hadoop.tmp.dir</name> <value>/export/data/HADOOP/apps/hadoop-3.1.1/tmp</value> </property> </configuration>
<configuration> ---------------- <property> #namenode上存儲hdfs名字空間元數據 <name>dfs.namenode.name.dir</name> <value>/export/data/HADOOP/hdfs/name</value> </property> <property> #hdfs datanode上數據塊的物理存儲位置 <name>dfs.datanode.data.dir</name> <value>/export/data/HADOOP/hdfs/data</value> </property> <property> #hdfs數據副本數量 3分副本 應小於datanode機器數量 <name>dfs.replication</name> <value>3</value> </property> ---------------- <property> <name>dfs.secondary.http.address</name> <value>yh01:50090</value> </property> </configuration>
<configuration> <property> <name>mapreduce.framework.name</name> #mr運行的平臺 默認local本地模式 <value>yarn</value> </property> </configuration>
<configuration> <property> <name>yarn.resourcemanager.hostname</name> #yarn (MR)的ResourcesManager管理界面的地址 主機名稱 <value>yh01</value> </property> <property> #reducer獲取數據的方式 <name>yarn.nodemanager.aux-services</name> <value>mapreduce_shuffle</value> </property> </configuration>
HDFS_DATANODE_USER=root HDFS_DATANODE_SECURE_USER=hdfs HDFS_NAMENODE_USER=root HDFS_SECONDARYNAMENODE_USER=root
ERROR: Attempting to launch hdfs namenode as root ERROR: but there is no HDFS_NAMENODE_USER defined. Aborting launch. Starting datanodes ERROR: Attempting to launch hdfs datanode as root ERROR: but there is no HDFS_DATANODE_USER defined. Aborting launch. Starting secondary namenodes [localhost.localdomain] ERROR: Attempting to launch hdfs secondarynamenode as root ERROR: but there is no HDFS_SECONDARYNAMENODE_USER defined. Aborting launch.
YARN_RESOURCEMANAGER_USER=root HDFS_DATANODE_SECURE_USER=yarn YARN_NODEMANAGER_USER=root
export JAVA_HOME=/export/servers/jdk1.8.0_141 export PATH=$PATH:$JAVA_HOME/bin #hadoop export HADOOP_HOME=/export/servers/hadoop-3.1.1 export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
[root@yh01]# cd $HADOOP_HOME [root@yh01 hadoop-3.1.1]# bin/hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-3.1.1.jar grep input output 'dfs[a-z.]+'
如今檢查您是否能夠在沒有密碼的狀況下ssh到localhost: ssh yh01 若是在沒有密碼短語的狀況下沒法ssh到localhost,請執行如下命令: ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys chmod 0600 ~/.ssh/authorized_keys 添加本機ssh到本機ssh中 當使用start-all.sh啓動works下的全部機器時能直接無祕鑰啓動 同時分發yh01主機的祕密到其餘機器中 ~/.ssh/authorized_keys 包含 當前機器 及主節點機器的祕鑰
須要修改的 若是有註釋 就發開註釋 RSAAuthentication yes PubkeyAuthentication yes #這兩項爲打開公鑰模式 AuthorizedKeysFile .ssh/authorized_keys #配置公鑰記錄文件 PasswordAuthentication yes #打開密碼驗證模式
[root@yh01 hadoop-3.1.1]# pwd /export/servers/hadoop-3.1.1 [root@yh01 hadoop-3.1.1]# scp -r hadoop-3.1.1/ root@yh02:$PWD [root@yh01 hadoop-3.1.1]# scp -r hadoop-3.1.1/ root@yh03:$PWD
修改works文件 文件中的全部工做者主機名或IP地址,每行一個apache
添加hadoop節點 vim hadoop-3.1.1/etc/hadoop/works 假設有3臺 yh01 yh02 yh03 要添加本機ssh連接 ssh yh01
若是配置了workds文件 只須要在yh01上執行便可 分割線一下的能夠忽略 hdfs namenode -format 啓動 $HADOOP_HOME/sbin/start-dfs.sh $HADOOP_HOME/sbin/start-yarn.sh $HADOOP_HOME/bin/mapred --daemon start historyserver 關閉 $HADOOP_HOME/sbin/stop-dfs.sh $HADOOP_HOME/sbin/stop-yarn.sh $HADOOP_HOME/bin/mapred --daemon stop historyserver =========================== 在主節點 yh01上執行 $HADOOP_HOME/bin/hdfs --daemon start namenode 在副節點 yh02 yh03上執行 $HADOOP_HOME/bin/hdfs --daemon start datanode #格式化成功 2018-08-24 16:41:04,983 INFO common.Storage: Storage directory /export/data/HADOOP/data/name has been successfully formatted.
啓動全部 start-all.sh 關閉全部 stop-all.sh
[root@yh01 logs]# jps 5633 Jps 4498 ResourceManager 4020 DataNode 5556 JobHistoryServer 3879 NameNode 4247 SecondaryNameNode 4635 NodeManager [root@yh02 hadoop-3.1.1]# jps 5856 Jps 3969 Bootstrap 5345 NodeManager 5235 DataNode 3919 QuorumPeerMain [root@yh03 hadoop-3.1.1]# jps 3729 QuorumPeerMain 4820 NodeManager 4711 DataNode 3768 Bootstrap 5050 Jps
Daemon | Web Interface | Notes |
---|---|---|
NameNode | http://nn_host:port/ | Default HTTP port is 9870. |
ResourceManager | http://rm_host:port/ | Default HTTP port is 8088. |
MapReduce JobHistory Server | http://jhs_host:port/ | Default HTTP port is 19888. |