[root@master ~]# tar zxvf sshpass-1.05.tar.gznode
[root@master ~]# cd sshpass-1.05linux
[root@master sshpass-1.05]# ./configurebash
[root@master sshpass-1.05]# make && make install服務器
三、sshpass命令介紹ssh
sshpass -p [yourpassword] ssh [yourusername]@[host] [yourcommand]ide
四、消除首次ssh登陸時要求輸入yes確認函數
在全部機器上修改/etc/ssh/ssh_config文件中設置StrictHostKeyChecking no便可(默認爲 ask )oop
[root@master ~]# grep "StrictHostKeyChecking" /etc/ssh/ssh_config測試
StrictHostKeyChecking nothis
前提條件:
一、全部節點上都要設置相同的密碼
二、在節點上(slave)正確安裝並做了相應配置
用法
一、確保hadoop-prepare.sh腳本具備執行權限
二、根據須要修改hadoop-prepare.sh:
2.1)修改變量ADMIN_PASS: 用戶root的密碼
2.2)修改變量HADOOP_USER: 要在全部節點上建立的用戶的用戶名
2.3)修改變量HADOOP_PASS: 全部節點上建立的用戶的對應密碼
2.4)全部節點的主機名的列表:
2.4.1) 方式A:在變量OTHER_HOSTS中直接給出其餘節點的hostname
2.4.2)方式B:在函數hostname_list_gen()中依據必定規則,自動生成其餘節點的hostname
五、執行hadoop-ssh.sh go完成配置工做
執行hadoop-ssh.sh test驗證配置
測試:
[root@master ~]# ./hadoop-ssh.sh Usage: ./hadoop-ssh.sh {start|test|info} [root@master ~]# ./hadoop-ssh.sh go >>> All hostnames: master slave >>> Distributing system configurations Skipping localhost master /etc/ssh/ssh_config to slave Warning: Permanently added 'slave,192.168.200.102' (RSA) to the list of known hosts. /etc/hosts to slave Warning: Permanently added 'master,192.168.200.101' (RSA) to the list of known hosts. Changing password for user hadoop. passwd: all authentication tokens updated successfully. Changing password for user hadoop. passwd: all authentication tokens updated successfully. >>> Generating SSH key at each host >>> All public keys copied to localhost ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEArhzopeVGYNvU3Prt8OgEmvmqS+zNJeX30779YMWaVvOKssl1oRQmPGHoqi/ofi82xKGCLIHTJDmgD79KfS+e/JSkVn24u9blrfby/UquU4LyyTRJ2zDv95DjkdIbB1AjAnYWph/lBF5xRjiJNP3M4HTh1YicZ5B6kN+inDE7j3As27ekHRmYX/9WaX6FKxdcRYIdLw+oVet8IFIc26woM4+csnZc+hS5slb78q0kvyRkI4SVPAoUYHZ95XGN76WoNIgxUis2qVlX+npTma1ByVlfllHY90STDGnbXGWC1XzfxYLGCyjeAqpCLHDLygEDU1CJGxqKBRy++ebxviUQWw== hadoop@master ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAzu0XCFKHnFeS9dmjpHNUETVQedub0G7n8PcaKF+S9HWyMcf3geQcp7avHeYjAtbY6gR8k1U25ZrrKIyC5oVNacb48Zd7xfA09Cbx+ySc0yfkmywrxSLr5AM7GTSD1sTgtYG4gEe7UTIMRwlnLiB0dHlZMYEIs5ZFKRGGaWIcwQTVNtokeaDH6VyNE5zCb0LoAVnhxjIxSql6jwNUqi742Jar6p0l5e9Y685J56jpb6Z2HVQsZRYgQw1ocDnP9FMSb5YgROK5Tl8VzEfjWgrfR7+3RMuC0HG32dXgUlSE+D0qx5jNDhy9b969QBfYzcKWh2RBdzDImY9K7yZTjTBtvw== hadoop@slave >>> Distributing all public keys [root@master ~]# su - hadoop [hadoop@master ~]$ ssh slave Warning: Permanently added 'slave,192.168.200.102' (RSA) to the list of known hosts. [hadoop@master2 ~]$ exit logout Connection to slave closed. [hadoop@slave ~]$ ssh master Warning: Permanently added 'master,192.168.200.101' (RSA) to the list of known hosts. [hadoop@master ~]$ exit logout Connection to master closed.
#!/bin/sh #by Crushlinux #2012-07-22 ADMIN_USER="root" ADMIN_PASS="crushlinux" HADOOP_USER="hadoop" HADOOP_PASS="hadoop" LOCAL_HOST=`hostname` OTHER_HOSTS="slave1 slave2 slave3" function hostname_list_gen() { if [ -n "$OTHER_HOSTS" ] then HOSTNAME_LIST="$LOCAL_HOST $OTHER_HOSTS" return fi HOSTNAME_LIST="" for i in {1..4}; do for j in {1..8}; do HOSTNAME_LIST="${HOSTNAME_LIST} gd1$i$j" done done } function hostname_list_print() { echo ">>> All hostnames:" for host in $HOSTNAME_LIST; do echo $host done } function add_user() { cmd="useradd $HADOOP_USER; echo '$HADOOP_PASS' | passwd $HADOOP_USER --stdin" for host in $HOSTNAME_LIST; do #echo "at $host: $cmd" sshpass -p $ADMIN_PASS ssh $ADMIN_USER@$host $cmd done } function ssh_auth() { echo "" > $HADOOP_USER-authorized_keys echo ">>> Generating SSH key at each host" cmd_rm='rm -f ~/.ssh/id_rsa* ~/.ssh/known_hosts' cmd_gen='ssh-keygen -q -N "" -t rsa -f ~/.ssh/id_rsa' cmd_cat='cat ~/.ssh/id_rsa.pub' for host in $HOSTNAME_LIST; do sshpass -p $HADOOP_PASS ssh $HADOOP_USER@$host $cmd_rm sshpass -p $HADOOP_PASS ssh $HADOOP_USER@$host $cmd_gen sshpass -p $HADOOP_PASS ssh $HADOOP_USER@$host $cmd_cat >> $HADOOP_USER-authorized_keys done echo ">>> All public keys copied to localhost" #ls -l /home/$HADOOP_USER/.ssh/authorized_keys cat $HADOOP_USER-authorized_keys echo ">>> Distributing all public keys" cmd_chmod="chmod 600 /home/$HADOOP_USER/.ssh/authorized_keys" for host in $HOSTNAME_LIST; do sshpass -p $HADOOP_PASS scp $HADOOP_USER-authorized_keys $HADOOP_USER@$host:/home/$HADOOP_USER/.ssh/authorized_keys sshpass -p $HADOOP_PASS ssh $HADOOP_USER@$host $cmd_chmod done } function ssh_subtest() { for host in $HOSTNAME_LIST; do ssh $HADOOP_USER@$host hostname done } function ssh_test() { echo ">>> Testing SSH authorization for $HADOOP_USER in all nodes" cmd="./$0 subtest" for host in $HOSTNAME_LIST; do echo ">>> Testing SSH authorization at $host" sshpass -p $HADOOP_PASS scp ./$0 $HADOOP_USER@$host:~ sshpass -p $HADOOP_PASS ssh $HADOOP_USER@$host $cmd done return } HOSTS_CONF="/etc/hosts" SSH_CONF="/etc/ssh/ssh_config" function system_conf() { echo ">>> Distributing system configurations" for host in $HOSTNAME_LIST; do if [ "$host" == "$LOCAL_HOST" ] then echo "Skipping localhost $LOCAL_HOST" continue fi echo "$SSH_CONF to $host" sshpass -p $ADMIN_PASS scp $SSH_CONF $ADMIN_USER@$host:$SSH_CONF echo "$HOSTS_CONF to $host" sshpass -p $ADMIN_PASS scp $HOSTS_CONF $ADMIN_USER@$host:$HOSTS_CONF done } function print_info() { echo "Version: 2011-12-20" return } case "$1" in start) hostname_list_gen hostname_list_print system_conf add_user ssh_auth RETVAL=0 ;; subtest) ssh_subtest RETVAL=0 ;; test) ssh_test RETVAL=0 ;; info) print_info RETVAL=0 ;; *) echo $"Usage: $0 {go|test|info}" RETVAL=2 esac exit $RETVAL