hadoop 安装部署
//
192.168.85.40 hadoop.master
192.168.85.30 hadoop.slave1
192.168.85.20 hadoop.slave2
//
$ cd ~ #进入hadoop用户目录
$ ssh-keygen -t rsa -P ""
这是生成ssh密码的命令,-t 参数表示生成算法,有rsa和dsa两种;-P表示使用的密码,这里使用""空字符串表示无密码。
$ cd ~/.ssh
$ cat id_rsa.pub >> authorized_keys #这个命令将id_rsa.pub的内容追加到了authorized_keys的内容后面
ssh-keygen -t rsa //回车三次
cd ~/.ssh
cat /root/.ssh/id_rsa.pub>> authorized_keys
ssh hadoop.master cat /root/.ssh/id_rsa.pub>> authorized_keys
ssh hadoop.slave1 cat /root/.ssh/id_rsa.pub>> authorized_keys
ssh hadoop.slave2 cat /root/.ssh/id_rsa.pub>> authorized_keys
scp id_rsa.pub hadoop.master:/root/.ssh/id_rsa.pub.node1
scp id_rsa.pub hadoop.master:/root/.ssh/id_rsa.pub.node2
cat id_rsa.pub.node1 >> authorized_keys
cat id_rsa.pub.node2 >> authorized_keys
scp authorized_keys hadoop.slave1:/root/.ssh/
scp authorized_keys hadoop.slave2:/root/.ssh/
更改文件属性相关的权限
sudo chmod 644 ~/.ssh/authorized_keys
sudo chmod 700 ~/.ssh
chenyuegu@hadoop
ssh hadoop.master
ssh hadoop.slave1
ssh hadoop.slave2
这时候从三台机器上任何一台都可以无密码访问另外两台了
都用root用户ssh访问一次,第一次会需要输入一个"yes",把你添加到认识的名单中
/**************************************************************************/
免密码登录完成
下载且解压HADOOP文件
//
cd /home/hadoop/
yum -y install wget
wget http://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/hadoop-2.6.5/hadoop-2.6.5.tar.gz
tar zxvf hadoop-2.6.5.tar.gz
cd hadoop-2.6.5
//
更改相关文件
/**************************************************************************/
vim /home/hadoop/hadoop-2.6.5/etc/hadoop/hadoop-env.sh
vim /home/hadoop/hadoop-2.6.5/etc/hadoop/yarn-env.sh
vim /home/hadoop/hadoop-2.6.5/etc/hadoop/slaves
vim /home/hadoop/hadoop-2.6.5/etc/hadoop/core-site.xml
vim /home/hadoop/hadoop-2.6.5/etc/hadoop/hdfs-site.xml
vim /home/hadoop/hadoop-2.6.5/etc/hadoop/mapred-site.xml
vim /home/hadoop/hadoop-2.6.5/etc/hadoop/yarn-site.xml
/home/hadoop/jdk1.7.0_79
vim hadoop-env.sh
vim yarn-env.sh
vim slaves
hadoop.slave1
hadoop.slave2
vim core-site.xml
{
}
vim hdfs-site.xml
{
}
cp mapred-site.xml.template mapred-site.xml
vim mapred-site.xml
{
}
vim yarn-site.xml
{
}
/home/hadoop/hadoop
mkdir tmp
//
拷贝到从机
//
scp -r /home/hadoop/hadoop root@hadoop.slave1:/home/hadoop
scp -r /home/hadoop/hadoop root@hadoop.slave2:/home/hadoop
//
格式化master
//
cd /home/hadoop/hadoop
./bin/hdfs namenode -format
./sbin/start-all.sh
/**************************************************************************/
http://192.168.85.40:50070/dfshealth.html#tab-overview
浙公网安备 33010602011771号