Hadoop

Hadoop集群
主节点:dd
从节点:dd1 dd2
添加节点:dd3
--------------------------------------------------------------------------------
快速安装

#无密互通
ssh-keygen
ssh-copy-id -i dd
for ((x=0;x<=3;x++));do scp -r .ssh dd$x:~ ; done
for ((x=0;x<=3;x++));do scp /etc/hosts dd$x:/etc/ ; done
source /etc/profile
#dd dd1 dd2 dd3
tar -C /home
mv /home/hadoop-2.9.2/ /home/hadoop
mkdir /home/hadoop/tmp
useradd hadoop
echo "" | passwd --stdin hadoop
chown -R hadoop:hadoop /home/hadoop
#dd配置变量
vi /etc/profile
export JAVA_HOME=/home/jdk
export CLASSPATH=.:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
export PATH=$PATH:$JAVA_HOME/bin:/home/hadoop/bin
source /etc/profile
java -version
hadoop

scp 到 dd1 dd2 dd3
for ((x=1;x<=3;x++));do scp /etc/profile dd$x:/etc/ ; done
source /etc/profile

配置好dd hadoop/etc下文件并先SCP到dd1 dd2

#dd
/home/hadoop/bin/hdfs namenode -format
/home/hadoop/sbin/start-dfs.sh
/home/hadoop/sbin/start-yarn.sh
/home/hadoop/sbin/stop-dfs.sh
/home/hadoop/sbin/stop-yarn.sh
#测试
http://192.168.55.138:50070
http://192.168.55.138:8088
#查看集群
hdfs dfsadmin -report
#添加dd3
从主节点scp /home/hadoop/etc/ dd3:/home/hadoop/etc/
chown -R hadoop:hadoop /home/hadoop
/home/hadoop/sbin/stop-dfs.sh
/home/hadoop/sbin/stop-yarn.sh
/home/hadoop/sbin/start-dfs.sh
/home/hadoop/sbin/start-yarn.sh
--------------------------------------------------------------------------------

#配置hadoop/etc下文件
vi /home/hadoop/etc/hadoop/core-site.xml
<property>
<name>fs.default.name</name>
<value>hdfs://dd:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/home/hadoop/tmp</value>
</property>
<property>
<name>io.file.buffer.size</name>
<value>131702</value>
</property>

vi /home/hadoop/etc/hadoop/hdfs-site.xml
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/home/hadoop/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/home/hadoop/dfs/data</value>
</property>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>dd:50090</value>
</property>
<property>
<name>dfs.webhdfs.enabled</name>
<value>true</value>
</property>

vi /home/hadoop/etc/hadoop/mapred-site.xml
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
<final>true</final>
</property>
<property>
<name>mapreduce.jobtracker.http.address</name>
<value>dd:50030</value>
</property>
<property>
<name>mapreduce.jobhistory.address</name>
<value>dd:10020</value>
</property>
<property>
<name>mapreduce.jobhistory.webapp.address</name>
<value>dd:19888</value>
</property>
<property>
<name>mapred.job.tracker</name>
<value>http://dd:9001</value>
</property>

vi /home/hadoop/etc/hadoop/yarn-site.xml
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.auxservices.mapreduce.shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
<property>
<name>yarn.resourcemanager.address</name>
<value>dd:8032</value>
</property>
<property>
<name>yarn.resourcemanager.scheduler.address</name>
<value>dd:8030</value>
</property>
<property>
<name>yarn.resourcemanager.resource-tracker.address</name>
<value>dd:8031</value>
</property>
<property>
<name>yarn.resourcemanager.admin.address</name>
<value>dd:8033</value>
</property>
<property>
<name>yarn.resourcemanager.webapp.address</name>
<value>dd:8088</value>
</property>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>dd</value>
</property>
<property>
<name>yarn.nodemanager.resource.memory-mb</name>
<value>2048</value>
</property>

--------------------------------------------------------------------------------
vi /home/hadoop/etc/hadoop/hadoop-env.sh
export JAVA_HOME=/home/jdk
vi /home/hadoop/etc/hadoop/yarn-env.sh
export JAVA_HOME=/home/jdk
vi /home/hadoop/etc/hadoop/slaves
添加从节点IP
--------------------------------------------------------------------------------
hadoop fs -ls file:/home/hadoopd 查看本地文件
hadoop fs -ls / 查看hadoop文件
hadoop fs -mkdir /input
hadoop fs -put /home/hadoop/dd.txt /input 上传文件
hadoop fs -ls /input
hadoop fs -cat /input/dd.txt
hadoop fs -cat file:/home/hadoop/dd.txt
hadoop fs -get /input/dd.txt /tmp 下载hadoop文件
ls /tmp

posted @ 2019-02-18 13:38  UnlimitedBlade  阅读(229)  评论(0)    收藏  举报