node1:
[root@localhost ~]# vi /etc/sysconfig/network
内容:
NETWORKING=yes
HOSTNAME=node1
[root@localhost ~]# vi /etc/hosts
内容:
192.168.112.151 node1
192.168.112.152 node2
192.168.112.153 node3
[root@localhost ~]# reboot
[root@node1 ~]# systemctl status firewalld.service
[root@node1 ~]# systemctl stop firewalld.service
[root@node1 ~]# systemctl disable firewalld.service
[root@node1 ~]# systemctl status firewalld.service
免密
[root@node1 ~]# ssh-keygen -t rsa
[root@node1 ~]# ssh-copy-id node1
[root@node1 ~]# ssh-copy-id node2
[root@node1 ~]# ssh-copy-id node3
[root@node1 ~]# mkdir -p /hadoop/server
[root@node1 ~]# mkdir -p /hadoop/upload
[root@node1 ~]# mkdir -p /hadoop/data
[root@node1 ~]# yum install lrzsz
卸载java(根据自己电脑查出来的删除):
[root@node1 ~]# rpm -qa | grep java
[root@node1 ~]# rpm -e --nodeps java-1.8.0-openjdk-1.8.0.131-11.b12.el7.x86_64 java-1.8.0-openjdk-headless-1.8.0.131-11.b12.el7.x86_64 javapackages-tools-3.4.1-11.el7.noarch python-javapackages-3.4.1-11.el7.noarch tzdata-java-2017b-1.el7.noarch
[root@node1 ~]# rpm -qa | grep java
[root@node1 ~]# mkdir /opt/SoftWare/Java
[root@node1 ~]# cd /opt/SoftWare/Java/
[root@node1 Java]# tar zxvf jdk-8u231-linux-x64.tar\(1\).gz -C /hadoop/server/
[root@node1 Java]# vi /etc/profile
内容:
#java environment
export JAVA_HOME=/hadoop/server/jdk1.8.0_231
export CLASSPATH=.:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
export PATH=$PATH:$JAVA_HOME/bin
[root@node1 Java]# source /etc/profile
[root@node1 Java]# java -version
java version "1.8.0_231"
Java(TM) SE Runtime Environment (build 1.8.0_231-b11)
Java HotSpot(TM) 64-Bit Server VM (build 25.231-b11, mixed mode)
[root@node1 Java]# mkdir /opt/SoftWare/Hadoop
[root@node1 Java]# cd /opt/SoftWare/Hadoop
[root@node1 Hadoop]# tar zxvf hadoop-2.10.0.tar.gz -C /hadoop/server/
[root@node1 Hadoop]# cd /hadoop/server/hadoop-2.10.0/etc/hadoop/
[root@node1 hadoop]# vi hadoop-env.sh
找到JAVA_HOME写成
JAVA_HOME=/hadoop/server/jdk1.8.0_231
[root@node1 hadoop]# vim core-site.xml
内容:
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://node1:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/hadoop/date/hddate</value>
</property>
</configuration>
[root@node1 hadoop]# vim hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>node2:50090</value>
</property>
</configuration>
[root@node1 hadoop]# vim mapred-site.xml
内容:
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
[root@node1 hadoop]# vim yarn-site.xml
内容:
<configuration>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>node1</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
[root@node1 hadoop]# vim slaves
内容:
node1
node2
node3
[root@node1 hadoop]# vim /etc/profile
内容:
#hadoop environment
export HADOOP_HOME=/hadoop/server/hadoop-2.10.0
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
[root@node1 hadoop]# source /etc/profile
复制到其它节点:
[root@node1 hadoop]# scp -r /hadoop/server/ root@node2:/hadoop/
[root@node1 hadoop]# scp -r /hadoop/server/ root@node3:/hadoop/
[root@node1 hadoop]# scp -r /etc/profile root@node2:/etc/profile
[root@node1 hadoop]# scp -r /etc/profile root@node3:/etc/profile
格式化: [root@node1 hadoop]# hdfs namenode -format 启动: [root@node1 hadoop]# hadoop-daemon.sh start namenode [root@node1 hadoop]# yarn-daemon.sh start resourcemanager [root@node1 hadoop]# hdfs dfs -ls / [root@node1 hadoop]# hdfs dfs -mkdir /hello [root@node1 ~]# hdfs dfs -put anaconda-ks.cfg /hello [root@node1 ~]# hdfs dfs -ls /hello/ [root@node1 ~]# /hadoop/server/hadoop-2.10.0/sbin/start-all.sh
node2:
[root@localhost ~]# vi /etc/sysconfig/network
内容:
NETWORKING=yes
HOSTNAME=node2
[root@localhost ~]# vi /etc/hosts
内容:
192.168.112.151 node1
192.168.112.152 node2
192.168.112.153 node3
[root@localhost ~]# reboot
[root@node1 ~]# systemctl status firewalld.service
[root@node1 ~]# systemctl stop firewalld.service
[root@node1 ~]# systemctl disable firewalld.service
[root@node1 ~]# systemctl status firewalld.service
卸载java(根据自己电脑查出来的删除):
[root@node1 ~]# rpm -qa | grep java
[root@node1 ~]# rpm -e --nodeps java-1.8.0-openjdk-1.8.0.131-11.b12.el7.x86_64 java-1.8.0-openjdk-headless-1.8.0.131-11.b12.el7.x86_64 javapackages-tools-3.4.1-11.el7.noarch python-javapackages-3.4.1-11.el7.noarch tzdata-java-2017b-1.el7.noarch
[root@node1 ~]# rpm -qa | grep java
免密
[root@node1 ~]# ssh-keygen -t rsa
[root@node1 ~]# ssh-copy-id node1
[root@node1 ~]# ssh-copy-id node2
[root@node1 ~]# ssh-copy-id node3
[root@node1 ~]# mkdir -p /hadoop/server
[root@node1 ~]# mkdir -p /hadoop/upload
[root@node1 ~]# mkdir -p /hadoop/data
[root@node1 ~]# yum install lrzsz
[root@node1 Java]# source /etc/profile
[root@node1 ~]# /hadoop/server/hadoop-2.10.0/sbin/start-all.sh
node3:
[root@localhost ~]# vi /etc/sysconfig/network
内容:
NETWORKING=yes
HOSTNAME=node3
[root@localhost ~]# vi /etc/hosts
内容:
192.168.112.151 node1
192.168.112.152 node2
192.168.112.153 node3
[root@localhost ~]# reboot
[root@node1 ~]# systemctl status firewalld.service
[root@node1 ~]# systemctl stop firewalld.service
[root@node1 ~]# systemctl disable firewalld.service
[root@node1 ~]# systemctl status firewalld.service
卸载java(根据自己电脑查出来的删除):
[root@node1 ~]# rpm -qa | grep java
[root@node1 ~]# rpm -e --nodeps java-1.8.0-openjdk-1.8.0.131-11.b12.el7.x86_64 java-1.8.0-openjdk-headless-1.8.0.131-11.b12.el7.x86_64 javapackages-tools-3.4.1-11.el7.noarch python-javapackages-3.4.1-11.el7.noarch tzdata-java-2017b-1.el7.noarch
[root@node1 ~]# rpm -qa | grep java
免密
[root@node1 ~]# ssh-keygen -t rsa
[root@node1 ~]# ssh-copy-id node1
[root@node1 ~]# ssh-copy-id node2
[root@node1 ~]# ssh-copy-id node3
[root@node1 ~]# mkdir -p /hadoop/server
[root@node1 ~]# mkdir -p /hadoop/upload
[root@node1 ~]# mkdir -p /hadoop/data
[root@node1 ~]# yum install lrzsz
[root@node1 Java]# source /etc/profile
[root@node1 ~]# /hadoop/server/hadoop-2.10.0/sbin/start-all.sh