环境
华为云:
role | hostname | ip | node |
---|---|---|---|
master | ecs-8fb7.novalocal | 122.112.207.47 | NameNode,DataNode,ResourceManager,NodeManager |
slave1 | ecs-e055.novalocal | 114.115.166.112 | DataNode,NodeManager |
slave2 | ecs-fdd1.novalocal | 119.3.21.184 |
安装
tar zxvf hadoop-3.0.2.tar.gz
mv hadoop-3.0.2 /usr/local/hadoop
mkdir datanode
mkdir namenode
mkdir tmp
hadoop-env.sh
export JAVA_HOME=/usr/java/latest
core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://192.168.1.40:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/usr/local/hadoop/tmp</value>
</property>
</configuration>
hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:///usr/local/hadoop/namenode</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:///usr/local/hadoop/datanode</value>
</property>
</configuration>
yarn-site.xml
<configuration>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>master</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.aux-services.mapreduce.shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
<property>
<name>yarn.nodemanager.vmem-check-enabled</name>
<value>false</value>
</property>
</configuration>
mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
<property>
<name>mapreduce.admin.user.env</name>
<value>HADOOP_MAPRED_HOME=$HADOOP_COMMON_HOME</value>
</property>
<property>
<name>yarn.app.mapreduce.am.env</name>
<value>HADOOP_MAPRED_HOME=$HADOOP_COMMON_HOME</value>
</property>
</configuration>
设置环境变量
cat << EOF >> /root/.bashrc
export HDFS_NAMENODE_USER="root"
export HDFS_DATANODE_USER="root"
export HDFS_SECONDARYNAMENODE_USER="root"
export YARN_RESOURCEMANAGER_USER="root"
export YARN_NODEMANAGER_USER="root"
export HADOOP_HOME=/usr/local/hadoop
export PATH=$PATH:$HADOOP_HOME/bin
export PATH=$PATH:$HADOOP_HOME/sbin
EOF
source /etc/profile
配置域名解析
cat << EOF >> /etc/hosts
122.112.207.47 master ecs-8fb7.novalocal
114.115.166.112 slave1 ecs-e055.novalocal
119.3.21.184 slave2 ecs-fdd1.novalocal
EOF
配置ssh免密登录
ssh-keygen -t rsa
ssh-copy-id master
ssh-copy-id slave1
ssh-copy-id slave2
配置works
cat << EOF >> /usr/local/hadoop/etc/hadoop/workers
master
slave1
slave2
EOF
格式化namenode
hdfs namenode -format
启动服务
start-dfs.sh
start-yarn.sh
查看各节点状态
jps