# 下載
$ cd /usr/local
$ wget http://mirrors.hust.edu.cn/apache/hadoop/common/hadoop-2.9.2/hadoop-2.9.2.tar.gz
# 解壓安裝包
$ tar -zxvf hadoop-2.9.2.tar.gz && rm hadoop-2.9.2.tar.gz
# 編輯環境變量
$ vim ~/.bashrc
# 添加如下兩行
export HADOOP_HOME=/usr/local/hadoop-2.9.2.tar.gz
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
# 使環境變量生效
$ source ~/.bashrc
本地SSH工具配置
# 安裝ssh工具
$ sudo apt-get install ssh
$ sudo apt-get install rsync
# 運行失敗, 需先生成並寫入本地的id_rsa.put到已授權的keys中
$ ssh localhost
# 生成ssh key
$ ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
# 添加本地的pub key到授權keys中
$ cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
# 提升下權限
$ chmod 0600 ~/.ssh/authorized_keys
# 再次嘗試可運行成功, 如果不行刪掉~/.ssh/known_knows重試
$ ssh localhost
啟動HDFS前配置
# etc/hadoop/core-site.xml:
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
# etc/hadoop/hdfs-site.xml:
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
啟動HDFS文件系統
$ bin/hdfs namenode -format
# 啟動hdfs中的NameNode和DataNode, 此時可在host:8088中進行訪問
$ start-dfs.sh
# 驗證是否成功可通過jps命令
$ jps
19441 Jps
18946 NameNode
19321 SecondaryNameNode
19084 DataNode
5710 jar
# 關閉hdfs
$ stop-dfs.sh
單節點上的YARN
# etc/hadoop/mapred-site.xml:
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
# etc/hadoop/yarn-site.xml:
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
啟動關閉
$ start-yarn.sh
$ stop-yarn.sh