1.下载、解压
wget http://www-eu.apache.org/dist/hadoop/common/hadoop-2.7.5/hadoop-2.7.5.tar.gz
tar -xvf hadoop-2.7.5.tar.gz -C /usr/local/bigdata
2.修改配置文件,一共有五个均在etc下面
cd /usr/local/bigdata/hadoop-2.7.5/etc/hadoop
vim hadoop-env.sh
cd /usr/local/bigdata/hadoop-2.7.5/etc/hadoop
vim core-site.xml
<configuration>
<!--配置hdfs的namenode的地址-->
<property>
<name>fs.defaultFS</name>
<value>hdfs://192.168.183.131:9000</value>
</property>
<!--配置Hadoop运行时产生的数据存储目录-->
<property>
<name>fs.defaultFS</name>
<value>hdfs://192.168.183.131:9000</value>
</property>
</configuration>
cd /usr/local/bigdata/hadoop-2.7.5/etc/hadoop
vim hdfs-site.xml
<configuration>
<!--配置他的副本的数量-->
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
cd /usr/local/bigdata/hadoop-2.7.5/etc/hadoop
mv mapred-site.xml.template mapred-site.xml //复制重命名
vim mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
cd /usr/local/bigdata/hadoop-2.7.5/etc/hadoop
vim yarn-site.xml
3.启动hadoop
格式化HDFS
cd bin
./hadoop namenode -format
启动
cd sbin
./start-all.sh
jps //查看进程
注意如果没有配置免密登录的话,会有一系列的密码输入提醒,输入用户名密码即可
4.访问首页
http://192.168.183.131:50070
http://192.168.183.131:8088