安装x2go 远程桌面和MATE
sudo yum install x2goserver -y
sudo yum install epel-release -y
sudo yum groupinstall "X Window system" -y
sudo yum groupinstall "MATE Desktop" -y
下载hadoop2.7.3 - binary 和spark1.6.2
http://hadoop.apache.org/releases.html
http://spark.apache.org/
也可以直接wget命令下载
sudo wget http://apache.mirror.digitalpacific.com.au/hadoop/common/hadoop-2.7.3/hadoop-2.7.3.tar.gz
sudo wget http://apache.mirror.serversaustralia.com.au/spark/spark-1.6.2/spark-1.6.2-bin-hadoop2.6.tgz
解压
tar -zxvf hadoop-2.7.3.tar.gz
tar -zxvf spark-1.6.2-bin-hadoop2.6.tgz
最好直接在自己新建的路径下解压,比如
添加SSHkey
这里一直回车就可以
ssh-keygen -t rsa -P ""
cat $HOME/.ssh/id_rsa.pub>>$HOME/.ssh/authorized_keys
ssh localhost
可以执行ssh localhost连接到自己就表示成功
添加环境变量
此前要先安装jdk,教程在此
http://tecadmin.net/install-java-8-on-centos-rhel-and-fedora/
然后配置环境变量
sudo vim ~/.bashrc
在环境变量文件中添加以下内容
export JAVA_HOME=/opt/jdk1.8.0_101
export JRE_HOME=/opt/jdk1.8.0_101/jre
export PATH=$PATH:/opt/jdk1.8.0_101/bin:/opt/jdk1.8.0_101/jre/bin
# 此处路径修改为对应的hadoop所在的文件解压路径
export HADOOP_INSTALL=/home/ec2-user/hadoop/hadoop-2.7.3
export PATH=$PATH:$HADOOP_INSTALL/bin
export PATH=$PATH:$HADOOP_INSTALL/sbin
export HADOOP_MAPRED_HOME=$HADOOP_INSTALL
export HADOOP_COMMON_HOME=$HADOOP_INSTALL
export HADOOP_HDFS_HOME=$HADOOP_INSTALL
export YARN_HOME=$HADOOP_INSTALL
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_INSTALL/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_INSTALL/lib"
export PATH=${PATH}:${JAVA_HOME}/bin
export PATH=${PATH}:${JAVA_HOME}/lib
export HADOOP_CLASSPATH=${JAVA_HOME}/lib/tools.jar
alias hls='hadoop fs -ls '
alias hput='hadoop fs -put '
alias hmv='hadoop fs -mv '
alias hmkdir='hadoop fs -mkdir '
alias hcat='hadoop fs -cat '
保存后别忘了生效配置
source ~/.bashrc
替换hadoop文件
把Quan给的etc文件替换到hadoop目录下
然后
hadoop namenode -format
sbin/start-dfs.sh
jps # 成功后显示NameNode,DataNode,SecondaryNameNode