单机模式
下载hadoop-2.7.3.tar.gz 并解压缩
安装java环境
sudo apt install openjdk-8-jdk
设置环境变量
vim /etc/profile
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
export CLASSPATH=.:$JAVA_HOME/lib:$JAVA_HOME/jre/lib:$CLASSPATH
export HADOOP_HOME=/usr/local/hadoop
export PATH=$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$PATH:$HADOOP_HOME/bin
保存文件
source /etc/profile
修改haddoop中的配置文件
vim /usr/local/hadoop/etc/hadoop/core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
vim etc/hadoop/hdfs-site.xml
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
无密码登录设置
ssh-keygen -t dsa
ssh-copy-id -i ~/.ssh/id_rsa.pub root@172.16.242.100
格式化hdfs
/usr/local/hadoop/bin/hdfs namenode -format
启动dfs测试
/usr/local/hadoop/sbin/start-dfs.sh
测试代码
hdfs dfs -mkdir input
hdfs dfs -put etc/hadoop/*.xml input
hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.2.jar grep input output 'dfs'
hdfs dfs -get output /data/hadoop
cat /data/hadoop/output/part-r-00000