在IDEA中,通过MAVEN来创建工程,并配置端口。
package hadoop.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URI;
public class HDFSApp {
public static final StringHDFS_PATH ="hdfs://hadoop:8020";
FileSystemfileSystem =null;
Configurationconfiguration =null;
}
/**
* 创建HDFS目录01
*/
@Test
public void mkdir()throws Exception {
fileSystem.mkdirs(new Path("/hdfsapi/test"));
}
/**
* 创建文件02
*/
@Test
public void creat()throws Exception{
FSDataOutputStream output =fileSystem.create(new Path("/hdfsapi/test/a.txt"));
output.write("hellow hadoop".getBytes());
output.flush();
output.close();
}
/**
* 查看HDFS文件内容03
*/
@Test
public void cat()throws Exception{
FSDataInputStream in =fileSystem.open(new Path("/hdfsapi/test/a.txt"));
IOUtils.copyBytes(in, System.out, 1024);
in.close();
}
/**
* 重命名04
*/
@Test
public void rename()throws Exception{
Path oldPath =new Path("/hdfsapi/test/a.txt");
Path newPath =new Path("/hdfsapi/test/b.txt");
fileSystem.rename(oldPath,newPath);
}
/**
* 上传文件到HDFS 05
*/
@Test
public void copyFromLocalFile()throws Exception {
Path localPath =new Path("H:/hh.txt");
Path hdfsPath =new Path("/hdfsapi/test");
fileSystem.copyFromLocalFile(localPath, hdfsPath);
}
/**
* 上传文件到HDFS 05
*/
@Test
public void copyFromLocalFilePlus()throws Exception {
InputStream in =new BufferedInputStream(new FileInputStream(new File("H:/atom.zip")));
FSDataOutputStream output =fileSystem.create(new Path("/hdfsapi/test/haha.tgz"), new Progressable() {
public void progress() {
System.out.print("."); //进度提醒
}
});
IOUtils.copyBytes(in, output, 4096);
}
/**
* 下载HDFS文件
*/
@Test
public void copyToLocalFile()throws Exception{
Path localPath =new Path("H:/za/kong.txt");
Path hdfsPath =new Path("/hdfsapi/test/hh.txt");
fileSystem.copyToLocalFile(false, hdfsPath, localPath, true);
}
/**
* 查看某个目录下的所有文件
*/
@Test
public void listFiles()throws Exception {
FileStatus[] fileStatuses =fileSystem.listStatus(new Path("/hdfsapi/test"));
for (FileStatus fileStatus : fileStatuses) {
String isDir = fileStatus.isDirectory() ?"文件夹" :"文件";
short replication = fileStatus.getReplication();
long len = fileStatus.getLen();
String path = fileStatus.getPath().toString();
System.out.println(isDir +"\t" + replication +"\t" + len +"\t" + path);
}
}
/**
* 删除
* @throws Exception
*/
@Test
public void delete()throws Exception{
fileSystem.delete(new Path("/hdfsapi/test"), true);
}
@Before
public void setUp()throws Exception {
System.out.println("HDFSApp.setUp");
configuration =new Configuration();
fileSystem = FileSystem.get(new URI(HDFS_PATH), configuration, "hadoop");
}
@After
public void tearDown()throws Exception {
configuration =null;
fileSystem =null;
System.out.println("\nHDFSApp.tearDown");
}
}