CompressionCodec有两个方法可以用于轻松地压缩或解压缩数据。要想对正在被写入一个输出流的数据进行压缩,我们可以使用createOutputStream(OutputStreamout)方法创建一个CompressionOutputStream,将其以压缩格式写入底层的流。相反,要想对从输入流读取而来的数据进行解压缩,则调用createInputStream(InputStreamin)函数,从而获得一个CompressionInputStream,从而从底层的流读取未压缩的数据。
测试一下如下压缩方式
方式 | class类 |
---|---|
DEFLATE | org.apache.hadoop.io.compress.DefaultCodec |
gzip | org.apache.hadoop.io.compress.GzipCodec |
bzip2 | org.apache.hadoop.io.compress.BZip2Codec |
LZ4 | org.apache.hadoop.io.compress.Lz4Codec |
1、压缩
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.util.ReflectionUtils;
public class TestComporess {
public static void main(String[] args) throws Exception {
compress("e:/log.txt", "org.apache.hadoop.io.compress.BZip2Codec");
// compress("e:/log.txt", "org.apache.hadoop.io.compress.GzipCodec");
// compress("e:/log.txt", "org.apache.hadoop.io.compress.Lz4Codec");
// compress("e:/test.txt", "org.apache.hadoop.io.compress.DefaultCodec");
}
// 压缩测试
private static void compress(String filename, String method) throws ClassNotFoundException, IOException {
// 1 获取输入流
FileInputStream fis = new FileInputStream(new File(filename));
// 2 获取输出流
Class classname = Class.forName(method);
CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(classname, new Configuration());
FileOutputStream fos = new FileOutputStream(new File(filename + codec.getDefaultExtension()));
// 3 把输出流封装成压缩流
CompressionOutputStream cos = codec.createOutputStream(fos);
// 4 对拷贝
IOUtils.copyBytes(fis, cos, 1024 * 1024 * 5, false);
// 5 关闭资源
fis.close();
cos.close();
fos.close();
}
}
2、解压缩
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.CompressionInputStream;
public class TestDecompress {
public static void main(String[] args) throws Exception {
decompress("e:/log.txt.gz");
}
// 解压缩
private static void decompress(String filename) throws FileNotFoundException, IOException {
// 1 判断是否有解压缩的方法
CompressionCodecFactory factory = new CompressionCodecFactory(new Configuration());
CompressionCodec codec = factory.getCodec(new Path(filename));
if (null == codec) {
System.out.println("not find " + codec);
return;
}
// 2 获取输入流
CompressionInputStream cis = codec.createInputStream(new FileInputStream(filename));
// 3 获取输出流
FileOutputStream fos = new FileOutputStream(new File(filename + ".decode"));
// 4 流的对拷贝
IOUtils.copyBytes(cis, fos, 1024*1024*5, false);
// 5 关闭资源
cis.close();
fos.close();
}
}