janneyabc 发表于 2017-12-17 23:46:47

hadoop归档、压缩、串行化系统,序列文件

70   xx    132    234       437       1733  lz4 < lzo < Gzip < deflate < default < bzip
  /**
  * 测试压缩编解码器
  */

  public>  @Test
  public void testAllCompress() throws Exception {
  Class[] allClass = {
  DefaultCodec.class ,
  DeflateCodec.class ,
  BZip2Codec.class ,
  GzipCodec.class ,
  Lz4Codec.class ,
  SnappyCodec.class
  } ;
  for(Class clazz : allClass){
  testDecompress(clazz);
  }
  }
  /**
  * 压缩
  */
  public void testCompress(Class codecClass) throws Exception {

  String>  Configuration conf = new Configuration();
  //压缩编解码器
  CompressionCodec codec = (CompressionCodec)ReflectionUtils.newInstance(codecClass,conf);
  //获得指定压缩编辑器扩展名
  String ext = codec.getDefaultExtension();

  FileOutputStream fos = new FileOutputStream("d:/codec/" +>  long start = System.currentTimeMillis() ;
  CompressionOutputStream cos = codec.createOutputStream(fos);
  IOUtils.copyBytes(new FileInputStream("d:/codec/hadoop.pdf"),cos,1024);
  IOUtils.closeStream(cos);
  System.out.println(codecClass + " : " + (System.currentTimeMillis() - start));
  }
  /**
  * 解压缩
  */
  public void testDecompress(Class codecClass) throws Exception {
  //类名

  String>  Configuration conf = new Configuration();
  //压缩编解码器
  CompressionCodec codec = (CompressionCodec)ReflectionUtils.newInstance(codecClass,conf);
  //获得指定压缩编辑器扩展名
  String ext = codec.getDefaultExtension();

  FileInputStream fis = new FileInputStream("d:/codec/" +>  long start = System.currentTimeMillis();
  //解压缩流
  CompressionInputStream cis = codec.createInputStream(fis);
  FileOutputStream fos = new FileOutputStream("d:/codec/tmp.pdf") ;
  IOUtils.copyBytes(cis, fos,1024);
  System.out.println(codecClass + " : " + (System.currentTimeMillis() - start));
  }
  }
页: [1]
查看完整版本: hadoop归档、压缩、串行化系统,序列文件