@Override CompressionCodec getCodec(Configuration conf) { if (bzipCodec == null) { synchronized (lock) { if (bzipCodec == null) { bzipCodec = buildCodec(conf); } } } return bzipCodec; }
public static Compression.Algorithm compressionByName(String algoName) { if (algoName == null) return HFile.DEFAULT_COMPRESSION_ALGORITHM; return Compression.getCompressionAlgorithmByName(algoName); }
@Override CompressionCodec getCodec(Configuration conf) { if (zStandardCodec == null) { synchronized (lock) { if (zStandardCodec == null) { zStandardCodec = buildCodec(conf); } } } return zStandardCodec; }
@Override public void setCompression(HColumnDescriptor cd, String algo) { cd.setCompressionType(Compression.Algorithm.valueOf(algo)); }
/** * @param compressionAlgorithmName What kind of algorithm should be used * as baseline for comparison (e.g. lzo, gz). */ public DataBlockEncodingTool(String compressionAlgorithmName) { this.compressionAlgorithmName = compressionAlgorithmName; this.compressionAlgorithm = Compression.getCompressionAlgorithmByName( compressionAlgorithmName); this.compressor = this.compressionAlgorithm.getCompressor(); this.decompressor = this.compressionAlgorithm.getDecompressor(); }
public OutputStream createCompressionStream( OutputStream downStream, Compressor compressor, int downStreamBufferSize) throws IOException { OutputStream bos1 = null; if (downStreamBufferSize > 0) { bos1 = new BufferedOutputStream(downStream, downStreamBufferSize); } else { bos1 = downStream; } CompressionOutputStream cos = createPlainCompressionStream(bos1, compressor); BufferedOutputStream bos2 = new BufferedOutputStream(new FinishOnFlushCompressionStream(cos), DATA_OBUF_SIZE); return bos2; }
@Override public CompressionOutputStream createOutputStream(OutputStream out) throws IOException { if (ZlibFactory.isNativeZlibLoaded(getConf())) { return super.createOutputStream(out); } return new ReusableGzipOutputStream(out); }
/** * Releases the compressor this writer uses to compress blocks into the * compressor pool. */ @Override public void close() { if (compressor != null) { this.fileContext.getCompression().returnCompressor(compressor); compressor = null; } }
/** * Compression types supported in hbase. LZO is not bundled as part of the * hbase distribution. See * <a href="http://wiki.apache.org/hadoop/UsingLzoCompression">LZO * Compression</a> * for how to enable it. * * @param type Compression type setting. * @return this (for chained invocation) */ public ModifyableColumnFamilyDescriptor setCompressionType(Compression.Algorithm type) { return setValue(COMPRESSION_BYTES, type.name()); }
@Override CompressionCodec getCodec(Configuration conf) { if (lzoCodec == null) { synchronized (lock) { if (lzoCodec == null) { lzoCodec = buildCodec(conf); } } } return lzoCodec; }
@Override DefaultCodec getCodec(Configuration conf) { if (codec == null) { synchronized (lock) { if (codec == null) { codec = buildCodec(conf); } } } return codec; }
@Override CompressionCodec getCodec(Configuration conf) { if (lz4Codec == null) { synchronized (lock) { if (lz4Codec == null) { lz4Codec = buildCodec(conf); } } } return lz4Codec; }
@Override public void resetState() throws IOException { ((ResetableGZIPOutputStream) out).resetState(); } }
/** re-implement because the relative method in jdk is invisible */ private void writeInt(int paramInt1, byte[] paramArrayOfByte, int paramInt2) throws IOException { writeShort(paramInt1 & 0xFFFF, paramArrayOfByte, paramInt2); writeShort(paramInt1 >> 16 & 0xFFFF, paramArrayOfByte, paramInt2 + 2); }
@Override CompressionCodec getCodec(Configuration conf) { if (snappyCodec == null) { synchronized (lock) { if (snappyCodec == null) { snappyCodec = buildCodec(conf); } } } return snappyCodec; }
public ReusableGzipOutputStream(OutputStream out) throws IOException { super(new ResetableGZIPOutputStream(out)); }
/** * Get names of supported compression algorithms. The names are acceptable by * HFile.Writer. * * @return Array of strings, each represents a supported compression * algorithm. Currently, the following compression algorithms are * supported. * <ul> * <li>"none" - No compression. * <li>"gz" - GZIP compression. * </ul> */ public static String[] getSupportedCompressionAlgorithms() { return Compression.getSupportedAlgorithms(); }
@Override public void setCompression(HColumnDescriptor cd, String algo) { cd.setCompressionType(Compression.Algorithm.valueOf(algo)); }
/** * Compression types supported in hbase. LZO is not bundled as part of the * hbase distribution. See * <a href="http://wiki.apache.org/hadoop/UsingLzoCompression">LZO * Compression</a> * for how to enable it. * * @param type Compression type setting. * @return this (for chained invocation) */ public ModifyableColumnFamilyDescriptor setCompactionCompressionType( Compression.Algorithm type) { return setValue(COMPRESSION_COMPACT_BYTES, type.name()); }
@Override public void setCompression(HColumnDescriptor cd, String algo) { cd.setCompressionType(Compression.Algorithm.valueOf(algo)); }