/** {@inheritDoc} */ @Override public LZFChunk call() { if (data != null) { LZFChunk lzfChunk = ENCODER.get().encodeChunk(data, offset, length); // input data is fully processed, we can now discard it blockManager.releaseBlockToPool(data); return lzfChunk; } else { // cleanup time! ENCODER.remove(); return null; } }
/** * Alternate version that accepts pre-allocated output buffer. */ public static int appendEncoded(ChunkEncoder enc, byte[] input, int inputPtr, int inputLength, byte[] outputBuffer, int outputPtr) { int left = inputLength; int chunkLen = Math.min(LZFChunk.MAX_CHUNK_LEN, left); outputPtr = enc.appendEncodedChunk(input, inputPtr, chunkLen, outputBuffer, outputPtr); left -= chunkLen; // shortcut: if it all fit in, no need to coalesce: if (left < 1) { return outputPtr; } // otherwise need to keep on encoding... inputPtr += chunkLen; do { chunkLen = Math.min(left, LZFChunk.MAX_CHUNK_LEN); outputPtr = enc.appendEncodedChunk(input, inputPtr, chunkLen, outputBuffer, outputPtr); inputPtr += chunkLen; left -= chunkLen; } while (left > 0); return outputPtr; } }
/** * Alternate constructor used when we want to avoid allocation encoding * buffer, in cases where caller wants full control over allocations. */ protected ChunkEncoder(int totalLength, BufferRecycler bufferRecycler, boolean bogus) { int largestChunkLen = Math.max(totalLength, LZFChunk.MAX_CHUNK_LEN); int suggestedHashLen = calcHashLen(largestChunkLen); _recycler = bufferRecycler; _hashTable = bufferRecycler.allocEncodingHash(suggestedHashLen); _hashModulo = _hashTable.length - 1; _encodeBuffer = null; }
/** * Method that will use "safe" {@link ChunkEncoder}, as produced by * {@link ChunkEncoderFactory#safeInstance}, for encoding. Safe here * means that it does not use any non-compliant features beyond core JDK. */ public static byte[] safeEncode(byte[] data, int offset, int length) { ChunkEncoder enc = ChunkEncoderFactory.safeInstance(length); byte[] result = encode(enc, data, offset, length); enc.close(); return result; }
/** * Method for encoding individual chunk, writing it to given output stream, * if (and only if!) it compresses enough. * * @return True if compression occurred and chunk was written; false if not. */ public boolean encodeAndWriteChunkIfCompresses(byte[] data, int offset, int inputLen, OutputStream out, double resultRatio) throws IOException { if (inputLen >= MIN_BLOCK_TO_COMPRESS) { int compEnd = tryCompress(data, offset, offset+inputLen, _encodeBuffer, LZFChunk.HEADER_LEN_COMPRESSED); final int maxSize = (int) (resultRatio * inputLen + LZFChunk.HEADER_LEN_COMPRESSED + 0.5); if (compEnd <= maxSize) { // yes, down to small enough LZFChunk.appendCompressedHeader(inputLen, compEnd-LZFChunk.HEADER_LEN_COMPRESSED, _encodeBuffer, 0); out.write(_encodeBuffer, 0, compEnd); return true; } } return false; }
public LZFFileOutputStream(ChunkEncoder encoder, FileDescriptor fdObj) { this(encoder, fdObj, encoder.getBufferRecycler()); }
/** * Compress and write the current block to the OutputStream */ protected void writeCompressedBlock() throws IOException { int left = _position; _position = 0; int offset = 0; while (left > 0) { int chunkLen = Math.min(LZFChunk.MAX_CHUNK_LEN, left); _encoder.encodeAndWriteChunk(_outputBuffer, offset, chunkLen, out); offset += chunkLen; left -= chunkLen; } }
@Override public ChunkEncoder get() { log.info("Allocating new chunkEncoder[%,d]", counter.incrementAndGet()); return new ChunkEncoder(0xFFFF); } }
@Override public void close() throws IOException { if (!_outputStreamClosed) { if (_position > 0) { writeCompressedBlock(); } super.flush(); super.close(); _outputStreamClosed = true; _encoder.close(); byte[] buf = _outputBuffer; if (buf != null) { _outputBuffer = null; _recycler.releaseOutputBuffer(buf); } } }
/** * Method for compressing individual chunk, if (and only if) it compresses down * to specified ratio or less. * * @param maxResultRatio Value between 0.05 and 1.10 to indicate maximum relative size of * the result to use, in order to append encoded chunk * * @return Encoded chunk if (and only if) input compresses down to specified ratio or less; * otherwise returns null */ public LZFChunk encodeChunkIfCompresses(byte[] data, int offset, int inputLen, double maxResultRatio) { if (inputLen >= MIN_BLOCK_TO_COMPRESS) { final int maxSize = (int) (maxResultRatio * inputLen + LZFChunk.HEADER_LEN_COMPRESSED + 0.5); int compLen = tryCompress(data, offset, offset+inputLen, _encodeBuffer, 0); if (compLen <= maxSize) { return LZFChunk.createCompressed(inputLen, _encodeBuffer, 0, compLen); } } return null; }
public LZFFileOutputStream(ChunkEncoder encoder, String name, boolean append) throws FileNotFoundException { this(encoder, name, append, encoder.getBufferRecycler()); }
/** * Compress and write the current block to the OutputStream */ protected void writeCompressedBlock() throws IOException { int left = _position; _position = 0; int offset = 0; while (left > 0) { int chunkLen = Math.min(LZFChunk.MAX_CHUNK_LEN, left); _encoder.encodeAndWriteChunk(_outputBuffer, offset, chunkLen, _wrapper); offset += chunkLen; left -= chunkLen; } }
/** * Method that will use "safe" {@link ChunkEncoder}, as produced by * {@link ChunkEncoderFactory#safeInstance}, for encoding. Safe here * means that it does not use any non-compliant features beyond core JDK. */ public static byte[] safeEncode(byte[] data, int offset, int length, BufferRecycler bufferRecycler) { ChunkEncoder enc = ChunkEncoderFactory.safeInstance(length, bufferRecycler); byte[] result = encode(enc, data, offset, length); enc.close(); return result; }
/** * Method for encoding individual chunk, writing it to given output stream, * if (and only if!) it compresses enough. * * @return True if compression occurred and chunk was written; false if not. */ public boolean encodeAndWriteChunkIfCompresses(byte[] data, int offset, int inputLen, OutputStream out, double resultRatio) throws IOException { if (inputLen >= MIN_BLOCK_TO_COMPRESS) { int compEnd = tryCompress(data, offset, offset+inputLen, _encodeBuffer, LZFChunk.HEADER_LEN_COMPRESSED); final int maxSize = (int) (resultRatio * inputLen + LZFChunk.HEADER_LEN_COMPRESSED + 0.5); if (compEnd <= maxSize) { // yes, down to small enough LZFChunk.appendCompressedHeader(inputLen, compEnd-LZFChunk.HEADER_LEN_COMPRESSED, _encodeBuffer, 0); out.write(_encodeBuffer, 0, compEnd); return true; } } return false; }
public LZFFileOutputStream(ChunkEncoder encoder, File file, boolean append) throws FileNotFoundException { this(encoder, file, append, encoder.getBufferRecycler()); }
/** {@inheritDoc} */ @Override public LZFChunk call() { if (data != null) { LZFChunk lzfChunk = ENCODER.get().encodeChunk(data, offset, length); // input data is fully processed, we can now discard it blockManager.releaseBlockToPool(data); return lzfChunk; } else { // cleanup time! ENCODER.remove(); return null; } }
/** * Compress and write the current block to the OutputStream */ protected void writeCompressedBlock() throws IOException { int left = _position; _position = 0; int offset = 0; while (left > 0) { int chunkLen = Math.min(LZFChunk.MAX_CHUNK_LEN, left); _encoder.encodeAndWriteChunk(_outputBuffer, offset, chunkLen, out); offset += chunkLen; left -= chunkLen; } }
/** * Alternate constructor used when we want to avoid allocation encoding * buffer, in cases where caller wants full control over allocations. */ protected ChunkEncoder(int totalLength, BufferRecycler bufferRecycler, boolean bogus) { int largestChunkLen = Math.max(totalLength, LZFChunk.MAX_CHUNK_LEN); int suggestedHashLen = calcHashLen(largestChunkLen); _recycler = bufferRecycler; _hashTable = bufferRecycler.allocEncodingHash(suggestedHashLen); _hashModulo = _hashTable.length - 1; _encodeBuffer = null; }
/** * Alternate version that accepts pre-allocated output buffer. */ public static int appendEncoded(ChunkEncoder enc, byte[] input, int inputPtr, int inputLength, byte[] outputBuffer, int outputPtr) { int left = inputLength; int chunkLen = Math.min(LZFChunk.MAX_CHUNK_LEN, left); outputPtr = enc.appendEncodedChunk(input, inputPtr, chunkLen, outputBuffer, outputPtr); left -= chunkLen; // shortcut: if it all fit in, no need to coalesce: if (left < 1) { return outputPtr; } // otherwise need to keep on encoding... inputPtr += chunkLen; do { chunkLen = Math.min(left, LZFChunk.MAX_CHUNK_LEN); outputPtr = enc.appendEncodedChunk(input, inputPtr, chunkLen, outputBuffer, outputPtr); inputPtr += chunkLen; left -= chunkLen; } while (left > 0); return outputPtr; } }
/** * Method that will use "safe" {@link ChunkEncoder}, as produced by * {@link ChunkEncoderFactory#safeInstance}, for encoding. Safe here * means that it does not use any non-compliant features beyond core JDK. */ public static byte[] safeEncode(byte[] data, int offset, int length) { ChunkEncoder enc = ChunkEncoderFactory.safeInstance(length); byte[] result = encode(enc, data, offset, length); enc.close(); return result; }