return first.getData(); int resultBytes = first.length(); offset += chunkLen; LZFChunk last = first; offset += chunkLen; left -= chunkLen; resultBytes += chunk.length(); last.setNext(chunk); last = chunk; } while (left > 0); for (; first != null; first = first.next()) { ptr = first.copyTo(result, ptr);
/** * Factory method for constructing compressed chunk */ public static LZFChunk createNonCompressed(byte[] plainData, int ptr, int len) { byte[] result = new byte[len + HEADER_LEN_NOT_COMPRESSED]; result[0] = BYTE_Z; result[1] = BYTE_V; result[2] = BLOCK_TYPE_NON_COMPRESSED; result[3] = (byte) (len >> 8); result[4] = (byte) len; System.arraycopy(plainData, ptr, result, HEADER_LEN_NOT_COMPRESSED, len); return new LZFChunk(result); }
/** * Alternate chunk compression method that will append encoded chunk in * pre-allocated buffer. Note that caller must ensure that the buffer is * large enough to hold not just encoded result but also intermediate * result; latter may be up to 4% larger than input; caller may use * {@link LZFEncoder#estimateMaxWorkspaceSize(int)} to calculate * necessary buffer size. * * @return Offset in output buffer after appending the encoded chunk */ public int appendEncodedChunk(final byte[] input, final int inputPtr, final int inputLen, final byte[] outputBuffer, final int outputPos) { if (inputLen >= MIN_BLOCK_TO_COMPRESS) { /* If we have non-trivial block, and can compress it by at least * 2 bytes (since header is 2 bytes longer), use as-is */ final int compStart = outputPos + LZFChunk.HEADER_LEN_COMPRESSED; final int end = tryCompress(input, inputPtr, inputPtr+inputLen, outputBuffer, compStart); final int uncompEnd = (outputPos + LZFChunk.HEADER_LEN_NOT_COMPRESSED) + inputLen; if (end < uncompEnd) { // yes, compressed by at least one byte final int compLen = end - compStart; LZFChunk.appendCompressedHeader(inputLen, compLen, outputBuffer, outputPos); return end; } } // Otherwise append as non-compressed chunk instead (length + 5): return LZFChunk.appendNonCompressed(input, inputPtr, inputLen, outputBuffer, outputPos); }
/** * Method for compressing (or not) individual chunks */ public LZFChunk encodeChunk(byte[] data, int offset, int len) { if (len >= MIN_BLOCK_TO_COMPRESS) { /* If we have non-trivial block, and can compress it by at least * 2 bytes (since header is 2 bytes longer), let's compress: */ int compLen = tryCompress(data, offset, offset+len, _encodeBuffer, 0); if (compLen < (len-2)) { // nah; just return uncompressed return LZFChunk.createCompressed(len, _encodeBuffer, 0, compLen); } } // Otherwise leave uncompressed: return LZFChunk.createNonCompressed(data, offset, len); }
/** * Method for encoding individual chunk, writing it to given output stream. */ public void encodeAndWriteChunk(byte[] data, int offset, int len, OutputStream out) throws IOException { if (len >= MIN_BLOCK_TO_COMPRESS) { // If we have non-trivial block, and can compress it by at least // 2 bytes (since header is 2 bytes longer), let's compress: int compEnd = tryCompress(data, offset, offset+len, _encodeBuffer, LZFChunk.HEADER_LEN_COMPRESSED); final int compLen = compEnd - LZFChunk.HEADER_LEN_COMPRESSED; if (compLen < (len-2)) { // yes, compressed block is smaller (consider header is 2 bytes longer) LZFChunk.appendCompressedHeader(len, compLen, _encodeBuffer, 0); out.write(_encodeBuffer, 0, compEnd); return; } } // Otherwise leave uncompressed: byte[] headerBuf = _headerBuffer; if (headerBuf == null) { _headerBuffer = headerBuf = new byte[LZFChunk.MAX_HEADER_LEN]; } LZFChunk.writeNonCompressedHeader(len, out, headerBuf); out.write(data, offset, len); }
LZFChunk.appendCompressedHeader(chunkLength, encodeEnd-7, _encodedBytes, 0); _bufferLength = encodeEnd; } else { // no -- so sad... int ptr = LZFChunk.appendNonCompressedHeader(chunkLength, _encodedBytes, 0);
/** * Method for encoding individual chunk, writing it to given output stream, * if (and only if!) it compresses enough. * * @return True if compression occurred and chunk was written; false if not. */ public boolean encodeAndWriteChunkIfCompresses(byte[] data, int offset, int inputLen, OutputStream out, double resultRatio) throws IOException { if (inputLen >= MIN_BLOCK_TO_COMPRESS) { int compEnd = tryCompress(data, offset, offset+inputLen, _encodeBuffer, LZFChunk.HEADER_LEN_COMPRESSED); final int maxSize = (int) (resultRatio * inputLen + LZFChunk.HEADER_LEN_COMPRESSED + 0.5); if (compEnd <= maxSize) { // yes, down to small enough LZFChunk.appendCompressedHeader(inputLen, compEnd-LZFChunk.HEADER_LEN_COMPRESSED, _encodeBuffer, 0); out.write(_encodeBuffer, 0, compEnd); return true; } } return false; }
@Override public byte[] toBytes(ResourceHolder<T> holder) { T val = holder.get(); ByteBuffer buf = ByteBuffer.allocate(converter.sizeOf(val.remaining())).order(order); converter.combine(buf, val); final ResourceHolder<ChunkEncoder> encoder = CompressedPools.getChunkEncoder(); LZFChunk chunk = encoder.get().encodeChunk(buf.array(), 0, buf.array().length); Closeables.closeQuietly(encoder); return chunk.getData(); }
/** * Method for compressing individual chunk, if (and only if) it compresses down * to specified ratio or less. * * @param maxResultRatio Value between 0.05 and 1.10 to indicate maximum relative size of * the result to use, in order to append encoded chunk * * @return Encoded chunk if (and only if) input compresses down to specified ratio or less; * otherwise returns null */ public LZFChunk encodeChunkIfCompresses(byte[] data, int offset, int inputLen, double maxResultRatio) { if (inputLen >= MIN_BLOCK_TO_COMPRESS) { final int maxSize = (int) (maxResultRatio * inputLen + LZFChunk.HEADER_LEN_COMPRESSED + 0.5); int compLen = tryCompress(data, offset, offset+inputLen, _encodeBuffer, 0); if (compLen <= maxSize) { return LZFChunk.createCompressed(inputLen, _encodeBuffer, 0, compLen); } } return null; }
/** * Method for compressing (or not) individual chunks */ public LZFChunk encodeChunk(byte[] data, int offset, int len) { if (len >= MIN_BLOCK_TO_COMPRESS) { /* If we have non-trivial block, and can compress it by at least * 2 bytes (since header is 2 bytes longer), let's compress: */ int compLen = tryCompress(data, offset, offset+len, _encodeBuffer, 0); if (compLen < (len-2)) { // nah; just return uncompressed return LZFChunk.createCompressed(len, _encodeBuffer, 0, compLen); } } // Otherwise leave uncompressed: return LZFChunk.createNonCompressed(data, offset, len); }
/** * Method for encoding individual chunk, writing it to given output stream. */ public void encodeAndWriteChunk(byte[] data, int offset, int len, OutputStream out) throws IOException { if (len >= MIN_BLOCK_TO_COMPRESS) { // If we have non-trivial block, and can compress it by at least // 2 bytes (since header is 2 bytes longer), let's compress: int compEnd = tryCompress(data, offset, offset+len, _encodeBuffer, LZFChunk.HEADER_LEN_COMPRESSED); final int compLen = compEnd - LZFChunk.HEADER_LEN_COMPRESSED; if (compLen < (len-2)) { // yes, compressed block is smaller (consider header is 2 bytes longer) LZFChunk.appendCompressedHeader(len, compLen, _encodeBuffer, 0); out.write(_encodeBuffer, 0, compEnd); return; } } // Otherwise leave uncompressed: byte[] headerBuf = _headerBuffer; if (headerBuf == null) { _headerBuffer = headerBuf = new byte[LZFChunk.MAX_HEADER_LEN]; } LZFChunk.writeNonCompressedHeader(len, out, headerBuf); out.write(data, offset, len); }
LZFChunk.appendCompressedHeader(chunkLength, encodeEnd-7, _encodedBytes, 0); _bufferLength = encodeEnd; } else { // no -- so sad... int ptr = LZFChunk.appendNonCompressedHeader(chunkLength, _encodedBytes, 0);
/** * Method for encoding individual chunk, writing it to given output stream, * if (and only if!) it compresses enough. * * @return True if compression occurred and chunk was written; false if not. */ public boolean encodeAndWriteChunkIfCompresses(byte[] data, int offset, int inputLen, OutputStream out, double resultRatio) throws IOException { if (inputLen >= MIN_BLOCK_TO_COMPRESS) { int compEnd = tryCompress(data, offset, offset+inputLen, _encodeBuffer, LZFChunk.HEADER_LEN_COMPRESSED); final int maxSize = (int) (resultRatio * inputLen + LZFChunk.HEADER_LEN_COMPRESSED + 0.5); if (compEnd <= maxSize) { // yes, down to small enough LZFChunk.appendCompressedHeader(inputLen, compEnd-LZFChunk.HEADER_LEN_COMPRESSED, _encodeBuffer, 0); out.write(_encodeBuffer, 0, compEnd); return true; } } return false; }
/** * Method for compressing individual chunk, if (and only if) it compresses down * to specified ratio or less. * * @param maxResultRatio Value between 0.05 and 1.10 to indicate maximum relative size of * the result to use, in order to append encoded chunk * * @return Encoded chunk if (and only if) input compresses down to specified ratio or less; * otherwise returns null */ public LZFChunk encodeChunkIfCompresses(byte[] data, int offset, int inputLen, double maxResultRatio) { if (inputLen >= MIN_BLOCK_TO_COMPRESS) { final int maxSize = (int) (maxResultRatio * inputLen + LZFChunk.HEADER_LEN_COMPRESSED + 0.5); int compLen = tryCompress(data, offset, offset+inputLen, _encodeBuffer, 0); if (compLen <= maxSize) { return LZFChunk.createCompressed(inputLen, _encodeBuffer, 0, compLen); } } return null; }
return first.getData(); int resultBytes = first.length(); offset += chunkLen; LZFChunk last = first; offset += chunkLen; left -= chunkLen; resultBytes += chunk.length(); last.setNext(chunk); last = chunk; } while (left > 0); for (; first != null; first = first.next()) { ptr = first.copyTo(result, ptr);
/** * Method for compressing (or not) individual chunks */ public LZFChunk encodeChunk(byte[] data, int offset, int len) { if (len >= MIN_BLOCK_TO_COMPRESS) { /* If we have non-trivial block, and can compress it by at least * 2 bytes (since header is 2 bytes longer), let's compress: */ int compLen = tryCompress(data, offset, offset+len, _encodeBuffer, 0); if (compLen < (len-2)) { // nah; just return uncompressed return LZFChunk.createCompressed(len, _encodeBuffer, 0, compLen); } } // Otherwise leave uncompressed: return LZFChunk.createNonCompressed(data, offset, len); }
/** * Method for encoding individual chunk, writing it to given output stream. */ public void encodeAndWriteChunk(byte[] data, int offset, int len, OutputStream out) throws IOException { if (len >= MIN_BLOCK_TO_COMPRESS) { // If we have non-trivial block, and can compress it by at least // 2 bytes (since header is 2 bytes longer), let's compress: int compEnd = tryCompress(data, offset, offset+len, _encodeBuffer, LZFChunk.HEADER_LEN_COMPRESSED); final int compLen = compEnd - LZFChunk.HEADER_LEN_COMPRESSED; if (compLen < (len-2)) { // yes, compressed block is smaller (consider header is 2 bytes longer) LZFChunk.appendCompressedHeader(len, compLen, _encodeBuffer, 0); out.write(_encodeBuffer, 0, compEnd); return; } } // Otherwise leave uncompressed: byte[] headerBuf = _headerBuffer; if (headerBuf == null) { _headerBuffer = headerBuf = new byte[LZFChunk.MAX_HEADER_LEN]; } LZFChunk.writeNonCompressedHeader(len, out, headerBuf); out.write(data, offset, len); }