/** * Creates a new LZ4 decoder with customizable implementation. * * @param factory user customizable {@link LZ4Factory} instance * which may be JNI bindings to the original C implementation, a pure Java implementation * or a Java implementation that uses the {@link sun.misc.Unsafe} * @param validateChecksums if {@code true}, the checksum field will be validated against the actual * uncompressed data, and if the checksums do not match, a suitable * {@link DecompressionException} will be thrown. In this case encoder will use * xxhash hashing for Java, based on Yann Collet's work available at * <a href="https://github.com/Cyan4973/xxHash">Github</a>. */ public Lz4FrameDecoder(LZ4Factory factory, boolean validateChecksums) { this(factory, validateChecksums ? XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum() : null); }
/** * Creates a new LZ4 encoder with hight or fast compression, default block size (64 KB) * and xxhash hashing for Java, based on Yann Collet's work available at * <a href="https://github.com/Cyan4973/xxHash">Github</a>. * * @param highCompressor if {@code true} codec will use compressor which requires more memory * and is slower but compresses more efficiently */ public Lz4FrameEncoder(boolean highCompressor) { this(LZ4Factory.fastestInstance(), highCompressor, DEFAULT_BLOCK_SIZE, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum()); }
/** * Creates a new LZ4 decoder with customizable implementation. * * @param factory user customizable {@link LZ4Factory} instance * which may be JNI bindings to the original C implementation, a pure Java implementation * or a Java implementation that uses the {@link sun.misc.Unsafe} * @param validateChecksums if {@code true}, the checksum field will be validated against the actual * uncompressed data, and if the checksums do not match, a suitable * {@link DecompressionException} will be thrown. In this case encoder will use * xxhash hashing for Java, based on Yann Collet's work available at * <a href="https://github.com/Cyan4973/xxHash">Github</a>. */ public Lz4FrameDecoder(LZ4Factory factory, boolean validateChecksums) { this(factory, validateChecksums ? XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum() : null); }
/** * Creates a new LZ4 encoder with hight or fast compression, default block size (64 KB) * and xxhash hashing for Java, based on Yann Collet's work available at * <a href="https://github.com/Cyan4973/xxHash">Github</a>. * * @param highCompressor if {@code true} codec will use compressor which requires more memory * and is slower but compresses more efficiently */ public Lz4FrameEncoder(boolean highCompressor) { this(LZ4Factory.fastestInstance(), highCompressor, DEFAULT_BLOCK_SIZE, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum()); }
/** * Create a new instance using {@link net.jpountz.xxhash.XXHash32} for checksuming. * @see #LZ4BlockInputStream(InputStream, LZ4FastDecompressor, Checksum) * @see net.jpountz.xxhash.StreamingXXHash32#asChecksum() */ public LZ4BlockInputStream(InputStream in, LZ4FastDecompressor decompressor) { this(in, decompressor, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum()); }
/** * Creates a new LZ4 decoder with customizable implementation. * * @param factory user customizable {@link LZ4Factory} instance * which may be JNI bindings to the original C implementation, a pure Java implementation * or a Java implementation that uses the {@link sun.misc.Unsafe} * @param validateChecksums if {@code true}, the checksum field will be validated against the actual * uncompressed data, and if the checksums do not match, a suitable * {@link DecompressionException} will be thrown. In this case encoder will use * xxhash hashing for Java, based on Yann Collet's work available at * <a href="https://github.com/Cyan4973/xxHash">Github</a>. */ public Lz4FrameDecoder(LZ4Factory factory, boolean validateChecksums) { this(factory, validateChecksums ? XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum() : null); }
/** * Creates a new LZ4 encoder with hight or fast compression, default block size (64 KB) * and xxhash hashing for Java, based on Yann Collet's work available at * <a href="https://github.com/Cyan4973/xxHash">Github</a>. * * @param highCompressor if {@code true} codec will use compressor which requires more memory * and is slower but compresses more efficiently */ public Lz4FrameEncoder(boolean highCompressor) { this(LZ4Factory.fastestInstance(), highCompressor, DEFAULT_BLOCK_SIZE, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum()); }
/** * Create a new {@link OutputStream} that will compress data using the LZ4 algorithm. * * @param out The output stream to compress * @param blockSize Default: 4. The block size used during compression. 4=64kb, 5=256kb, 6=1mb, 7=4mb. All other * values will generate an exception * @param blockChecksum Default: false. When true, a XXHash32 checksum is computed and appended to the stream for * every block of data * @param useBrokenFlagDescriptorChecksum Default: false. When true, writes an incorrect FrameDescriptor checksum * compatible with older kafka clients. * @throws IOException */ public KafkaLZ4BlockOutputStream(OutputStream out, int blockSize, boolean blockChecksum, boolean useBrokenFlagDescriptorChecksum) throws IOException { this.out = out; compressor = LZ4Factory.fastestInstance().fastCompressor(); checksum = XXHashFactory.fastestInstance().hash32(); this.useBrokenFlagDescriptorChecksum = useBrokenFlagDescriptorChecksum; bd = new BD(blockSize); flg = new FLG(blockChecksum); bufferOffset = 0; maxBlockSize = bd.getBlockMaximumSize(); buffer = new byte[maxBlockSize]; compressedBuffer = new byte[compressor.maxCompressedLength(maxBlockSize)]; finished = false; writeHeader(); }
public SixtPartitioner() { XXHashFactory factory = XXHashFactory.fastestInstance(); xxHasher = factory.hash32(); }
int hash = XXHashFactory.fastestInstance().hash32().hash(compressed, off, len, 0);
public LZ4CompressingInputStream(InputStream delegate, int blockSize) throws IOException { super(delegate, LZ4_HEADER_SIZE + COMPRESSOR.maxCompressedLength(blockSize)); this.blockSize = blockSize; this.uncompressedBuffer = new byte[blockSize]; Checksum checksum = XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum(); OutputStream delegateOutputStream = new InternalByteArrayOutputStream(); this.compressingStream = new LZ4BlockOutputStream(delegateOutputStream, blockSize, COMPRESSOR, checksum, true); this.finished = false; }
/** Prints the fastest instance. */ public static void main(String[] args) { System.out.println("Fastest instance is " + fastestInstance()); System.out.println("Fastest Java instance is " + fastestJavaInstance()); }
/** Prints the fastest instance. */ public static void main(String[] args) { System.out.println("Fastest instance is " + fastestInstance()); System.out.println("Fastest Java instance is " + fastestJavaInstance()); }
/** * Create a new instance using {@link XXHash32} for checksuming. * @see #LZ4BlockInputStream(InputStream, LZ4FastDecompressor, Checksum) * @see StreamingXXHash32#asChecksum() */ public LZ4BlockInputStream(InputStream in, LZ4FastDecompressor decompressor) { this(in, decompressor, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum()); }
/** * Create a new instance which checks stream integrity using * {@link StreamingXXHash32} and doesn't sync flush. * @see #LZ4BlockOutputStream(OutputStream, int, LZ4Compressor, Checksum, boolean) * @see StreamingXXHash32#asChecksum() */ public LZ4BlockOutputStream(OutputStream out, int blockSize, LZ4Compressor compressor) { this(out, blockSize, compressor, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum(), false); }
/** * Creates a new LZ4 encoder with hight or fast compression, default block size (64 KB) * and xxhash hashing for Java, based on Yann Collet's work available at * <a href="https://github.com/Cyan4973/xxHash">Github</a>. * * @param highCompressor if {@code true} codec will use compressor which requires more memory * and is slower but compresses more efficiently */ public Lz4FrameEncoder(boolean highCompressor) { this(LZ4Factory.fastestInstance(), highCompressor, DEFAULT_BLOCK_SIZE, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum()); }
/** * Creates a new LZ4 encoder with hight or fast compression, default block size (64 KB) * and xxhash hashing for Java, based on Yann Collet's work available at * <a href="https://github.com/Cyan4973/xxHash">Github</a>. * * @param highCompressor if {@code true} codec will use compressor which requires more memory * and is slower but compresses more efficiently */ public Lz4FrameEncoder(boolean highCompressor) { this(LZ4Factory.fastestInstance(), highCompressor, DEFAULT_BLOCK_SIZE, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum()); }
/** * Creates a new LZ4 encoder with hight or fast compression, default block size (64 KB) * and xxhash hashing for Java, based on Yann Collet's work available at * <a href="https://github.com/Cyan4973/xxHash">Github</a>. * * @param highCompressor if {@code true} codec will use compressor which requires more memory * and is slower but compresses more efficiently */ public Lz4FrameEncoder(boolean highCompressor) { this(LZ4Factory.fastestInstance(), highCompressor, DEFAULT_BLOCK_SIZE, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum()); }
/** * Creates a new LZ4 encoder with hight or fast compression, default block size (64 KB) * and xxhash hashing for Java, based on Yann Collet's work available at * <a href="https://github.com/Cyan4973/xxHash">Github</a>. * * @param highCompressor if {@code true} codec will use compressor which requires more memory * and is slower but compresses more efficiently */ public Lz4FrameEncoder(boolean highCompressor) { this(LZ4Factory.fastestInstance(), highCompressor, DEFAULT_BLOCK_SIZE, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum()); }
public LZ4CompressingInputStream(InputStream delegate, int blockSize) throws IOException { super(delegate, LZ4_HEADER_SIZE + COMPRESSOR.maxCompressedLength(blockSize)); this.blockSize = blockSize; this.uncompressedBuffer = new byte[blockSize]; Checksum checksum = XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum(); OutputStream delegateOutputStream = new InternalByteArrayOutputStream(); this.compressingStream = new LZ4BlockOutputStream(delegateOutputStream, blockSize, COMPRESSOR, checksum, true); this.finished = false; }