Refine search
compressor = CodecPool.getCompressor(codec); valueBuffer = new NonSyncDataOutputBuffer(); deflateFilter = codec.createOutputStream(valueBuffer, compressor); deflateOut = new DataOutputStream(deflateFilter); setColumnIndex(columnIndex); deflateFilter.resetState(); deflateOut.write(columnValue.getData(), 0, columnValue.getLength()); deflateOut.flush(); deflateFilter.finish(); out.write(valueBuffer.getData(), 0, valueBuffer.getLength()); } else { for(int columnIndex=0; columnIndex < columnNumber; ++columnIndex) { NonSyncDataOutputBuffer buf = columnBuffers[columnIndex].columnValBuffer; out.write(buf.getData(), 0, buf.getLength());
java.io.InputStream in = null; try { out = codec.createOutputStream( new java.io.FileOutputStream(args[i])); byte[] buffer = new byte[100]; String inFilename = removeSuffix(args[i], codec.getDefaultExtension()); in = new java.io.FileInputStream(inFilename); int len = in.read(buffer); while (len > 0) { out.write(buffer, 0, len); len = in.read(buffer); if(out != null) { out.close(); } if(in != null) { in.close(); } CompressionInputStream in = null; try { in = codec.createInputStream( new java.io.FileInputStream(args[i])); byte[] buffer = new byte[100];
@Override public void flush() throws IOException { CompressionOutputStream cout = (CompressionOutputStream) out; cout.finish(); cout.flush(); cout.resetState(); } }
/** Workhorse to check and write out compressed data/lengths */ private synchronized void writeBuffer(DataOutputBuffer uncompressedDataBuffer) throws IOException { deflateFilter.resetState(); buffer.reset(); deflateOut.write(uncompressedDataBuffer.getData(), 0, uncompressedDataBuffer.getLength()); deflateOut.flush(); deflateFilter.finish(); WritableUtils.writeVInt(out, buffer.getLength()); out.write(buffer.getData(), 0, buffer.getLength()); }
/** Append a key/value pair. */ @Override @SuppressWarnings("unchecked") public synchronized void append(Object key, Object val) throws IOException { if (key.getClass() != keyClass) throw new IOException("wrong key class: "+key.getClass().getName() +" is not "+keyClass); if (val.getClass() != valClass) throw new IOException("wrong value class: "+val.getClass().getName() +" is not "+valClass); buffer.reset(); // Append the 'key' keySerializer.serialize(key); int keyLength = buffer.getLength(); if (keyLength < 0) throw new IOException("negative length keys not allowed: " + key); // Compress 'value' and append it deflateFilter.resetState(); compressedValSerializer.serialize(val); deflateOut.flush(); deflateFilter.finish(); // Write the record out checkAndWriteSync(); // sync out.writeInt(buffer.getLength()); // total record length out.writeInt(keyLength); // key portion length out.write(buffer.getData(), 0, buffer.getLength()); // data }
private void writeKey(int recordLen, int keyLength) throws IOException { checkAndWriteSync(); // sync out.writeInt(recordLen); // total record length out.writeInt(keyLength); // key portion length if (this.isCompressed()) { Compressor compressor = org.apache.tajo.storage.compress.CodecPool.getCompressor(codec); if (compressor != null) compressor.reset(); //builtin gzip is null NonSyncByteArrayOutputStream compressionBuffer = new NonSyncByteArrayOutputStream(); CompressionOutputStream deflateFilter = codec.createOutputStream(compressionBuffer, compressor); DataOutputStream deflateOut = new DataOutputStream(deflateFilter); //compress key and write key out compressionBuffer.reset(); deflateFilter.resetState(); WriteKeyBuffer(deflateOut); deflateOut.flush(); deflateFilter.finish(); int compressedKeyLen = compressionBuffer.getLength(); out.writeInt(compressedKeyLen); compressionBuffer.writeTo(out); compressionBuffer.reset(); deflateOut.close(); org.apache.tajo.storage.compress.CodecPool.returnCompressor(compressor); } else { out.writeInt(keyLength); WriteKeyBuffer(out); } }
private void writeKey(KeyBuffer keyBuffer, int recordLen, int keyLength) throws IOException { checkAndWriteSync(); // sync out.writeInt(recordLen); // total record length out.writeInt(keyLength); // key portion length if(this.isCompressed()) { Compressor compressor = CodecPool.getCompressor(codec); NonSyncDataOutputBuffer compressionBuffer = new NonSyncDataOutputBuffer(); CompressionOutputStream deflateFilter = codec.createOutputStream(compressionBuffer, compressor); DataOutputStream deflateOut = new DataOutputStream(deflateFilter); //compress key and write key out compressionBuffer.reset(); deflateFilter.resetState(); keyBuffer.write(deflateOut); deflateOut.flush(); deflateFilter.finish(); int compressedKeyLen = compressionBuffer.getLength(); out.writeInt(compressedKeyLen); out.write(compressionBuffer.getData(), 0, compressedKeyLen); CodecPool.returnCompressor(compressor); } else { out.writeInt(keyLength); keyBuffer.write(out); } }
CompressionOutputStream deflateFilter = codec.createOutputStream(compressedDataBuffer); DataOutputStream deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter)); deflateOut.write(data, 0, data.length); deflateOut.flush(); deflateFilter.finish();
public void writeRequest(OutputStream out) throws IOException { if(compress) { CompressionOutputStream cos = codec.createOutputStream(out); DataOutputStream dos = new DataOutputStream( cos); doWriteRequest( dos); cos.finish(); } else { DataOutputStream dos = new DataOutputStream( out); doWriteRequest( dos); } } }
public synchronized void close() throws IOException { if (bufferedRecords > 0) { flushRecords(); } clearColumnBuffers(); if (isCompressed()) { for (int i = 0; i < columnNumber; i++) { deflateFilter[i].close(); IOUtils.closeStream(deflateOut[i]); } keyDeflateFilter.close(); IOUtils.closeStream(keyDeflateOut); CodecPool.returnCompressor(keyCompressor); keyCompressor = null; CodecPool.returnCompressor(compressor); compressor = null; } if (out != null) { // Close the underlying stream if we own it... out.flush(); out.close(); out = null; } } }
@Override public BytesInput compress(BytesInput bytes) throws IOException { final BytesInput compressedBytes; if (codec == null) { compressedBytes = bytes; } else { compressedOutBuffer.reset(); if (compressor != null) { // null compressor for non-native gzip compressor.reset(); } CompressionOutputStream cos = codec.createOutputStream(compressedOutBuffer, compressor); bytes.writeAllTo(cos); cos.finish(); cos.close(); compressedBytes = BytesInput.from(compressedOutBuffer); } return compressedBytes; }
throw new IOException("Illegal codec!"); Compressor c = codec.createCompressor(); Arrays.fill(b, (byte) 43); try { cos = codec.createOutputStream(bos, c); cos.write(b); } finally { if (cos != null) { cos.close();
compressionStream.resetState(); compressionStream.write(uncompressedBytesWithHeaderBuffer, headerBytes.length + uncompressedBytesWithHeaderOffset, uncompressedBytesWithHeaderLength - headerBytes.length); compressionStream.flush(); compressionStream.finish(); byte[] plaintext = compressedByteStream.toByteArray(); plaintextLength = plaintext.length; compressedByteStream.reset(); compressedByteStream.write(headerBytes); compressionStream.resetState(); compressionStream.write(uncompressedBytesWithHeaderBuffer, headerBytes.length + uncompressedBytesWithHeaderOffset, uncompressedBytesWithHeaderLength - headerBytes.length); compressionStream.flush(); compressionStream.finish(); return new Bytes(compressedByteStream.getBuffer(), 0, compressedByteStream.size()); } else {
Arrays.fill(b, (byte) 43); try { cos = codec.createOutputStream(bos, c2); cos.write(b); } finally { if (cos != null) { cos.close();
@Override public void close() throws IOException { serializer.flush(); serializer.beforeClose(); if (!isFinished) { cmpOut.finish(); isFinished = true; } fsOut.flush(); hflushOrSync(fsOut); cmpOut.close(); if (compressor != null) { CodecPool.returnCompressor(compressor); compressor = null; } unregisterCurrentStream(); }
@Override public void sync() throws IOException { // We must use finish() and resetState() here -- flush() is apparently not // supported by the compressed output streams (it's a no-op). // Also, since resetState() writes headers, avoid calling it without an // additional write/append operation. // Note: There are bugs in Hadoop & JDK w/ pure-java gzip; see HADOOP-8522. serializer.flush(); if (!isFinished) { cmpOut.finish(); isFinished = true; } fsOut.flush(); hflushOrSync(this.fsOut); }
compressor = CodecPool.getCompressor(codec, conf); cmpOut = codec.createOutputStream(fsOut, compressor); serializer = EventSerializerFactory.getInstance(serializerType, serializerContext, cmpOut); if (appending && !serializer.supportsReopen()) { cmpOut.close(); serializer = null; throw new IOException("serializer (" + serializerType
compressingStream.resetState(); compressingStream.write(buffer, offset, length); compressingStream.flush(); compressedStream.toByteArray(); name), e); compressingStream.close(); printBenchmarkResult(length, compressDurations, Manipulation.COMPRESSION);
@Override public void close() throws IOException { try { finish(); } finally { try { out.close(); } finally { if (trackedCompressor != null) { CodecPool.returnCompressor(trackedCompressor); trackedCompressor = null; } } } }
@Override public void close() throws IOException { if (!closed) { try { super.close(); } finally { closed = true; } } }