@Override public void flush() throws IOException { encoder.flush(); buffered.flush(); }
public void close() throws IOException { encoder.flush(); out.close(); }
/** {@inheritDoc} */ @Override public void serialize(AvroWrapper<T> avroWrapper) throws IOException { mAvroDatumWriter.write(avroWrapper.datum(), mAvroEncoder); // This would be a lot faster if the Serializer interface had a flush() method and the // Hadoop framework called it when needed. For now, we'll have to flush on every record. mAvroEncoder.flush(); }
@Override public void encode(D datum, OutputStream stream) throws IOException { BinaryEncoder encoder = EncoderFactory.get() .directBinaryEncoder(stream, ENCODER.get()); ENCODER.set(encoder); writer.write(datum, encoder); encoder.flush(); }
/** Calls {@linkplain #sync()} and then flushes the current state of the * file. */ @Override public void flush() throws IOException { sync(); vout.flush(); }
@Override public void encode(D datum, OutputStream stream) throws IOException { BinaryEncoder encoder = EncoderFactory.get() .directBinaryEncoder(stream, ENCODER.get()); ENCODER.set(encoder); writer.write(datum, encoder); encoder.flush(); }
/** Calls {@linkplain #sync()} and then flushes the current state of the * file. */ @Override public void flush() throws IOException { sync(); vout.flush(); }
@Override public synchronized byte[] encode(T message) { try { datumWriter.write(message, this.encoder); this.encoder.flush(); return this.byteArrayOutputStream.toByteArray(); } catch (Exception e) { throw new SchemaSerializationException(e); } finally { this.byteArrayOutputStream.reset(); } }
/** * Converts object to byte array * * @param avroObject the avro object * @return the byte[] result of conversion * @throws IOException Signals that an I/O exception has occurred. */ public byte[] toByteArray(T avroObject) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); encoder = EncoderFactory.get().binaryEncoder(baos, encoder); avroWriter.write(avroObject, encoder); encoder.flush(); return baos.toByteArray(); }
@Override public Iterable<byte[]> convertRecord(String outputSchema, GenericRecord inputRecord, WorkUnitState workUnit) throws DataConversionException { try { ByteArrayOutputStream bytesOut = new ByteArrayOutputStream(); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(bytesOut, encoderCache.get()); encoderCache.set(encoder); writer.write(inputRecord, encoder); encoder.flush(); return Collections.singleton(bytesOut.toByteArray()); } catch (IOException e) { throw new DataConversionException("Error serializing record", e); } } }
@Override public synchronized byte[] encode(GenericRecord message) { checkArgument(message instanceof GenericAvroRecord); GenericAvroRecord gar = (GenericAvroRecord) message; try { datumWriter.write(gar.getAvroRecord(), this.encoder); this.encoder.flush(); return this.byteArrayOutputStream.toByteArray(); } catch (Exception e) { throw new SchemaSerializationException(e); } finally { this.byteArrayOutputStream.reset(); } }
public void serialize(TetherData datum) throws IOException { encoder.writeBytes(datum.buffer()); encoder.flush(); //Flush shouldn't be required. Might be a bug in AVRO. }
private byte[] serialize(Object datum, Schema datumSchema) throws FlumeException { if (schema == null || !datumSchema.equals(schema)) { schema = datumSchema; out = new ByteArrayOutputStream(); writer = new ReflectDatumWriter<>(schema); encoder = EncoderFactory.get().binaryEncoder(out, null); } out.reset(); try { writer.write(datum, encoder); encoder.flush(); return out.toByteArray(); } catch (IOException e) { throw new FlumeException(e); } }
/** * Writes given record using specified schema. * @param record record to serialize * @param schema schema to use for serialization * @return serialized record */ public static byte[] writeRecord(GenericRecord record, Schema schema) throws IOException { ByteArrayOutputStream stream = new ByteArrayOutputStream(); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(stream, null); new GenericDatumWriter<>(schema).write(record, encoder); encoder.flush(); return stream.toByteArray(); } }
private void resetBufferTo(int size) throws IOException { bufOut.flush(); byte[] data = buffer.toByteArray(); buffer.reset(); buffer.write(data, 0, size); }
private void resetBufferTo(int size) throws IOException { bufOut.flush(); byte[] data = buffer.toByteArray(); buffer.reset(); buffer.write(data, 0, size); }
void writeBlockTo(BinaryEncoder e, byte[] sync) throws IOException { e.writeLong(this.numEntries); e.writeLong(this.blockSize); e.writeFixed(this.data, offset, this.blockSize); e.writeFixed(sync); if (flushOnWrite) { e.flush(); } }
void writeBlockTo(BinaryEncoder e, byte[] sync) throws IOException { e.writeLong(this.numEntries); e.writeLong(this.blockSize); e.writeFixed(this.data, offset, this.blockSize); e.writeFixed(sync); if (flushOnWrite) { e.flush(); } }
private void writeBlock() throws IOException { if (blockCount > 0) { bufOut.flush(); ByteBuffer uncompressed = buffer.getByteArrayAsByteBuffer(); DataBlock block = new DataBlock(uncompressed, blockCount); block.setFlushOnWrite(flushOnEveryBlock); block.compressUsing(codec); block.writeBlockTo(vout, sync); buffer.reset(); blockCount = 0; } }
private void writeBlock() throws IOException { if (blockCount > 0) { try { bufOut.flush(); ByteBuffer uncompressed = buffer.getByteArrayAsByteBuffer(); DataBlock block = new DataBlock(uncompressed, blockCount); block.setFlushOnWrite(flushOnEveryBlock); block.compressUsing(codec); block.writeBlockTo(vout, sync); } finally { buffer.reset(); blockCount = 0; } } }