/** Open a new file for data matching a schema with an explicit sync. */ public DataFileWriter<D> create(Schema schema, OutputStream outs, byte[] sync) throws IOException { assertNotOpen(); this.schema = schema; setMetaInternal(DataFileConstants.SCHEMA, schema.toString()); if (sync == null ) { this.sync = generateSync(); } else if (sync.length == 16) { this.sync = sync; } else { throw new IOException("sync must be exactly 16 bytes"); } init(outs); vout.writeFixed(DataFileConstants.MAGIC); // write magic vout.writeMapStart(); // write metadata vout.setItemCount(meta.size()); for (Map.Entry<String,byte[]> entry : meta.entrySet()) { vout.startItem(); vout.writeString(entry.getKey()); vout.writeBytes(entry.getValue()); } vout.writeMapEnd(); vout.writeFixed(this.sync); // write initial sync vout.flush(); //vout may be buffered, flush before writing to out return this; }
/** Open a new file for data matching a schema with an explicit sync. */ public DataFileWriter<D> create(Schema schema, OutputStream outs, byte[] sync) throws IOException { assertNotOpen(); this.schema = schema; setMetaInternal(DataFileConstants.SCHEMA, schema.toString()); if (sync == null ) { this.sync = generateSync(); } else if (sync.length == 16) { this.sync = sync; } else { throw new IOException("sync must be exactly 16 bytes"); } init(outs); vout.writeFixed(DataFileConstants.MAGIC); // write magic vout.writeMapStart(); // write metadata vout.setItemCount(meta.size()); for (Map.Entry<String,byte[]> entry : meta.entrySet()) { vout.startItem(); vout.writeString(entry.getKey()); vout.writeBytes(entry.getValue()); } vout.writeMapEnd(); vout.writeFixed(this.sync); // write initial sync vout.flush(); //vout may be buffered, flush before writing to out return this; }
/** * Gets the request data, generating it first if necessary. * @return the request data. * @throws Exception if an error occurs generating the request data. */ public List<ByteBuffer> getBytes() throws Exception { if (requestBytes == null) { ByteBufferOutputStream bbo = new ByteBufferOutputStream(); BinaryEncoder out = ENCODER_FACTORY.binaryEncoder(bbo, encoder); // use local protocol to write request Message m = getMessage(); context.setMessage(m); writeRequest(m.getRequest(), request, out); // write request payload out.flush(); List<ByteBuffer> payload = bbo.getBufferList(); writeHandshake(out); // prepend handshake if needed context.setRequestPayload(payload); for (RPCPlugin plugin : rpcMetaPlugins) { plugin.clientSendRequest(context); // get meta-data from plugins } META_WRITER.write(context.requestCallMeta(), out); out.writeString(m.getName()); // write message name out.flush(); bbo.append(payload); requestBytes = bbo.getBufferList(); } return requestBytes; } }
private ByteBuf getBuffer(Event event) { DatumWriter writer = new FilteredRecordWriter(event.properties().getSchema(), GenericData.get()); ByteBuf buffer = DEFAULT.buffer(100); buffer.writeByte(2); BinaryEncoder encoder = EncoderFactory.get() .directBinaryEncoder(new ByteBufOutputStream(buffer), null); try { encoder.writeString(event.collection()); writer.write(event.properties(), encoder); } catch (Exception e) { throw new RuntimeException("Couldn't serialize event", e); } return buffer; } }
encoder.writeString(project); DatumWriter writer = new FilteredRecordWriter(avroSchema, data); encoder.writeString(collectionName);
public static void saveLabeledDumps(final File file, final Map<String, SampleNode> collected) throws IOException { try (BufferedOutputStream bos = new BufferedOutputStream( Files.newOutputStream(file.toPath()))) { final SpecificDatumWriter<StackSampleElement> writer = new SpecificDatumWriter<>(StackSampleElement.SCHEMA$); final BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(bos, null); encoder.writeMapStart(); encoder.setItemCount(collected.size()); for (Map.Entry<String, SampleNode> entry : collected.entrySet()) { encoder.startItem(); encoder.writeString(entry.getKey()); encoder.writeArrayStart(); Converter.convert(Methods.ROOT, entry.getValue(), -1, 0, (final StackSampleElement object, final long deadline) -> { encoder.setItemCount(1L); encoder.startItem(); writer.write(object, encoder); }); encoder.writeArrayEnd(); } encoder.writeMapEnd(); encoder.flush(); } }
/** Open a new file for data matching a schema. */ public DataFileWriter<D> create(Schema schema, OutputStream outs) throws IOException { assertNotOpen(); this.schema = schema; setMetaInternal(DataFileConstants.SCHEMA, schema.toString()); this.sync = generateSync(); init(outs); vout.writeFixed(DataFileConstants.MAGIC); // write magic vout.writeMapStart(); // write metadata vout.setItemCount(meta.size()); for (Map.Entry<String,byte[]> entry : meta.entrySet()) { vout.startItem(); vout.writeString(entry.getKey()); vout.writeBytes(entry.getValue()); } vout.writeMapEnd(); vout.writeFixed(sync); // write initial sync vout.flush(); //vout may be buffered, flush before writing to out return this; }
/** Open a new file for data matching a schema. */ public DataFileWriter<D> create(Schema schema, OutputStream outs) throws IOException { assertNotOpen(); this.schema = schema; setMetaInternal(DataFileConstants.SCHEMA, schema.toString()); this.sync = generateSync(); init(outs); out.write(DataFileConstants.MAGIC); // write magic vout.writeMapStart(); // write metadata vout.setItemCount(meta.size()); for (Map.Entry<String,byte[]> entry : meta.entrySet()) { vout.startItem(); vout.writeString(entry.getKey()); vout.writeBytes(entry.getValue()); } vout.writeMapEnd(); vout.flush(); //vout may be buffered, flush before writing to out out.write(sync); // write initial sync return this; }
/** Open a new file for data matching a schema with an explicit sync. */ public DataFileWriter<D> create(Schema schema, OutputStream outs, byte[] sync) throws IOException { assertNotOpen(); this.schema = schema; setMetaInternal(DataFileConstants.SCHEMA, schema.toString()); if (sync == null ) { this.sync = generateSync(); } else if (sync.length == 16) { this.sync = sync; } else { throw new IOException("sync must be exactly 16 bytes"); } init(outs); vout.writeFixed(DataFileConstants.MAGIC); // write magic vout.writeMapStart(); // write metadata vout.setItemCount(meta.size()); for (Map.Entry<String,byte[]> entry : meta.entrySet()) { vout.startItem(); vout.writeString(entry.getKey()); vout.writeBytes(entry.getValue()); } vout.writeMapEnd(); vout.writeFixed(this.sync); // write initial sync vout.flush(); //vout may be buffered, flush before writing to out return this; }
/** * Gets the request data, generating it first if necessary. * @return the request data. * @throws Exception if an error occurs generating the request data. */ public List<ByteBuffer> getBytes() throws Exception { if (requestBytes == null) { ByteBufferOutputStream bbo = new ByteBufferOutputStream(); BinaryEncoder out = ENCODER_FACTORY.binaryEncoder(bbo, encoder); // use local protocol to write request Message m = getMessage(); context.setMessage(m); writeRequest(m.getRequest(), request, out); // write request payload out.flush(); List<ByteBuffer> payload = bbo.getBufferList(); writeHandshake(out); // prepend handshake if needed context.setRequestPayload(payload); for (RPCPlugin plugin : rpcMetaPlugins) { plugin.clientSendRequest(context); // get meta-data from plugins } META_WRITER.write(context.requestCallMeta(), out); out.writeString(m.getName()); // write message name out.flush(); bbo.append(payload); requestBytes = bbo.getBufferList(); } return requestBytes; } }