Refine search
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { int size = in.readInt(); byte[] data = new byte[size]; in.read(data); @SuppressWarnings("unchecked") AvroReader<T> reader = (AvroReader<T>) recordReaderMap.get().get(className); if (reader == null) { reader = new AvroReader<T>( new SpecificDatumReader<T>(clazz), DecoderFactory.get().binaryDecoder(data, null) ); recordReaderMap.get().put(className, reader); } BinaryDecoder recordDataDecoder = DecoderFactory.get().binaryDecoder(data, reader.getDecoder()); avroObject = reader.getReader().read(null, recordDataDecoder); }
@Override protected Object readString(Object old, Decoder in) throws IOException { return super.readString(null, in).toString(); }
public T toObject(byte[] bytes) { Decoder decoder = DecoderFactory.defaultFactory().createBinaryDecoder(bytes, null); SpecificDatumReader<T> reader = null; try { reader = new SpecificDatumReader<T>(clazz); return reader.read(null, decoder); } catch(IOException e) { throw new SerializationException(e); } } }
@Override public void readExternal(ObjectInput in) throws IOException { new SpecificDatumReader(getSchema()) .read(this, SpecificData.getDecoder(in)); }
@Override public void configure(Map<String, ?> configs, boolean isKey) { InputStream dummyInputStream = new ByteArrayInputStream(new byte[0]); _decoder = DecoderFactory.get().binaryDecoder(dummyInputStream, null); _reader = new SpecificDatumReader<AvroJobSpec>(AvroJobSpec.SCHEMA$); _versionWriter = new FixedSchemaVersionWriter(); }
/** * Creates object from byte array * * @param data the data * @param reuse object to reuse * @return the result of conversion * @throws IOException Signals that an I/O exception has occurred. */ public T fromByteArray(byte[] data, T reuse) throws IOException { decoder = DecoderFactory.get().binaryDecoder(data, decoder); return avroReader.read(reuse, decoder); } }
/** * Parses a {@link org.apache.gobblin.metrics.MetricReport} from a byte array representing a json input. * @param reuse MetricReport to reuse. * @param bytes Input bytes. * @return MetricReport. * @throws java.io.IOException */ public synchronized static GobblinTrackingEvent deserializeReportFromJson(GobblinTrackingEvent reuse, byte[] bytes) throws IOException { if (!reader.isPresent()) { reader = Optional.of(new SpecificDatumReader<>(GobblinTrackingEvent.class)); } Closer closer = Closer.create(); try { DataInputStream inputStream = closer.register(new DataInputStream(new ByteArrayInputStream(bytes))); // Check version byte int versionNumber = inputStream.readInt(); if (versionNumber != SCHEMA_VERSION) { throw new IOException(String .format("MetricReport schema version not recognized. Found version %d, expected %d.", versionNumber, SCHEMA_VERSION)); } // Decode the rest Decoder decoder = DecoderFactory.get().jsonDecoder(GobblinTrackingEvent.SCHEMA$, inputStream); return reader.get().read(reuse, decoder); } catch(Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }
AvroBytesConverter(Schema schema) { this.schema = schema; dataReader = new SpecificDatumReader<>(this.schema); }
@Override public Object[] parse(InputStream stream) { try { BinaryDecoder in = DECODER_FACTORY.binaryDecoder(stream, null); Schema reqSchema = message.getRequest(); GenericRecord request = (GenericRecord) new SpecificDatumReader<>(reqSchema).read(null, in); Object[] args = new Object[reqSchema.getFields().size()]; int i = 0; for (Schema.Field field : reqSchema.getFields()) { args[i++] = request.get(field.name()); } return args; } catch (IOException e) { throw Status.INTERNAL.withCause(e). withDescription("Error deserializing avro request arguments").asRuntimeException(); } finally { AvroGrpcUtils.skipAndCloseQuietly(stream); } }
private Event deserializeValue(byte[] value, boolean parseAsFlumeEvent) throws IOException { Event e; if (parseAsFlumeEvent) { ByteArrayInputStream in = new ByteArrayInputStream(value); decoder = DecoderFactory.get().directBinaryDecoder(in, decoder); if (!reader.isPresent()) { reader = Optional.of( new SpecificDatumReader<AvroFlumeEvent>(AvroFlumeEvent.class)); } AvroFlumeEvent event = reader.get().read(null, decoder); e = EventBuilder.withBody(event.getBody().array(), toStringMap(event.getHeaders())); } else { e = EventBuilder.withBody(value, Collections.EMPTY_MAP); } return e; } }
/** * Decode json data. * * @param data the data * @param reuse the reuse * @return the decoded object * @throws IOException Signals that an I/O exception has occurred. */ public T decodeJson(String data, T reuse) throws IOException { jsonDecoder = DecoderFactory.get().jsonDecoder(this.schema, data, true); return avroReader.read(null, jsonDecoder); }
private <T extends SpecificRecordBase> T deserialize(byte[] bytes, SpecificDatumReader<T> datumReader) { try { final ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes); return datumReader.read(null, DecoderFactory.get().directBinaryDecoder(byteArrayInputStream, null)); } catch (IOException e) { throw new RuntimeException(e); } } }
public static Optional<Check> fromJsonAvro(String event) { InputStream is = new ByteArrayInputStream(event.getBytes()); DataInputStream din = new DataInputStream(is); try { Decoder decoder = DecoderFactory.get().jsonDecoder(schema, din); DatumReader<Check> reader = new SpecificDatumReader<Check>(schema); return Optional.of(reader.read(null, decoder)); // c } catch (IOException | AvroTypeException e) { System.out.println("Error deserializing:" + e.getMessage()); return Optional.empty(); } }
void input(ByteBuffer data, long count) { try { decoder = decoderFactory.binaryDecoder(data.array(), decoder); for (long i = 0; i < count; i++) { switch (taskType) { case MAP: inRecord = inReader.read(inRecord, decoder); map(inRecord, midCollector); break; case REDUCE: MID prev = midRecord; midRecord = midReader.read(midRecordSpare, decoder); if (prev != null && !midRecord.equals(prev)) reduceFlush(prev, outCollector); reduce(midRecord, outCollector); midRecordSpare = prev; break; } } } catch (Throwable e) { LOG.warn("failing: "+e, e); fail(e.toString()); } }
@Override void readInternal(Decoder d) throws IOException { for (int i = 0; i < count; i++) { reader.read(reuse, d); } } @Override
@SuppressWarnings("unchecked") private void readObject(ObjectInputStream inputStream) throws ClassNotFoundException, IOException { recordClazz = (Class<? extends SpecificRecord>) inputStream.readObject(); schemaString = inputStream.readUTF(); typeInfo = (RowTypeInfo) AvroSchemaConverter.<Row>convertToTypeInfo(schemaString); schema = new Schema.Parser().parse(schemaString); if (recordClazz != null) { record = (SpecificRecord) SpecificData.newInstance(recordClazz, schema); } else { record = new GenericData.Record(schema); } datumReader = new SpecificDatumReader<>(schema); this.inputStream = new MutableByteArrayInputStream(); decoder = DecoderFactory.get().binaryDecoder(this.inputStream, null); } }
@Override public AvroJobSpec deserialize(String topic, byte[] data) { try (InputStream is = new ByteArrayInputStream(data)) { _versionWriter.readSchemaVersioningInformation(new DataInputStream(is)); Decoder decoder = DecoderFactory.get().binaryDecoder(is, _decoder); return _reader.read(null, decoder); } catch (IOException e) { throw new RuntimeException("Could not decode message"); } }
throws IOException { if (!READER.isPresent()) { READER = Optional.of(new SpecificDatumReader<>(MetricReport.class)); Decoder decoder = DecoderFactory.get().jsonDecoder(MetricReport.SCHEMA$, inputStream); return READER.get().read(reuse, decoder); } catch (Throwable t) { throw closer.rethrow(t);
@Override protected SpecificDatumReader<T> initialValue() { return new SpecificDatumReader<>(KafkaAvroJobMonitor.this.schema); } };
@Override public Object parse(InputStream stream) { try { if (message.isOneWay()) return null; BinaryDecoder in = DECODER_FACTORY.binaryDecoder(stream, null); if (!in.readBoolean()) { Object response = new SpecificDatumReader(message.getResponse()).read(null, in); return response; } else { Object value = new SpecificDatumReader(message.getErrors()).read(null, in); if (value instanceof Exception) { return value; } return new AvroRuntimeException(value.toString()); } } catch (IOException e) { throw Status.INTERNAL.withCause(e). withDescription("Error deserializing avro response").asRuntimeException(); } finally { AvroGrpcUtils.skipAndCloseQuietly(stream); } }