@Override public void writeRecord(E record) throws IOException { dataFileWriter.append(record); }
/** * Append this record to the {@link DataFileWriter} * * {@inheritDoc} * @see org.apache.gobblin.audit.values.sink.AuditSink#write(org.apache.avro.generic.GenericRecord) */ @Override public void write(GenericRecord record) throws IOException { this.writer.append(record); }
@Override public void write(GenericRecord record) throws IOException { if (skipNullRecord && record == null) { return; } Preconditions.checkNotNull(record); this.writer.append(record); // Only increment when write is successful this.count.incrementAndGet(); }
@Override public void write(Writable writable) throws IOException { if(!(writable instanceof AvroGenericRecordWritable)) { throw new IOException("Expecting instance of AvroGenericRecordWritable, " + "but received" + writable.getClass().getCanonicalName()); } AvroGenericRecordWritable r = (AvroGenericRecordWritable)writable; dfw.append(r.getRecord()); }
@Override public void write(Event event) throws IOException { T destType = convert(event); dataFileWriter.append(destType); }
@Override public Map<String, String> writeRecord(final Record record) throws IOException { final GenericRecord rec = AvroTypeUtil.createAvroRecord(record, schema); dataFileWriter.append(rec); return Collections.emptyMap(); }
private static byte[] convertRecordToAvro(Schema schema, Map<String, Object> values) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); GenericData.Record record = new GenericData.Record(schema); values.forEach(record::put); try (DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(new GenericDatumWriter<>(schema))) { dataFileWriter.create(schema, outputStream); dataFileWriter.append(record); dataFileWriter.close(); } catch (IOException e) { throw new UncheckedIOException("Failed to convert to Avro.", e); } return outputStream.toByteArray(); }
public void writeNext() throws IOException { GenericData.Record nextRecord = new GenericData.Record(_avroSchema); for (String column : _generatorMap.keySet()) { nextRecord.put(column, _generatorMap.get(column).next()); } _recordWriter.append(nextRecord); }
private byte[] serializeToAvro(Schema avroSchema, List<Record> docList) throws IOException { serializationBuffer.reset(); dataFileWriter.create(avroSchema, serializationBuffer); for (Record doc2 : docList) { dataFileWriter.append(doc2); } dataFileWriter.close(); return serializationBuffer.toByteArray(); }
@Override public void write(KeyValue keyValue) throws IOException { GenericRecord record = schemaRegistryClient.decodeMessage(topic, keyValue.getValue()); LOG.trace("Writing record {}", record); if (record != null){ writer.append(record); } }
void write(K key, V value) throws IOException { mOutputRecord.setKey(key); mOutputRecord.setValue(value); mAvroFileWriter.append(mOutputRecord.get()); }
@Override protected void doWrite(Tuple tuple) throws IOException { GenericRecord avroRecord = (GenericRecord) tuple.getValue(0); avroWriter.append(avroRecord); offset = this.out.getPos(); this.needsRotation = this.rotationPolicy.mark(tuple, offset); }
@Override public synchronized void storePosition(long position) throws IOException { metaCache.setOffset(position); writer.append(metaCache); writer.sync(); writer.flush(); }
private static GenericData.Record buildAvroRecord(Schema schema, ByteArrayOutputStream outputStream, Map<String, Object> values) { GenericData.Record record = new GenericData.Record(schema); values.forEach(record::put); try { DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(new GenericDatumWriter<>(schema)); dataFileWriter.create(schema, outputStream); dataFileWriter.append(record); dataFileWriter.close(); } catch (IOException e) { throw new RuntimeException("Failed to convert to Avro.", e); } return record; }
private byte[] writeRecord(Schema schema, GenericData.Record record) throws Exception { ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); GenericDatumWriter<GenericData.Record> datumWriter = new GenericDatumWriter<>(schema); DataFileWriter<GenericData.Record> writer = new DataFileWriter<>(datumWriter); try { writer.create(schema, byteStream); writer.append(record); } finally { writer.close(); } return byteStream.toByteArray(); }
private ByteArrayOutputStream serializeAvroRecord(Schema schema, GenericRecord user2, DatumWriter<GenericRecord> datumWriter, Map<String, String> metadata) throws IOException { ByteArrayOutputStream out2 = new ByteArrayOutputStream(); DataFileWriter<GenericRecord> dataFileWriter2 = new DataFileWriter<>(datumWriter); if (metadata != null) { metadata.forEach(dataFileWriter2::setMeta); } dataFileWriter2.create(schema, out2); dataFileWriter2.append(user2); dataFileWriter2.close(); return out2; }
/** {@inheritDoc} */ @Override public void write(K key, V value) throws IOException { mOutputRecord.setKey(mKeyConverter.convert(key)); mOutputRecord.setValue(mValueConverter.convert(value)); mAvroFileWriter.append(mOutputRecord.get()); }
public void createAvroFileWithRepeatingRecords(File file, GenericRecord r, int count) throws IOException { DataFileWriter<GenericRecord> writer = new DataFileWriter<>(new GenericDatumWriter<GenericRecord>()); writer.create(getSchema(), new FileOutputStream(file)); for (int i = 0; i < count; ++i) { writer.append(r); } writer.close(); }
public void writeLinesFile() throws IOException { FileUtil.fullyDelete(dir); DatumWriter<String> writer = new GenericDatumWriter<>(); DataFileWriter<String> out = new DataFileWriter<>(writer); linesFiles.getParentFile().mkdirs(); out.create(Schema.create(Schema.Type.STRING), linesFiles); for (String line : LINES) out.append(line); out.close(); }
public static void writeLinesFile(File dir) throws IOException { DatumWriter<Utf8> writer = new GenericDatumWriter<>(); try(DataFileWriter<Utf8> out = new DataFileWriter<>(writer)) { out.create(Schema.create(Schema.Type.STRING), dir); for (String line : LINES) { out.append(new Utf8(line)); } } }