/** * Parse the event using the entity parser and write the entity to the dataset. * * @param event The event to write * @throws EventDeliveryException An error occurred trying to write to the dataset that couldn't or shouldn't be handled by the failure policy. */ @VisibleForTesting void write(Event event) throws EventDeliveryException { try { this.entity = parser.parse(event, reuseEntity ? entity : null); this.bytesParsed += event.getBody().length; // writeEncoded would be an optimization in some cases, but HBase // will not support it and partitioned Datasets need to get partition // info from the entity Object. We may be able to avoid the // serialization round-trip otherwise. writer.write(entity); } catch (NonRecoverableEventException ex) { failurePolicy.handle(event, ex); } catch (DataFileWriter.AppendWriteException ex) { failurePolicy.handle(event, ex); } catch (RuntimeException ex) { Throwables.propagateIfInstanceOf(ex, EventDeliveryException.class); throw new EventDeliveryException(ex); } }
@Override public void handle(Event event, Throwable cause) throws EventDeliveryException { try { if (writer == null) { writer = dataset.newWriter(); } final AvroFlumeEvent avroEvent = new AvroFlumeEvent(); avroEvent.setBody(ByteBuffer.wrap(event.getBody())); avroEvent.setHeaders(toCharSeqMap(event.getHeaders())); writer.write(avroEvent); nEventsHandled++; } catch (RuntimeException ex) { throw new EventDeliveryException(ex); } }
@Override public void process(InputStream in) throws IOException { try (DataFileStream<Record> stream = new DataFileStream<>( in, AvroUtil.newDatumReader(schema, Record.class))) { IncompatibleSchemaException.check( SchemaValidationUtil.canRead(stream.getSchema(), schema), "Incompatible file schema %s, expected %s", stream.getSchema(), schema); long written = 0L; try (DatasetWriter<Record> writer = target.newWriter()) { for (Record record : stream) { writer.write(record); written += 1; } } finally { session.adjustCounter("Stored records", written, true /* cannot roll back the write */); } } } });
@Override public void write(E key, Void v) { if (copyRecords) { key = copy(key); } datasetWriter.write(key); }
@Override public void write(E key, Void v) { if (copyRecords) { key = copy(key); } datasetWriter.write(key); }
@Override public Void call() throws IOException { DatasetWriter<StandardEvent> writer = null; try { writer = range.newWriter(); writer.write(event); } finally { Closeables.close(writer, false); } return null; } });
@Override public Void call() throws IOException { DatasetWriter<StandardEvent> writer = null; try { writer = range.newWriter(); writer.write(sepEvent); } finally { Closeables.close(writer, false); } return null; } });
@Override public Void call() throws IOException { DatasetWriter<StandardEvent> writer = null; try { writer = view.newWriter(); writer.write(octEvent); } finally { Closeables.close(writer, false); } return null; } });
@Override public void write(T entity) throws IOException { if (writer == null) { synchronized (lock) { if (writer == null) { writer = createWriter(); } } } writer.write(convertEntity(entity)); resetIdleTimeout(); }
private void populateInputDataset() { DatasetWriter<GenericData.Record> writer = inputDataset.newWriter(); writer.write(newStringRecord("apple")); writer.write(newStringRecord("banana")); writer.write(newStringRecord("banana")); writer.write(newStringRecord("carrot")); writer.write(newStringRecord("apple")); writer.write(newStringRecord("apple")); writer.close(); }
public void writeUserToView(View<GenericRecord> dataset) { DatasetWriter<GenericRecord> writer = null; try { writer = dataset.newWriter(); writer.write(USER); } finally { if (writer != null) { writer.close(); } } }
@Override public void handle(Event event, Throwable cause) throws EventDeliveryException { try { if (writer == null) { writer = dataset.newWriter(); } final AvroFlumeEvent avroEvent = new AvroFlumeEvent(); avroEvent.setBody(ByteBuffer.wrap(event.getBody())); avroEvent.setHeaders(toCharSeqMap(event.getHeaders())); writer.write(avroEvent); nEventsHandled++; } catch (RuntimeException ex) { throw new EventDeliveryException(ex); } }
private void populateOutputDataset() { DatasetWriter<GenericData.Record> writer = outputDataset.newWriter(); writer.write(newStatsRecord(4, "date")); writer.close(); }
@Test public void testReaderWriterCompatibleSchema() throws IOException { DatasetWriter<TestValue> writer = null; try { writer = Datasets.load(testValueView.getUri(), TestValue.class).newWriter(); writer.write(testValue); } finally { Closeables.close(writer, false); } DatasetReader<Value> reader = null; try { reader = valueView.newReader(); Assert.assertEquals(Sets.newHashSet(defaultValue), Sets.newHashSet((Iterable<Value>) reader)); } finally { Closeables.close(reader, false); } }
private static <E> void writeToView(View<E> view, E... entities) { DatasetWriter<E> writer = null; try { writer = view.newWriter(); for (E entity : entities) { writer.write(entity); } writer.close(); } finally { if (writer != null) { writer.close(); } } }
@BeforeClass public static void setup() throws IOException { fs = LocalFileSystem.getInstance(); testDirectory = new Path(Files.createTempDir().getAbsolutePath()); FileSystemDatasetRepository repo = new FileSystemDatasetRepository(fs.getConf(), testDirectory); Dataset<MyRecord> writerDataset = repo.create("ns", "test", new DatasetDescriptor.Builder() .schema(MyRecord.class) .build(), MyRecord.class); DatasetWriter<MyRecord> writer = writerDataset.newWriter(); for (int i = 0; i < totalRecords; i++) { writer.write(new MyRecord(String.valueOf(i), i)); } writer.close(); readerDataset = repo.load("ns", "test", GenericRecord.class); }
@BeforeClass public static void setup() throws IOException { fs = LocalFileSystem.getInstance(); testDirectory = new Path(Files.createTempDir().getAbsolutePath()); FileSystemDatasetRepository repo = new FileSystemDatasetRepository(fs.getConf(), testDirectory); Dataset<MyRecord> writerDataset = repo.create("ns", "test", new DatasetDescriptor.Builder() .schema(MyRecord.class) .build(), MyRecord.class); DatasetWriter<MyRecord> writer = writerDataset.newWriter(); for (int i = 0; i < totalRecords; i++) { writer.write(new MyRecord(String.valueOf(i), i)); } writer.close(); readerDataset = repo.load("ns", "test", TestGenericRecord.class); }
@Test public void testLimitedWriter() { final View<TestEntity> range = ds .fromAfter(NAMES[0], "1").to(NAMES[0], "5") .fromAfter(NAMES[1], "1").to(NAMES[1], "5"); DatasetWriter<TestEntity> writer = range.newWriter(); try { writer.write(newTestEntity("3", "3")); writer.write(newTestEntity("5", "5")); } finally { writer.close(); } }
@Test(expected = IllegalArgumentException.class) public void testInvalidLimitedWriter() { final View<TestEntity> range = ds .fromAfter(NAMES[0], "1").to(NAMES[0], "5") .fromAfter(NAMES[1], "1").to(NAMES[1], "5"); range.newWriter().write(newTestEntity("6", "6")); }
@Test public void testEmptyCheck() throws IOException { Assert.assertTrue("New dataset should be empty", unbounded.isEmpty()); // NOTE: this is an un-restricted write so all should succeed DatasetWriter<StandardEvent> writer = null; try { writer = unbounded.newWriter(); writer.write(sepEvent); } finally { Closeables.close(writer, false); } Assert.assertFalse("Should not be empty after write", unbounded.isEmpty()); Assert.assertFalse("Should find event in September", unbounded.with("timestamp", sepEvent.getTimestamp()).isEmpty()); Assert.assertTrue("Should not find event in October", unbounded.with("timestamp", octEvent.getTimestamp()).isEmpty()); }