@Override public void close() throws EventDeliveryException { if (nEventsHandled > 0) { try { writer.close(); } catch (RuntimeException ex) { throw new EventDeliveryException(ex); } finally { writer = null; nEventsHandled = 0; } } }
if (writer != null) { try { writer.close();
@Override public void close(TaskAttemptContext taskAttemptContext) { datasetWriter.close(); } }
@Override public void close(TaskAttemptContext taskAttemptContext) { datasetWriter.close(); } }
df.toJavaRDD().foreachPartition(new VoidFunction<Iterator<Row>>() { @Override public void call(Iterator<Row> iterator) throws Exception { final DatasetWriter writer = // obtain writer while (iterator.hasNext()) { // process the records; write to HDFS using writer } writer.close(); // this ensures that .avro.tmp is converted to .avro } });
@Override public void close() throws IOException { if (log.isDebugEnabled()) { log.debug("Closing writer " + writer); } if (writer != null) { writer.close(); writer = null; } }
@Override public void close() throws EventDeliveryException { if (nEventsHandled > 0) { try { writer.close(); } catch (RuntimeException ex) { throw new EventDeliveryException(ex); } finally { writer = null; nEventsHandled = 0; } } }
@Override public void close() { if (state.equals(ReaderWriterState.OPEN)) { LOG.debug("Closing all cached writers for view:{}", view); for (DatasetWriter<E> writer : cachedWriters.asMap().values()) { LOG.debug("Closing partition writer:{}", writer); writer.close(); } state = ReaderWriterState.CLOSED; } }
@Override public void onRemoval( RemovalNotification<StorageKey, DatasetWriter<E>> notification) { DatasetWriter<E> writer = notification.getValue(); LOG.debug("Closing writer:{} for partition:{}", writer, notification.getKey()); writer.close(); }
@Override public synchronized void stop() { counter.stop(); if (writer != null) { // any write problems invalidate the writer, which is immediately closed writer.close(); this.writer = null; this.lastRolledMs = System.currentTimeMillis(); } // signal that this sink has stopped LOG.info("Stopped dataset sink: " + getName()); super.stop(); }
public void writeUserToView(View<GenericRecord> dataset) { DatasetWriter<GenericRecord> writer = null; try { writer = dataset.newWriter(); writer.write(USER); } finally { if (writer != null) { writer.close(); } } }
private void populateOutputDataset() { DatasetWriter<GenericData.Record> writer = outputDataset.newWriter(); writer.write(newStatsRecord(4, "date")); writer.close(); }
private static void writeTestRecords(View<TestRecord> view) { DatasetWriter<TestRecord> writer = null; try { writer = view.newWriter(); for (int i = 0; i < 10; i += 1) { TestRecord record = new TestRecord(); record.id = i; record.data = "test-" + i; writer.write(record); } } finally { if (writer != null) { writer.close(); } } } }
private static void writeTestRecords(View<TestRecord> view) { DatasetWriter<TestRecord> writer = null; try { writer = view.newWriter(); for (int i = 0; i < 10; i += 1) { TestRecord record = new TestRecord(); record.id = i; record.data = "test/-" + i; writer.write(record); } } finally { if (writer != null) { writer.close(); } } } }
@BeforeClass public static void setup() throws IOException { fs = LocalFileSystem.getInstance(); testDirectory = new Path(Files.createTempDir().getAbsolutePath()); FileSystemDatasetRepository repo = new FileSystemDatasetRepository(fs.getConf(), testDirectory); Dataset<MyRecord> writerDataset = repo.create("ns", "test", new DatasetDescriptor.Builder() .schema(MyRecord.class) .build(), MyRecord.class); DatasetWriter<MyRecord> writer = writerDataset.newWriter(); for (int i = 0; i < totalRecords; i++) { writer.write(new MyRecord(String.valueOf(i), i)); } writer.close(); readerDataset = repo.load("ns", "test", GenericRecord.class); }
@BeforeClass public static void setup() throws IOException { fs = LocalFileSystem.getInstance(); testDirectory = new Path(Files.createTempDir().getAbsolutePath()); FileSystemDatasetRepository repo = new FileSystemDatasetRepository(fs.getConf(), testDirectory); Dataset<StandardEvent> writerDataset = repo.create("ns", "test", new DatasetDescriptor.Builder() .schema(StandardEvent.class) .build(), StandardEvent.class); DatasetWriter<StandardEvent> writer = writerDataset.newWriter(); for (long i = 0; i < totalRecords; i++) { String text = String.valueOf(i); writer.write(new StandardEvent(text, text, i, text, text, i)); } writer.close(); readerDataset = repo.load("ns", "test", GenericData.Record.class); }
@BeforeClass public static void setup() throws IOException { fs = LocalFileSystem.getInstance(); testDirectory = new Path(Files.createTempDir().getAbsolutePath()); FileSystemDatasetRepository repo = new FileSystemDatasetRepository(fs.getConf(), testDirectory); Dataset<MyRecord> writerDataset = repo.create("ns", "test", new DatasetDescriptor.Builder() .schema(MyRecord.class) .build(), MyRecord.class); DatasetWriter<MyRecord> writer = writerDataset.newWriter(); for (int i = 0; i < totalRecords; i++) { writer.write(new MyRecord(String.valueOf(i), i)); } writer.close(); readerDataset = repo.load("ns", "test", TestGenericRecord.class); }
@Test public void testLimitedWriter() { final View<TestEntity> range = ds .fromAfter(NAMES[0], "1").to(NAMES[0], "5") .fromAfter(NAMES[1], "1").to(NAMES[1], "5"); DatasetWriter<TestEntity> writer = range.newWriter(); try { writer.write(newTestEntity("3", "3")); writer.write(newTestEntity("5", "5")); } finally { writer.close(); } }
private void populateInputDataset() { DatasetWriter<GenericData.Record> writer = inputDataset.newWriter(); writer.write(newStringRecord("apple")); writer.write(newStringRecord("banana")); writer.write(newStringRecord("banana")); writer.write(newStringRecord("carrot")); writer.write(newStringRecord("apple")); writer.write(newStringRecord("apple")); writer.close(); }