@Override public void onSuccess(Object value) { log.debug(" FlowFile Processing Success: {}", new Object[]{value.toString()}); try { synchronized (successfulRecordWriter) { successfulRecordWriter.write(record); successfulRecordWriter.flush(); successfulFlowFileCount.incrementAndGet(); } } catch (final IOException ioe) { log.error("Error transferring record to success, this may result in data loss. " + "However the record was successfully processed by Druid", new Object[]{ioe.getMessage()}, ioe); recordWriteErrors.incrementAndGet(); } } });
@Override public void onFailure(Throwable cause) { if (cause instanceof MessageDroppedException) { // This happens when event timestamp targets a Druid Indexing task that has closed (Late Arriving Data) log.debug("Record Dropped due to MessageDroppedException: {}, transferring record to dropped.", new Object[]{cause.getMessage()}, cause); try { synchronized (droppedRecordWriter) { droppedRecordWriter.write(record); droppedRecordWriter.flush(); droppedFlowFileCount.incrementAndGet(); } } catch (final IOException ioe) { log.error("Error transferring record to dropped, this may result in data loss.", new Object[]{ioe.getMessage()}, ioe); recordWriteErrors.incrementAndGet(); } } else { log.error("FlowFile Processing Failed due to: {}", new Object[]{cause.getMessage()}, cause); try { synchronized (failedRecordWriter) { failedRecordWriter.write(record); failedRecordWriter.flush(); failedFlowFileCount.incrementAndGet(); } } catch (final IOException ioe) { log.error("Error transferring record to failure, this may result in data loss.", new Object[]{ioe.getMessage()}, ioe); recordWriteErrors.incrementAndGet(); } } }
@Override public void process(final OutputStream out) throws IOException { try { final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out); writer.write(recordSet); writer.flush(); mimeType.append(writer.getMimeType()); } catch (SchemaNotFoundException e) { throw new ProcessException("Could not parse Solr response", e); } } });
final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes(); writer.flush();
final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes(); writer.flush();
final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes(); writer.flush();
writer.flush();
recordWriter.flush(); mimeType = recordWriter.getMimeType();
try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out)) { writer.write(recordSet); writer.flush(); mimeType.append(writer.getMimeType()); } catch (SchemaNotFoundException e) {
@Override public void onSuccess(Object value) { log.debug(" FlowFile Processing Success: {}", new Object[]{value.toString()}); try { synchronized (successfulRecordWriter) { successfulRecordWriter.write(record); successfulRecordWriter.flush(); successfulFlowFileCount.incrementAndGet(); } } catch (final IOException ioe) { log.error("Error transferring record to success, this may result in data loss. " + "However the record was successfully processed by Druid", new Object[]{ioe.getMessage()}, ioe); recordWriteErrors.incrementAndGet(); } } });
@Override public void onFailure(Throwable cause) { if (cause instanceof MessageDroppedException) { // This happens when event timestamp targets a Druid Indexing task that has closed (Late Arriving Data) log.debug("Record Dropped due to MessageDroppedException: {}, transferring record to dropped.", new Object[]{cause.getMessage()}, cause); try { synchronized (droppedRecordWriter) { droppedRecordWriter.write(record); droppedRecordWriter.flush(); droppedFlowFileCount.incrementAndGet(); } } catch (final IOException ioe) { log.error("Error transferring record to dropped, this may result in data loss.", new Object[]{ioe.getMessage()}, ioe); recordWriteErrors.incrementAndGet(); } } else { log.error("FlowFile Processing Failed due to: {}", new Object[]{cause.getMessage()}, cause); try { synchronized (failedRecordWriter) { failedRecordWriter.write(record); failedRecordWriter.flush(); failedFlowFileCount.incrementAndGet(); } } catch (final IOException ioe) { log.error("Error transferring record to failure, this may result in data loss.", new Object[]{ioe.getMessage()}, ioe); recordWriteErrors.incrementAndGet(); } } }
@Override public void process(final OutputStream out) throws IOException { try { final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out); writer.write(recordSet); writer.flush(); mimeType.append(writer.getMimeType()); } catch (SchemaNotFoundException e) { throw new ProcessException("Could not parse Solr response", e); } } });
final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes(); writer.flush();
final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes(); writer.flush();
recordWriter.flush(); mimeType = recordWriter.getMimeType();
try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out)) { writer.write(recordSet); writer.flush(); mimeType.append(writer.getMimeType()); } catch (SchemaNotFoundException e) {