@Override public WriteResult finishRecordSet() throws IOException { return WriteResult.of(recordCount, Collections.emptyMap()); } };
@Override public final WriteResult write(final Record record) throws IOException { final Map<String, String> attributes = writeRecord(record); return WriteResult.of(++recordCount, attributes); }
@Override public final WriteResult finishRecordSet() throws IOException { if (!isActiveRecordSet()) { throw new IllegalStateException("Cannot finish RecordSet because no RecordSet has begun"); } final Map<String, String> attributes = onFinishRecordSet(); return WriteResult.of(recordCount, attributes == null ? Collections.emptyMap() : attributes); }
/** * @param recordSet the RecordSet to write * @return the result of writing the record set * @throws IOException if an I/O error happens reading from the RecordSet, or writing a Record */ default WriteResult write(final RecordSet recordSet) throws IOException { int recordCount = 0; Record record; while ((record = recordSet.next()) != null) { write(record); recordCount++; } return WriteResult.of(recordCount, Collections.emptyMap()); }
@Override public WriteResult write(Record record) throws IOException { if (++recordCount > failAfterN && failAfterN > -1) { throw new IOException("Unit Test intentionally throwing IOException after " + failAfterN + " records were written"); } if (header != null && !headerWritten) { out.write(header.getBytes()); out.write("\n".getBytes()); headerWritten = true; } final int numCols = record.getSchema().getFieldCount(); int i = 0; for (final String fieldName : record.getSchema().getFieldNames()) { final String val = record.getAsString(fieldName); if (val != null) { if (quoteValues) { out.write("\"".getBytes()); out.write(val.getBytes()); out.write("\"".getBytes()); } else { out.write(val.getBytes()); } } if (i++ < numCols - 1) { out.write(",".getBytes()); } } out.write("\n".getBytes()); return WriteResult.of(1, Collections.emptyMap()); }
return WriteResult.of(recordCount, Collections.emptyMap());
@Override public WriteResult writeRawRecord(Record record) throws IOException { if (!isActiveRecordSet()) { schemaAccess.writeHeader(recordSchema, getOutputStream()); } checkWritingMultipleRecords(); Deque<String> tagsToOpen = new ArrayDeque<>(); try { tagsToOpen.addLast(recordTagName); boolean closingTagRequired = iterateThroughRecordWithoutSchema(tagsToOpen, record); if (closingTagRequired) { writer.writeEndElement(); hasWrittenRecord = true; } } catch (XMLStreamException e) { throw new IOException(e.getMessage()); } final Map<String, String> attributes = schemaAccess.getAttributes(recordSchema); return WriteResult.of(incrementRecordCount(), attributes); }
/** * @param recordSet the RecordSet to write * @return the result of writing the record set * @throws IOException if an I/O error happens reading from the RecordSet, or writing a Record */ public WriteResult write(final RecordSet recordSet) throws IOException { int recordCount = 0; Record record; while ((record = recordSet.next()) != null) { write(record); recordCount++; } // Add Hive DDL Attribute String hiveDDL = NiFiOrcUtils.generateHiveDDL(recordSchema, hiveTableName, hiveFieldNames); Map<String, String> attributes = new HashMap<String, String>() {{ put(HIVE_DDL_ATTRIBUTE, hiveDDL); }}; return WriteResult.of(recordCount, attributes); }
@Override public WriteResult write(final RecordSet rs) throws IOException { final int colCount = rs.getSchema().getFieldCount(); Assert.assertEquals(columnNames.size(), colCount); final List<String> colNames = new ArrayList<>(colCount); for (int i = 0; i < colCount; i++) { colNames.add(rs.getSchema().getField(i).getFieldName()); } Assert.assertEquals(columnNames, colNames); // Iterate over the rest of the records to ensure that we read the entire stream. If we don't // do this, we won't consume all of the data and as a result we will not close the stream properly Record record; while ((record = rs.next()) != null) { System.out.println(record); } return WriteResult.of(0, Collections.emptyMap()); }
@Override public WriteResult writeRawRecord(final Record record) throws IOException { // If we are not writing an active record set, then we need to ensure that we write the // schema information. if (!isActiveRecordSet()) { generator.flush(); schemaAccess.writeHeader(recordSchema, getOutputStream()); } writeRecord(record, recordSchema, generator, g -> g.writeStartObject(), g -> g.writeEndObject(), false); final Map<String, String> attributes = schemaAccess.getAttributes(recordSchema); return WriteResult.of(incrementRecordCount(), attributes); }
@Override public WriteResult writeRawRecord(final Record record) throws IOException { // If we are not writing an active record set, then we need to ensure that we write the // schema information. if (!isActiveRecordSet()) { schemaWriter.writeHeader(recordSchema, getOutputStream()); } includeHeaderIfNecessary(record, false); final String[] fieldNames = getFieldNames(record); // Avoid creating a new Object[] for every Record if we can. But if the record has a different number of columns than does our // schema, we don't have a lot of options here, so we just create a new Object[] in that case. final Object[] recordFieldValues = (fieldNames.length == this.fieldValues.length) ? this.fieldValues : new String[fieldNames.length]; int i = 0; for (final String fieldName : fieldNames) { final Optional<RecordField> recordField = recordSchema.getField(fieldName); if (recordField.isPresent()) { recordFieldValues[i++] = record.getAsString(fieldName, getFormat(recordField.get())); } else { recordFieldValues[i++] = record.getAsString(fieldName); } } printer.printRecord(recordFieldValues); final Map<String, String> attributes = schemaWriter.getAttributes(recordSchema); return WriteResult.of(incrementRecordCount(), attributes); }
@Override public final WriteResult write(final Record record) throws IOException { final Map<String, String> attributes = writeRecord(record); return WriteResult.of(++recordCount, attributes); }
@Override public final WriteResult finishRecordSet() throws IOException { if (!isActiveRecordSet()) { throw new IllegalStateException("Cannot finish RecordSet because no RecordSet has begun"); } final Map<String, String> attributes = onFinishRecordSet(); return WriteResult.of(recordCount, attributes == null ? Collections.emptyMap() : attributes); }