private String[] getFieldNames(final Record record) { if (fieldNames != null) { return fieldNames; } final Set<String> allFields = new LinkedHashSet<>(); allFields.addAll(record.getRawFieldNames()); allFields.addAll(recordSchema.getFieldNames()); fieldNames = allFields.toArray(new String[0]); return fieldNames; }
private Set<String> getNormalizedColumnNames(final RecordSchema schema, final boolean translateFieldNames) { final Set<String> normalizedFieldNames = new HashSet<>(); if (schema != null) { schema.getFieldNames().forEach((fieldName) -> normalizedFieldNames.add(normalizeColumnName(fieldName, translateFieldNames))); } return normalizedFieldNames; }
private void includeHeaderIfNecessary(final Record record, final boolean includeOnlySchemaFields) throws IOException { if (headerWritten || !includeHeaderLine) { return; } final Object[] fieldNames; if (includeOnlySchemaFields) { fieldNames = recordSchema.getFieldNames().toArray(new Object[0]); } else { fieldNames = getFieldNames(record); } printer.printRecord(fieldNames); headerWritten = true; }
@Override public WriteResult write(Record record) throws IOException { if (++recordCount > failAfterN && failAfterN > -1) { throw new IOException("Unit Test intentionally throwing IOException after " + failAfterN + " records were written"); } if (header != null && !headerWritten) { out.write(header.getBytes()); out.write("\n".getBytes()); headerWritten = true; } final int numCols = record.getSchema().getFieldCount(); int i = 0; for (final String fieldName : record.getSchema().getFieldNames()) { final String val = record.getAsString(fieldName); if (val != null) { if (quoteValues) { out.write("\"".getBytes()); out.write(val.getBytes()); out.write("\"".getBytes()); } else { out.write(val.getBytes()); } } if (i++ < numCols - 1) { out.write(",".getBytes()); } } out.write("\n".getBytes()); return WriteResult.of(1, Collections.emptyMap()); }
for (final String recordFieldName : recordSchema.getFieldNames()) { map.put(recordFieldName, record.getValue(recordFieldName));
for (final String fieldName : record.getSchema().getFieldNames()) { final String val = record.getAsString(fieldName); if (val != null) {
public CSVRecordReader(final InputStream in, final ComponentLog logger, final RecordSchema schema, final CSVFormat csvFormat, final boolean hasHeader, final boolean ignoreHeader, final String dateFormat, final String timeFormat, final String timestampFormat, final String encoding) throws IOException { super(logger, schema, hasHeader, ignoreHeader, dateFormat, timeFormat, timestampFormat); final Reader reader = new InputStreamReader(new BOMInputStream(in), encoding); CSVFormat withHeader; if (hasHeader) { withHeader = csvFormat.withSkipHeaderRecord(); if (ignoreHeader) { withHeader = withHeader.withHeader(schema.getFieldNames().toArray(new String[0])); } else { withHeader = withHeader.withFirstRecordAsHeader(); } } else { withHeader = csvFormat.withHeader(schema.getFieldNames().toArray(new String[0])); } csvParser = new CSVParser(reader, withHeader); }
List<String> fieldNames = recordSchema.getFieldNames(); final List<Integer> includedColumns = new ArrayList<>(); if (fieldNames != null) {
rawFieldNames = schema.getFieldNames(); } else { rawFieldNames = Arrays.asList(csvRecord);
private void recursivelyAddParentFields(Record recordToWrite, FieldValue fieldValue) { try { // we get the parent data FieldValue parentField = fieldValue.getParent().get(); Record parentRecord = fieldValue.getParentRecord().get(); // for each field of the parent for (String field : parentRecord.getSchema().getFieldNames()) { // if and only if there is not an already existing field with this name // (we want to give priority to the deeper existing fields) if(recordToWrite.getValue(field) == null) { // Updates the value of the field with the given name to the given value. // If the field specified is not present in the schema, will do nothing. recordToWrite.setValue(field, parentRecord.getValue(field)); } } // recursive call recursivelyAddParentFields(recordToWrite, parentField); } catch (NoSuchElementException e) { return; } } });
List<String> fieldNames = recordSchema.getFieldNames(); final List<Integer> includedColumns = new ArrayList<>(); if (fieldNames != null) {
List<String> fieldNames = recordSchema.getFieldNames(); final List<Integer> includedColumns = new ArrayList<>(); if (fieldNames != null) {
try (final InputStream in = session.read(flowFile); final RecordReader recordReader = recordReaderFactory.createRecordReader(flowFile, in, getLogger())) { final List<String> fieldNames = recordReader.getSchema().getFieldNames(); final RecordSet recordSet = recordReader.createRecordSet();
for (String name : schema.getFieldNames()) { if (name.equals(rowFieldName) || name.equals(timestampFieldName) || (visField != null && name.equals(visField.getFieldName()))) { continue;
for (String name : schema.getFieldNames()) { document.put(name, contentMap.get(name));
insertQuery = QueryBuilder.insertInto(cassandraTable); for (String fieldName : schema.getFieldNames()) { insertQuery.value(fieldName, recordContentMap.get(fieldName));
private Set<String> getNormalizedColumnNames(final RecordSchema schema, final boolean translateFieldNames) { final Set<String> normalizedFieldNames = new HashSet<>(); if (schema != null) { schema.getFieldNames().forEach((fieldName) -> normalizedFieldNames.add(normalizeColumnName(fieldName, translateFieldNames))); } return normalizedFieldNames; }
for (final String recordFieldName : recordSchema.getFieldNames()) { map.put(recordFieldName, record.getValue(recordFieldName));
private void recursivelyAddParentFields(Record recordToWrite, FieldValue fieldValue) { try { // we get the parent data FieldValue parentField = fieldValue.getParent().get(); Record parentRecord = fieldValue.getParentRecord().get(); // for each field of the parent for (String field : parentRecord.getSchema().getFieldNames()) { // if and only if there is not an already existing field with this name // (we want to give priority to the deeper existing fields) if(recordToWrite.getValue(field) == null) { // Updates the value of the field with the given name to the given value. // If the field specified is not present in the schema, will do nothing. recordToWrite.setValue(field, parentRecord.getValue(field)); } } // recursive call recursivelyAddParentFields(recordToWrite, parentField); } catch (NoSuchElementException e) { return; } } });
insertQuery = QueryBuilder.insertInto(cassandraTable); for (String fieldName : schema.getFieldNames()) { insertQuery.value(fieldName, recordContentMap.get(fieldName));