private Stream<FieldValue> getChildren(final FieldValue fieldValue) { if (fieldValue == null || fieldValue.getValue() == null || !Filters.isRecord(fieldValue)) { return Stream.empty(); } final Record record = (Record) fieldValue.getValue(); return Filters.presentValues(record.getSchema().getFields().stream() .map(field -> { final Object value = record.getValue(field); if (value == null) { return Optional.empty(); } return Optional.of(new StandardFieldValue(value, field, fieldValue)); })); }
map.put(recordFieldName, record.getValue(recordFieldName));
private void recursivelyAddParentFields(Record recordToWrite, FieldValue fieldValue) { try { // we get the parent data FieldValue parentField = fieldValue.getParent().get(); Record parentRecord = fieldValue.getParentRecord().get(); // for each field of the parent for (String field : parentRecord.getSchema().getFieldNames()) { // if and only if there is not an already existing field with this name // (we want to give priority to the deeper existing fields) if(recordToWrite.getValue(field) == null) { // Updates the value of the field with the given name to the given value. // If the field specified is not present in the schema, will do nothing. recordToWrite.setValue(field, parentRecord.getValue(field)); } } // recursive call recursivelyAddParentFields(recordToWrite, parentField); } catch (NoSuchElementException e) { return; } } });
public static GenericRecord createAvroRecord(final Record record, final Schema avroSchema, final Charset charset) throws IOException { final GenericRecord rec = new GenericData.Record(avroSchema); final RecordSchema recordSchema = record.getSchema(); for (final RecordField recordField : recordSchema.getFields()) { final Object rawValue = record.getValue(recordField); Pair<String, Field> fieldPair = lookupField(avroSchema, recordField); final String fieldName = fieldPair.getLeft(); final Field field = fieldPair.getRight(); if (field == null) { continue; } final Object converted = convertToAvroObject(rawValue, field.schema(), fieldName, charset); rec.put(fieldName, converted); } // see if the Avro schema has any fields that aren't in the RecordSchema, and if those fields have a default // value then we want to populate it in the GenericRecord being produced for (final Field field : avroSchema.getFields()) { final Optional<RecordField> recordField = recordSchema.getField(field.name()); if (!recordField.isPresent() && rec.get(field.name()) == null && field.defaultVal() != null) { rec.put(field.name(), field.defaultVal()); } } return rec; }
@Override public void write(final Record record) throws IOException { if (recordFields != null) { for (int i = 0; i < numRecordFields; i++) { final RecordField field = recordFields.get(i); final DataType fieldType = field.getDataType(); final String fieldName = field.getFieldName(); Object o = record.getValue(field); try { workingRow[i] = NiFiOrcUtils.convertToORCObject(NiFiOrcUtils.getOrcField(fieldType, hiveFieldNames), o, hiveFieldNames); } catch (ArrayIndexOutOfBoundsException aioobe) { final String errorMsg = "Index out of bounds for column " + i + ", type " + fieldName + ", and object " + o.toString(); throw new IOException(errorMsg, aioobe); } } orcWriter.addRow(NiFiOrcUtils.createOrcStruct(orcSchema, workingRow)); } }
private void writeRecord(final Record record, final RecordSchema writeSchema, final JsonGenerator generator) throws IOException { RecordSchema schema = record.getSchema(); generator.writeStartObject(); for (int i = 0; i < schema.getFieldCount(); i++) { final RecordField field = schema.getField(i); final String fieldName = field.getFieldName(); final Object value = record.getValue(field); if (value == null) { if (nullSuppression.equals(NEVER_SUPPRESS.getValue()) || (nullSuppression.equals(SUPPRESS_MISSING.getValue())) && record.getRawFieldNames().contains(fieldName)) { generator.writeNullField(fieldName); } continue; } generator.writeFieldName(fieldName); final DataType dataType = schema.getDataType(fieldName).get(); writeValue(generator, value, fieldName, dataType); } generator.writeEndObject(); }
Object sql = currentRecord.getValue(sqlField); if (sql == null || StringUtils.isEmpty((String) sql)) { throw new MalformedRecordException(format("Record had no (or null) value for Field Containing SQL: %s, FlowFile %s", sqlField, flowFile));
private boolean iterateThroughRecordWithoutSchema(Deque<String> tagsToOpen, Record record) throws XMLStreamException { boolean loopHasWritten = false; for (String fieldName : record.getRawFieldNames()) { Object value = record.getValue(fieldName); if (value != null) { boolean hasWritten = writeUnknownField(tagsToOpen, value, fieldName); if (hasWritten) { loopHasWritten = true; } } else { if (nullSuppression.equals(NullSuppression.NEVER_SUPPRESS) || nullSuppression.equals(NullSuppression.SUPPRESS_MISSING)) { writeAllTags(tagsToOpen, fieldName); writer.writeEndElement(); loopHasWritten = true; } } } return loopHasWritten; }
private List<FieldValue> findDescendants(final FieldValue fieldValue) { if (fieldValue == null || fieldValue.getValue() == null) { return Collections.emptyList(); } if (!Filters.isRecord(fieldValue)) { return Collections.emptyList(); } final Record record = (Record) fieldValue.getValue(); final List<FieldValue> matchingValues = new ArrayList<>(); for (final RecordField childField : record.getSchema().getFields()) { final Object value = record.getValue(childField); if (value == null) { continue; } final FieldValue descendantFieldValue = new StandardFieldValue(value, childField, fieldValue); matchingValues.add(descendantFieldValue); if (Filters.isRecord(childField.getDataType(), value)) { final FieldValue childFieldValue = new StandardFieldValue(value, childField, fieldValue); matchingValues.addAll(findDescendants(childFieldValue)); } } return matchingValues; } }
/** * Writes each Record as a SolrInputDocument. */ public static void writeRecord(final Record record, final SolrInputDocument inputDocument,final List<String> fieldsToIndex,String parentFieldName) throws IOException { RecordSchema schema = record.getSchema(); for (int i = 0; i < schema.getFieldCount(); i++) { final RecordField field = schema.getField(i); String fieldName; if(!StringUtils.isBlank(parentFieldName)) { // Prefixing parent field name fieldName = parentFieldName+"_"+field.getFieldName(); }else{ fieldName = field.getFieldName(); } final Object value = record.getValue(field); if (value == null) { continue; }else { final DataType dataType = schema.getDataType(field.getFieldName()).get(); writeValue(inputDocument, value, fieldName, dataType,fieldsToIndex); } } }
private FieldValue getChild(final FieldValue fieldValue) { if (!Filters.isRecord(fieldValue)) { return missingChild(fieldValue); } final Record record = (Record) fieldValue.getValue(); if(record == null) { return missingChild(fieldValue); } final Object value = record.getValue(childName); if (value == null) { return missingChild(fieldValue); } final Optional<RecordField> field = record.getSchema().getField(childName); if (!field.isPresent()) { return missingChild(fieldValue); } return new StandardFieldValue(value, field.get(), fieldValue); }
final DataType fieldDataType = field.getDataType(); final String fieldName = field.getFieldName(); Object fieldValue = record.getValue(fieldName); if (fieldValue == null) { recordMap.put(fieldName, null);
for (final RecordField field : writeSchema.getFields()) { final String fieldName = field.getFieldName(); final Object value = record.getValue(field); if (value == null) { if (nullSuppression == NullSuppression.NEVER_SUPPRESS || (nullSuppression == NullSuppression.SUPPRESS_MISSING) && isFieldPresent(field, record)) { final Object value = record.getValue(fieldName); if (value == null) { if (nullSuppression == NullSuppression.NEVER_SUPPRESS || (nullSuppression == NullSuppression.SUPPRESS_MISSING) && record.getRawFieldNames().contains(fieldName)) {
private boolean iterateThroughRecordUsingSchema(Deque<String> tagsToOpen, Record record, RecordSchema schema) throws XMLStreamException { boolean loopHasWritten = false; for (RecordField field : schema.getFields()) { String fieldName = field.getFieldName(); DataType dataType = field.getDataType(); Object value = record.getValue(field); final DataType chosenDataType = dataType.getFieldType() == RecordFieldType.CHOICE ? DataTypeUtils.chooseDataType(value, (ChoiceDataType) dataType) : dataType; final Object coercedValue = DataTypeUtils.convertType(value, chosenDataType, LAZY_DATE_FORMAT, LAZY_TIME_FORMAT, LAZY_TIMESTAMP_FORMAT, fieldName); if (coercedValue != null) { boolean hasWritten = writeFieldForType(tagsToOpen, coercedValue, chosenDataType, fieldName); if (hasWritten) { loopHasWritten = true; } } else { if (nullSuppression.equals(NullSuppression.NEVER_SUPPRESS) || nullSuppression.equals(NullSuppression.SUPPRESS_MISSING) && recordHasField(field, record)) { writeAllTags(tagsToOpen, fieldName); writer.writeEndElement(); loopHasWritten = true; } } } return loopHasWritten; }
Object val = record.getValue(name); final byte[] fieldValueBytes; if (val == null && nullStrategy.equals(NULL_FIELD_SKIP.getValue())) {
Type colType = colSchema.getType(); if (record.getValue(colName) == null) { row.setNull(colName); continue;
final Object rawValue = record.getValue(field);
final Object childValue = record.getValue(childField); if (childValue == null && !childField.isNullable()) { logger.debug("Value is not compatible with schema because field {} has a null value, which is not allowed in the schema", childField.getFieldName());