@Override public RecordSchema getSchema() throws IOException { try { return RecordReader.this.getSchema(); } catch (final MalformedRecordException mre) { throw new IOException(mre); } }
RecordSchema recordSchema = recordReader.getSchema(); for (RecordField field : recordSchema.getFields()) { String fieldName = field.getFieldName();
throws IllegalArgumentException, MalformedRecordException, IOException, SQLException { final RecordSchema recordSchema = recordParser.getSchema();
@Override public RelDataType getRowType(final RelDataTypeFactory typeFactory) { if (relDataType != null) { return relDataType; } RecordSchema schema; try (final InputStream in = session.read(flowFile)) { final RecordReader recordParser = recordParserFactory.createRecordReader(flowFile, in, logger); schema = recordParser.getSchema(); } catch (final Exception e) { throw new ProcessException("Failed to determine schema of data records for " + flowFile, e); } final List<String> names = new ArrayList<>(); final List<RelDataType> types = new ArrayList<>(); final JavaTypeFactory javaTypeFactory = (JavaTypeFactory) typeFactory; for (final RecordField field : schema.getFields()) { names.add(field.getFieldName()); final RelDataType relDataType = getRelDataType(field.getDataType(), javaTypeFactory); types.add(javaTypeFactory.createTypeWithNullability(relDataType, field.isNullable())); } logger.debug("Found Schema: {}", new Object[] {schema}); if (recordSchema == null) { recordSchema = schema; } relDataType = typeFactory.createStructType(Pair.zip(names, types)); return relDataType; }
throws IllegalArgumentException, MalformedRecordException, IOException, SQLException { final RecordSchema recordSchema = recordParser.getSchema(); final ComponentLog log = getLogger();
protected RecordSchema getValidationSchema(final ProcessContext context, final FlowFile flowFile, final RecordReader reader) throws MalformedRecordException, IOException, SchemaNotFoundException { final String schemaAccessStrategy = context.getProperty(SCHEMA_ACCESS_STRATEGY).getValue(); if (schemaAccessStrategy.equals(READER_SCHEMA.getValue())) { return reader.getSchema(); } else if (schemaAccessStrategy.equals(SCHEMA_NAME_PROPERTY.getValue())) { final SchemaRegistry schemaRegistry = context.getProperty(SCHEMA_REGISTRY).asControllerService(SchemaRegistry.class); final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(flowFile).getValue(); final SchemaIdentifier schemaIdentifier = SchemaIdentifier.builder().name(schemaName).build(); return schemaRegistry.retrieveSchema(schemaIdentifier); } else if (schemaAccessStrategy.equals(SCHEMA_TEXT_PROPERTY.getValue())) { final String schemaText = context.getProperty(SCHEMA_TEXT).evaluateAttributeExpressions(flowFile).getValue(); final Parser parser = new Schema.Parser(); final Schema avroSchema = parser.parse(schemaText); return AvroTypeUtil.createSchema(avroSchema); } else { throw new ProcessException("Invalid Schema Access Strategy: " + schemaAccessStrategy); } } }
@Override public void process(final InputStream in, final OutputStream out) throws IOException { try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema()); try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), writeSchema, out)) { writer.beginRecordSet(); Record record; while ((record = reader.nextRecord()) != null) { final Record processed = AbstractRecordProcessor.this.process(record, writeSchema, original, context); writer.write(processed); } final WriteResult writeResult = writer.finishRecordSet(); attributes.put("record.count", String.valueOf(writeResult.getRecordCount())); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); attributes.putAll(writeResult.getAttributes()); recordCount.set(writeResult.getRecordCount()); } } catch (final SchemaNotFoundException e) { throw new ProcessException(e.getLocalizedMessage(), e); } catch (final MalformedRecordException e) { throw new ProcessException("Could not parse incoming data", e); } } });
@Override public void process(final InputStream in) throws IOException { try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema()); Record record; while ((record = reader.nextRecord()) != null) { final Set<Relationship> relationships = route(record, writeSchema, original, context, flowFileContext); numRecords.incrementAndGet(); for (final Relationship relationship : relationships) { final RecordSetWriter recordSetWriter; Tuple<FlowFile, RecordSetWriter> tuple = writers.get(relationship); if (tuple == null) { FlowFile outFlowFile = session.create(original); final OutputStream out = session.write(outFlowFile); recordSetWriter = writerFactory.createWriter(getLogger(), writeSchema, out); recordSetWriter.beginRecordSet(); tuple = new Tuple<>(outFlowFile, recordSetWriter); writers.put(relationship, tuple); } else { recordSetWriter = tuple.getValue(); } recordSetWriter.write(record); } } } catch (final SchemaNotFoundException | MalformedRecordException e) { throw new ProcessException("Could not parse incoming data", e); } } });
try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema schema = writerFactory.getSchema(originalAttributes, reader.getSchema());
try (final InputStream in = session.read(flowFile); final RecordReader recordReader = recordReaderFactory.createRecordReader(flowFile, in, getLogger())) { final List<String> fieldNames = recordReader.getSchema().getFieldNames(); final RecordSet recordSet = recordReader.createRecordSet();
private void binFlowFile(final ProcessContext context, final FlowFile flowFile, final ProcessSession session, final RecordBinManager binManager, final boolean block) { final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class); try (final InputStream in = session.read(flowFile); final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger())) { final RecordSchema schema = reader.getSchema(); final String groupId = getGroupId(context, flowFile, schema, session); getLogger().debug("Got Group ID {} for {}", new Object[] {groupId, flowFile}); binManager.add(groupId, flowFile, reader, session, block); } catch (MalformedRecordException | IOException | SchemaNotFoundException e) { throw new ProcessException(e); } }
final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger()); final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema());
recordWriter = createHDFSRecordWriter(context, flowFile, configuration, tempFile, recordReader.getSchema()); writeResult.set(recordWriter.write(recordSet)); } catch (Exception e) {
final RecordReader reader = recordParserFactory.createRecordReader(flowFile, inStream, getLogger())) { final MongoCollection<Document> collection = getCollection(context, flowFile).withWriteConcern(writeConcern); RecordSchema schema = reader.getSchema(); Record record; while ((record = reader.nextRecord()) != null) {
PutFlowFile putFlowFile = createPut(context, record, reader.getSchema(), recordPath, flowFile, rowFieldName, columnFamily, timestampFieldName, fieldEncodingStrategy, rowEncodingStrategy, complexFieldStrategy); if (putFlowFile.getColumns().size() == 0) {
try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema()); final OutputStream out = session.write(outFlowFile);
final Map<String, String> originalAttributes = original.getAttributes(); final RecordReader reader = recordReaderFactory.createRecordReader(originalAttributes, rawIn, getLogger()); final RecordSchema inputSchema = reader.getSchema();
final RecordReader reader = recordParserFactory.createRecordReader(inputFlowFile, inputStream, getLogger())){ final RecordSchema schema = reader.getSchema(); Record record;
try (final InputStream in = session.read(original); final RecordReader reader = readerFactory.createRecordReader(original, in, getLogger())) { schema = writerFactory.getSchema(original.getAttributes(), reader.getSchema()); Record record;
final RecordReader recordReader = avroReader.createRecordReader(validFlowFile.getAttributes(), resultContentStream, runner.getLogger()); ) { final RecordSchema resultSchema = recordReader.getSchema(); assertEquals(3, resultSchema.getFieldCount());