private FlowFile writeErrorAttributes(int line, String msg, FlowFile file, ProcessSession session) { file = session.putAttribute(file, ERROR_LINE, String.valueOf(line)); file = session.putAttribute(file, ERROR_MSG, msg != null ? msg : ""); return file; }
protected List<FlowFile> processException(final ProcessSession session, List<FlowFile> flowFiles, Exception exception) { List<FlowFile> failedFlowFiles = new ArrayList<>(); for (FlowFile flowFile : flowFiles) { flowFile = session.putAttribute(flowFile, DYNAMODB_ERROR_EXCEPTION_MESSAGE, exception.getMessage() ); failedFlowFiles.add(flowFile); } return failedFlowFiles; }
protected FlowFile saveRequestDetailsAsAttributes(final HttpServletRequest request, final ProcessSession session, String foundSubject, FlowFile flowFile) { Map<String, String> attributes = new HashMap<>(); addMatchingRequestHeaders(request, attributes); flowFile = session.putAllAttributes(flowFile, attributes); flowFile = session.putAttribute(flowFile, "restlistener.remote.source.host", request.getRemoteHost()); flowFile = session.putAttribute(flowFile, "restlistener.request.uri", request.getRequestURI()); flowFile = session.putAttribute(flowFile, "restlistener.remote.user.dn", foundSubject); return flowFile; }
private void markFlowFileValid() { final List<FlowFile> groupedFlowFiles = flowFileGroups.computeIfAbsent(groupId, k -> new ArrayList<>()); final FlowFile validFlowFile; if (isBlank(flowFile.getAttribute(ATTR_STARTED_AT))) { validFlowFile = processSession.putAttribute(flowFile, ATTR_STARTED_AT, String.valueOf(now)); } else { validFlowFile = flowFile; } groupedFlowFiles.add(validFlowFile); }
private FlowFile transferToFailure(final ProcessSession processSession, FlowFile flowfile, final String value) { flowfile = processSession.putAttribute(flowfile, ATTR_WS_FAILURE_DETAIL, value); processSession.transfer(flowfile, REL_FAILURE); return flowfile; }
@Override public void handle(byte[] row, ResultCell[] resultCells) { final String serializedRow = rowSerializer.serialize(row, resultCells); flowFile = session.putAttribute(flowFile, HBASE_ROW_ATTR, serializedRow); handledRow = true; }
private FlowFile writeAggregationFlowFileContents(String name, String json, ProcessSession session, FlowFile aggFlowFile, Map<String, String> attributes) { aggFlowFile = session.write(aggFlowFile, out -> out.write(json.getBytes())); if (name != null) { aggFlowFile = session.putAttribute(aggFlowFile, "aggregation.name", name); } return session.putAllAttributes(aggFlowFile, attributes); }
private FlowFile initNewFlowFile(final ProcessSession session, final FlowFile origFF, final String tableName) throws IOException{ FlowFile flowFile = session.create(origFF); flowFile = session.putAttribute(flowFile, HBASE_TABLE_ATTR, tableName); flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), "application/json"); final AtomicReference<IOException> ioe = new AtomicReference<>(null); flowFile = session.write(flowFile, (out) -> { try{ out.write("[".getBytes()); }catch(IOException e){ ioe.set(e); } }); if (ioe.get() != null){ throw ioe.get(); } return flowFile; }
protected boolean isHashKeyValueConsistent(String hashKeyName, Object hashKeyValue, ProcessSession session, FlowFile flowFile) { boolean isConsistent = true; if ( hashKeyValue == null || StringUtils.isBlank(hashKeyValue.toString())) { getLogger().error("Hash key value '" + hashKeyValue + "' is required for flow file " + flowFile); flowFile = session.putAttribute(flowFile, DYNAMODB_HASH_KEY_VALUE_ERROR, "hash key " + hashKeyName + "/value '" + hashKeyValue + "' inconsistency error"); session.transfer(flowFile, REL_FAILURE); isConsistent = false; } return isConsistent; }
private void finalizeFlowFile(final ProcessSession session, final HBaseClientService hBaseClientService, FlowFile flowFile, final String tableName, Long rowsPulled, Exception e) { Relationship rel = REL_SUCCESS; flowFile = session.putAttribute(flowFile, HBASE_ROWS_COUNT_ATTR, rowsPulled.toString()); final AtomicReference<IOException> ioe = new AtomicReference<>(null); flowFile = session.append(flowFile, (out) -> { try{ out.write("]".getBytes()); }catch(IOException ei){ ioe.set(ei); } }); if (e != null || ioe.get() != null) { flowFile = session.putAttribute(flowFile, "scanhbase.error", (e==null?e:ioe.get()).toString()); rel = REL_FAILURE; } else { session.getProvenanceReporter().receive(flowFile, hBaseClientService.toTransitUri(tableName, "{ids}")); } session.transfer(flowFile, rel); }
/** * Updates the given FlowFiles attributes with the given key/value pair. If * the key is named {@code uuid}, this attribute will be ignored. * * @param flowFile to update * @param key of attribute * @param value of attribute * @return FlowFile the updated FlowFile * @throws FlowFileHandlingException if the given FlowFile is already transferred or removed or doesn't belong to this session. Automatic rollback will occur. * @throws NullPointerException if an argument is null */ @Override public SessionFile putAttribute(FlowFile flowFile, String key, String value) { SessionFile sf = wrap(flowFile); sf.flowFile = onMod(s.putAttribute(sf.flowFile, key, value)); return sf; }
public void handle(FlowFile original, ProcessSession processSession, String chunkName, byte[] badChunk) { FlowFile flowFile = processSession.create(original); flowFile = processSession.putAttribute(flowFile, CoreAttributes.FILENAME.key(), chunkName); flowFile = processSession.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), MediaType.APPLICATION_BINARY.toString()); flowFile = processSession.write(flowFile, out -> out.write(badChunk)); processSession.transfer(flowFile, badChunkRelationship); } }
public void process(ProcessSession session, ComponentLog logger, FlowFile updated, Exception exception, String name) { updated = session.putAttribute(updated, CoreAttributes.FILENAME.key(), name); updated = session.putAttribute(updated, CoreAttributes.MIME_TYPE.key(), MediaType.APPLICATION_XML_UTF_8.toString()); if (exception == null) { session.transfer(updated, successRelationship); } else { logger.error(UNABLE_TO_PROCESS_DUE_TO, new Object[]{name, exception}, exception); session.transfer(updated, failureRelationship); } } }
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } for (int i = 1; i <= context.getProperty(NUM_COPIES).evaluateAttributeExpressions(flowFile).asInteger(); i++) { FlowFile copy = session.clone(flowFile); copy = session.putAttribute(copy, COPY_INDEX_ATTRIBUTE, Integer.toString(i)); session.transfer(copy, REL_SUCCESS); } flowFile = session.putAttribute(flowFile, COPY_INDEX_ATTRIBUTE, "0"); session.transfer(flowFile, REL_SUCCESS); }
/** * Send unhandled items to failure and remove the flow files from key to flow file map * @param session used for sending the flow file * @param keysToFlowFileMap - ItemKeys to flow file map * @param hashKeyValue the items hash key value * @param rangeKeyValue the items hash key value */ protected void sendUnprocessedToUnprocessedRelationship(final ProcessSession session, Map<ItemKeys, FlowFile> keysToFlowFileMap, Object hashKeyValue, Object rangeKeyValue) { ItemKeys itemKeys = new ItemKeys(hashKeyValue, rangeKeyValue); FlowFile flowFile = keysToFlowFileMap.get(itemKeys); flowFile = session.putAttribute(flowFile, DYNAMODB_KEY_ERROR_UNPROCESSED, itemKeys.toString()); session.transfer(flowFile,REL_UNPROCESSED); getLogger().error("Unprocessed key " + itemKeys + " for flow file " + flowFile); keysToFlowFileMap.remove(itemKeys); }
@Override protected FlowFile preprocessFlowFile(final ProcessContext context, final ProcessSession session, final FlowFile flowFile) { FlowFile processed = flowFile; // handle backward compatibility with old segment attributes if (processed.getAttribute(FRAGMENT_COUNT_ATTRIBUTE) == null && processed.getAttribute(SEGMENT_COUNT_ATTRIBUTE) != null) { processed = session.putAttribute(processed, FRAGMENT_COUNT_ATTRIBUTE, processed.getAttribute(SEGMENT_COUNT_ATTRIBUTE)); } if (processed.getAttribute(FRAGMENT_INDEX_ATTRIBUTE) == null && processed.getAttribute(SEGMENT_INDEX_ATTRIBUTE) != null) { processed = session.putAttribute(processed, FRAGMENT_INDEX_ATTRIBUTE, processed.getAttribute(SEGMENT_INDEX_ATTRIBUTE)); } if (processed.getAttribute(FRAGMENT_ID_ATTRIBUTE) == null && processed.getAttribute(SEGMENT_ID_ATTRIBUTE) != null) { processed = session.putAttribute(processed, FRAGMENT_ID_ATTRIBUTE, processed.getAttribute(SEGMENT_ID_ATTRIBUTE)); } return processed; }
protected FlowFile handleFlowFileTooBig(final ProcessSession session, FlowFile flowFileCandidate, String message) { flowFileCandidate = session.putAttribute(flowFileCandidate, message, "record too big " + flowFileCandidate.getSize() + " max allowed " + MAX_MESSAGE_SIZE ); session.transfer(flowFileCandidate, REL_FAILURE); getLogger().error("Failed to publish to kinesis records {} because the size was greater than {} bytes", new Object[]{flowFileCandidate, MAX_MESSAGE_SIZE}); return flowFileCandidate; }
@Override public void accept(final JMSResponse response) { if (response == null) { return; } FlowFile flowFile = processSession.create(); flowFile = processSession.write(flowFile, out -> out.write(response.getMessageBody())); final Map<String, String> jmsHeaders = response.getMessageHeaders(); final Map<String, String> jmsProperties = response.getMessageProperties(); flowFile = ConsumeJMS.this.updateFlowFileAttributesWithJMSAttributes(jmsHeaders, flowFile, processSession); flowFile = ConsumeJMS.this.updateFlowFileAttributesWithJMSAttributes(jmsProperties, flowFile, processSession); flowFile = processSession.putAttribute(flowFile, JMS_SOURCE_DESTINATION_NAME, destinationName); processSession.getProvenanceReporter().receive(flowFile, destinationName); processSession.transfer(flowFile, REL_SUCCESS); processSession.commit(); } });
@Test public void testAttributes_emptyListUserSpecifiedAttributes() throws Exception { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON()); testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_ATTRIBUTE); ProcessSession session = testRunner.getProcessSessionFactory().createSession(); FlowFile ff = session.create(); ff = session.putAttribute(ff, TEST_ATTRIBUTE_KEY, TEST_ATTRIBUTE_VALUE); testRunner.enqueue(ff); testRunner.run(); testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS) .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); ObjectMapper mapper = new ObjectMapper(); Map<String, String> val = mapper.readValue(json, HashMap.class); assertTrue(val.get(TEST_ATTRIBUTE_KEY).equals(TEST_ATTRIBUTE_VALUE)); }
@Test public void testInvalidJSONValueInAttribute() throws Exception { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON()); testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_ATTRIBUTE); ProcessSession session = testRunner.getProcessSessionFactory().createSession(); FlowFile ff = session.create(); //Create attribute that contains an invalid JSON Character ff = session.putAttribute(ff, TEST_ATTRIBUTE_KEY, "'badjson'"); testRunner.enqueue(ff); testRunner.run(); //Expecting success transition because Jackson is taking care of escaping the bad JSON characters testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); }