protected boolean isHashKeyValueConsistent(String hashKeyName, Object hashKeyValue, ProcessSession session, FlowFile flowFile) { boolean isConsistent = true; if ( hashKeyValue == null || StringUtils.isBlank(hashKeyValue.toString())) { getLogger().error("Hash key value '" + hashKeyValue + "' is required for flow file " + flowFile); flowFile = session.putAttribute(flowFile, DYNAMODB_HASH_KEY_VALUE_ERROR, "hash key " + hashKeyName + "/value '" + hashKeyValue + "' inconsistency error"); session.transfer(flowFile, REL_FAILURE); isConsistent = false; } return isConsistent; }
/** * Helper method to create InfluxDB instance * @return InfluxDB instance */ protected synchronized InfluxDB getInfluxDB(ProcessContext context) { if ( influxDB.get() == null ) { String username = context.getProperty(USERNAME).evaluateAttributeExpressions().getValue(); String password = context.getProperty(PASSWORD).evaluateAttributeExpressions().getValue(); long connectionTimeout = context.getProperty(INFLUX_DB_CONNECTION_TIMEOUT).asTimePeriod(TimeUnit.SECONDS); String influxDbUrl = context.getProperty(INFLUX_DB_URL).evaluateAttributeExpressions().getValue(); try { influxDB.set(makeConnection(username, password, influxDbUrl, connectionTimeout)); } catch(Exception e) { getLogger().error("Error while getting connection {}", new Object[] { e.getLocalizedMessage() },e); throw new RuntimeException("Error while getting connection " + e.getLocalizedMessage(),e); } getLogger().info("InfluxDB connection created for host {}", new Object[] {influxDbUrl}); } return influxDB.get(); }
protected void initializeResolver(final ProcessContext context ) { final String dnsTimeout = context.getProperty(DNS_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).toString(); final String dnsServer = context.getProperty(DNS_SERVER).getValue(); final String dnsRetries = context.getProperty(DNS_RETRIES).getValue(); String finalServer = ""; Hashtable<String,String> env = new Hashtable<String,String>(); env.put("java.naming.factory.initial", contextFactory); env.put("com.sun.jndi.dns.timeout.initial", dnsTimeout); env.put("com.sun.jndi.dns.timeout.retries", dnsRetries); if (StringUtils.isNotEmpty(dnsServer)) { for (String server : dnsServer.split(",")) { finalServer = finalServer + "dns://" + server + "/. "; } env.put(Context.PROVIDER_URL, finalServer); } try { initializeContext(env); initialized.set(true); } catch (NamingException e) { getLogger().error("Could not initialize JNDI context", e); } }
private void handleParseFailure(final ConsumerRecord<byte[], byte[]> consumerRecord, final ProcessSession session, final Exception cause, final String message) { // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship final Map<String, String> attributes = getAttributes(consumerRecord); attributes.put(KafkaProcessorUtils.KAFKA_OFFSET, String.valueOf(consumerRecord.offset())); attributes.put(KafkaProcessorUtils.KAFKA_PARTITION, String.valueOf(consumerRecord.partition())); attributes.put(KafkaProcessorUtils.KAFKA_TOPIC, consumerRecord.topic()); FlowFile failureFlowFile = session.create(); final byte[] value = consumerRecord.value(); if (value != null) { failureFlowFile = session.write(failureFlowFile, out -> out.write(value)); } failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, consumerRecord.topic()); session.getProvenanceReporter().receive(failureFlowFile, transitUri); session.transfer(failureFlowFile, REL_PARSE_FAILURE); if (cause == null) { logger.error(message); } else { logger.error(message, cause); } session.adjustCounter("Parse Failures", 1, false); }
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } try { flowFile = session.write(flowFile, new OutputStreamCallback() { @Override public void process(OutputStream out) throws IOException { IOUtils.write(resourceData, out, Charset.defaultCharset()); } }); session.transfer(flowFile, REL_SUCCESS); } catch (ProcessException ex) { getLogger().error("Unable to process", ex); session.transfer(flowFile, REL_FAILURE); } } }
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(SQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class); }
/** * Report the registered metrics. * * @param context used for getting the most recent {@link ProcessGroupStatus}. */ @Override public void onTrigger(ReportingContext context) { String groupId = context.getProperty(PROCESS_GROUP_ID).evaluateAttributeExpressions().getValue(); ProcessGroupStatus statusToReport = groupId == null ? context.getEventAccess().getControllerStatus() : context.getEventAccess().getGroupStatus(groupId); if (statusToReport != null) { currentStatusReference.set(statusToReport); reporter.report(); } else { getLogger().error("Process group with provided group id could not be found."); } }
@OnEnabled public void onEnabled(final ConfigurationContext context) throws InitializationException { try { setupClient(context); charset = Charset.forName(context.getProperty(CHARSET).getValue()); } catch (Exception ex) { getLogger().error("Could not initialize ElasticSearch client.", ex); throw new InitializationException(ex); } }
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) { FlowFile flowFile = session.get(); if (flowFile == null) { return; logger.error("routing {} to 'failure' because of missing attributes: {}", new Object[]{flowFile, missingKeys.toString()}); session.transfer(flowFile, REL_FAILURE); } else { flowFile = session.putAttribute(flowFile, context.getProperty(HASH_VALUE_ATTRIBUTE).getValue(), hashValue); session.getProvenanceReporter().modifyAttributes(flowFile); session.transfer(flowFile, REL_SUCCESS);
@OnScheduled public void onScheduled(final ProcessContext context) { hostname = context.getProperty(DB_HOST).getValue(); port = context.getProperty(DB_PORT).asInteger(); username = context.getProperty(USERNAME).getValue(); password = context.getProperty(PASSWORD).getValue(); databaseName = context.getProperty(DB_NAME).getValue(); tableName = context.getProperty(TABLE_NAME).getValue(); try { rethinkDbConnection = makeConnection(); } catch(Exception e) { getLogger().error("Error while getting connection " + e.getLocalizedMessage(),e); throw new RuntimeException("Error while getting connection" + e.getLocalizedMessage(),e); } getLogger().info("RethinkDB connection created for host {} port {} and db {}", new Object[] {hostname, port,databaseName}); }
private void handleParseFailure(final ConsumerRecord<byte[], byte[]> consumerRecord, final ProcessSession session, final Exception cause, final String message) { // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship final Map<String, String> attributes = getAttributes(consumerRecord); attributes.put(KafkaProcessorUtils.KAFKA_OFFSET, String.valueOf(consumerRecord.offset())); attributes.put(KafkaProcessorUtils.KAFKA_PARTITION, String.valueOf(consumerRecord.partition())); attributes.put(KafkaProcessorUtils.KAFKA_TOPIC, consumerRecord.topic()); FlowFile failureFlowFile = session.create(); final byte[] value = consumerRecord.value(); if (value != null) { failureFlowFile = session.write(failureFlowFile, out -> out.write(value)); } failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, consumerRecord.topic()); session.getProvenanceReporter().receive(failureFlowFile, transitUri); session.transfer(failureFlowFile, REL_PARSE_FAILURE); if (cause == null) { logger.error(message); } else { logger.error(message, cause); } session.adjustCounter("Parse Failures", 1, false); }
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } try { Long currTime = System.currentTimeMillis(); if(microBatchTime == null){ noMicroBatch(context, session, flowFile, currTime); } else{ microBatch(context, session, flowFile, currTime); } } catch (Exception e) { getLogger().error("Ran into an error while processing {}.", new Object[] { flowFile}, e); session.transfer(flowFile, REL_FAILURE); } }
protected boolean isRangeKeyValueConsistent(String rangeKeyName, Object rangeKeyValue, ProcessSession session, FlowFile flowFile) { boolean isRangeNameBlank = StringUtils.isBlank(rangeKeyName); boolean isRangeValueNull = rangeKeyValue == null; boolean isConsistent = true; if ( ! isRangeNameBlank && (isRangeValueNull || StringUtils.isBlank(rangeKeyValue.toString()))) { isConsistent = false; } if ( isRangeNameBlank && ( ! isRangeValueNull && ! StringUtils.isBlank(rangeKeyValue.toString()))) { isConsistent = false; } if ( ! isConsistent ) { getLogger().error("Range key name '" + rangeKeyName + "' was not consistent with range value " + rangeKeyValue + "'" + flowFile); flowFile = session.putAttribute(flowFile, DYNAMODB_RANGE_KEY_VALUE_ERROR, "range key '" + rangeKeyName + "'/value '" + rangeKeyValue + "' inconsistency error"); session.transfer(flowFile, REL_FAILURE); } return isConsistent; }
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(HIVEQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } }
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; final String algorithm = context.getProperty(HASH_ALGORITHM).getValue(); final MessageDigest digest; try { digest = MessageDigest.getInstance(algorithm); } catch (NoSuchAlgorithmException e) { logger.error("Failed to process {} due to {}; routing to failure", new Object[]{flowFile, e}); session.transfer(flowFile, REL_FAILURE); return; final String attributeName = context.getProperty(ATTRIBUTE_NAME).getValue(); flowFile = session.putAttribute(flowFile, attributeName, hashValueHolder.get()); logger.info("Successfully added attribute '{}' to {} with a value of {}; routing to success", new Object[]{attributeName, flowFile, hashValueHolder.get()}); session.getProvenanceReporter().modifyAttributes(flowFile); session.transfer(flowFile, REL_SUCCESS); } catch (final ProcessException e) { logger.error("Failed to process {} due to {}; routing to failure", new Object[]{flowFile, e}); session.transfer(flowFile, REL_FAILURE);
@OnScheduled public void onScheduled(final ProcessContext context) { try { channel = new NifiSinkSessionChannel(SUCCESS, FAILURE); channel.start(); sink = SINK_FACTORY.create(context.getProperty(SOURCE_NAME).getValue(), context.getProperty(SINK_TYPE).getValue()); sink.setChannel(channel); String flumeConfig = context.getProperty(FLUME_CONFIG).getValue(); String agentName = context.getProperty(AGENT_NAME).getValue(); String sinkName = context.getProperty(SOURCE_NAME).getValue(); Configurables.configure(sink, getFlumeSinkContext(flumeConfig, agentName, sinkName)); sink.start(); } catch (Throwable th) { getLogger().error("Error creating sink", th); throw Throwables.propagate(th); } }
private void handleParseFailure(final ConsumerRecord<byte[], byte[]> consumerRecord, final ProcessSession session, final Exception cause, final String message) { // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship final Map<String, String> attributes = getAttributes(consumerRecord); attributes.put(KafkaProcessorUtils.KAFKA_OFFSET, String.valueOf(consumerRecord.offset())); attributes.put(KafkaProcessorUtils.KAFKA_PARTITION, String.valueOf(consumerRecord.partition())); attributes.put(KafkaProcessorUtils.KAFKA_TOPIC, consumerRecord.topic()); FlowFile failureFlowFile = session.create(); final byte[] value = consumerRecord.value(); if (value != null) { failureFlowFile = session.write(failureFlowFile, out -> out.write(value)); } failureFlowFile = session.putAllAttributes(failureFlowFile, attributes); final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, consumerRecord.topic()); session.getProvenanceReporter().receive(failureFlowFile, transitUri); session.transfer(failureFlowFile, REL_PARSE_FAILURE); if (cause == null) { logger.error(message); } else { logger.error(message, cause); } session.adjustCounter("Parse Failures", 1, false); }
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile input = session.get(); if (input == null) { return; } try { Map<String, RecordPath> paths = getRecordPaths(context, input); Map<String, String> stats = getStats(input, paths, context, session); input = session.putAllAttributes(input, stats); session.transfer(input, REL_SUCCESS); } catch (Exception ex) { getLogger().error("Error processing stats.", ex); session.transfer(input, REL_FAILURE); } }
private void failFlowWithBlankEvaluatedProperty(ProcessSession session, FlowFile flowFile, PropertyDescriptor pd) { getLogger().error("{} value is blank after attribute expression language evaluation", new Object[]{pd.getName()}); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); }
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(HIVEQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } }