@TriggerWhenEmpty // trigger when empty so we have a chance to perform a Kerberos re-login @DefaultSettings(yieldDuration = "100 ms") // decrease the default yield since we are triggering when empty public abstract class AbstractFetchHDFSRecord extends AbstractHadoopProcessor {
@TriggerWhenEmpty // trigger when empty so we have a chance to perform a Kerberos re-login @DefaultSettings(yieldDuration = "100 ms") // decrease the default yield since we are triggering when empty public abstract class AbstractPutHDFSRecord extends AbstractHadoopProcessor {
@Deprecated @DeprecationNotice(classNames = {"org.apache.nifi.jms.processors.ConsumeJMS"}, reason = "This processor is deprecated and may be removed in future releases. ") @TriggerWhenEmpty @InputRequirement(Requirement.INPUT_FORBIDDEN) @Tags({"jms", "queue", "listen", "get", "pull", "source", "consume", "consumer"})
@TriggerWhenEmpty @Tags({ "Spring", "Message", "Get", "Put", "Integration" }) @CapabilityDescription("A Processor that supports sending and receiving data from application defined in "
@TriggerWhenEmpty @Tags({"hadoop", "HDFS", "get", "fetch", "ingest", "source", "sequence file"}) @CapabilityDescription("Fetch sequence files from Hadoop Distributed File System (HDFS) into FlowFiles")
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @Tags({"splunk", "logs", "tcp", "udp"}) @TriggerWhenEmpty // because we have a queue of sessions that are ready to be committed @CapabilityDescription("Sends logs to Splunk Enterprise over TCP, TCP + TLS/SSL, or UDP. If a Message " + "Delimiter is provided, then this processor will read messages from the incoming FlowFile based on the " +
@SeeAlso(ListenUDP.class) @Tags({ "remote", "egress", "put", "udp" }) @TriggerWhenEmpty // trigger even when queue is empty so that the processor can check for idle senders to prune. public class PutUDP extends AbstractPutEventProcessor {
@SeeAlso(ListenTCP.class) @Tags({ "remote", "egress", "put", "tcp" }) @TriggerWhenEmpty // trigger even when queue is empty so that the processor can check for idle senders to prune. public class PutTCP extends AbstractPutEventProcessor {
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @TriggerWhenEmpty @Tags({"syslog", "put", "udp", "tcp", "logs"}) @CapabilityDescription("Sends Syslog messages to a given host and port over TCP or UDP. Messages are constructed from the \"Message ___\" properties of the processor " +
@DeprecationNotice(classNames = {"org.apache.nifi.jms.processors.ConsumeJMS"}, reason = "This processor is deprecated and may be removed in future releases.") @TriggerSerially @TriggerWhenEmpty @InputRequirement(Requirement.INPUT_FORBIDDEN) @Tags({"jms", "topic", "subscription", "durable", "non-durable", "listen", "get", "pull", "source", "consume", "consumer"})
@TriggerWhenEmpty @InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) @Tags({"hadoop", "HDFS", "delete", "remove", "filesystem"})
@TriggerWhenEmpty @TriggerSerially @InputRequirement(InputRequirement.Requirement.INPUT_FORBIDDEN)
@SideEffectFree @TriggerSerially @TriggerWhenEmpty @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"monitor", "flow", "active", "inactive", "activity", "detection"})
@TriggerSerially @TriggerWhenEmpty @Tags({"hadoop", "events", "inotify", "notifications", "filesystem"}) @WritesAttributes({
@TriggerWhenEmpty @InputRequirement(Requirement.INPUT_FORBIDDEN) @Tags({"hadoop", "HDFS", "get", "fetch", "ingest", "source", "filesystem"})
@TriggerWhenEmpty @SupportsBatching @InputRequirement(Requirement.INPUT_ALLOWED)
@SideEffectFree @TriggerWhenEmpty @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"merge", "record", "content", "correlation", "stream", "event"})
@PrimaryNodeOnly @TriggerSerially @TriggerWhenEmpty @InputRequirement(Requirement.INPUT_FORBIDDEN) @Tags({"hadoop", "HDFS", "get", "list", "ingest", "source", "filesystem"})
@PrimaryNodeOnly @TriggerSerially @TriggerWhenEmpty @InputRequirement(Requirement.INPUT_FORBIDDEN) @Tags({"Amazon", "S3", "AWS", "list"})
@TriggerWhenEmpty @InputRequirement(Requirement.INPUT_FORBIDDEN) @Tags({"local", "files", "filesystem", "ingest", "ingress", "get", "source", "input"})