public static TransportProtocol makeDummyProtocol() { KafkaTransportProtocol protocol = new KafkaTransportProtocol(); protocol.setKafkaPort(0); protocol.setBrokerHostname("kafka"); protocol.setTopicDefinition(new SimpleTopicDefinition("test-topic")); return protocol; } }
@Override public void connect(KafkaTransportProtocol protocol, InternalEventProcessor<byte[]> eventProcessor) throws SpRuntimeException { LOG.info("Kafka consumer: Connecting to " + protocol.getTopicDefinition().getActualTopicName()); if (protocol.getTopicDefinition() instanceof WildcardTopicDefinition) { this.patternTopic = true; } this.eventProcessor = eventProcessor; this.kafkaUrl = protocol.getBrokerHostname() + ":" + protocol.getKafkaPort(); this.topic = protocol.getTopicDefinition().getActualTopicName(); this.groupId = UUID.randomUUID().toString(); this.isRunning = true; Thread thread = new Thread(this); thread.start(); }
protected Properties getProperties(KafkaTransportProtocol protocol) { Properties props = new Properties(); String zookeeperHost = protocol.getZookeeperHost(); int zookeeperPort = protocol.getZookeeperPort(); String kafkaHost = protocol.getBrokerHostname(); int kafkaPort = protocol.getKafkaPort(); props.put("zookeeper.connect", zookeeperHost +":" +zookeeperPort); props.put("bootstrap.servers", kafkaHost +":" +kafkaPort); props.put("group.id", UUID.randomUUID().toString()); props.put("client.id", UUID.randomUUID().toString()); props.put("zookeeper.session.timeout.ms", "60000"); props.put("zookeeper.sync.time.ms", "20000"); props.put("auto.commit.interval.ms", "10000"); return props; }
public KafkaTransportProtocol(KafkaTransportProtocol other) { super(other); this.kafkaPort = other.getKafkaPort(); this.zookeeperHost = other.getZookeeperHost(); this.zookeeperPort = other.getZookeeperPort(); }
protected Properties getProperties(KafkaTransportProtocol protocol) { Properties props = new Properties(); String kafkaHost = protocol.getBrokerHostname(); Integer kafkaPort = protocol.getKafkaPort(); props.put("client.id", UUID.randomUUID().toString()); props.put("bootstrap.servers", kafkaHost +":" +kafkaPort); return props; } }
/** * Defines the transport protocol Kafka used by a data stream at runtime using a * {@link org.streampipes.model.grounding.SimpleTopicDefinition}. * @param kafkaHost The hostname of any Kafka broker * @param kafkaPort The port of any Kafka broker * @param topic The topic identifier * @return The {@link org.streampipes.model.grounding.KafkaTransportProtocol} containing URL and topic where data * arrives. */ public static KafkaTransportProtocol kafka(String kafkaHost, Integer kafkaPort, String topic) { return new KafkaTransportProtocol(kafkaHost, kafkaPort, topic, kafkaHost, kafkaPort); }
protected String getKafkaUrl(SpDataStream stream) { // TODO add also jms support return protocol(stream).getBrokerHostname() + ":" + ((KafkaTransportProtocol) protocol(stream)).getKafkaPort(); }
private SourceFunction<String> getKafkaConsumer(KafkaTransportProtocol protocol) { if (protocol.getTopicDefinition() instanceof SimpleTopicDefinition) { return new FlinkKafkaConsumer010<>(protocol .getTopicDefinition() .getActualTopicName(), new SimpleStringSchema (), getProperties(protocol)); } else { String patternTopic = replaceWildcardWithPatternFormat(protocol.getTopicDefinition().getActualTopicName()); return new FlinkKafkaConsumer010<>(Pattern.compile(patternTopic), new SimpleStringSchema (), getProperties(protocol)); } }
/** * Defines that a pipeline element (data processor or data sink) supports processing messages arriving from a * Kafka broker. * @return The {@link org.streampipes.model.grounding.KafkaTransportProtocol}. */ public static KafkaTransportProtocol kafka() { return new KafkaTransportProtocol(); }
protected String getKafkaUrl(SpDataStream stream) { // TODO add also jms support return protocol(stream).getBrokerHostname() + ":" + ((KafkaTransportProtocol) protocol(stream)).getKafkaPort(); }
/** * This method takes the i's input stream and creates a source for the Spark streaming job * Currently just kafka is supported as a protocol * TODO Add also jms support * * @param i * @param streamingContext * @return */ private JavaInputDStream<ConsumerRecord<String, String>> getStreamSource(int i, JavaStreamingContext streamingContext) { if (graph.getInputStreams().size() - 1 >= i) { SpDataStream stream = graph.getInputStreams().get(i); if (stream != null) { KafkaTransportProtocol protocol = (KafkaTransportProtocol) stream.getEventGrounding().getTransportProtocol(); //System.out.println("Listening on Kafka topic '" + protocol.getTopicName() + "'"); return KafkaUtils.createDirectStream(streamingContext, LocationStrategies.PreferConsistent(), ConsumerStrategies.<String, String>Subscribe(Arrays.asList(protocol.getTopicDefinition().getActualTopicName()), kafkaParams)); } else { return null; } } else { return null; } } }
public SpKafkaConsumer(String kafkaUrl, String topic, InternalEventProcessor<byte[]> callback) { KafkaTransportProtocol protocol = new KafkaTransportProtocol(); protocol.setKafkaPort(Integer.parseInt(kafkaUrl.split(":")[1])); protocol.setBrokerHostname(kafkaUrl.split(":")[0]); protocol.setTopicDefinition(new SimpleTopicDefinition(topic)); try { this.connect(protocol, callback); } catch (SpRuntimeException e) { e.printStackTrace(); } }
/** * Defines the transport protocol Kafka used by a data stream at runtime using a * {@link org.streampipes.model.grounding.WildcardTopicDefinition} * @param kafkaHost The hostname of any Kafka broker * @param kafkaPort The port of any Kafka broker * @param wildcardTopicDefinition The wildcard topic definition. * @return The {@link org.streampipes.model.grounding.KafkaTransportProtocol} containing URL and topic where data * arrives. */ public static KafkaTransportProtocol kafka(String kafkaHost, Integer kafkaPort, WildcardTopicDefinition wildcardTopicDefinition) { return new KafkaTransportProtocol(kafkaHost, kafkaPort, wildcardTopicDefinition); }
@Override public void connect(KafkaTransportProtocol protocolSettings) { LOG.info("Kafka producer: Connecting to " +protocolSettings.getTopicDefinition().getActualTopicName()); this.brokerUrl = protocolSettings.getBrokerHostname() +":" +protocolSettings.getKafkaPort(); this.topic = protocolSettings.getTopicDefinition().getActualTopicName(); this.producer = new KafkaProducer<>(getProperties()); this.connected = true; }
protected Properties getProperties(KafkaTransportProtocol protocol) { Properties props = new Properties(); String zookeeperHost = protocol.getZookeeperHost(); int zookeeperPort = protocol.getZookeeperPort(); String kafkaHost = protocol.getBrokerHostname(); int kafkaPort = protocol.getKafkaPort(); props.put("zookeeper.connect", zookeeperHost +":" +zookeeperPort); props.put("bootstrap.servers", kafkaHost +":" +kafkaPort); props.put("group.id", UUID.randomUUID().toString()); props.put("client.id", UUID.randomUUID().toString()); props.put("zookeeper.session.timeout.ms", "60000"); props.put("zookeeper.sync.time.ms", "20000"); props.put("auto.commit.interval.ms", "10000"); return props; }
public static String extractBroker(AdapterDescription adapterDescription) { EventGrounding eventGrounding = getEventGrounding(adapterDescription); String host = eventGrounding.getTransportProtocol().getBrokerHostname(); int port = ((KafkaTransportProtocol) eventGrounding.getTransportProtocol()).getKafkaPort(); return host + ":" + port; }
public static EventGrounding createEventGrounding(String kafkaHost, int kafkaPort, EventSchema eventSchema) { EventGrounding eventGrounding = new EventGrounding(); KafkaTransportProtocol transportProtocol = new KafkaTransportProtocol(); transportProtocol.setBrokerHostname(kafkaHost); transportProtocol.setKafkaPort(kafkaPort); String topic = "org.streampipes.connect." + UUID.randomUUID(); System.out.println("Topic: " + topic); TopicDefinition topicDefinition = new SimpleTopicDefinition(topic); transportProtocol.setTopicDefinition(topicDefinition); eventGrounding.setTransportProtocol(transportProtocol); return eventGrounding; } }
public TransportProtocol protocol(TransportProtocol protocol) { if (protocol instanceof KafkaTransportProtocol) { return new KafkaTransportProtocol((KafkaTransportProtocol) protocol); } else { return new JmsTransportProtocol((JmsTransportProtocol) protocol); } }
@Override public void connect(KafkaTransportProtocol protocolSettings) { LOG.info("Kafka producer: Connecting to " +protocolSettings.getTopicDefinition().getActualTopicName()); this.brokerUrl = protocolSettings.getBrokerHostname() +":" +protocolSettings.getKafkaPort(); this.topic = protocolSettings.getTopicDefinition().getActualTopicName(); this.producer = new KafkaProducer<>(getProperties()); this.connected = true; }
public static String extractBroker(AdapterDescription adapterDescription) { String host = adapterDescription.getEventGrounding().getTransportProtocol().getBrokerHostname(); int port = ((KafkaTransportProtocol) adapterDescription.getEventGrounding().getTransportProtocol()).getKafkaPort(); return host + ":" + port; }