public SiddhiApp defineStream(StreamDefinition streamDefinition) { if (streamDefinition == null) { throw new SiddhiAppValidationException("Stream Definition should not be null"); } else if (streamDefinition.getId() == null) { throw new SiddhiAppValidationException("Stream Id should not be null for Stream Definition", streamDefinition.getQueryContextStartIndex(), streamDefinition.getQueryContextEndIndex()); } checkDuplicateDefinition(streamDefinition); this.streamDefinitionMap.put(streamDefinition.getId(), streamDefinition); return this; }
public SiddhiApp defineTrigger(TriggerDefinition triggerDefinition) { if (triggerDefinition == null) { throw new SiddhiAppValidationException("Trigger Definition should not be null"); } else if (triggerDefinition.getId() == null) { throw new SiddhiAppValidationException("Trigger Id should not be null for Trigger Definition", triggerDefinition.getQueryContextStartIndex(), triggerDefinition.getQueryContextEndIndex()); } StreamDefinition streamDefinition = StreamDefinition.id(triggerDefinition.getId()).attribute(SiddhiConstants .TRIGGERED_TIME, Attribute.Type.LONG); streamDefinition.setQueryContextStartIndex(triggerDefinition.getQueryContextStartIndex()); streamDefinition.setQueryContextEndIndex(triggerDefinition.getQueryContextEndIndex()); try { checkDuplicateDefinition(streamDefinition); } catch (DuplicateDefinitionException e) { throw new DuplicateDefinitionException("Trigger '" + triggerDefinition.getId() + "' cannot be defined as," + " " + e.getMessageWithOutContext(), e, triggerDefinition.getQueryContextStartIndex(), triggerDefinition.getQueryContextEndIndex()); } if (triggerDefinitionMap.containsKey(triggerDefinition.getId())) { throw new DuplicateDefinitionException("Trigger Definition with same Id '" + triggerDefinition.getId() + "' already exist '" + triggerDefinitionMap.get( triggerDefinition.getId()) + "', hence cannot add '" + triggerDefinition + "'", triggerDefinition.getQueryContextStartIndex(), triggerDefinition.getQueryContextEndIndex()); } this.triggerDefinitionMap.put(triggerDefinition.getId(), triggerDefinition); this.streamDefinitionMap.put(streamDefinition.getId(), streamDefinition); return this; }
public void registerOutputStreamListener(StreamDefinition siddhiStreamDefinition, SiddhiOutputStreamListener outputStreamListener) { log.info(logPrefix + "Registering output stream listener for Siddhi stream : " + siddhiStreamDefinition.getId()); streamNameToOutputStreamListenerMap.put(siddhiStreamDefinition.getId(), outputStreamListener); tcpEventServer.addStreamDefinition(siddhiStreamDefinition); }
@Override public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { // Declaring all incoming streams as output streams because this spouts role is to pass through all the incoming events as tuples. for (StreamDefinition siddhiStreamDefinition : incomingStreamDefinitions) { List<String> attributeList = new ArrayList<>(Arrays.asList(siddhiStreamDefinition.getAttributeNameArray())); attributeList.add(0, "_timestamp"); Fields fields = new Fields(attributeList); outputFieldsDeclarer.declareStream(siddhiStreamDefinition.getId(), fields); incomingStreamIDs.add(siddhiStreamDefinition.getId()); log.info(logPrefix + "Declaring output fields for stream : " + siddhiStreamDefinition.getId()); } }
public void addInputStream(String streamDefinition) { StreamDefinition siddhiStreamDefinition = SiddhiCompiler.parseStreamDefinition(streamDefinition); inputStreams.put(siddhiStreamDefinition.getId(), siddhiStreamDefinition); }
public void addOutputStream(String streamDefinition) { StreamDefinition siddhiStreamDefinition = SiddhiCompiler.parseStreamDefinition(streamDefinition); outputStreams.put(siddhiStreamDefinition.getId(), siddhiStreamDefinition); }
public String getStreamDefinitionExpression(StreamDefinition streamDefinition) { List<String> columns = new ArrayList<>(); Preconditions.checkNotNull(streamDefinition, "StreamDefinition is null"); for (Attribute attribute : streamDefinition.getAttributeList()) { columns.add(String.format("%s %s", attribute.getName(), attribute.getType().toString().toLowerCase())); } return String.format(DEFINE_STREAM_TEMPLATE, streamDefinition.getId(), StringUtils.join(columns, ",")); }
public String getStreamDefinitionExpression(StreamDefinition streamDefinition) { List<String> columns = new ArrayList<>(); Preconditions.checkNotNull(streamDefinition, "StreamDefinition is null"); for (Attribute attribute : streamDefinition.getAttributeList()) { columns.add(String.format("%s %s", attribute.getName(), attribute.getType().toString().toLowerCase())); } return String.format(DEFINE_STREAM_TEMPLATE, streamDefinition.getId(), StringUtils.join(columns, ",")); }
public SiddhiApp defineStream(StreamDefinition streamDefinition) { if (streamDefinition == null) { throw new SiddhiAppValidationException("Stream Definition should not be null"); } else if (streamDefinition.getId() == null) { throw new SiddhiAppValidationException("Stream Id should not be null for Stream Definition", streamDefinition.getQueryContextStartIndex(), streamDefinition.getQueryContextEndIndex()); } checkDuplicateDefinition(streamDefinition); this.streamDefinitionMap.put(streamDefinition.getId(), streamDefinition); return this; }
@Override public void receive(Event[] events) { for (Event event : events) { Object[] eventData = Arrays.copyOf(event.getData(), event.getData().length + 1); eventData[event.getData().length] = event.getTimestamp(); collector.emit(outputSiddhiDefinition.getId(), Arrays.asList(eventData)); if (log.isDebugEnabled()) { if (++eventCount % 10000 == 0) { double timeSpentInSecs = (System.currentTimeMillis() - batchStartTime) / 1000.0D; double throughput = 10000 / timeSpentInSecs; log.debug(logPrefix + "Processed 10000 events in " + timeSpentInSecs + " " + "seconds, throughput : " + throughput + " events/sec. Stream : " + outputSiddhiDefinition.getId()); eventCount = 0; batchStartTime = System.currentTimeMillis(); } log.debug(logPrefix + "Emitted Event:" + outputSiddhiDefinition.getId() + ":" + Arrays.deepToString(eventData) + "@" + event.getTimestamp()); } } } });
public String getStreamDefinitionExpression(String streamId) { StreamDefinition streamDefinition = getStreamDefinition(streamId); List<String> columns = new ArrayList<>(); Preconditions.checkNotNull(streamDefinition, "StreamDefinition is null"); for (Attribute attribute : streamDefinition.getAttributeList()) { columns.add(String.format("%s %s", attribute.getName(), attribute.getType().toString().toLowerCase())); } return String.format(DEFINE_STREAM_TEMPLATE, streamDefinition.getId(), StringUtils.join(columns, ",")); } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { if (siddhiManager == null) { init(); } // Declaring output fields for each exported stream ID for (String outputStreamDefinition : outputStreamDefinitions) { StreamDefinition siddhiOutputDefinition = SiddhiCompiler.parseStreamDefinition(outputStreamDefinition); if (outputStreamDefinition == null) { throw new RuntimeException(logPrefix + "Cannot find exported stream : " + siddhiOutputDefinition.getId()); } List<String> list = new ArrayList<String>(); list.add(0,"_timestamp"); for (Attribute attribute : siddhiOutputDefinition.getAttributeList()) { list.add(attribute.getName()); } Fields fields = new Fields(list); declarer.declareStream(siddhiOutputDefinition.getId(), fields); log.info(logPrefix + "Declaring output field for stream :" + siddhiOutputDefinition.getId()); } } }
public String getStreamDefinitionExpression(String streamId) { StreamDefinition streamDefinition = getStreamDefinition(streamId); List<String> columns = new ArrayList<>(); Preconditions.checkNotNull(streamDefinition, "StreamDefinition is null"); for (Attribute attribute : streamDefinition.getAttributeList()) { columns.add(String.format("%s %s", attribute.getName(), attribute.getType().toString().toLowerCase())); } return String.format(DEFINE_STREAM_TEMPLATE, streamDefinition.getId(), StringUtils.join(columns, ",")); } }
@Override public void init(StreamDefinition streamDefinition, OptionHolder optionHolder, Map<String, TemplateBuilder> payloadTemplateBuilderMap, ConfigReader mapperConfigReader, SiddhiAppContext siddhiAppContext) { this.streamID = streamDefinition.getId(); this.attributeList = streamDefinition.getAttributeList(); this.eventGroupEnabled = Boolean.valueOf(optionHolder .validateAndGetStaticValue(OPTION_GROUP_EVENTS, DEFAULT_GROUP_EVENTS)); this.endOfLine = optionHolder.validateAndGetStaticValue(OPTION_NEW_LINE, DEFAULT_NEW_LINE); this.eventDelimiter = optionHolder.validateAndGetStaticValue(OPTION_GROUP_EVENTS_DELIMITER, DEFAULT_EVENTS_DELIMITER) + endOfLine; //if @payload() is added there must be at least 1 element in it, otherwise a SiddhiParserException raised if (payloadTemplateBuilderMap != null && payloadTemplateBuilderMap.size() != 1) { throw new SiddhiAppCreationException("Text sink-mapper does not support multiple @payload mappings, " + "error at the mapper of '" + streamDefinition.getId() + "'"); } if (payloadTemplateBuilderMap != null && payloadTemplateBuilderMap.get(payloadTemplateBuilderMap.keySet().iterator().next()).isObjectMessage()) { throw new SiddhiAppCreationException("Text sink-mapper does not support object @payload mappings, " + "error at the mapper of '" + streamDefinition.getId() + "'"); } }
@Override public void receive(Event[] events) { for (Event event : events) { Object[] eventData = Arrays.copyOf(event.getData(), event.getData().length + 1); eventData[event.getData().length] = event.getTimestamp(); collector.emit(outputSiddhiDefinition.getId(), Arrays.asList(eventData)); if (log.isDebugEnabled()) { log.debug(logPrefix + "Emitted Event:" + outputSiddhiDefinition.getId() + ":" + Arrays.deepToString(eventData) + "@" + event.getTimestamp()); } emitThroughputProbe.update(); } } });
/** * Initialize the mapper and the mapping configurations. * * @param streamDefinition The stream definition * @param optionHolder Option holder containing static and dynamic options * @param payloadTemplateBuilderMap Unmapped list of payloads for reference */ @Override public void init(StreamDefinition streamDefinition, OptionHolder optionHolder, Map<String, TemplateBuilder> payloadTemplateBuilderMap, ConfigReader mapperConfigReader, SiddhiAppContext siddhiAppContext) { this.attributeNameArray = streamDefinition.getAttributeNameArray(); this.enclosingElement = optionHolder.validateAndGetStaticValue(ENCLOSING_ELEMENT_IDENTIFIER, null); this.isJsonValidationEnabled = Boolean.parseBoolean(optionHolder .validateAndGetStaticValue(JSON_VALIDATION_IDENTIFIER, "false")); //if @payload() is added there must be at least 1 element in it, otherwise a SiddhiParserException raised if (payloadTemplateBuilderMap != null && payloadTemplateBuilderMap.size() != 1) { throw new SiddhiAppCreationException("Json sink-mapper does not support multiple @payload mappings, " + "error at the mapper of '" + streamDefinition.getId() + "'"); } if (payloadTemplateBuilderMap != null && payloadTemplateBuilderMap.get(payloadTemplateBuilderMap.keySet().iterator().next()).isObjectMessage()) { throw new SiddhiAppCreationException("Json sink-mapper does not support object @payload mappings, " + "error at the mapper of '" + streamDefinition.getId() + "'"); } }
@Override public void connect(ConnectionCallback connectionCallback) throws ConnectionUnavailableException { //ConnectionCallback is not used as re-connection is handled by carbon transport. try { jmsServerConnector.start(); } catch (JMSConnectorException e) { //calling super class logs the exception and retry throw new ConnectionUnavailableException("Exception in starting the JMS receiver for stream: " + sourceEventListener.getStreamDefinition().getId(), e); } }
@Override public void sendEvent(Event event) throws InterruptedException { asyncEventPublisher.sendEvent(event.getData(),event.getTimestamp(), this.siddhiStreamDefinition.getId()); }
@Override public void init(SourceEventListener sourceEventListener, OptionHolder optionHolder, String[] requestedTransportPropertyNames, ConfigReader configReader, SiddhiAppContext siddhiAppContext) { this.sourceEventListener = sourceEventListener; this.optionHolder = optionHolder; Map<String, String> properties = initJMSProperties(); jmsMessageProcessor = new JMSMessageProcessor(sourceEventListener, siddhiAppContext, requestedTransportPropertyNames); try { jmsServerConnector = new JMSServerConnectorImpl(null, properties, jmsMessageProcessor); } catch (JMSConnectorException e) { log.error("Error occurred in initializing the JMS receiver for stream: " + sourceEventListener.getStreamDefinition().getId()); throw new JMSInputAdaptorRuntimeException("Error occurred in initializing the JMS receiver for stream: " + sourceEventListener.getStreamDefinition().getId(), e); } }
/** * Adding stream partitioned fields */ private static void addPartitionFields(OMElement streamsElement, ComponentInfoHolder componentInfoHolder) throws StormQueryConstructionException { Iterator<OMElement> streamIterator = streamsElement.getChildrenWithName(new QName(EventProcessorConstants.STREAM)); while (streamIterator.hasNext()) { OMElement streamElement = streamIterator.next(); OMAttribute partitionAttribute = streamElement.getAttribute(new QName("partition")); if (partitionAttribute != null) { StreamDefinition streamDefinition = SiddhiCompiler.parseStreamDefinition(streamElement.getText()); if (!Arrays.asList(streamDefinition.getAttributeNameArray()).contains(partitionAttribute .getAttributeValue())) { throw new StormQueryConstructionException("All input streams of the partition should have the " + "partitioning attribute."); } componentInfoHolder.addStreamPartitioningField(streamDefinition.getId(), partitionAttribute.getAttributeValue()); } } } }