/** * Create receiver element. Assume that imported streams contains all the receiver elements. * * @param document * @param queryExpressions * @param importedStreams @return * @throws EventStreamConfigurationException */ private static Element constructReceiverElement(Document document, String queryExpressions, List<String> importedStreams) throws EventStreamConfigurationException { Element receiverElement = document.createElement(EventProcessorConstants.EVENT_RECEIVER); receiverElement.setAttribute(EventProcessorConstants.NAME, EventProcessorConstants.EVENT_RECEIVER_SPOUT); ExecutionPlan executionPlan = SiddhiCompiler.parse(queryExpressions); receiverElement.setAttribute(EventProcessorConstants.PARALLEL, String.valueOf(getParallelism(executionPlan.getAnnotations(), EventProcessorConstants.RECEIVER_PARALLELISM))); Element streams = document.createElement(EventProcessorConstants.STREAMS); for (String definition : importedStreams) { Element stream = getStreamElement(document, definition); streams.appendChild(stream); } receiverElement.appendChild(streams); return receiverElement; }
/** * Receives events from the CEP Receiver through Thrift using data bridge and pass through the events * to a downstream component as tupels. * * @param stormDeploymentConfig * @param incomingStreamDefinitions - Incoming Siddhi stream definitions * @param executionPlanName * @param tenantId */ public EventReceiverSpout(DistributedConfiguration stormDeploymentConfig, List<String> incomingStreamDefinitions, String executionPlanName, int tenantId, int heartbeatInterval) { this.incomingStreamDefinitions = new ArrayList<StreamDefinition>(incomingStreamDefinitions.size()); this.stormDeploymentConfig = stormDeploymentConfig; this.executionPlanName = executionPlanName; this.tenantId = tenantId; this.heartbeatInterval = heartbeatInterval; for (String definition : incomingStreamDefinitions) { this.incomingStreamDefinitions.add(SiddhiCompiler.parseStreamDefinition(definition)); } this.logPrefix = "[" + tenantId + ":" + executionPlanName + ":" + "EventReceiverSpout] "; }
@Override public DataProvider init(String topic, String sessionId, JsonElement jsonElement) throws DataProviderException { this.siddhiDataProviderConfig = new Gson().fromJson(jsonElement, SiddhiDataProviderConfig.class); siddhiDataProviderConfig.setQueryData(((JsonObject) jsonElement).get(STORE_QUERY)); siddhiDataProviderConfig.setSiddhiAppContext(((JsonObject) jsonElement).get(SIDDHI_APP).getAsString()); this.timeColumns = Arrays.asList(this.siddhiDataProviderConfig.getTimeColumns().toUpperCase(Locale.ENGLISH) .split(",")); super.init(topic, sessionId, siddhiDataProviderConfig); SiddhiAppRuntime siddhiAppRuntime = getSiddhiAppRuntime(); siddhiAppRuntime.setPurgingEnabled(false); siddhiAppRuntime.start(); StoreQuery storeQuery = SiddhiCompiler.parseStoreQuery(siddhiDataProviderConfig.getQueryData() .getAsJsonObject().get(QUERY).getAsString()); Attribute[] outputAttributeList = siddhiAppRuntime.getStoreQueryOutputAttributes(storeQuery); metadata = new DataSetMetadata(outputAttributeList.length); Attribute outputAttribute; for (int i = 0; i < outputAttributeList.length; i++) { outputAttribute = outputAttributeList[i]; metadata.put(i, outputAttribute.getName(), getMetadataTypes(outputAttribute.getName(), outputAttribute.getType().toString())); } return this; }
/** * Create publisher element. Assumes that exported streams contains all publisher streams. * * @param exportedStreams * @return * @throws EventStreamConfigurationException */ private static Element constructPublisherElement(Document document, String queryExpressions, List<String> exportedStreams) throws EventStreamConfigurationException { Element publisherElement = document.createElement(EventProcessorConstants.EVENT_PUBLISHER); Element publisherInputStream = document.createElement(EventProcessorConstants.INPUT_STREAMS); Element publisherOutputStream = document.createElement(EventProcessorConstants.OUTPUT_STREAMS); publisherElement.setAttribute(EventProcessorConstants.NAME, EventProcessorConstants.EVENT_PUBLISHER_BOLT); ExecutionPlan executionPlan = SiddhiCompiler.parse(queryExpressions); publisherElement.setAttribute(EventProcessorConstants.PARALLEL, String.valueOf(getParallelism(executionPlan.getAnnotations(), EventProcessorConstants.PUBLISHER_PARALLELISM))); for (String definition : exportedStreams) { Element stream = getStreamElement(document, definition); publisherOutputStream.appendChild(stream); Element clonedStream = (Element) stream.cloneNode(true); publisherInputStream.appendChild(clonedStream); } publisherElement.appendChild(publisherInputStream); publisherElement.appendChild(publisherOutputStream); return publisherElement; }
public void addInputStream(String streamDefinition) { StreamDefinition siddhiStreamDefinition = SiddhiCompiler.parseStreamDefinition(streamDefinition); inputStreams.put(siddhiStreamDefinition.getId(), siddhiStreamDefinition); }
private static List<Element> constructTriggerElement(Document document, String queryExpression) throws StormQueryConstructionException { ExecutionPlanRuntime executionPlanRuntime = EventProcessorValueHolder.getSiddhiManager().createExecutionPlanRuntime(queryExpression); Map<String, AbstractDefinition> streamDefinitionMap = executionPlanRuntime.getStreamDefinitionMap(); executionPlanRuntime.shutdown(); ExecutionPlan executionPlan = SiddhiCompiler.parse(queryExpression); List<Element> triggerElementList = new ArrayList<Element>(); for (Map.Entry entry : executionPlan.getTriggerDefinitionMap().entrySet()){ Element triggerElement = document.createElement(EventProcessorConstants.TRIGGER_TAG); ParallelismInfoHolder holder = new ParallelismInfoHolder(1, false); setAttributes(triggerElement, (String)entry.getKey() , holder); String getTriggerDefinition = EventProcessorUtil.getTriggerDefinitionString((TriggerDefinition) entry.getValue()); Element triggerDefinitionElement = document.createElement(EventProcessorConstants.TRIGGER_DEFINITION); triggerDefinitionElement.setTextContent(getTriggerDefinition); triggerElement.appendChild(triggerDefinitionElement); String outputStreamDefinition = EventProcessorUtil.getDefinitionString(streamDefinitionMap.get(entry.getKey())); Element outputStreamElement = document.createElement(EventProcessorConstants.OUTPUT_STREAM); outputStreamElement.setTextContent(outputStreamDefinition); triggerElement.appendChild(outputStreamElement); triggerElementList.add(triggerElement); } return triggerElementList; }
public void addOutputStream(String streamDefinition) { StreamDefinition siddhiStreamDefinition = SiddhiCompiler.parseStreamDefinition(streamDefinition); outputStreams.put(siddhiStreamDefinition.getId(), siddhiStreamDefinition); }
/** * Returns the execution plan name * * @param executionPlanAsString executionPlan (taken from code mirror) as a string * @return execution plan name as given in @Plan:name('MyPlanName'). Returns null in the absence of @Plan:name('MyPlanName') */ public static String getExecutionPlanName(String executionPlanAsString) { String executionPlanName = null; ExecutionPlan executionPlan = SiddhiCompiler.parse(executionPlanAsString); executionPlanName = AnnotationHelper.getAnnotationElement(EventProcessorConstants.ANNOTATION_NAME_NAME, null, executionPlan.getAnnotations()).getValue(); return executionPlanName; }
/** * Adding stream partitioned fields */ private static void addPartitionFields(OMElement streamsElement, ComponentInfoHolder componentInfoHolder) throws StormQueryConstructionException { Iterator<OMElement> streamIterator = streamsElement.getChildrenWithName(new QName(EventProcessorConstants.STREAM)); while (streamIterator.hasNext()) { OMElement streamElement = streamIterator.next(); OMAttribute partitionAttribute = streamElement.getAttribute(new QName("partition")); if (partitionAttribute != null) { StreamDefinition streamDefinition = SiddhiCompiler.parseStreamDefinition(streamElement.getText()); if (!Arrays.asList(streamDefinition.getAttributeNameArray()).contains(partitionAttribute .getAttributeValue())) { throw new StormQueryConstructionException("All input streams of the partition should have the " + "partitioning attribute."); } componentInfoHolder.addStreamPartitioningField(streamDefinition.getId(), partitionAttribute.getAttributeValue()); } } } }
public Response siddhiAppElementsGet(String appName) throws NotFoundException { Map<String, SiddhiAppData> siddhiAppDataMap = StreamProcessorDataHolder.getStreamProcessorService() .getSiddhiAppMap(); if (siddhiAppDataMap.containsKey(appName)) { String siddhiAppString = siddhiAppDataMap.get(appName).getSiddhiApp(); SiddhiApp siddhiApp = SiddhiCompiler.parse(String.valueOf(siddhiAppString)); SiddhiAppRuntime siddhiAppRuntime = new SiddhiManager().createSiddhiAppRuntime(siddhiApp); List<SiddhiAppElements> listOfSiddhiAppElements = new ArrayList<>(); for (int i = 0; i < siddhiApp.getExecutionElementList().size(); i++) { ExecutionElement executionElement = siddhiApp.getExecutionElementList().get(i); if (executionElement instanceof Query) { loadQueryExecutionElements(siddhiApp, siddhiAppRuntime, executionElement, siddhiAppString, listOfSiddhiAppElements); } else if (executionElement instanceof Partition) { loadPartitionExecutionElements(siddhiApp, siddhiAppRuntime, executionElement, siddhiAppString, listOfSiddhiAppElements); } } loadAggregarionData(siddhiApp, siddhiAppRuntime, listOfSiddhiAppElements, siddhiAppString); loadSources(siddhiApp, siddhiAppRuntime, listOfSiddhiAppElements, siddhiAppString); loadSinks(siddhiApp, siddhiAppRuntime, listOfSiddhiAppElements, siddhiAppString); return Response.ok().entity(listOfSiddhiAppElements).build(); } String jsonString = new Gson().toJson(new ApiResponseMessage(ApiResponseMessage.NOT_FOUND, "There is no Siddhi App exist with provided name : " + appName)); return Response.status(Response.Status.NOT_FOUND).entity(jsonString).build(); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { if (siddhiManager == null) { init(); } // Declaring output fields for each exported stream ID for (String outputStreamDefinition : outputStreamDefinitions) { StreamDefinition siddhiOutputDefinition = SiddhiCompiler.parseStreamDefinition(outputStreamDefinition); if (outputStreamDefinition == null) { throw new RuntimeException(logPrefix + "Cannot find exported stream : " + siddhiOutputDefinition.getId()); } List<String> list = new ArrayList<String>(); list.add(0,"_timestamp"); for (Attribute attribute : siddhiOutputDefinition.getAttributeList()) { list.add(attribute.getName()); } Fields fields = new Fields(list); declarer.declareStream(siddhiOutputDefinition.getId(), fields); log.info(logPrefix + "Declaring output field for stream :" + siddhiOutputDefinition.getId()); } } }
List<String> sourceList = new ArrayList<>(); List<String> sinkList = new ArrayList<>(); SiddhiApp siddhiApp = SiddhiCompiler.parse(siddhiAppHolder.getSiddhiApp()); for (Map.Entry<String, StreamDefinition> sourceStream : siddhiApp.getStreamDefinitionMap().entrySet()) { for (Annotation annotation : sourceStream.getValue().getAnnotations()) {
final StreamDefinition outputSiddhiDefinition = SiddhiCompiler.parseStreamDefinition (outputStreamDefinition); log.info(logPrefix + "Adding callback for stream:" + outputSiddhiDefinition.getId()); StreamDefinition siddhiDefinition; for (String outputStreamDefinition : outputStreamDefinitions) { siddhiDefinition = SiddhiCompiler.parseStreamDefinition(outputStreamDefinition); streamIdToDefinitionMap.put(siddhiDefinition.getId(), siddhiDefinition);
public String getSiddhiAppName(String siddhiApp) throws SiddhiAppConfigurationException { try { SiddhiApp parsedSiddhiApp = SiddhiCompiler.parse(siddhiApp); Element nameAnnotation = AnnotationHelper. getAnnotationElement(SiddhiAppProcessorConstants.ANNOTATION_NAME_NAME, null, parsedSiddhiApp.getAnnotations()); if (nameAnnotation == null || nameAnnotation.getValue().isEmpty()) { throw new SiddhiAppConfigurationException("Siddhi App name must " + "be provided as @App:name('name')."); } return nameAnnotation.getValue(); } catch (Throwable e) { throw new SiddhiAppConfigurationException("Exception occurred when retrieving Siddhi App Name ", e); } }
List<String> exportedStreamIds = new ArrayList<String>(exportedStreams.size()); for (String definitionString : exportedStreams) { StreamDefinition definition = SiddhiCompiler.parseStreamDefinition(definitionString); exportedStreamIds.add(definition.getId());
private void parse() throws Exception { SiddhiApp siddhiApp = SiddhiCompiler.parse(enrichedExecutionPlan); for (ExecutionElement executionElement : siddhiApp.getExecutionElementList()) { if (!(executionElement instanceof Query)) {
final StreamDefinition outputSiddhiDefinition = SiddhiCompiler.parseStreamDefinition(outputStreamDefinition); if (log.isDebugEnabled()) { log.debug(logPrefix + " Adding callback for stream: " + outputSiddhiDefinition.getId());
List<String> stringQueryList = SiddhiQLStormQuerySplitter.split(queryExpressions); List<String> eventTableDefinitionList = SiddhiQLStormQuerySplitter.getEventTableList(queryExpressions); ExecutionPlan executionPlan = SiddhiCompiler.parse(queryExpressions); List<ExecutionElement> executionElements = executionPlan.getExecutionElementList(); Set<String> eventTableIdSet = executionPlan.getTableDefinitionMap().keySet();
parsedExecutionPlan = SiddhiCompiler.parse(executionPlan); String executionPlanName = AnnotationHelper.getAnnotationElement(EventProcessorConstants.ANNOTATION_NAME_NAME, null, parsedExecutionPlan.getAnnotations()).getValue();
public void editInactiveExecutionPlan(String executionPlan, String filename) throws ExecutionPlanConfigurationException, ExecutionPlanDependencyValidationException { EventProcessorHelper.validateExecutionPlan(executionPlan); org.wso2.siddhi.query.api.ExecutionPlan parsedExecutionPlan = SiddhiCompiler.parse(executionPlan); String newExecutionPlanName = AnnotationHelper.getAnnotationElement(EventProcessorConstants.ANNOTATION_NAME_NAME, null, parsedExecutionPlan.getAnnotations()).getValue(); EventProcessorConfigurationFilesystemInvoker.delete(filename); EventProcessorConfigurationFilesystemInvoker.save(executionPlan, newExecutionPlanName, filename); }