private void checkDuplicateDefinition(AbstractDefinition definition) { TableDefinition existingTableDefinition = tableDefinitionMap.get(definition.getId()); if (existingTableDefinition != null && (!existingTableDefinition.equals(definition) || definition instanceof StreamDefinition)) { throw new DuplicateDefinitionException("Table Definition with same Stream Id '" + definition.getId() + "' already exist : " + existingTableDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex()); StreamDefinition existingStreamDefinition = streamDefinitionMap.get(definition.getId()); if (existingStreamDefinition != null && (!existingStreamDefinition.equals(definition) || definition instanceof TableDefinition)) { throw new DuplicateDefinitionException("Stream Definition with same Stream Id '" + definition.getId() + "' already exist : " + existingStreamDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex()); WindowDefinition existingWindowDefinition = windowDefinitionMap.get(definition.getId()); if (existingWindowDefinition != null && (!existingWindowDefinition.equals(definition) || definition instanceof WindowDefinition)) { throw new DuplicateDefinitionException("Stream Definition with same Window Id '" + definition.getId() + "' already exist : " + existingWindowDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex()); AggregationDefinition existingAggregationDefinition = aggregationDefinitionMap.get(definition.getId()); if (existingAggregationDefinition != null && (!existingAggregationDefinition.equals(definition) || definition instanceof AggregationDefinition)) { throw new DuplicateDefinitionException("Aggregate Definition with same Aggregate Id '" + definition.getId() + "' already exist : " + existingAggregationDefinition + ", hence cannot add " + definition,
private Map<String, Object> toMap(Event event) { Map<String, Object> map = new LinkedHashMap<>(); for (int i = 0; i < definition.getAttributeNameArray().length; i++) { map.put(definition.getAttributeNameArray()[i], event.getData(i)); } return map; }
/** * Construct Stream Definition query string for a given Siddhi Stream Definition * * @param siddhiStreamDefinition * @return */ public static String getDefinitionString(org.wso2.siddhi.query.api.definition.AbstractDefinition siddhiStreamDefinition) { StringBuilder builder = new StringBuilder(); builder.append(EventProcessorConstants.DEFINE_STREAM); builder.append(siddhiStreamDefinition.getId()); builder.append(EventProcessorConstants.OPENING_BRACKETS); for (Attribute attribute : siddhiStreamDefinition.getAttributeList()) { builder.append(attribute.getName() + EventProcessorConstants.SPACE + attribute.getType().toString().toLowerCase() + EventProcessorConstants.COMMA); } builder.deleteCharAt(builder.length() - 2); //remove last comma builder.append(EventProcessorConstants.CLOSING_BRACKETS); return builder.toString(); }
public static StreamDefinition convertFromSiddiDefinition(AbstractDefinition siddhiDefinition) { StreamDefinition streamDefinition = new StreamDefinition(); streamDefinition.setStreamId(siddhiDefinition.getId()); List<StreamColumn> columns = new ArrayList<>(siddhiDefinition.getAttributeNameArray().length); for (Attribute attribute : siddhiDefinition.getAttributeList()) { StreamColumn column = new StreamColumn(); column.setType(convertFromSiddhiAttributeType(attribute.getType())); column.setName(attribute.getName()); columns.add(column); } streamDefinition.setColumns(columns); streamDefinition.setTimeseries(true); streamDefinition.setDescription("Auto-generated stream schema from siddhi for " + siddhiDefinition.getId()); return streamDefinition; } }
public static <T extends Tuple> TypeInformation<T> getTupleTypeInformation(AbstractDefinition definition) { int tupleSize = definition.getAttributeList().size(); TypeInformation[] typeInformations = new TypeInformation[tupleSize]; List<Attribute> attributes = definition.getAttributeList(); try { for (int i = 0; i < attributes.size() ; i++) { Class<?> clazz = getJavaType(attributes.get(i).getType()); typeInformations[i] = TypeInformation.of(clazz); } return Types.TUPLE(typeInformations); } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("Failed to get Type Information.", ex); } }
private static boolean isTableIndexVariable(MatchingMetaStateHolder matchingMetaStateHolder, Expression expression, String indexAttribute) { if (expression instanceof Variable) { Variable variable = (Variable) expression; if (variable.getStreamId() != null) { MetaStreamEvent tableStreamEvent = matchingMetaStateHolder.getMetaStateEvent().getMetaStreamEvent(matchingMetaStateHolder.getCandidateEventIndex()); if (tableStreamEvent != null) { if ((tableStreamEvent.getInputReferenceId() != null && variable.getStreamId().equals(tableStreamEvent.getInputReferenceId())) || (tableStreamEvent.getLastInputDefinition().getId().equals(variable.getStreamId()))) { if (Arrays.asList(tableStreamEvent.getLastInputDefinition().getAttributeNameArray()).contains(indexAttribute)) { return true; } } } else { if (matchingMetaStateHolder.getCandsidateDefinition().getId().equals(variable.getStreamId())) { return true; } } } } return false; }
public List<StreamDefinition> getSiddhiStreams(String executionPlan) { SiddhiManager siddhiManager = EventProcessorValueHolder.getSiddhiManager(); EventProcessorHelper.loadDataSourceConfiguration(siddhiManager); ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(executionPlan); Collection<AbstractDefinition> streamDefinitions = executionPlanRuntime.getStreamDefinitionMap().values(); List<StreamDefinition> databridgeStreamDefinitions = new ArrayList<StreamDefinition>(streamDefinitions.size()); for (AbstractDefinition siddhiStreamDef : streamDefinitions) { StreamConfiguration streamConfig = new StreamConfiguration(siddhiStreamDef.getId()); StreamDefinition databridgeStreamDef = EventProcessorUtil.convertToDatabridgeStreamDefinition( (org.wso2.siddhi.query.api.definition.StreamDefinition) siddhiStreamDef, streamConfig); databridgeStreamDefinitions.add(databridgeStreamDef); } executionPlanRuntime.shutdown(); return databridgeStreamDefinitions; }
@Override protected void init(Expression[] parameters, QueryPostProcessingElement nextProcessor, AbstractDefinition streamDefinition, String elementId, boolean async, SiddhiContext siddhiContext) { if (parameters[0] instanceof IntConstant) { timeToKeep = ((IntConstant) parameters[0]).getValue(); } else { timeToKeep = ((LongConstant) parameters[0]).getValue(); } String subjectedAttr = ((Variable)parameters[1]).getAttributeName(); subjectedAttrIndex = streamDefinition.getAttributePosition(subjectedAttr); subjectedAttrType = streamDefinition.getAttributeType(subjectedAttr); oldEventList = new ArrayList<RemoveEvent>(); if (this.siddhiContext.isDistributedProcessingEnabled()) { newEventList = this.siddhiContext.getHazelcastInstance().getList(elementId + "-newEventList"); } else { newEventList = new ArrayList<InEvent>(); } if (this.siddhiContext.isDistributedProcessingEnabled()) { window = new SchedulerSiddhiQueueGrid<StreamEvent>(elementId, this, this.siddhiContext, this.async); } else { window = new SchedulerSiddhiQueue<StreamEvent>(this); } //Ordinary scheduling window.schedule(); }
memberIdAttrIndex = streamDefinition.getAttributePosition(memberIdAttrName);
private void initMetaStateEvent() { this.outputAttrs = matchingMetaStateHolder.getMatchingStreamDefinition().getAttributeList(); for (MetaStreamEvent metaStreamEvent : matchingMetaStateHolder.getMetaStateEvent().getMetaStreamEvents()) { String referenceId = metaStreamEvent.getInputReferenceId(); AbstractDefinition abstractDefinition = metaStreamEvent.getLastInputDefinition(); if (!abstractDefinition.getId().trim().equals("")) { if (abstractDefinition instanceof TableDefinition) { this.eventTableRefs.add(abstractDefinition.getId()); if (referenceId != null) { this.eventTableRefs.add(referenceId); } } } } if (tableDefinition instanceof TableDefinition) { this.eventTableRefs.add(tableDefinition.getId()); } }
public static <T extends Tuple> TypeInformation<T> getTupleTypeInformation(AbstractDefinition definition) { List<TypeInformation> types = new ArrayList<>(); for (Attribute attribute : definition.getAttributeList()) { types.add(TypeInformation.of(getJavaType(attribute.getType()))); } try { return Types.TUPLE(types.toArray(new TypeInformation[0])); } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("Unable to parse ", ex); } }
@Override protected void init(Expression[] parameters, QueryPostProcessingElement nextProcessor, AbstractDefinition streamDefinition, String elementId, boolean async, SiddhiContext siddhiContext) { if (parameters[0] instanceof IntConstant) { timeToKeep = ((IntConstant) parameters[0]).getValue(); } else { timeToKeep = ((LongConstant) parameters[0]).getValue(); } String subjectedAttr = ((Variable)parameters[1]).getAttributeName(); subjectedAttrIndex = streamDefinition.getAttributePosition(subjectedAttr); subjectedAttrType = streamDefinition.getAttributeType(subjectedAttr); oldEventList = new ArrayList<RemoveEvent>(); if (this.siddhiContext.isDistributedProcessingEnabled()) { newEventList = this.siddhiContext.getHazelcastInstance().getList(elementId + "-newEventList"); } else { newEventList = new ArrayList<InEvent>(); } if (this.siddhiContext.isDistributedProcessingEnabled()) { window = new SchedulerSiddhiQueueGrid<StreamEvent>(elementId, this, this.siddhiContext, this.async); } else { window = new SchedulerSiddhiQueue<StreamEvent>(this); } //Ordinary scheduling window.schedule(); }
memberIdAttrIndex = streamDefinition.getAttributePosition(memberIdAttrName);
streamMapId.add(ab.getId()); attributeList.add(ab.getAttributeList());
/** * Starts the siddhi execution plan * * @param siddhiManager The manager that will manage the execution plan * @param siddhiCallback The callback to be called when the execution plan creates a new message * @throws ExecutionPlanException */ public void start(SiddhiManager siddhiManager, SiddhiCallback siddhiCallback) throws ExecutionPlanException { this.executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(getFullExecutionPlan()); for (Map.Entry<String, String> entry : outputTopics.entrySet()) { String streamName = entry.getKey(); String topic = entry.getValue(); AbstractDefinition abstractDefinition = executionPlanRuntime.getStreamDefinitionMap().get(streamName); if (abstractDefinition != null) { List<Attribute> attributes = abstractDefinition.getAttributeList(); StreamCallback streamCallback = siddhiCallback.getCallback(streamName, topic, attributes); executionPlanRuntime.addCallback(streamName, streamCallback); } else { throw new InvalidExecutionPlanException("You specified a output that is not present on the execution plan"); } } executionPlanRuntime.start(); log.info("Started execution plan with id {} version {}", id, version, fullExecutionPlan); }
private void checkDuplicateDefinition(AbstractDefinition definition) { TableDefinition existingTableDefinition = tableDefinitionMap.get(definition.getId()); if (existingTableDefinition != null && (!existingTableDefinition.equals(definition) || definition instanceof StreamDefinition)) { throw new DuplicateDefinitionException("Table Definition with same Stream Id '" + definition.getId() + "' already exist : " + existingTableDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex()); StreamDefinition existingStreamDefinition = streamDefinitionMap.get(definition.getId()); if (existingStreamDefinition != null && (!existingStreamDefinition.equals(definition) || definition instanceof TableDefinition)) { throw new DuplicateDefinitionException("Stream Definition with same Stream Id '" + definition.getId() + "' already exist : " + existingStreamDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex()); WindowDefinition existingWindowDefinition = windowDefinitionMap.get(definition.getId()); if (existingWindowDefinition != null && (!existingWindowDefinition.equals(definition) || definition instanceof WindowDefinition)) { throw new DuplicateDefinitionException("Stream Definition with same Window Id '" + definition.getId() + "' already exist : " + existingWindowDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex()); AggregationDefinition existingAggregationDefinition = aggregationDefinitionMap.get(definition.getId()); if (existingAggregationDefinition != null && (!existingAggregationDefinition.equals(definition) || definition instanceof AggregationDefinition)) { throw new DuplicateDefinitionException("Aggregate Definition with same Aggregate Id '" + definition.getId() + "' already exist : " + existingAggregationDefinition + ", hence cannot add " + definition,
private TreeMap<String, Object> buildMap(Event event) { TreeMap<String, Object> map = new TreeMap<>(); for (int i = 0; i < definition.getAttributeNameArray().length; i++) { map.put(definition.getAttributeNameArray()[i], event.getData(i)); } return map; }
@Override protected void init(Expression[] parameters, QueryPostProcessingElement nextProcessor, AbstractDefinition streamDefinition, String elementId, boolean async, SiddhiContext siddhiContext) { if (parameters[0] instanceof IntConstant) { timeToKeep = ((IntConstant) parameters[0]).getValue(); } else { timeToKeep = ((LongConstant) parameters[0]).getValue(); } String subjectedAttr = ((Variable)parameters[1]).getAttributeName(); subjectedAttrIndex = streamDefinition.getAttributePosition(subjectedAttr); subjectedAttrType = streamDefinition.getAttributeType(subjectedAttr); oldEventList = new ArrayList<RemoveEvent>(); if (this.siddhiContext.isDistributedProcessingEnabled()) { newEventList = this.siddhiContext.getHazelcastInstance().getList(elementId + "-newEventList"); } else { newEventList = new ArrayList<InEvent>(); } if (this.siddhiContext.isDistributedProcessingEnabled()) { window = new SchedulerSiddhiQueueGrid<StreamEvent>(elementId, this, this.siddhiContext, this.async); } else { window = new SchedulerSiddhiQueue<StreamEvent>(this); } //Ordinary scheduling window.schedule(); }
updateConditionAttributeList.addAll(matchingMetaStateHolder.getMatchingStreamDefinition().getAttributeList()); String referenceId = metaStreamEvent.getInputReferenceId(); AbstractDefinition abstractDefinition = metaStreamEvent.getLastInputDefinition(); if (!abstractDefinition.getId().trim().equals("")) { if (abstractDefinition instanceof TableDefinition) { isTableStreamMap.put(abstractDefinition.getId(), true); if (referenceId != null) { isTableStreamMap.put(referenceId, true); isTableStreamMap.put(abstractDefinition.getId(), false); if (referenceId != null) { isTableStreamMap.put(referenceId, false); executionPlanContext, variableExpressionExecutors, eventTableMap, queryName); return new RDBMSOperator(executionInfo, expressionExecutorList, dbHandler, inMemoryEventTableOperator, matchingMetaStateHolder.getMatchingStreamDefinition().getAttributeList().size());
private Map<String, Object> toMap(Event event) { Map<String, Object> map = new LinkedHashMap<>(); for (int i = 0; i < definition.getAttributeNameArray().length; i++) { map.put(definition.getAttributeNameArray()[i], event.getData(i)); } return map; }