protected void receiveEvent(WorkflowDataToken token, String portName) { int portIndex = inputPortNameToIndex(portName); if (portIndex == -1) { throw new WorkflowStructureException( "Received event on a port that doesn't exist, huh?"); } int[] currentIndex = token.getIndex(); int[] newIndex = new int[currentIndex.length + 1]; newIndex[0] = portIndex; for (int i = 0; i < currentIndex.length; i++) { newIndex[i + 1] = currentIndex[i]; } output.sendEvent(new WorkflowDataToken(token.getOwningProcess(), newIndex, token.getData(), token.getContext())); }
@Override public WorkflowDataToken popOwningProcess() throws ProcessIdentifierException { return new WorkflowDataToken(popOwner(), index, dataRef, context); }
public void resultTokenProduced(WorkflowDataToken token, String portName) { if (instanceOwningProcessId.equals(token.getOwningProcess())) { synchronized (this) { if (token.getIndex().length == 0) { portsToComplete--; } if (portsToComplete == 0) { // Received complete events on all ports, can // un-register this node from the monitor MonitorManager.getInstance().deregisterNode( instanceOwningProcessId.split(":")); } } for (ResultListener resultListener : resultListeners .toArray(new ResultListener[resultListeners.size()])) { resultListener.resultTokenProduced(token .popOwningProcess(), portName); } } }
@Override public WorkflowDataToken pushOwningProcess(String localProcessName) throws ProcessIdentifierException { return new WorkflowDataToken(pushOwner(localProcessName), index, dataRef, context); }
public void resultTokenProduced(WorkflowDataToken token, String portName) { if (!instanceOwningProcessId.equals(token.getOwningProcess())) { return; workflowDataProvenanceItem.setPortName(portName); workflowDataProvenanceItem.setInputPort(false); workflowDataProvenanceItem.setData(token.getData()); workflowDataProvenanceItem.setReferenceService(context.getReferenceService()); workflowDataProvenanceItem.setParentId(workflowItem.getIdentifier()); workflowDataProvenanceItem.setParentId(instanceOwningProcessId); workflowDataProvenanceItem.setProcessId(instanceOwningProcessId); workflowDataProvenanceItem.setIndex(token.getIndex()); workflowDataProvenanceItem.setFinal(token.isFinal()); context.getProvenanceReporter().addProvenanceItem( workflowDataProvenanceItem); if (token.getIndex().length == 0) { synchronized (WorkflowInstanceFacadeImpl.this) { portsToComplete--; try { resultListener.resultTokenProduced( token.popOwningProcess(), portName); } catch (RuntimeException ex) { logger.warn("Could not notify result listener "
public void jobCreated(Job outputJob) { for (String outputPortName : outputJob.getData().keySet()) { WorkflowDataToken token = new WorkflowDataToken(outputJob .getOwningProcess(), outputJob.getIndex(), outputJob .getData().get(outputPortName), outputJob.getContext()); parent.getOutputPortWithName(outputPortName).receiveEvent(token); } }
setContext(token.getContext()); if (token.getIndex().length == 0){ receivedAllResultsForPort.put(portName, new Boolean(Boolean.TRUE)); T2Reference resultsRef = token.getData();
/** * Receive an input event, relay it through the internal output port to all * connected entities */ public void receiveEvent(WorkflowDataToken t) { WorkflowDataToken transformedToken = t.pushOwningProcess(dataflow.getLocalName()); // I'd rather avoid casting to the implementation but in this // case we're in the same package - the only reason to do this // is to allow dummy implementations of parts of this // infrastructure during testing, in 'real' use this should // always be a dataflowimpl if (dataflow instanceof DataflowImpl) { ((DataflowImpl) dataflow).tokenReceived(transformedToken .getOwningProcess(), t.getContext()); } for (Datalink dl : internalOutput.getOutgoingLinks()) { dl.getSink().receiveEvent(transformedToken); } }
if (token.getIndex().length == 0) { if (pushedDataMap.containsKey(portName)) { throw new IllegalStateException("Already pushed for port " + portName); pushedDataMap.put(portName, token.getData()); workflowDataProvenanceItem.setPortName(portName); workflowDataProvenanceItem.setInputPort(true); workflowDataProvenanceItem.setData(token.getData()); workflowDataProvenanceItem.setReferenceService(context.getReferenceService()); workflowDataProvenanceItem.setParentId(workflowItem.getIdentifier()); workflowDataProvenanceItem.setParentId(instanceOwningProcessId); workflowDataProvenanceItem.setProcessId(instanceOwningProcessId); workflowDataProvenanceItem.setIndex(token.getIndex()); workflowDataProvenanceItem.setFinal(token.isFinal()); context.getProvenanceReporter().addProvenanceItem( workflowDataProvenanceItem); port.receiveEvent(token.pushOwningProcess(localName));
public void resultTokenProduced(WorkflowDataToken dataToken, String portname) { if (dataToken.getIndex().length == 0) { T2Reference reference = dataToken.getData(); System.out.println("Output reference made = " + reference); Object value = reference; if (reference.containsErrors()) { System.out.println("Contains errors!"); printAllErrors(context.getReferenceService().resolveIdentifier( reference, null, context)); } else { try { value = context.getReferenceService().renderIdentifier( reference, Object.class, context); } catch (ReferenceServiceException ex) { ex.printStackTrace(); } } resultMap.put(portname, value); synchronized (this) { outputCount--; } } }
public void tokenReceived(WorkflowDataToken token, String portName) { if (rootOutputDirectory != null) { //only save individual results if a directory is specified if (portsAndDepth.containsKey(portName)) { int[] index = token.getIndex(); if (depthSeen.get(portName) == -1) depthSeen.put(portName, index.length); if (index.length >= depthSeen.get(portName)) { //storeToken(token, portName); } } else { logger .error("Result recieved for unexpected Port: " + portName); } } }
@Override public void receiveEvent(WorkflowDataToken t) { tokenOwningProcess=t.getOwningProcess(); } }
public void receiveEvent(WorkflowDataToken token) { eventCount++; result = referenceService.renderIdentifier(token.getData(), Object.class, null); System.out.println(token); }
/** * Strip off the last id in the owning process stack (as this will have been * pushed onto the stack on entry to the processor) and relay the event to * the targets. * */ protected void receiveEvent(WorkflowDataToken token) { sendEvent(token.popOwningProcess()); }
public void pushData(WorkflowDataToken token, String portName) throws TokenOrderException { // TODO: throw TokenOrderException when token stream is violates order // constraints. for (DataflowInputPort port : dataflow.getInputPorts()) { if (portName.equals(port.getName())) { port.receiveEvent(token.pushOwningProcess(localName)); } } pushDataCalled = true; }
public void resultTokenProduced(WorkflowDataToken token, String portName) { if (!instanceOwningProcessId.equals(token.getOwningProcess())) { return; portName, token.getData(), context.getReferenceService()); workflowDataProvenanceItem.setParentId(workflowItem.getIdentifier()); workflowDataProvenanceItem.setIdentifier(UUID.randomUUID().toString()); if (token.getIndex().length == 0) { portsToComplete--; try { resultListener.resultTokenProduced( token.popOwningProcess(), portName); } catch (RuntimeException ex) { logger.warn("Could not notify result listener "
public void jobCreated(Job outputJob) { for (String outputPortName : outputJob.getData().keySet()) { WorkflowDataToken token = new WorkflowDataToken(outputJob .getOwningProcess(), outputJob.getIndex(), outputJob .getData().get(outputPortName), outputJob.getContext()); parent.getOutputPortWithName(outputPortName).receiveEvent(token); } }
context = token.getContext(); if (token.getIndex().length == 0){ receivedAllResultsForPort.put(portName, new Boolean(Boolean.TRUE)); T2Reference resultsRef = token.getData();
/** * Receive an input event, relay it through the internal output port to all * connected entities */ public void receiveEvent(WorkflowDataToken t) { WorkflowDataToken transformedToken = t.pushOwningProcess(dataflow.getLocalName()); // I'd rather avoid casting to the implementation but in this // case we're in the same package - the only reason to do this // is to allow dummy implementations of parts of this // infrastructure during testing, in 'real' use this should // always be a dataflowimpl if (dataflow instanceof DataflowImpl) { ((DataflowImpl) dataflow).tokenReceived(transformedToken .getOwningProcess(), t.getContext()); } for (Datalink dl : internalOutput.getOutgoingLinks()) { dl.getSink().receiveEvent(transformedToken); } }
@Override public WorkflowDataToken pushOwningProcess(String localProcessName) throws ProcessIdentifierException { return new WorkflowDataToken(pushOwner(localProcessName), index, dataRef, context); }