private List<Event> createEventList(boolean sendSerializedEvents, InputSplitInfoMem inputSplitInfo) { List<Event> events = Lists.newArrayListWithCapacity(inputSplitInfo.getNumTasks() + 1); InputConfigureVertexTasksEvent configureVertexEvent = InputConfigureVertexTasksEvent.create(inputSplitInfo.getNumTasks(), VertexLocationHint.create(inputSplitInfo.getTaskLocationHints()), InputSpecUpdate.getDefaultSinglePhysicalInputSpecUpdate()); events.add(configureVertexEvent); if (sendSerializedEvents) { MRSplitsProto splitsProto = inputSplitInfo.getSplitsProto(); int count = 0; for (MRSplitProto mrSplit : splitsProto.getSplitsList()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload( count++, mrSplit.toByteString().asReadOnlyByteBuffer()); events.add(diEvent); } } else { int count = 0; for (org.apache.hadoop.mapred.InputSplit split : inputSplitInfo.getOldFormatSplits()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithObjectPayload( count++, split); events.add(diEvent); } } return events; }
+ configureVertexTaskEvent.getNumTasks());
= (InputConfigureVertexTasksEvent) eventList.get(0); List<TaskLocationHint> hints = configureEvent.getLocationHint().getTaskLocationHints();
rootInputSpecUpdate.put( inputName, cEvent.getInputSpecUpdate() == null ? InputSpecUpdate .getDefaultSinglePhysicalInputSpecUpdate() : cEvent.getInputSpecUpdate()); getContext().reconfigureVertex(rootInputSpecUpdate, cEvent.getLocationHint(), cEvent.getNumTasks());
public static InputConfigureVertexTasksEvent create(int numTasks, VertexLocationHint locationHint, InputSpecUpdate inputSpecUpdate) { return new InputConfigureVertexTasksEvent(numTasks, locationHint, inputSpecUpdate); }
private List<Event> createEventList(boolean sendSerializedEvents, InputSplitInfoMem inputSplitInfo) { List<Event> events = Lists.newArrayListWithCapacity(inputSplitInfo.getNumTasks() + 1); InputConfigureVertexTasksEvent configureVertexEvent = InputConfigureVertexTasksEvent.create(inputSplitInfo.getNumTasks(), VertexLocationHint.create(inputSplitInfo.getTaskLocationHints()), InputSpecUpdate.getDefaultSinglePhysicalInputSpecUpdate()); events.add(configureVertexEvent); if (sendSerializedEvents) { MRSplitsProto splitsProto = inputSplitInfo.getSplitsProto(); int count = 0; for (MRSplitProto mrSplit : splitsProto.getSplitsList()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload( count++, mrSplit.toByteString().asReadOnlyByteBuffer()); events.add(diEvent); } } else { int count = 0; for (org.apache.hadoop.mapred.InputSplit split : inputSplitInfo.getOldFormatSplits()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithObjectPayload( count++, split); events.add(diEvent); } } return events; }
+ configureVertexTaskEvent.getNumTasks());
= (InputConfigureVertexTasksEvent) eventList.get(0); List<TaskLocationHint> hints = configureEvent.getLocationHint().getTaskLocationHints();
@Override public List<Event> initialize() throws Exception { List<Event> list = new ArrayList<>(); list.add(InputConfigureVertexTasksEvent.create(srcParallelism, null, null)); for (int i = 0; i < srcParallelism; i++) { list.add(InputDataInformationEvent.createWithObjectPayload(i, null)); } return list; }
+ configureVertexTaskEvent.getNumTasks());
private List<Event> createEventList(boolean sendSerializedEvents, InputSplitInfoMem inputSplitInfo) { List<Event> events = Lists.newArrayListWithCapacity(inputSplitInfo.getNumTasks() + 1); InputConfigureVertexTasksEvent configureVertexEvent = InputConfigureVertexTasksEvent.create(inputSplitInfo.getNumTasks(), VertexLocationHint.create(inputSplitInfo.getTaskLocationHints()), InputSpecUpdate.getDefaultSinglePhysicalInputSpecUpdate()); events.add(configureVertexEvent); if (sendSerializedEvents) { MRSplitsProto splitsProto = inputSplitInfo.getSplitsProto(); int count = 0; for (MRSplitProto mrSplit : splitsProto.getSplitsList()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload( count++, mrSplit.toByteString().asReadOnlyByteBuffer()); events.add(diEvent); } } else { int count = 0; for (org.apache.hadoop.mapred.InputSplit split : inputSplitInfo.getOldFormatSplits()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithObjectPayload( count++, split); events.add(diEvent); } } return events; }
.getNumTasks() + 1); InputConfigureVertexTasksEvent configureVertexEvent = InputConfigureVertexTasksEvent.create( inputSplitInfo.getNumTasks(), VertexLocationHint.create(inputSplitInfo.getTaskLocationHints()),