private List<Event> createEventList(boolean sendSerializedEvents, InputSplitInfoMem inputSplitInfo) { List<Event> events = Lists.newArrayListWithCapacity(inputSplitInfo.getNumTasks() + 1); InputConfigureVertexTasksEvent configureVertexEvent = InputConfigureVertexTasksEvent.create(inputSplitInfo.getNumTasks(), VertexLocationHint.create(inputSplitInfo.getTaskLocationHints()), InputSpecUpdate.getDefaultSinglePhysicalInputSpecUpdate()); events.add(configureVertexEvent); if (sendSerializedEvents) { MRSplitsProto splitsProto = inputSplitInfo.getSplitsProto(); int count = 0; for (MRSplitProto mrSplit : splitsProto.getSplitsList()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload( count++, mrSplit.toByteString().asReadOnlyByteBuffer()); events.add(diEvent); } } else { int count = 0; for (org.apache.hadoop.mapred.InputSplit split : inputSplitInfo.getOldFormatSplits()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithObjectPayload( count++, split); events.add(diEvent); } } return events; }
private FileSplit getFileSplitFromEvent(InputDataInformationEvent event) throws IOException { InputSplit inputSplit = null; if (event.getDeserializedUserPayload() != null) { inputSplit = (InputSplit) event.getDeserializedUserPayload(); } else { MRSplitProto splitProto = MRSplitProto.parseFrom(ByteString.copyFrom(event.getUserPayload())); SerializationFactory serializationFactory = new SerializationFactory(new Configuration()); inputSplit = MRInputHelpers.createOldFormatSplitFromUserPayload(splitProto, serializationFactory); } if (!(inputSplit instanceof FileSplit)) { throw new UnsupportedOperationException( "Cannot handle splits other than FileSplit for the moment. Current input split type: " + inputSplit.getClass().getSimpleName()); } return (FileSplit) inputSplit; }
count++; InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload(count, buf); diEvent.setTargetIndex(task); taskEvents.add(diEvent);
public static EventProtos.RootInputDataInformationEventProto convertRootInputDataInformationEventToProto(InputDataInformationEvent event) { EventProtos.RootInputDataInformationEventProto.Builder builder = EventProtos.RootInputDataInformationEventProto.newBuilder(); builder.setSourceIndex(event.getSourceIndex()); builder.setTargetIndex(event.getTargetIndex()); if (event.getUserPayload() != null) { builder.setUserPayload(ByteString.copyFrom(event.getUserPayload())); } return builder.build(); }
rEvent.setTargetIndex(rEvent.getSourceIndex()); // 1:1 routing riEvents.add(rEvent);
MRSplitProto splitProto = MRSplitProto.parseFrom(ByteString.copyFrom(initEvent.getUserPayload())); Object splitObj = null; long splitLength = -1;
@Override public List<Event> initialize() throws Exception { List<Event> list = new ArrayList<>(); list.add(InputConfigureVertexTasksEvent.create(srcParallelism, null, null)); for (int i = 0; i < srcParallelism; i++) { list.add(InputDataInformationEvent.createWithObjectPayload(i, null)); } return list; }
if (riEvent.getTargetIndex() == taskIndex) { events.add(tezEvent);
LOG.debug(getContext().getSourceVertexName() + " initializing Reader: " + eventCount.get()); MRSplitProto splitProto = MRSplitProto.parseFrom(ByteString.copyFrom(event.getUserPayload())); MRReader reader = null; JobConf localJobConf = new JobConf(jobConf);
} else { InputDataInformationEvent riEvent = (InputDataInformationEvent) tezEvent.getEvent(); Task targetTask = getTask(riEvent.getTargetIndex()); targetTask.registerTezEvent(tezEvent);
count++; InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload(count, buf); diEvent.setTargetIndex(task); taskEvents.add(diEvent);
private List<Event> createEventList(boolean sendSerializedEvents, InputSplitInfoMem inputSplitInfo) { List<Event> events = Lists.newArrayListWithCapacity(inputSplitInfo.getNumTasks() + 1); InputConfigureVertexTasksEvent configureVertexEvent = InputConfigureVertexTasksEvent.create(inputSplitInfo.getNumTasks(), VertexLocationHint.create(inputSplitInfo.getTaskLocationHints()), InputSpecUpdate.getDefaultSinglePhysicalInputSpecUpdate()); events.add(configureVertexEvent); if (sendSerializedEvents) { MRSplitsProto splitsProto = inputSplitInfo.getSplitsProto(); int count = 0; for (MRSplitProto mrSplit : splitsProto.getSplitsList()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload( count++, mrSplit.toByteString().asReadOnlyByteBuffer()); events.add(diEvent); } } else { int count = 0; for (org.apache.hadoop.mapred.InputSplit split : inputSplitInfo.getOldFormatSplits()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithObjectPayload( count++, split); events.add(diEvent); } } return events; }
private FileSplit getFileSplitFromEvent(InputDataInformationEvent event) throws IOException { InputSplit inputSplit = null; if (event.getDeserializedUserPayload() != null) { inputSplit = (InputSplit) event.getDeserializedUserPayload(); } else { MRSplitProto splitProto = MRSplitProto.parseFrom(ByteString.copyFrom(event.getUserPayload())); SerializationFactory serializationFactory = new SerializationFactory(new Configuration()); inputSplit = MRInputHelpers.createOldFormatSplitFromUserPayload(splitProto, serializationFactory); } if (!(inputSplit instanceof FileSplit)) { throw new UnsupportedOperationException( "Cannot handle splits other than FileSplit for the moment. Current input split type: " + inputSplit.getClass().getSimpleName()); } return (FileSplit) inputSplit; }
InputDataInformationEvent.createWithSerializedPayload(count, serializedSplit .toByteString().asReadOnlyByteBuffer()); diEvent.setTargetIndex(count); taskEvents.add(diEvent); MRSplitProto serializedSplit = MRInputHelpers.createSplitProto(inputSplit); InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload(count, serializedSplit .toByteString().asReadOnlyByteBuffer()); diEvent.setTargetIndex(count); taskEvents.add(diEvent);
private List<Event> createEventList(boolean sendSerializedEvents, InputSplitInfoMem inputSplitInfo) { List<Event> events = Lists.newArrayListWithCapacity(inputSplitInfo.getNumTasks() + 1); InputConfigureVertexTasksEvent configureVertexEvent = InputConfigureVertexTasksEvent.create(inputSplitInfo.getNumTasks(), VertexLocationHint.create(inputSplitInfo.getTaskLocationHints()), InputSpecUpdate.getDefaultSinglePhysicalInputSpecUpdate()); events.add(configureVertexEvent); if (sendSerializedEvents) { MRSplitsProto splitsProto = inputSplitInfo.getSplitsProto(); int count = 0; for (MRSplitProto mrSplit : splitsProto.getSplitsList()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload( count++, mrSplit.toByteString().asReadOnlyByteBuffer()); events.add(diEvent); } } else { int count = 0; for (org.apache.hadoop.mapred.InputSplit split : inputSplitInfo.getOldFormatSplits()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithObjectPayload( count++, split); events.add(diEvent); } } return events; }
private FileSplit getFileSplitFromEvent(InputDataInformationEvent event) throws IOException { InputSplit inputSplit = null; if (event.getDeserializedUserPayload() != null) { inputSplit = (InputSplit) event.getDeserializedUserPayload(); } else { MRSplitProto splitProto = MRSplitProto.parseFrom(ByteString.copyFrom(event.getUserPayload())); SerializationFactory serializationFactory = new SerializationFactory(new Configuration()); inputSplit = MRInputHelpers.createOldFormatSplitFromUserPayload(splitProto, serializationFactory); } if (!(inputSplit instanceof FileSplit)) { throw new UnsupportedOperationException( "Cannot handle splits other than FileSplit for the moment. Current input split type: " + inputSplit.getClass().getSimpleName()); } return (FileSplit) inputSplit; }
InputDataInformationEvent.createWithSerializedPayload(count, serializedSplit .toByteString().asReadOnlyByteBuffer()); diEvent.setTargetIndex(count); taskEvents.add(diEvent); MRSplitProto serializedSplit = MRInputHelpers.createSplitProto(inputSplit); InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload(count, serializedSplit .toByteString().asReadOnlyByteBuffer()); diEvent.setTargetIndex(count); taskEvents.add(diEvent);
diEvent = InputDataInformationEvent.createWithSerializedPayload(count++, mrSplit.toByteString().asReadOnlyByteBuffer()); } else { org.apache.hadoop.mapreduce.InputSplit newInputSplit = MRInputUtils .getNewSplitDetailsFromEvent(mrSplit, conf); diEvent = InputDataInformationEvent.createWithObjectPayload(count++, newInputSplit); } else { org.apache.hadoop.mapred.InputSplit oldInputSplit = MRInputUtils .getOldSplitDetailsFromEvent(mrSplit, conf); diEvent = InputDataInformationEvent.createWithObjectPayload(count++, oldInputSplit);
public static InputDataInformationEvent convertRootInputDataInformationEventFromProto( EventProtos.RootInputDataInformationEventProto proto) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload( proto.getSourceIndex(), proto.hasUserPayload() ? proto.getUserPayload().asReadOnlyByteBuffer() : null); diEvent.setTargetIndex(proto.getTargetIndex()); return diEvent; }
for (MRSplitProto mrSplit : splitsProto.getSplitsList()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithSerializedPayload( count++, mrSplit.toByteString().asReadOnlyByteBuffer()); if (inputSplitInfo.holdsNewFormatSplits()) { for (org.apache.hadoop.mapreduce.InputSplit split : inputSplitInfo.getNewFormatSplits()) { InputDataInformationEvent diEvent = InputDataInformationEvent.createWithObjectPayload( count++, split); events.add(diEvent); InputDataInformationEvent diEvent = InputDataInformationEvent.createWithObjectPayload( count++, split); events.add(diEvent);