private static void writeTaskAttemptId(DataOutputStream dos, TaskAttemptId taId) throws IOException { TaskId tid = taId.getTaskId(); ActivityId aid = tid.getActivityId(); OperatorDescriptorId odId = aid.getOperatorDescriptorId(); dos.writeInt(odId.getId()); dos.writeInt(aid.getLocalId()); dos.writeInt(tid.getPartition()); dos.writeInt(taId.getAttempt()); }
public static TaskId parse(String str) { if (str.startsWith("TID:")) { str = str.substring(4); int idIdx = str.lastIndexOf(':'); return new TaskId(ActivityId.parse(str.substring(0, idIdx)), Integer.parseInt(str.substring(idIdx + 1))); } throw new IllegalArgumentException("Unable to parse: " + str); }
private TaskCluster getTaskCluster(TaskId tid) { JobRun run = executor.getJobRun(); ActivityCluster ac = run.getActivityClusterGraph().getActivityMap().get(tid.getActivityId()); ActivityClusterPlan acp = run.getActivityClusterPlanMap().get(ac.getId()); Task[] tasks = acp.getActivityPlanMap().get(tid.getActivityId()).getTasks(); Task task = tasks[tid.getPartition()]; assert task.getTaskId().equals(tid); return task.getTaskCluster(); }
public static TaskId create(DataInput dis) throws IOException { TaskId taskId = new TaskId(); taskId.readFields(dis); return taskId; }
private void run() throws HyracksDataException { // Start by getting the partition number from the manager LOGGER.info("Starting ingestion for partition:" + ctx.getTaskAttemptId().getTaskId().getPartition()); try { doRun(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw HyracksDataException.create(e); } catch (Exception e) { LOGGER.log(Level.WARN, "Unhandled Exception", e); throw HyracksDataException.create(e); } }
private String assignLocation(ActivityClusterGraph acg, Map<TaskId, LValueConstraintExpression> locationMap, TaskId tid, TaskAttempt taskAttempt) throws HyracksException { ActivityId aid = tid.getActivityId(); ActivityCluster ac = acg.getActivityMap().get(aid); Set<ActivityId> blockers = ac.getBlocked2BlockerMap().get(aid); if (blockers != null) { for (ActivityId blocker : blockers) { nodeId = findTaskLocation(new TaskId(blocker, tid.getPartition())); if (nodeId != null) { break;
@Override public boolean equals(Object o) { if (!(o instanceof TaskAttemptId)) { return false; } TaskAttemptId oTaskId = (TaskAttemptId) o; return oTaskId.attempt == attempt && oTaskId.taskId.equals(taskId); }
@Override public void readFields(DataInput input) throws IOException { taskId = TaskId.create(input); attempt = input.readInt(); } }
@Override public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException { // Format: |TypeTag | PayloadLength | Payload | // TypeTag: 1 byte // PayloadLength: 1 byte // Payload: 12 bytes: |partition-id (4 bytes) | local-id (8 bytes) | byte[] uidBytes = new byte[BINARY_LENGTH]; // Writes the type tag. uidBytes[0] = ATypeTag.SERIALIZED_BINARY_TYPE_TAG; // Writes the payload size. uidBytes[1] = BINARY_LENGTH - PAYLOAD_START; // Writes the 4 byte partition id. IntegerPointable.setInteger(uidBytes, PAYLOAD_START, ctx.getTaskAttemptId().getTaskId().getPartition()); return new IScalarEvaluator() { @Override public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException { // Increments the Unique ID value. for (int i = BINARY_LENGTH - 1; i >= PAYLOAD_START; i--) { if (++uidBytes[i] != 0) { break; } } result.set(uidBytes, 0, BINARY_LENGTH); } }; } };
TaskId tid = ts.getTaskId(); TaskAttempt taskAttempt = new TaskAttempt(tcAttempt, new TaskAttemptId(new TaskId(tid.getActivityId(), tid.getPartition()), attempts), ts); taskAttempt.setStatus(TaskAttempt.TaskStatus.INITIALIZED, null); locationMap.put(tid, new PartitionLocationExpression(tid.getActivityId().getOperatorDescriptorId(), tid.getPartition())); taskAttempts.put(tid, taskAttempt); taskAttemptMap.put(nodeId, tads); OperatorDescriptorId opId = tid.getActivityId().getOperatorDescriptorId(); jobRun.registerOperatorLocation(opId, tid.getPartition(), nodeId); ActivityPartitionDetails apd = ts.getActivityPlan().getActivityPartitionDetails(); TaskAttemptDescriptor tad = new TaskAttemptDescriptor(taskAttempt.getTaskAttemptId(), int attempt = taid.getAttempt(); TaskId tid = taid.getTaskId(); ActivityId aid = tid.getActivityId(); List<IConnectorDescriptor> inConnectors = acg.getActivityInputs(aid); int[] inPartitionCounts = tad.getInputPartitionCounts(); partitionLocations[i] = new NetworkAddress[inPartitionCounts[i]]; for (int j = 0; j < inPartitionCounts[i]; ++j) { TaskId producerTaskId = new TaskId(producerAid, j); String nodeId = findTaskLocation(producerTaskId); partitionLocations[i][j] = nodeManager.getNodeControllerState(nodeId).getDataPort();
@Override public void open() throws HyracksDataException { state = new CollectTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition)); state.buffer = new ArrayList<Object[]>(); }
private String findTaskLocation(TaskId tid) { ActivityId aid = tid.getActivityId(); ActivityCluster ac = jobRun.getActivityClusterGraph().getActivityMap().get(aid); Task[] tasks = getActivityClusterPlan(ac).getActivityPlanMap().get(aid).getTasks(); List<TaskClusterAttempt> tcAttempts = tasks[tid.getPartition()].getTaskCluster().getAttempts(); if (tcAttempts == null || tcAttempts.isEmpty()) { return null; } TaskClusterAttempt lastTCA = tcAttempts.get(tcAttempts.size() - 1); TaskAttempt ta = lastTCA.getTaskAttempts().get(tid); return ta == null ? null : ta.getNodeId(); }
NetworkAddress networkAddress = inputAddresses[i][j]; PartitionId pid = new PartitionId(jobId, inputs.get(i).getConnectorId(), j, td.getTaskAttemptId().getTaskId().getPartition()); PartitionChannel channel = new PartitionChannel(pid, new NetworkInputChannel(ncs.getNetworkManager(),
@Override public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) throws HyracksDataException { return new ExternalGroupBuildOperatorNodePushable(ctx, new TaskId(getActivityId(), partition), tableSize, fileSize, keyFields, framesLimit, comparatorFactories, firstNormalizerFactory, partialAggregatorFactory, recordDescProvider.getInputRecordDescriptor(getActivityId(), 0), outRecDescs[0], spillableTableFactory); } }
List<Task> taskStates = new ArrayList<>(); for (TaskId tid : cluster) { taskStates.add(activityPlanMap.get(tid.getActivityId()).getTasks()[tid.getPartition()]); tcSet.add(tc); for (TaskId tid : cluster) { activityPlanMap.get(tid.getActivityId()).getTasks()[tid.getPartition()].setTaskCluster(tc);
private void doRun() throws HyracksDataException, InterruptedException { while (true) { try { // Start the adapter adapter.start(ctx.getTaskAttemptId().getTaskId().getPartition(), writer); // Adapter has completed execution return; } catch (InterruptedException e) { throw e; } catch (Exception e) { LOGGER.log(Level.WARN, "Exception during feed ingestion ", e); throw HyracksDataException.create(e); } } }
@Override public void open() throws HyracksDataException { state = new MaterializerTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition)); state.open(ctx); }
@Override public final void runWork() { IJobManager jobManager = ccs.getJobManager(); JobRun run = jobManager.get(jobId); if (run != null) { TaskId tid = taId.getTaskId(); Map<ActivityId, ActivityCluster> activityClusterMap = run.getActivityClusterGraph().getActivityMap(); ActivityCluster ac = activityClusterMap.get(tid.getActivityId()); if (ac != null) { Map<ActivityId, ActivityPlan> taskStateMap = run.getActivityClusterPlanMap().get(ac.getId()).getActivityPlanMap(); Task[] taskStates = taskStateMap.get(tid.getActivityId()).getTasks(); if (taskStates != null && taskStates.length > tid.getPartition()) { Task ts = taskStates[tid.getPartition()]; TaskCluster tc = ts.getTaskCluster(); List<TaskClusterAttempt> taskClusterAttempts = tc.getAttempts(); if (taskClusterAttempts != null && taskClusterAttempts.size() > taId.getAttempt()) { TaskClusterAttempt tca = taskClusterAttempts.get(taId.getAttempt()); TaskAttempt ta = tca.getTaskAttempts().get(tid); if (ta != null) { performEvent(ta); } } } } } }
@Override public IPushRuntime[] createPushRuntime(IHyracksTaskContext ctx) throws HyracksDataException { IJobletEventListenerFactory fact = ctx.getJobletContext().getJobletEventListenerFactory(); return new IPushRuntime[] { new CommitRuntime(ctx, ((IJobEventListenerFactory) fact).getTxnId(datasetId), datasetId, primaryKeyFields, isWriteTransaction, datasetPartitions[ctx.getTaskAttemptId().getTaskId().getPartition()], isSink) }; } }
@Override public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) throws HyracksDataException { return new ExternalGroupWriteOperatorNodePushable(ctx, new TaskId(new ActivityId(getOperatorId(), AGGREGATE_ACTIVITY_ID), partition), spillableTableFactory, partialRecDesc, outRecDesc, framesLimit, keyFields, firstNormalizerFactory, comparatorFactories, intermediateAggregateFactory); }