@Override public JobId getJobId() { return ctx.getJobletContext().getJobId(); }
@Override public void sendApplicationMessageToCC(byte[] message, DeploymentId deploymentId) throws Exception { this.ncs.sendApplicationMessageToCC(getJobletContext().getJobId().getCcId(), message, deploymentId); }
@Override public void open() throws HyracksDataException { state = new CollectTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition)); state.buffer = new ArrayList<Object[]>(); }
@Override public void open() throws HyracksDataException { manager.registerPartition(pid, ctx.getJobletContext().getJobId().getCcId(), taId, this, PartitionState.STARTED, false); pendingConnection = true; ensureConnected(); }
@Override public void sendApplicationMessageToCC(Serializable message, DeploymentId deploymentId) throws Exception { this.ncs.sendApplicationMessageToCC(getJobletContext().getJobId().getCcId(), JavaSerializationUtils.serialize(message), deploymentId); }
@Override public void close() throws HyracksDataException { closeTime = System.currentTimeMillis(); try { ((Task) ctx).setPartitionSendProfile( new PartitionProfile(new PartitionId(ctx.getJobletContext().getJobId(), cd.getConnectorId(), senderIndex, receiverIndex), openTime, closeTime, mrep)); } finally { writer.close(); } }
@Override public void open() throws HyracksDataException { if (LOGGER.isEnabled(openCloseLevel)) { LOGGER.log(openCloseLevel, "open(" + pid + " by " + taId); } size = 0; eos = false; failed = false; deallocated = false; manager.registerPartition(pid, ctx.getJobletContext().getJobId().getCcId(), taId, this, PartitionState.STARTED, false); }
@Override public void open() throws HyracksDataException { state = new MaterializerTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition)); state.open(ctx); }
@Override public void close() throws HyracksDataException { SortTaskState state = new SortTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition)); runGen.close(); state.generatedRunFileReaders = runGen.getRuns(); state.sorter = runGen.getSorter(); if (LOGGER.isTraceEnabled()) { LOGGER.trace("InitialNumberOfRuns:" + runGen.getRuns().size()); } ctx.setStateObject(state); }
@Override public void open() throws HyracksDataException { state = new MaterializerTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition)); state.open(ctx); }
@Override public void close() throws HyracksDataException { // expecting a range map if (numFields <= 0 || splitValues == null || splitValuesEndOffsets == null) { throw HyracksDataException.create(ErrorCode.NO_RANGEMAP_PRODUCED, sourceLoc); } // store the range map in the state object of ctx so that next activity (forward) could retrieve it TaskId rangeMapReaderTaskId = new TaskId(activityId, partition); RangeMapState rangeMapState = new RangeMapState(ctx.getJobletContext().getJobId(), rangeMapReaderTaskId); rangeMapState.rangeMap = new RangeMap(numFields, splitValues, splitValuesEndOffsets); ctx.setStateObject(rangeMapState); } }
@Override public final void deinitialize() throws HyracksDataException { activeManager.deregisterRuntime(runtimeId); try { ctx.sendApplicationMessageToCC(new ActivePartitionMessage(runtimeId, ctx.getJobletContext().getJobId(), Event.RUNTIME_DEREGISTERED, null), null); } catch (Exception e) { LOGGER.log(Level.INFO, "deinitialize() failed on ActiveSourceOperatorNodePushable", e); throw HyracksDataException.create(e); } finally { LOGGER.log(Level.INFO, "deinitialize() returning on ActiveSourceOperatorNodePushable"); } }
@Override public void open() throws HyracksDataException { if (requiresMaterialization) { state = new MaterializerTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition), numberOfMaterializedOutputs); state.open(ctx); } for (int i = 0; i < numberOfNonMaterializedOutputs; i++) { isOpen[i] = true; writers[i].open(); } }
@Override public void run() { TaskProfile taskProfile = new TaskProfile(task.getTaskAttemptId(), task.getPartitionSendProfile(), task.getStatsCollector()); try { ncs.getClusterController(task.getJobletContext().getJobId().getCcId()).notifyTaskComplete( task.getJobletContext().getJobId(), task.getTaskAttemptId(), ncs.getId(), taskProfile); } catch (Exception e) { LOGGER.log(Level.ERROR, "Failed notifying task complete for " + task.getTaskAttemptId(), e); } task.getJoblet().removeTask(task); }
@Override public void open() throws HyracksDataException { state = new ExternalGroupState(ctx.getJobletContext().getJobId(), stateId); ISpillableTable table = spillableTableFactory.buildSpillableTable(ctx, tableSize, fileSize, keyFields, comparators, firstNormalizerComputer, aggregatorFactory, inRecordDescriptor, outRecordDescriptor, framesLimit, 0); RunFileWriter[] runFileWriters = new RunFileWriter[table.getNumPartitions()]; this.externalGroupBy = new ExternalHashGroupBy(this, table, runFileWriters, inRecordDescriptor); state.setSpillableTable(table); state.setRuns(runFileWriters); state.setSpilledNumTuples(externalGroupBy.getSpilledNumTuples()); }
@Override public void open() throws HyracksDataException { state = new JoinCacheTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition)); state.joiner = new NestedLoopJoin(ctx, new FrameTupleAccessor(rd0), new FrameTupleAccessor(rd1), comparator, memSize, predEvaluator, isLeftOuter, nullWriters1); }
@Override public void open() throws HyracksDataException { super.open(); deletedTupleCounter = new DeletedTupleCounter(ctx.getJobletContext().getJobId(), partition); ctx.setStateObject(deletedTupleCounter); try { tb = new ArrayTupleBuilder(recordDesc.getFieldCount()); dos = tb.getDataOutput(); appender = new FrameTupleAppender(new VSizeFrame(ctx), true); } catch (Exception e) { throw HyracksDataException.create(e); } }
@Override public void open() throws HyracksDataException { state = new SortTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition)); IFrameBufferManager frameBufferManager = new VariableFrameMemoryManager( new VariableFramePool(ctx, VariableFramePool.UNLIMITED_MEMORY), FrameFreeSlotPolicyFactory.createFreeSlotPolicy(EnumFreeSlotPolicy.LAST_FIT)); state.frameSorter = new FrameSorterMergeSort(ctx, frameBufferManager, VariableFramePool.UNLIMITED_MEMORY, sortFields, keyNormalizerFactories, comparatorFactories, outRecDescs[0]); state.frameSorter.reset(); }
@Override public void open() throws HyracksDataException { ITuplePartitionComputer hpc0 = new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories).createPartitioner(ctx); ITuplePartitionComputer hpc1 = new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories).createPartitioner(ctx); state = new HashBuildTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition)); ISerializableTable table = new SerializableHashTable(tableSize, ctx, bufferManager); state.joiner = new InMemoryHashJoin(ctx, new FrameTupleAccessor(rd0), hpc0, new FrameTupleAccessor(rd1), rd1, hpc1, new FrameTuplePairComparator(keys0, keys1, comparators), isLeftOuter, nullWriters1, table, predEvaluator, bufferManager); }
@Override public IPushRuntime[] createPushRuntime(IHyracksTaskContext ctx) throws HyracksDataException { return new IPushRuntime[] { new CommitRuntime(ctx, new TxnId(ctx.getJobletContext().getJobId().getId()), getDatasetId(), primaryKeyFieldPermutation, true, ctx.getTaskAttemptId().getTaskId().getPartition(), true) }; } };