@Override public ByteBuffer allocateFrame() throws HyracksDataException { return joblet.allocateFrame(); }
@Override public void deallocateFrames(int bytes) { joblet.deallocateFrames(bytes); }
public Task(Joblet joblet, Set<JobFlag> jobFlags, TaskAttemptId taskId, String displayName, ExecutorService executor, NodeControllerService ncs, List<List<PartitionChannel>> inputChannelsFromConnectors) { this.joblet = joblet; this.jobFlags = jobFlags; this.taskAttemptId = taskId; this.displayName = displayName; this.executorService = executor; fileFactory = new WorkspaceFileFactory(this, joblet.getIOManager()); deallocatableRegistry = new DefaultDeallocatableRegistry(); counterMap = new HashMap<>(); opEnv = joblet.getEnvironment(); partitionSendProfile = new Hashtable<>(); pendingThreads = new LinkedHashSet<>(); exceptions = new CopyOnWriteArrayList<>(); // Multiple threads could add exceptions to this list. this.ncs = ncs; this.inputChannelsFromConnectors = inputChannelsFromConnectors; statsCollector = new StatsCollector(); }
private void performCleanup() { nodeController.getJobletMap().remove(jobId); IJobletEventListener listener = getJobletEventListener(); if (listener != null) { listener.jobletFinish(cleanupStatus); } close(); cleanupPending = false; try { nodeController.getClusterController(jobId.getCcId()).notifyJobletCleanup(jobId, nodeController.getId()); } catch (Exception e) { e.printStackTrace(); } }
@Override protected void doRun() throws Exception { List<JobProfile> profiles = new ArrayList<>(); Map<JobId, Joblet> jobletMap = ncs.getJobletMap(); jobletMap.values().stream().filter(ji -> ji.getJobId().getCcId().equals(ccId)) .forEach(ji -> profiles.add(new JobProfile(ji.getJobId()))); for (JobProfile jProfile : profiles) { Joblet ji; JobletProfile jobletProfile = new JobletProfile(ncs.getId()); ji = jobletMap.get(jProfile.getJobId()); if (ji != null) { ji.dumpProfile(jobletProfile); jProfile.getJobletProfiles().put(ncs.getId(), jobletProfile); } } fv.setValue(profiles); } }
throw HyracksException.create(ErrorCode.NODE_IS_NOT_ACTIVE, ncs.getId()); final ActivityClusterGraph acg = joblet.getActivityClusterGraph(); IRecordDescriptorProvider rdp = new IRecordDescriptorProvider() { @Override joblet.addTask(task); task.start(); taskIndex++;
@Override public void run() { LOGGER.debug("cleaning up after job: {}", jobId); ncs.removeJobParameterByteStore(jobId); ncs.getPartitionManager().jobCompleted(jobId, status); Map<JobId, Joblet> jobletMap = ncs.getJobletMap(); Joblet joblet = jobletMap.remove(jobId); if (joblet != null) { joblet.cleanup(status); } } }
private Joblet getOrCreateLocalJoblet(DeploymentId deploymentId, INCServiceContext appCtx, byte[] acgBytes) throws HyracksException { Map<JobId, Joblet> jobletMap = ncs.getJobletMap(); Joblet ji = jobletMap.get(jobId); if (ji == null) { ActivityClusterGraph acg = (deployedJobSpecId != null) ? ncs.getActivityClusterGraph(deployedJobSpecId) : (ActivityClusterGraph) DeploymentUtils.deserialize(acgBytes, deploymentId, appCtx); ncs.createOrGetJobParameterByteStore(jobId).setParameters(jobParameters); IJobletEventListenerFactory listenerFactory = acg.getJobletEventListenerFactory(); if (listenerFactory != null) { if (deployedJobSpecId != null) { listenerFactory = acg.getJobletEventListenerFactory().copyFactory(); } listenerFactory.updateListenerJobParameters(ncs.createOrGetJobParameterByteStore(jobId)); } ji = new Joblet(ncs, deploymentId, jobId, appCtx, acg, listenerFactory, jobStartTime); jobletMap.put(jobId, ji); } return ji; }
try { if (inputChannels.isEmpty()) { joblet.advertisePartitionRequest(taskAttemptId, collector.getRequiredPartitionIds(), collector, PartitionState.STARTED); } else {
@Override public ByteBuffer allocateFrame(int bytes) throws HyracksDataException { return joblet.allocateFrame(bytes); }