/** * Gets job configuration of the task. * * @return Job configuration. */ public JobConf jobConf() { return jobCtx.getJobConf(); }
/** * Cleans up job staging directory. */ public void cleanupStagingDirectory() { try { if (stagingDir != null) { FileSystem fs = job.fileSystem(stagingDir.toUri(), ctx.getJobConf()); fs.delete(stagingDir, true); } } catch (Exception e) { log.error("Failed to remove job staging directory [path=" + stagingDir + ", jobId=" + jobId + ']' , e); } }
/** * Set working directory in local file system. * * @param dir Working directory. * @throws IOException If fails. */ private void setLocalFSWorkingDirectory(File dir) throws IOException { JobConf cfg = ctx.getJobConf(); ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(cfg.getClassLoader()); try { cfg.set(HadoopFileSystemsUtils.LOC_FS_WORK_DIR_PROP, dir.getAbsolutePath()); if (!cfg.getBoolean(FILE_DISABLE_CACHING_PROPERTY_NAME, false)) FileSystem.getLocal(cfg).setWorkingDirectory(new Path(dir.getAbsolutePath())); } finally { HadoopCommonUtils.restoreContextClassLoader(oldLdr); } }
/** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public PartiallyOffheapRawComparatorEx<Object> partialRawSortComparator() { Class cls = jobCtx.getJobConf().getClass(HadoopJobProperty.JOB_PARTIALLY_RAW_COMPARATOR.propertyName(), null); if (cls == null) return null; Object res = ReflectionUtils.newInstance(cls, jobConf()); if (res instanceof PartiallyOffheapRawComparatorEx) return (PartiallyOffheapRawComparatorEx)res; else return new HadoopV2DelegatingPartiallyOffheapRawComparator<>((PartiallyRawComparator)res); }
continue; JobConf cfg = ctx.getJobConf(); ctx.getJobConf().setStrings(rsrcNameProp, res.toArray(new String[res.size()]));
throw new IgniteCheckedException("Local job directory already exists: " + jobLocDir.getAbsolutePath()); JobConf cfg = ctx.getJobConf();