@Override public Object call() { return Utilities.getMergeWork(jconf, prefix); } }));
@Override public Object call() { return Utilities.getMergeWork(jconf, prefix); } }));
@Override public Object call() { return Utilities.getMergeWork(jconf, prefix); } }));
@Override public Object call() { return Utilities.getMergeWork(jconf, prefix); } }));
public static BaseWork getMergeWork(Configuration jconf) { String currentMergePrefix = jconf.get(DagUtils.TEZ_MERGE_CURRENT_MERGE_FILE_PREFIX); if (StringUtils.isEmpty(currentMergePrefix)) { return null; } return getMergeWork(jconf, jconf.get(DagUtils.TEZ_MERGE_CURRENT_MERGE_FILE_PREFIX)); }
public static BaseWork getMergeWork(Configuration jconf) { if ((jconf.get(DagUtils.TEZ_MERGE_CURRENT_MERGE_FILE_PREFIX) == null) || (jconf.get(DagUtils.TEZ_MERGE_CURRENT_MERGE_FILE_PREFIX).isEmpty())) { return null; } return getMergeWork(jconf, jconf.get(DagUtils.TEZ_MERGE_CURRENT_MERGE_FILE_PREFIX)); }
private static MapWork populateMapWork(JobConf jobConf, String inputName) { MapWork work = null; if (inputName != null) { work = (MapWork) Utilities.getMergeWork(jobConf, inputName); // work can still be null if there is no merge work for this input } if (work == null) { work = Utilities.getMapWork(jobConf); } return work; }
private static MapWork populateMapWork(JobConf jobConf, String inputName) { MapWork work = null; if (inputName != null) { work = (MapWork) Utilities.getMergeWork(jobConf, inputName); // work can still be null if there is no merge work for this input } if (work == null) { work = Utilities.getMapWork(jobConf); } return work; }
private static MapWork findMapWork(JobConf job) throws HiveException { String inputName = job.get(Utilities.INPUT_NAME, null); if (LOG.isDebugEnabled()) { LOG.debug("Initializing for input " + inputName); } String prefixes = job.get(DagUtils.TEZ_MERGE_WORK_FILE_PREFIXES); if (prefixes != null && !StringUtils.isBlank(prefixes)) { // Currently SMB is broken, so we cannot check if it's compatible with IO elevator. // So, we don't use the below code that would get the correct MapWork. See HIVE-16985. return null; } BaseWork work = null; // HIVE-16985: try to find the fake merge work for SMB join, that is really another MapWork. if (inputName != null) { if (prefixes == null || !Lists.newArrayList(prefixes.split(",")).contains(inputName)) { inputName = null; } } if (inputName != null) { work = Utilities.getMergeWork(job, inputName); } if (work == null || !(work instanceof MapWork)) { work = Utilities.getMapWork(job); } return (MapWork) work; }
protected void init(JobConf job) { if (mrwork == null || pathToPartitionInfo == null) { if (HiveConf.getVar(job, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { mrwork = (MapWork) Utilities.getMergeWork(job); if (mrwork == null) { mrwork = Utilities.getMapWork(job); } } else { mrwork = Utilities.getMapWork(job); } pathToPartitionInfo = mrwork.getPathToPartitionInfo(); } }
protected void init(JobConf job) { if (mrwork == null || pathToPartitionInfo == null) { if (HiveConf.getVar(job, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { mrwork = (MapWork) Utilities.getMergeWork(job); if (mrwork == null) { mrwork = Utilities.getMapWork(job); } } else { mrwork = Utilities.getMapWork(job); } // Prune partitions if (HiveConf.getVar(job, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("spark") && HiveConf.getBoolVar(job, HiveConf.ConfVars.SPARK_DYNAMIC_PARTITION_PRUNING)) { SparkDynamicPartitionPruner pruner = new SparkDynamicPartitionPruner(); try { pruner.prune(mrwork, job); } catch (Exception e) { throw new RuntimeException(e); } } pathToPartitionInfo = mrwork.getPathToPartitionInfo(); } }
MapWork mrwork; if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { mrwork = (MapWork) Utilities.getMergeWork(jobConf); if (mrwork == null) { mrwork = Utilities.getMapWork(jobConf);
MapWork mrwork; if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { mrwork = (MapWork) Utilities.getMergeWork(jobConf); if (mrwork == null) { mrwork = Utilities.getMapWork(jobConf);
@Override public Object call() { return Utilities.getMergeWork(jconf, prefix); } }));
public static BaseWork getMergeWork(JobConf jconf) { if ((jconf.get(DagUtils.TEZ_MERGE_CURRENT_MERGE_FILE_PREFIX) == null) || (jconf.get(DagUtils.TEZ_MERGE_CURRENT_MERGE_FILE_PREFIX).isEmpty())) { return null; } return getMergeWork(jconf, jconf.get(DagUtils.TEZ_MERGE_CURRENT_MERGE_FILE_PREFIX)); }
private static MapWork populateMapWork(JobConf jobConf, String inputName) { MapWork work = null; if (inputName != null) { work = (MapWork) Utilities.getMergeWork(jobConf, inputName); // work can still be null if there is no merge work for this input } if (work == null) { work = Utilities.getMapWork(jobConf); } return work; }
private static MapWork findMapWork(JobConf job) throws HiveException { String inputName = job.get(Utilities.INPUT_NAME, null); if (LOG.isDebugEnabled()) { LOG.debug("Initializing for input " + inputName); } String prefixes = job.get(DagUtils.TEZ_MERGE_WORK_FILE_PREFIXES); if (prefixes != null && !StringUtils.isBlank(prefixes)) { // Currently SMB is broken, so we cannot check if it's compatible with IO elevator. // So, we don't use the below code that would get the correct MapWork. See HIVE-16985. return null; } BaseWork work = null; // HIVE-16985: try to find the fake merge work for SMB join, that is really another MapWork. if (inputName != null) { if (prefixes == null || !Lists.newArrayList(prefixes.split(",")).contains(inputName)) { inputName = null; } } if (inputName != null) { work = Utilities.getMergeWork(job, inputName); } if (work == null || !(work instanceof MapWork)) { work = Utilities.getMapWork(job); } return (MapWork) work; }
protected void init(JobConf job) { if (mrwork == null || pathToPartitionInfo == null) { if (HiveConf.getVar(job, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { mrwork = (MapWork) Utilities.getMergeWork(job); if (mrwork == null) { mrwork = Utilities.getMapWork(job); } } else { mrwork = Utilities.getMapWork(job); } pathToPartitionInfo = mrwork.getPathToPartitionInfo(); } }