@Override public void run(Map<String, LogicalInput> inputs, Map<String, LogicalOutput> outputs) throws Exception { rproc = new MergeFileRecordProcessor(jobConf, getContext()); initializeAndRunProcessor(inputs, outputs); } }
@Override void run() throws Exception { KeyValueReader reader = mrInput.getReader(); //process records until done while (reader.next()) { boolean needMore = processRow(reader.getCurrentKey(), reader.getCurrentValue()); if (!needMore || isAborted()) { break; } } }
@Override void close() { if (cache != null && cacheKey != null) { cache.release(cacheKey); } // check if there are IOExceptions if (!isAborted()) { setAborted(execContext.getIoCxt().getIOExceptions()); } // detecting failed executions by exceptions thrown by the operator tree try { if (mergeOp == null || mfWork == null) { return; } boolean abort = isAborted(); mergeOp.close(abort); ExecMapper.ReportStats rps = new ExecMapper.ReportStats(reporter, jconf); mergeOp.preorderMap(rps); } catch (Exception e) { if (!isAborted()) { // signal new failure to map-reduce l4j.error("Hit error while closing operators - failing tree"); throw new RuntimeException("Hive Runtime Error while closing operators", e); } } finally { Utilities.clearWorkMap(jconf); MapredContext.close(); } }
mrInput = getMRInput(inputs); Configuration updatedConf = mrInput.getConfigUpdates(); if (updatedConf != null) { createOutputMap();
@Override void run() throws Exception { KeyValueReader reader = mrInput.getReader(); //process records until done while (reader.next()) { boolean needMore = processRow(reader.getCurrentKey(), reader.getCurrentValue()); if (!needMore) { break; } } }
/** * @param key key to process * @param value value to process * @return true if it is not done and can take more inputs */ private boolean processRow(Object key, Object value) { // reset the execContext for each new row execContext.resetRow(); try { if (mergeOp.getDone()) { return false; //done } else { row[0] = key; row[1] = value; mergeOp.process(row, 0); } } catch (Throwable e) { setAborted(true); if (e instanceof OutOfMemoryError) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { l4j.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } } return true; //give me more }
mrInput = getMRInput(inputs); Configuration updatedConf = mrInput.getConfigUpdates(); if (updatedConf != null) { createOutputMap();
/** * @param key key to process * @param value value to process * @return true if it is not done and can take more inputs */ private boolean processRow(Object key, Object value) { // reset the execContext for each new row execContext.resetRow(); try { if (mergeOp.getDone()) { return false; //done } else { row[0] = key; row[1] = value; mergeOp.process(row, 0); } } catch (Throwable e) { setAborted(true); if (e instanceof OutOfMemoryError) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { l4j.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } } return true; //give me more }
mrInput = getMRInput(inputs); Configuration updatedConf = mrInput.getConfigUpdates(); if (updatedConf != null) { createOutputMap();
@Override void run() throws Exception { KeyValueReader reader = mrInput.getReader(); //process records until done while (reader.next()) { boolean needMore = processRow(reader.getCurrentKey(), reader.getCurrentValue()); if (!needMore || isAborted()) { break; } } }
@Override void close() { if (cache != null && cacheKey != null) { cache.release(cacheKey); } // check if there are IOExceptions if (!isAborted()) { setAborted(execContext.getIoCxt().getIOExceptions()); } // detecting failed executions by exceptions thrown by the operator tree try { if (mergeOp == null || mfWork == null) { return; } boolean abort = isAborted(); mergeOp.close(abort); ExecMapper.ReportStats rps = new ExecMapper.ReportStats(reporter, jconf); mergeOp.preorderMap(rps); } catch (Exception e) { if (!isAborted()) { // signal new failure to map-reduce l4j.error("Hit error while closing operators - failing tree"); throw new RuntimeException("Hive Runtime Error while closing operators", e); } } finally { Utilities.clearWorkMap(jconf); MapredContext.close(); } }
@Override public void run(Map<String, LogicalInput> inputs, Map<String, LogicalOutput> outputs) throws Exception { rproc = new MergeFileRecordProcessor(jobConf, getContext()); initializeAndRunProcessor(inputs, outputs); } }
@Override public void run(Map<String, LogicalInput> inputs, Map<String, LogicalOutput> outputs) throws Exception { rproc = new MergeFileRecordProcessor(jobConf, getContext()); initializeAndRunProcessor(inputs, outputs); } }