/** * @return returns total length of all stored input splits */ public long getLength() throws IOException { long length = 0L; for (InputSplit inputSplit: inputSplits) { length += inputSplit.getLength(); } return length; }
public long getLength() throws IOException { return inputSplit.getLength(); }
public long getLength(int idx) { if (inputSplits != null) { try { return inputSplits[idx].getLength(); } catch (Exception e) { throw new RuntimeException(e); } } return -1; }
@Override public long getLength() { long r = 0; try { r = inputSplit.getLength(); } catch (Exception e) { throw new RuntimeException(e); } return r; }
@Override public long getLength() { try { return baseMapRedSplit.getLength(); } catch (IOException e) { LOG.warn("Exception in HCatSplit", e); } return 0; // we errored }
@Override public long getLength() { long r = 0; if (inputSplits != null) { try { for (InputSplit inputSplit : inputSplits) { r += inputSplit.getLength(); } } catch (Exception e) { throw new RuntimeException(e); } } return r; }
@Override public long getLength() { long r = 0; try { r = inputSplit.getLength(); } catch (Exception e) { throw new RuntimeException(e); } return r; }
public long getLength(int idx) { if (inputSplits != null) { try { return inputSplits[idx].getLength(); } catch (Exception e) { throw new RuntimeException(e); } } return -1; }
@Override public long getLength() { long r = 0; if (inputSplits != null) { try { for (InputSplit inputSplit : inputSplits) { r += inputSplit.getLength(); } } catch (Exception e) { throw new RuntimeException(e); } } return r; }
@Override public long getLength() { long val = 0; try { val = isTableSplit ? tableSplit.getLength() : snapshotSplit.getLength(); } finally { return val; } }
/** * Returns a string representation of a {@link InputSplit}. * * @param is Hadoop {@link InputSplit} * @return its string representation */ public static String toStringHadoopInputSplit(InputSplit is) { StringBuilder sb = new StringBuilder("HadoopInputSplit: "); try { sb.append(" Length: ").append(is.getLength()); sb.append(" , Locations: "); for (String loc : is.getLocations()) { sb.append(loc).append(" ; "); } } catch (IOException e) { LOG.error(e.getMessage()); } return sb.toString(); }
@Override public long getEstimatedSize(InputSplit inputSplit) throws IOException { long colProjSize = inputSplit.getLength(); if (inputSplit instanceof ColumnarSplit) { colProjSize = ((ColumnarSplit) inputSplit).getColumnarProjectionSize(); if (LOG.isDebugEnabled()) { LOG.debug("Estimated column projection size: " + colProjSize); } } else if (inputSplit instanceof HiveInputFormat.HiveInputSplit) { InputSplit innerSplit = ((HiveInputFormat.HiveInputSplit) inputSplit).getInputSplit(); if (innerSplit instanceof ColumnarSplit) { colProjSize = ((ColumnarSplit) innerSplit).getColumnarProjectionSize(); if (LOG.isDebugEnabled()) { LOG.debug("Estimated column projection size: " + colProjSize); } } } if (colProjSize <= 0) { /* columnar splits of unknown size - estimate worst-case */ return Integer.MAX_VALUE; } return colProjSize; } }
@Override public long getEstimatedSize(InputSplit inputSplit) throws IOException { long colProjSize = inputSplit.getLength(); if (inputSplit instanceof ColumnarSplit) { colProjSize = ((ColumnarSplit) inputSplit).getColumnarProjectionSize(); if (isDebugEnabled) { LOG.debug("Estimated column projection size: " + colProjSize); } } else if (inputSplit instanceof HiveInputFormat.HiveInputSplit) { InputSplit innerSplit = ((HiveInputFormat.HiveInputSplit) inputSplit).getInputSplit(); if (innerSplit instanceof ColumnarSplit) { colProjSize = ((ColumnarSplit) innerSplit).getColumnarProjectionSize(); if (isDebugEnabled) { LOG.debug("Estimated column projection size: " + colProjSize); } } } if (colProjSize <= 0) { /* columnar splits of unknown size - estimate worst-case */ return Integer.MAX_VALUE; } return colProjSize; } }
final ProjectionPusher pusher) throws IOException, InterruptedException { this.splitLen = oldSplit.getLength(); this.projectionPusher = pusher; this.serDeStats = new SerDeStats();
final ProjectionPusher pusher) throws IOException, InterruptedException { this.splitLen = oldSplit.getLength(); this.projectionPusher = pusher; this.serDeStats = new SerDeStats();
@Override public long getLength() { long r = 0; try { r = inputSplit.getLength(); } catch (Exception e) { throw new RuntimeException(e); } return r; }
public long getLength(int idx) { if (inputSplits != null) { try { return inputSplits[idx].getLength(); } catch (Exception e) { throw new RuntimeException(e); } } return -1; }
/** * test DBInputFormat class. Class should split result for chunks * @throws Exception */ @Test(timeout = 10000) public void testDBInputFormat() throws Exception { JobConf configuration = new JobConf(); setupDriver(configuration); DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>(); format.setConf(configuration); format.setConf(configuration); DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10); Reporter reporter = mock(Reporter.class); RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader( splitter, configuration, reporter); configuration.setInt(MRJobConfig.NUM_MAPS, 3); InputSplit[] lSplits = format.getSplits(configuration, 3); assertEquals(5, lSplits[0].getLength()); assertEquals(3, lSplits.length); // test reader .Some simple tests assertEquals(LongWritable.class, reader.createKey().getClass()); assertEquals(0, reader.getPos()); assertEquals(0, reader.getProgress(), 0.001); reader.close(); }