@Override public String[] getHostnames() { try { return this.hadoopInputSplit.getLocations(); } catch (IOException e) { return new String[0]; } }
@Override public String[] getLocations() throws IOException { assert (inputSplits != null && inputSplits.length > 0); return inputSplits[0].getLocations(); }
@Override public String[] getLocations() throws IOException { return inputSplit.getLocations(); }
@Override public String[] getLocations() throws IOException { assert (inputSplits != null && inputSplits.length > 0); return inputSplits[0].getLocations(); }
@Override public String[] getLocations() throws IOException { return inputSplit.getLocations(); }
public String[] getLocations() throws IOException { return delegate.getLocations(); }
public String[] getLocations() throws IOException { return inputSplit.getLocations(); }
@Override public String[] getLocations() { try { return baseMapRedSplit.getLocations(); } catch (IOException e) { LOG.warn("Exception in HCatSplit", e); } return new String[0]; // we errored }
@Override public String[] getLocations(InputSplit split) throws IOException { if (split == null) { return null; } String[] locations = split.getLocations(); if (locations != null && locations.length == 1) { if ("localhost".equals(locations[0])) { return ArrayUtils.EMPTY_STRING_ARRAY; } } return locations; } };
@Override public String[] getLocations(InputSplit split) throws IOException { if (split == null) { return null; } String[] locations = split.getLocations(); if (locations != null && locations.length == 1) { if ("localhost".equals(locations[0])) { return ArrayUtils.EMPTY_STRING_ARRAY; } } return locations; } };
/** * @return collection of unique locations where input splits are stored */ public Collection<String> getLocations() throws IOException { Set<String> locations = new HashSet<>(); for (InputSplit inputSplit: inputSplits) { Collections.addAll(locations, inputSplit.getLocations()); } return locations; }
@Override public String[] getLocations() throws IOException { return isTableSplit ? tableSplit.getLocations() : snapshotSplit.getLocations(); } }
/** * Returns a string representation of a {@link InputSplit}. * * @param is Hadoop {@link InputSplit} * @return its string representation */ public static String toStringHadoopInputSplit(InputSplit is) { StringBuilder sb = new StringBuilder("HadoopInputSplit: "); try { sb.append(" Length: ").append(is.getLength()); sb.append(" , Locations: "); for (String loc : is.getLocations()) { sb.append(loc).append(" ; "); } } catch (IOException e) { LOG.error(e.getMessage()); } return sb.toString(); }
private InputSplit createMockInputSplit(String[] locations) throws IOException { InputSplit inputSplit = mock(InputSplit.class); doReturn(locations).when(inputSplit).getLocations(); return inputSplit; }
(final org.apache.hadoop.mapred.InputSplit split) -> { try { return Arrays.stream(split.getLocations());
@Override public String[] getLocations(InputSplit split) throws IOException { if (!(split instanceof FileSplit)) { if (LOG.isDebugEnabled()) { LOG.debug("Split: " + split + " is not a FileSplit. Using default locations"); } return split.getLocations(); } FileSplit fsplit = (FileSplit) split; String splitDesc = "Split at " + fsplit.getPath() + " with offset= " + fsplit.getStart() + ", length=" + fsplit.getLength(); String location = locations.get(determineLocation( locations, fsplit.getPath().toString(), fsplit.getStart(), splitDesc)); return (location != null) ? new String[] { location } : null; }
String rack = (split instanceof TezGroupedSplit) ? ((TezGroupedSplit) split).getRack() : null; if (rack == null) { String [] locations = split.getLocations(); if (locations != null && locations.length > 0) { locationHints.add(TaskLocationHint .createTaskLocationHint(new LinkedHashSet<String>(Arrays.asList(split .getLocations())), null));
@Override public String[] getLocations(InputSplit split) throws IOException { if (!(split instanceof FileSplit)) { if (isDebugEnabled) { LOG.debug("Split: " + split + " is not a FileSplit. Using default locations"); } return split.getLocations(); } FileSplit fsplit = (FileSplit) split; String splitDesc = "Split at " + fsplit.getPath() + " with offset= " + fsplit.getStart() + ", length=" + fsplit.getLength(); String location = locations.get(determineLocation( locations, fsplit.getPath().toString(), fsplit.getStart(), splitDesc)); return (location != null) ? new String[] { location } : null; }
/** * @param jobConf Job configuration. * @return Collection of mapped splits. * @throws IgniteCheckedException If mapping failed. */ public static Collection<HadoopInputSplit> splitJob(JobConf jobConf) throws IgniteCheckedException { try { InputFormat<?, ?> format = jobConf.getInputFormat(); assert format != null; InputSplit[] splits = format.getSplits(jobConf, 0); Collection<HadoopInputSplit> res = new ArrayList<>(splits.length); for (int i = 0; i < splits.length; i++) { InputSplit nativeSplit = splits[i]; if (nativeSplit instanceof FileSplit) { FileSplit s = (FileSplit)nativeSplit; res.add(new HadoopFileBlock(s.getLocations(), s.getPath().toUri(), s.getStart(), s.getLength())); } else res.add(HadoopUtils.wrapSplit(i, nativeSplit, nativeSplit.getLocations())); } return res; } catch (IOException e) { throw new IgniteCheckedException(e); } }
Assert.assertTrue(split.getLocations() != null); } else { Assert.assertTrue(split.getLocations() != null && split.getLocations().length == 0);