@JsonValue public byte[] toBytes() throws IOException { ByteArrayDataOutput out = ByteStreams.newDataOutput(); range.write(out); return out.toByteArray(); }
@Override public void write(DataOutput out) throws IOException { super.write(out); out.writeInt(ranges.size()); for (Range r : ranges) r.write(out); }
/** * Sets the input ranges to scan on all input tables for this job. If not set, the entire table * will be scanned. * * @param implementingClass * the class whose name will be used as a prefix for the property configuration key * @param conf * the Hadoop configuration object to configure * @param ranges * the ranges that will be mapped over * @throws IllegalArgumentException * if the ranges cannot be encoded into base 64 * @since 1.6.0 */ public static void setRanges(Class<?> implementingClass, Configuration conf, Collection<Range> ranges) { checkArgument(ranges != null, "ranges is null"); ArrayList<String> rangeStrings = new ArrayList<>(ranges.size()); try { for (Range r : ranges) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); r.write(new DataOutputStream(baos)); rangeStrings.add(Base64.getEncoder().encodeToString(baos.toByteArray())); } conf.setStrings(enumToConfKey(implementingClass, ScanOpts.RANGES), rangeStrings.toArray(new String[0])); } catch (IOException ex) { throw new IllegalArgumentException("Unable to encode ranges to Base64", ex); } }
@Override public void write(DataOutput out) throws IOException { range.write(out); out.writeUTF(tableName); out.writeUTF(tableId);
dataOutput.writeInt(ranges.size()); for (Range range : ranges) range.write(dataOutput); } else { dataOutput.writeInt(0);
public void write(DataOutput out) throws IOException { out.writeInt(ranges.size()); for (Range range : ranges) range.write(out); out.writeInt(locations.length); for (int i = 0; i < locations.length; ++i) out.writeUTF(locations[i]); }
@Override public void write(DataOutput out) throws IOException { super.write(out); out.writeInt(ranges.size()); for (Range r : ranges) r.write(out); }
@Override public void write(DataOutput out) throws IOException { super.write(out); out.writeInt(ranges.size()); for (Range r : ranges) r.write(out); }
public static String encodeRange(Range range) throws IOException { ByteArrayOutputStream b = new ByteArrayOutputStream(); DataOutputStream d = new DataOutputStream(b); try { range.write(d); } catch (Exception e) { throw new IOException(e); } finally { d.close(); b.close(); } return new String(Base64.encodeBase64(b.toByteArray())); }
@Override public void write(DataOutput out) throws IOException { super.write(out); out.writeInt(ranges.size()); for (Range r : ranges) r.write(out); }
@Override public void write(DataOutput out) throws IOException { Text.writeString(out, file.toString()); out.writeLong(start); out.writeLong(length); out.writeInt(ranges.size()); for (Range range : ranges) { range.write(out); } }
/** * Sets the input ranges to scan on all input tables for this job. If not set, the entire table * will be scanned. * * @param implementingClass * the class whose name will be used as a prefix for the property configuration key * @param conf * the Hadoop configuration object to configure * @param ranges * the ranges that will be mapped over * @throws IllegalArgumentException * if the ranges cannot be encoded into base 64 * @since 1.6.0 */ public static void setRanges(Class<?> implementingClass, Configuration conf, Collection<Range> ranges) { checkArgument(ranges != null, "ranges is null"); ArrayList<String> rangeStrings = new ArrayList<>(ranges.size()); try { for (Range r : ranges) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); r.write(new DataOutputStream(baos)); rangeStrings.add(Base64.getEncoder().encodeToString(baos.toByteArray())); } conf.setStrings(enumToConfKey(implementingClass, ScanOpts.RANGES), rangeStrings.toArray(new String[0])); } catch (IOException ex) { throw new IllegalArgumentException("Unable to encode ranges to Base64", ex); } }
/** * Sets the input ranges to scan on all input tables for this job. If not set, the entire table * will be scanned. * * @param implementingClass * the class whose name will be used as a prefix for the property configuration key * @param conf * the Hadoop configuration object to configure * @param ranges * the ranges that will be mapped over * @throws IllegalArgumentException * if the ranges cannot be encoded into base 64 * @since 1.6.0 */ public static void setRanges(Class<?> implementingClass, Configuration conf, Collection<Range> ranges) { checkArgument(ranges != null, "ranges is null"); ArrayList<String> rangeStrings = new ArrayList<>(ranges.size()); try { for (Range r : ranges) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); r.write(new DataOutputStream(baos)); rangeStrings.add(Base64.getEncoder().encodeToString(baos.toByteArray())); } conf.setStrings(enumToConfKey(implementingClass, ScanOpts.RANGES), rangeStrings.toArray(new String[0])); } catch (IOException ex) { throw new IllegalArgumentException("Unable to encode ranges to Base64", ex); } }
/** * Sets the input ranges to scan on all input tables for this job. If not set, the entire table * will be scanned. * * @param implementingClass * the class whose name will be used as a prefix for the property configuration key * @param conf * the Hadoop configuration object to configure * @param ranges * the ranges that will be mapped over * @throws IllegalArgumentException * if the ranges cannot be encoded into base 64 * @since 1.6.0 */ public static void setRanges(Class<?> implementingClass, Configuration conf, Collection<Range> ranges) { checkArgument(ranges != null, "ranges is null"); ArrayList<String> rangeStrings = new ArrayList<>(ranges.size()); try { for (Range r : ranges) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); r.write(new DataOutputStream(baos)); rangeStrings.add(Base64.encodeBase64String(baos.toByteArray())); } conf.setStrings(enumToConfKey(implementingClass, ScanOpts.RANGES), rangeStrings.toArray(new String[0])); } catch (IOException ex) { throw new IllegalArgumentException("Unable to encode ranges to Base64", ex); } }
@Override public void write(DataOutput out) throws IOException { range.write(out); out.writeUTF(tableName); out.writeUTF(tableId);
r.write(outStream);
@Override public void write(DataOutput out) throws IOException { range.write(out); out.writeUTF(tableName); out.writeUTF(tableId);
dataOutput.writeInt(ranges.size()); for (Range range : ranges) range.write(dataOutput); } else { dataOutput.writeInt(0);
dataOutput.writeInt(ranges.size()); for (Range range : ranges) range.write(dataOutput); } else { dataOutput.writeInt(0);
dataOutput.writeInt(ranges.size()); for (Range range : ranges) range.write(dataOutput); } else { dataOutput.writeInt(0);