@Override public List<Split> getSplits() { return table.getSplits(); }
@Override public List<Split> getSplits() { return kvTable.getSplits(); }
@Override public List<Split> getSplits() { return table.getSplits(); }
@Override public List<Split> getSplits(int numSplits, byte[] start, byte[] stop) { return kvTable.getSplits(numSplits, start, stop); }
@Override public List<Split> getSplits() { return kvTable.getSplits(); }
@Override public List<Split> getSplits(int numSplits, byte[] start, byte[] stop) { return kvTable.getSplits(numSplits, start, stop); }
@Override public void apply() throws Exception { // get the splits for the table List<Split> splits = t.getSplits(); // read each split and verify the keys SortedSet<Long> keysToVerify = Sets.newTreeSet(keysWritten); verifySplits(t, splits, keysToVerify); } });
@Override public List<Split> getSplits() { track(getName(), "split"); return t.getSplits(1, null, null); // return a single split for testing } }
@Override public void apply() throws Exception { // get specific number of splits for a subrange SortedSet<Long> keysToVerify = Sets.newTreeSet(keysWritten.subSet(0x10000000L, 0x40000000L)); List<Split> splits = t.getSplits(5, Bytes.toBytes(0x10000000L), Bytes.toBytes(0x40000000L)); Assert.assertTrue(splits.size() <= 5); // read each split and verify the keys verifySplits(t, splits, keysToVerify); } });
/** * Define a MapReduce job. * @throws Exception */ @Override public void initialize() throws Exception { MapReduceContext context = getContext(); LOG.info("Hello " + loadTestClasses()); Job job = context.getHadoopJob(); job.setMapperClass(SimpleMapper.class); job.setReducerClass(SimpleReducer.class); context.addInput(Input.ofDataset("simpleInputDataset", input.getSplits())); context.addOutput(Output.ofDataset("simpleOutputDataset")); }
@Override public void initialize() throws Exception { MapReduceContext context = getContext(); Job job = context.getHadoopJob(); job.setMapperClass(TokenMapper.class); job.setNumReduceTasks(0); String inputText = getContext().getRuntimeArguments().get("input.text"); Preconditions.checkNotNull(inputText); KeyValueTable kvTable = getContext().getDataset(INPUT); kvTable.write("key", inputText); context.addInput(Input.ofDataset(INPUT, kvTable.getSplits(1, null, null))); String outputDatasets = getContext().getRuntimeArguments().get("output.datasets"); outputDatasets = outputDatasets != null ? outputDatasets : PFS; for (String outputName : outputDatasets.split(",")) { String outputPartition = getContext().getRuntimeArguments().get(outputName + ".output.partition"); PartitionKey outputPartitionKey = outputPartition == null ? null : PartitionKey.builder().addField("number", Integer.parseInt(outputPartition)).build(); Map<String, String> outputArguments = new HashMap<>(); if (outputPartitionKey != null) { PartitionedFileSetArguments.setOutputPartitionKey(outputArguments, outputPartitionKey); } else { PartitionedFileSetArguments.setDynamicPartitioner(outputArguments, KeyPartitioner.class); } context.addOutput(Output.ofDataset(outputName, outputArguments)); } }