when(context.nextKey()).thenAnswer(new Answer<Boolean>() { @Override public Boolean answer(final InvocationOnMock invocation) {
@Override protected void setup(Context context) throws IOException, InterruptedException { if (!context.nextKey() || context.getCurrentKey().getType() != GridmixKey.REDUCE_SPEC) { throw new IOException("Missing reduce spec"); } for (NullWritable ignored : context.getValues()) { final GridmixKey spec = context.getCurrentKey(); duration += spec.getReduceOutputBytes(); } long sleepInterval = context.getConfiguration().getLong(GRIDMIX_SLEEP_INTERVAL, 5); final long RINTERVAL = TimeUnit.MILLISECONDS.convert(sleepInterval, TimeUnit.SECONDS); //This is to stop accumulating deviation from expected sleep time //over a period of time. long start = System.currentTimeMillis(); long slept = 0L; long sleep = 0L; while (slept < duration) { final long rem = duration - slept; sleep = Math.min(rem, RINTERVAL); context.setStatus("Sleeping... " + rem + " ms left"); TimeUnit.MILLISECONDS.sleep(sleep); slept = System.currentTimeMillis() - start; } }
@Override public void run(Context context) throws IOException, InterruptedException { double keySum = 0.0; double valueSum = 0.0; while (context.nextKey()) { keySum += context.getCurrentKey().get(); for (DoubleWritable value : context.getValues()) { valueSum += value.get(); } } outKey.set(keySum); outValue.set(valueSum); context.write(outKey, outValue); } }
@Override protected void setup(Context context) throws IOException, InterruptedException { if (!context.nextKey() || context.getCurrentKey().getType() != GridmixKey.REDUCE_SPEC) { throw new IOException("Missing reduce spec");
@Override public final void run(Context context) throws IOException, InterruptedException { try { setup(context); firstRun = true; boolean anyValue = false; while(context.nextKey()) { anyValue = true; reduce(context.getCurrentKey(), context.getValues(), context); } // close last group. Only if there was any value. if (anyValue) { for(int i = maxDepth; i >= minDepth; i--) { handler.onCloseGroup(i, groupSchema.getField(i).getName(), context .getCurrentKey().datum(), this.context, collector); } } cleanup(context); } catch(TupleMRException e) { throw new RuntimeException(e); } }
/** * Advanced application writers can use the * {@link #run(org.apache.hadoop.mapreduce.Reducer.Context)} method to * control how the reduce task works. */ public void run(Context context) throws IOException, InterruptedException { setup(context); try { while (context.nextKey()) { reduce(context.getCurrentKey(), context.getValues(), context); // If a back up store is used, reset it Iterator<VALUEIN> iter = context.getValues().iterator(); if(iter instanceof ReduceContext.ValueIterator) { ((ReduceContext.ValueIterator<VALUEIN>)iter).resetBackupStore(); } } } finally { cleanup(context); } } }
/** * Advanced application writers can use the * {@link #run(org.apache.hadoop.mapreduce.Reducer.Context)} method to * control how the reduce task works. */ public void run(Context context) throws IOException, InterruptedException { setup(context); try { while (context.nextKey()) { reduce(context.getCurrentKey(), context.getValues(), context); // If a back up store is used, reset it Iterator<VALUEIN> iter = context.getValues().iterator(); if(iter instanceof ReduceContext.ValueIterator) { ((ReduceContext.ValueIterator<VALUEIN>)iter).resetBackupStore(); } } } finally { cleanup(context); } } }
/** * Advanced application writers can use the * {@link #run(org.apache.hadoop.mapreduce.Reducer.Context)} method to * control how the reduce task works. */ public void run(Context context) throws IOException, InterruptedException { setup(context); try { while (context.nextKey()) { reduce(context.getCurrentKey(), context.getValues(), context); // If a back up store is used, reset it Iterator<VALUEIN> iter = context.getValues().iterator(); if(iter instanceof ReduceContext.ValueIterator) { ((ReduceContext.ValueIterator<VALUEIN>)iter).resetBackupStore(); } } } finally { cleanup(context); } } }
@Override public final void run(Context context) throws IOException, InterruptedException { try { setup(context); firstRun = true; boolean anyValue = false; while(context.nextKey()) { anyValue = true; reduce(context.getCurrentKey(), context.getValues(), context); } // close last group. Only if there was any value. if (anyValue) { for(int i = maxDepth; i >= minDepth; i--) { handler.onCloseGroup(i, groupSchema.getField(i).getName(), context .getCurrentKey().datum(), this.context, collector); } } cleanup(context); } catch(TupleMRException e) { throw new RuntimeException(e); } }
/** * Advanced application writers can use the * {@link #run(org.apache.hadoop.mapreduce.Reducer.Context)} method to * control how the reduce task works. */ public void run(Context context) throws IOException, InterruptedException { setup(context); try { while (context.nextKey()) { reduce(context.getCurrentKey(), context.getValues(), context); // If a back up store is used, reset it Iterator<VALUEIN> iter = context.getValues().iterator(); if(iter instanceof ReduceContext.ValueIterator) { ((ReduceContext.ValueIterator<VALUEIN>)iter).resetBackupStore(); } } } finally { cleanup(context); } } }
/** * Advanced application writers can use the * {@link #run(org.apache.hadoop.mapreduce.Reducer.Context)} method to * control how the reduce task works. */ @SuppressWarnings("unchecked") public void run(Context context) throws IOException, InterruptedException { setup(context); while (context.nextKey()) { reduce(context.getCurrentKey(), context.getValues(), context); // If a back up store is used, reset it ((ReduceContext.ValueIterator) (context.getValues().iterator())).resetBackupStore(); } cleanup(context); } }
@Override public void run(Context context) throws IOException, InterruptedException { double keySum = 0.0; double valueSum = 0.0; while (context.nextKey()) { keySum += context.getCurrentKey().get(); for (DoubleWritable value : context.getValues()) { valueSum += value.get(); } } outKey.set(keySum); outValue.set(valueSum); context.write(outKey, outValue); } }
@Override public void run(Context context) throws IOException, InterruptedException { double keySum = 0.0; double valueSum = 0.0; while (context.nextKey()) { keySum += context.getCurrentKey().get(); for (DoubleWritable value : context.getValues()) { valueSum += value.get(); } } outKey.set(keySum); outValue.set(valueSum); context.write(outKey, outValue); } }
/** * Advanced application writers can use the * {@link #run(org.apache.hadoop.mapreduce.Reducer.Context)} method to * control how the reduce task works. */ public void run(Context context) throws IOException, InterruptedException { setup(context); while (context.nextKey()) { context.progress(); reduce(context.getCurrentKey(), context.getValues(), context); } cleanup(context); } }