public void increment(long incr) { accumulator.add(incr); }
rdd.foreach(new VoidFunction<String>(){ public void call(String line) { if (line.contains("KK6JKQ")) { count.add(1); } }}); System.out.println("Lines with 'KK6JKQ': " + count.value());
new FlatMapFunction<String, String>() { public Iterable<String> call(String line) { if (line.equals("")) { blankLines.add(1); } return Arrays.asList(line.split(" ")); }}); callSigns.saveAsTextFile(outputDir + "/callsigns");
@Override public void add(final String key, final Object value) { checkKeyValue(key, value); if (this.inExecute) this.sparkMemory.get(key).add(new ObjectWritable<>(value)); else throw Memory.Exceptions.memoryAddOnlyDuringVertexProgramExecute(key); }
@Override public void incrementOutputRecordCount() { outputRecordCounter.add(1.0); }
@Override public void incrementErrorRecordCount() { errorRecordCounter.add(1.0); }
@Override public void incrementInputRecordCount() { inputRecordCounter.add(1.0); }
public void increment(T incr) { accumulator.add(incr); }
@Override public void increment(long inc) { this.value += inc; accum.add(ImmutableMap.<String, Map<String, Long>>of(group, ImmutableMap.of(name, inc))); }
@Override public void setValue(long newValue) { long delta = newValue - value; accum.add(ImmutableMap.<String, Map<String, Long>>of(group, ImmutableMap.of(name, delta))); this.value = newValue; } }
@Override public void setValue(long newValue) { long delta = newValue - value; accum.add(ImmutableMap.<String, Map<String, Long>>of(group, ImmutableMap.of(name, delta))); this.value = newValue; } }
@Override public void increment(long inc) { this.value += inc; accum.add(ImmutableMap.<String, Map<String, Long>>of(group, ImmutableMap.of(name, inc))); }
@Override public void call(Record arg0) throws Exception { dbWriter.putSample(arg0); if((++count % 1000) == 0) { System.out.println(String.format("%d images saved...", count)); } totalCount.add(1); }
@Override public void add(final String key, final Object value) { checkKeyValue(key, value); if (this.inExecute) this.sparkMemory.get(key).add(new ObjectWritable<>(value)); else throw Memory.Exceptions.memoryAddOnlyDuringVertexProgramExecute(key); }
@Override public void add(final String key, final Object value) { checkKeyValue(key, value); if (this.inExecute) { this.sparkMemory.get(key).add(new ObjectWritable<>(value)); } else { throw Memory.Exceptions.memoryAddOnlyDuringVertexProgramExecute(key); } }
public void accept(JavaRDD<?> rdd, SparkExecutor sparkExecutor) throws RheemException { if (this.isMarkedForInstrumentation() && !this.isRddCached()) { final Accumulator<Integer> accumulator = sparkExecutor.sc.accumulator(0); this.rdd = rdd.filter(dataQuantum -> { accumulator.add(1); return true; }); this.accumulator = accumulator; } else { this.rdd = rdd; } }