rdd.foreach(new DownloadToAzureFn(baseDirZips, true)); long end = System.currentTimeMillis(); log.info("*** Completed download of patent data in {} sec ***", (end-start)/1000);
@Test public void foreach() { foreachCalls = 0; JavaRDD<String> rdd = sc.parallelize(Arrays.asList("Hello", "World")); rdd.foreach(x -> foreachCalls++); Assert.assertEquals(2, foreachCalls); }
@Test public void foreachWithAnonymousClass() { foreachCalls = 0; JavaRDD<String> rdd = sc.parallelize(Arrays.asList("Hello", "World")); rdd.foreach(s -> foreachCalls++); Assert.assertEquals(2, foreachCalls); }
@Test public void foreach() { foreachCalls = 0; JavaRDD<String> rdd = sc.parallelize(Arrays.asList("Hello", "World")); rdd.foreach(x -> foreachCalls++); Assert.assertEquals(2, foreachCalls); }
@Test public void foreachWithAnonymousClass() { foreachCalls = 0; JavaRDD<String> rdd = sc.parallelize(Arrays.asList("Hello", "World")); rdd.foreach(s -> foreachCalls++); Assert.assertEquals(2, foreachCalls); }
@Test public void foreachWithAnonymousClass() { foreachCalls = 0; JavaRDD<String> rdd = sc.parallelize(Arrays.asList("Hello", "World")); rdd.foreach(s -> foreachCalls++); Assert.assertEquals(2, foreachCalls); }
@Test public void foreach() { foreachCalls = 0; JavaRDD<String> rdd = sc.parallelize(Arrays.asList("Hello", "World")); rdd.foreach(x -> foreachCalls++); Assert.assertEquals(2, foreachCalls); }
@Test public void foreach() { LongAccumulator accum = sc.sc().longAccumulator(); JavaRDD<String> rdd = sc.parallelize(Arrays.asList("Hello", "World")); rdd.foreach(s -> accum.add(1)); assertEquals(2, accum.value().intValue()); }
@Test public void foreach() { LongAccumulator accum = sc.sc().longAccumulator(); JavaRDD<String> rdd = sc.parallelize(Arrays.asList("Hello", "World")); rdd.foreach(s -> accum.add(1)); assertEquals(2, accum.value().intValue()); }
@Test public void foreach() { LongAccumulator accum = sc.sc().longAccumulator(); JavaRDD<String> rdd = sc.parallelize(Arrays.asList("Hello", "World")); rdd.foreach(s -> accum.add(1)); assertEquals(2, accum.value().intValue()); }
rdd.foreach(new VoidFunction<String>(){ public void call(String line) { if (line.contains("KK6JKQ")) { count.add(1);
@Override public void call(JavaRDD<Map<String, Object>> javaRDD) throws Exception { //System.out.println("Sending Kafka output"); javaRDD.foreach(new VoidFunction<Map<String, Object>>() { private static final long serialVersionUID = 1L; private final ObjectMapper objectMapper = new ObjectMapper(); @Override public void call(Map<String, Object> map) throws Exception { KafkaProducer<String, String> producer = new KafkaProducer<String, String>(kafkaParams); producer.send(new ProducerRecord<>(topic, objectMapper.writeValueAsString(map))); } }); }
@Override public void action() { // Empty function to force computation of RDD. rdd.foreach(TranslationUtils.emptyVoidFunction()); }
@Override public void action() { // Force computation of DStream. dStream.foreachRDD(rdd -> rdd.foreach(TranslationUtils.<WindowedValue<T>>emptyVoidFunction())); }
@Override public void forEach(@NonNull SerializableConsumer<? super T> consumer) { rdd.foreach(t -> { Configurator.INSTANCE.configure(configBroadcast.value()); consumer.accept(t); }); }
private void logWriteMetrics(final Optional<JavaRDD<WriteStatus>> writesStatuses) { if (writesStatuses.isPresent() && this.dataFeedMetrics.isPresent()) { final LongAccumulator totalCount = writesStatuses.get().rdd().sparkContext().longAccumulator(); final LongAccumulator errorCount = writesStatuses.get().rdd().sparkContext().longAccumulator(); writesStatuses.get().foreach(writeStatus -> { errorCount.add(writeStatus.getFailedRecords().size()); totalCount.add(writeStatus.getTotalRecords()); }); this.dataFeedMetrics.get().createLongMetric(DataFeedMetricNames.ERROR_ROWCOUNT, errorCount.value(), this.dataFeedMetricsTags); this.dataFeedMetrics.get().createLongMetric(DataFeedMetricNames.OUTPUT_ROWCOUNT, totalCount.value() - errorCount.value(), this.dataFeedMetricsTags); } }
@Override public void applyBulkMutations(List<Tuple2<MutationType, Dataset<Row>>> planned) { for (Tuple2<MutationType, Dataset<Row>> mutation : planned) { MutationType mutationType = mutation._1(); Dataset<Row> mutationDF = mutation._2(); if (mutationType.equals(MutationType.INSERT)) { mutationDF.javaRDD().foreach(new SendRowToLogFunction(delimiter, logLevel)); } } }