public void saveToRiakTS( RiakConnector connector, String bucketType, String bucketName, WriteConf writeConf, WriteDataMapperFactory<T, RowDef> factory ) { RiakWriter<T, RowDef> writer = RiakWriter.tsWriter(connector, bucketType, bucketName, writeConf, factory); rdd.sparkContext().runJob(rdd, new JobFunc<>(writer), JavaApiHelper.getClassTag(Void.class)); }
public void saveToRiakTS( RiakConnector connector, String bucketType, String bucketName, WriteConf writeConf, WriteDataMapperFactory<T, RowDef> factory ) { RiakWriter<T, RowDef> writer = RiakWriter.tsWriter(connector, bucketType, bucketName, writeConf, factory); rdd.sparkContext().runJob(rdd, new JobFunc<>(writer), JavaApiHelper.getClassTag(Void.class)); }
int tid = rand.nextInt(partitionSize); // sample item inside the partition List<Integer> partitions = Collections.singletonList(pid); Object samples = sparkContext.runJob(inputRdd.rdd(), new PartitionSampleFunction(tid, ((tid + sampleSize))), (scala.collection.Seq) JavaConversions.asScalaBuffer(partitions), sparkContext.runJob(inputRdd.rdd(), new PartitionSampleListFunction(list), (scala.collection.Seq) JavaConversions.asScalaBuffer(partitions),
mappedRDD.context().runJob(mappedRDD, new AbstractSerializableFunction2<TaskContext, Iterator<Tuple2<Cells, Cells>>, Integer>() {
pars.add(partitionID); Object samples = sparkContext.runJob(shuffledRDD.rdd(), new TakeSampleFunction(tupleID, tupleID + sampleSize), (scala.collection.Seq) JavaConversions.asScalaBuffer(pars), true, scala.reflect.ClassTag$.MODULE$.apply(List.class));
private void writeDataFrameToDAL(DataFrame data) { if (this.preserveOrder) { logDebug("Inserting data with order preserved! Each partition will be written using separate jobs."); for (int i = 0; i < data.rdd().partitions().length; i++) { data.sqlContext().sparkContext().runJob(data.rdd(), new AnalyticsWritingFunction(this.tenantId, this.tableName, data.schema(), this.globalTenantAccess, this.schemaString, this.primaryKeys, this.mergeFlag, this.recordStore, this.recordBatchSize), CarbonScalaUtils.getNumberSeq(i, i + 1), false, ClassTag$.MODULE$.Unit()); } } else { data.foreachPartition(new AnalyticsWritingFunction(this.tenantId, this.tableName, data.schema(), this.globalTenantAccess, this.schemaString, this.primaryKeys, this.mergeFlag, this.recordStore, this.recordBatchSize)); } }