@Override public void execute(TxRunnable runnable) throws TransactionFailureException { sec.execute(runnable); }
@Override public void execute(int timeout, TxRunnable runnable) throws TransactionFailureException { sec.execute(timeout, runnable); }
@Override public void run(final JavaSparkExecutionContext sec) throws Exception { sec.execute(new TxRunnable() { @Override public void run(DatasetContext context) throws Exception { KeyValueTable clusterNameTable = context.getDataset(CLUSTER_NAME_TABLE); WorkflowInfo workflowInfo = sec.getWorkflowInfo(); String prefix = workflowInfo == null ? "" : workflowInfo.getName() + "."; clusterNameTable.write(prefix + "spark.cluster.name", sec.getClusterName()); } }); } }
sec.execute(new TxRunnable() { @Override public void run(DatasetContext context) throws Exception {
sec.execute(new TxRunnable() { @Override public void run(DatasetContext datasetContext) throws Exception { final JavaRDD<T> countedRDD = data.map(new CountingFunction<T>(stageName, sec.getMetrics(), "records.in", null)).cache(); sec.execute(new TxRunnable() { @Override public void run(DatasetContext context) throws Exception { sec.execute(new TxRunnable() { @Override public void run(DatasetContext datasetContext) throws Exception { } finally { if (isPrepared && !isDone) { sec.execute(new TxRunnable() { @Override public void run(DatasetContext datasetContext) throws Exception {
sec.execute(new TxRunnable() { @Override public void run(DatasetContext context) throws Exception {
sec.execute(new TxRunnable() { @Override public void run(DatasetContext datasetContext) throws Exception { Object.class, Object.class); isDone = true; sec.execute(new TxRunnable() { @Override public void run(DatasetContext datasetContext) throws Exception { } finally { if (isPrepared && !isDone) { sec.execute(new TxRunnable() { @Override public void run(DatasetContext datasetContext) throws Exception {
sec.execute(new TxRunnable() { @Override public void run(DatasetContext context) throws Exception {