@Override public String getSparkUiWebUrl() { return getOrCreate().sparkContext().uiWebUrl().get(); }
private String stageLink(int stageId) { if (getSparkSession().sparkContext().uiWebUrl().isDefined()) { return getSparkSession().sparkContext().uiWebUrl().get() + "/stages/stage/?id=" + stageId + "&attempt=0"; } else { return ""; } }
private String jobLink(int jobId) { if (getSparkSession().sparkContext().uiWebUrl().isDefined()) { return getSparkSession().sparkContext().uiWebUrl().get() + "/jobs/job/?id=" + jobId; } else { return ""; } }
public void cancelAllJobs() { getSparkSession().sparkContext().cancelAllJobs(); }
private void configureRESTMapping() { KernelFunctionality kernel = KernelManager.get(); BeakerXServer beakerXServer = kernel.getBeakerXServer(); beakerXServer.addPostMapping(PUT_SPARK_JOBS_IN_THE_BACKGROUND, (Context ctx) -> kernel.putEvaluationInToBackground()); beakerXServer.addPostMapping(CANCELLED_SPARK_JOBS + "/:stageid", (Context ctx) -> getSparkSession().sparkContext().cancelStage(Integer.parseInt(ctx.param("stageid")))); }
private void applicationStart() { this.statusPanel = new SparkUIStatus(() -> getSparkSession().sparkContext().stop()); this.sparkUIForm.setDomClasses(new ArrayList<>(asList("bx-disabled"))); add(0, this.statusPanel); sendUpdate(SPARK_APP_ID, sparkEngine.getSparkAppId()); sendUpdate("sparkUiWebUrl", sparkEngine.getSparkUiWebUrl()); sendUpdate("sparkMasterUrl", sparkEngine.getSparkMasterUrl()); }
@Override public TryResult configure(KernelFunctionality kernel, SparkUIApi sparkUI, Message parentMessage) { SparkConf sparkConf = createSparkConf(sparkUI.getAdvancedOptions(), getSparkConfBasedOn(this.sparkSessionBuilder)); sparkConf = configureSparkConf(sparkConf, sparkUI); this.sparkSessionBuilder = SparkSession.builder().config(sparkConf); if (sparkUI.getHiveSupport()) { this.sparkSessionBuilder.enableHiveSupport(); } TryResult sparkSessionTry = createSparkSession(sparkUI, parentMessage); if (sparkSessionTry.isError()) { return sparkSessionTry; } addListener(getOrCreate().sparkContext(), sparkUI); SparkVariable.putSparkSession(getOrCreate()); TryResult tryResultSparkContext = initSparkContextInShell(kernel, parentMessage); if (!tryResultSparkContext.isError()) { kernel.registerCancelHook(SparkVariable::cancelAllJobs); } return tryResultSparkContext; }
.getOrCreate(); JavaSparkContext context = JavaSparkContext.fromSparkContext(session.sparkContext());
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
.getOrCreate(); JavaSparkContext context = JavaSparkContext.fromSparkContext(session.sparkContext());
.getOrCreate(); JavaSparkContext context = JavaSparkContext.fromSparkContext(session.sparkContext());
.getOrCreate(); JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); int neighborWindow = Integer.parseInt(args[0]); String input = args[1];
protected Configuration lazyConf() { if (lazyConf == null) { this.lazyConf = lazySparkSession().sparkContext().hadoopConfiguration(); } return lazyConf; } }
/** * Pushes an "in_valueset" UDF that uses the given {@link BroadcastableValueSets} for its content. * * @param spark the spark session * @param valueSets the valuesets to use in the UDF */ public static synchronized void pushUdf(SparkSession spark, BroadcastableValueSets valueSets) { JavaSparkContext ctx = new JavaSparkContext(spark.sparkContext()); Broadcast<BroadcastableValueSets> broadcast = ctx.broadcast(valueSets); pushUdf(spark, broadcast); }
@Before public void setupTest() { final SparkConf sparkConf = SparkTestUtil.getSparkConf(TestCassandraDataFrameConverter.class.getName()); this.spark = Optional.of(SparkTestUtil.getSparkSession(sparkConf)); this.jsc = Optional.of(new JavaSparkContext(spark.get().sparkContext())); }
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[2]") .appName(getClass().getSimpleName()) .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[2]") .appName(getClass().getSimpleName()) .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[2]") .appName(getClass().getSimpleName()) .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }