public static boolean isContextNullOrStopped() { return null == CONTEXT || CONTEXT.isStopped(); }
private void runSparkTestSession(HiveConf conf, int threadId) throws Exception { conf.setVar(HiveConf.ConfVars.SPARK_SESSION_TIMEOUT, "10s"); conf.setVar(HiveConf.ConfVars.SPARK_SESSION_TIMEOUT_PERIOD, "1s"); Driver driver = null; try { driver = new Driver(new QueryState.Builder() .withGenerateNewQueryId(true) .withHiveConf(conf).build(), null, null); SparkSession sparkSession = SparkUtilities.getSparkSession(conf, SparkSessionManagerImpl.getInstance()); Assert.assertEquals(0, driver.run("show tables").getResponseCode()); barrier.await(); SparkContext sparkContext = getSparkContext(sparkSession); Assert.assertFalse(sparkContext.isStopped()); if(threadId == 1) { barrier.await(); closeSparkSession(sparkSession); Assert.assertTrue(sparkContext.isStopped()); } else { closeSparkSession(sparkSession); Assert.assertFalse(sparkContext.isStopped()); barrier.await(); } } finally { if (driver != null) { driver.destroy(); } } }
public static SparkContext getContext() { if (null != CONTEXT && CONTEXT.isStopped()) recreateStopped(); return CONTEXT; }
public static SparkContext recreateStopped() { if (null == CONTEXT) throw new IllegalStateException("The Spark context has not been created."); if (!CONTEXT.isStopped()) throw new IllegalStateException("The Spark context is not stopped."); CONTEXT = SparkContext.getOrCreate(CONTEXT.getConf()); return CONTEXT; } public static SparkContext getContext() {
public static SparkContext create(final SparkContext sparkContext) { if (null != CONTEXT && !CONTEXT.isStopped() && sparkContext !=CONTEXT /*exact the same object => NOP*/ && !sparkContext.getConf().getBoolean("spark.driver.allowMultipleContexts", false)) { throw new IllegalStateException( "Active Spark context exists. Call Spark.close() to close it before creating a new one"); } CONTEXT = sparkContext; return CONTEXT; }
public static void refresh() { if (null == CONTEXT) throw new IllegalStateException("The Spark context has not been created."); if (CONTEXT.isStopped()) recreateStopped(); final Set<String> keepNames = new HashSet<>(); for (final RDD<?> rdd : JavaConversions.asJavaIterable(CONTEXT.persistentRdds().values())) { if (null != rdd.name()) { keepNames.add(rdd.name()); NAME_TO_RDD.put(rdd.name(), rdd); } } // remove all stale names in the NAME_TO_RDD map NAME_TO_RDD.keySet().stream().filter(key -> !keepNames.contains(key)).collect(Collectors.toList()).forEach(NAME_TO_RDD::remove); }
public static boolean isContextNullOrStopped() { return null == CONTEXT || CONTEXT.isStopped(); }
public static SparkContext getContext() { if (null != CONTEXT && CONTEXT.isStopped()) recreateStopped(); return CONTEXT; }
private static SparkStreamingContext contextOf(SparkContext sparkContext) { if (context == null || context.sc().isStopped()) { synchronized (SparkStreamingContext.class) { if (context == null || context.sc().isStopped()) { context = new JavaSparkContext(sparkContext); } } } return SparkStreamingContext.INSTANCE; }
public static SparkContext recreateStopped() { if (null == CONTEXT) throw new IllegalStateException("The Spark context has not been created."); if (!CONTEXT.isStopped()) throw new IllegalStateException("The Spark context is not stopped."); CONTEXT = SparkContext.getOrCreate(CONTEXT.getConf()); return CONTEXT; } public static SparkContext getContext() {
public static SparkContext create(final SparkContext sparkContext) { if (null != CONTEXT && !CONTEXT.isStopped() && sparkContext !=CONTEXT /*exact the same object => NOP*/ && !sparkContext.getConf().getBoolean("spark.driver.allowMultipleContexts", false)) { throw new IllegalStateException( "Active Spark context exists. Call Spark.close() to close it before creating a new one"); } CONTEXT = sparkContext; return CONTEXT; }
public static void refresh() { if (null == CONTEXT) throw new IllegalStateException("The Spark context has not been created."); if (CONTEXT.isStopped()) recreateStopped(); final Set<String> keepNames = new HashSet<>(); for (final RDD<?> rdd : JavaConversions.asJavaIterable(CONTEXT.persistentRdds().values())) { if (null != rdd.name()) { keepNames.add(rdd.name()); NAME_TO_RDD.put(rdd.name(), rdd); } } // remove all stale names in the NAME_TO_RDD map NAME_TO_RDD.keySet().stream().filter(key -> !keepNames.contains(key)).collect(Collectors.toList()).forEach(NAME_TO_RDD::remove); }
public static synchronized JavaSparkContext getSparkContext(SparkPipelineOptions options) { SparkContextOptions contextOptions = options.as(SparkContextOptions.class); usesProvidedSparkContext = contextOptions.getUsesProvidedSparkContext(); // reuse should be ignored if the context is provided. if (Boolean.getBoolean(TEST_REUSE_SPARK_CONTEXT) && !usesProvidedSparkContext) { // if the context is null or stopped for some reason, re-create it. if (sparkContext == null || sparkContext.sc().isStopped()) { sparkContext = createSparkContext(contextOptions); sparkMaster = options.getSparkMaster(); } else if (!options.getSparkMaster().equals(sparkMaster)) { throw new IllegalArgumentException( String.format( "Cannot reuse spark context " + "with different spark master URL. Existing: %s, requested: %s.", sparkMaster, options.getSparkMaster())); } return sparkContext; } else { return createSparkContext(contextOptions); } }
private static JavaSparkContext getSparkContext() { if (context == null || context.sc().isStopped()) { synchronized (SparkStreamingContext.class) { if (context == null || context.sc().isStopped()) { SparkConf conf = new SparkConf() .setAppName(Config.get(SPARK_APPNAME).asString(StringUtils.randomHexString(20))); if (Config.hasProperty(SPARK_MASTER)) { conf = conf.setMaster(Config.get(SPARK_MASTER).asString("local[*]")); } context = new JavaSparkContext(conf); } } } return context; }
private static JavaSparkContext createSparkContext(SparkContextOptions contextOptions) { if (usesProvidedSparkContext) { LOG.info("Using a provided Spark Context"); JavaSparkContext jsc = contextOptions.getProvidedSparkContext(); if (jsc == null || jsc.sc().isStopped()) { LOG.error("The provided Spark context " + jsc + " was not created or was stopped"); throw new RuntimeException("The provided Spark context was not created or was stopped"); } return jsc; } else { LOG.info("Creating a brand new Spark Context."); SparkConf conf = new SparkConf(); if (!conf.contains("spark.master")) { // set master if not set. conf.setMaster(contextOptions.getSparkMaster()); } if (contextOptions.getFilesToStage() != null && !contextOptions.getFilesToStage().isEmpty()) { conf.setJars(contextOptions.getFilesToStage().toArray(new String[0])); } conf.setAppName(contextOptions.getAppName()); // register immutable collections serializers because the SDK uses them. conf.set("spark.kryo.registrator", BeamSparkRunnerRegistrator.class.getName()); return new JavaSparkContext(conf); } } }