@Test public void scalaSparkContext() { List<String> jars = List$.MODULE$.empty(); Map<String, String> environment = Map$.MODULE$.empty(); new SparkContext(new SparkConf().setMaster("local").setAppName("name")).stop(); new SparkContext("local", "name", new SparkConf()).stop(); new SparkContext("local", "name").stop(); new SparkContext("local", "name", "sparkHome").stop(); new SparkContext("local", "name", "sparkHome", jars).stop(); new SparkContext("local", "name", "sparkHome", jars, environment).stop(); } }
@Test public void scalaSparkContext() { List<String> jars = List$.MODULE$.empty(); Map<String, String> environment = Map$.MODULE$.empty(); new SparkContext(new SparkConf().setMaster("local").setAppName("name")).stop(); new SparkContext("local", "name", new SparkConf()).stop(); new SparkContext("local", "name").stop(); new SparkContext("local", "name", "sparkHome").stop(); new SparkContext("local", "name", "sparkHome", jars).stop(); new SparkContext("local", "name", "sparkHome", jars, environment).stop(); } }
private void applicationStart() { this.statusPanel = new SparkUIStatus(() -> getSparkSession().sparkContext().stop()); this.sparkUIForm.setDomClasses(new ArrayList<>(asList("bx-disabled"))); add(0, this.statusPanel); sendUpdate(SPARK_APP_ID, sparkEngine.getSparkAppId()); sendUpdate("sparkUiWebUrl", sparkEngine.getSparkUiWebUrl()); sendUpdate("sparkMasterUrl", sparkEngine.getSparkMasterUrl()); }
public static void main(String[] args) throws Exception { assertNotEquals(0, args.length); assertEquals(args[0], "hello"); new SparkContext().stop(); synchronized (LOCK) { LOCK.notifyAll(); } }
public static void main(String[] args) throws Exception { assertNotEquals(0, args.length); assertEquals(args[0], "hello"); new SparkContext().stop(); synchronized (LOCK) { LOCK.notifyAll(); } }
public static void close() { NAME_TO_RDD.clear(); if (null != CONTEXT) CONTEXT.stop(); CONTEXT = null; }
@Override public void destroy() { indexManager.close(); if (schemaManager != null) { schemaManager.dropSchema(); } if (sparkContext != null) { logger.info("Closing connection to spark."); sparkContext.stop(); logger.info("Closed connection to spark."); } else { logger.warn("Can't close connection to Spark, it was already disconnected"); } externalProperties = null; schemaManager = null; }
public static void close() { NAME_TO_RDD.clear(); if (null != CONTEXT) CONTEXT.stop(); CONTEXT = null; }
public void stop() { if (this.sqlCtx != null) { this.sqlCtx.sparkContext().stop(); } }
@After public void tearDown() { final SparkArgs sparkArgs = getSampleMarmaraySparkArgs(); // gets existing sc this.sparkFactory.get().getSparkContext(sparkArgs).sc().stop(); this.sparkFactory = Optional.absent(); }
public void run() { long microsLower = day * 1000; long microsUpper = (day * 1000) + TimeUnit.DAYS.toMicros(1) - 1; log.info("Running Dependencies job for {}: {} ≤ Span.timestamp {}", dateStamp, microsLower, microsUpper); SparkContext sc = new SparkContext(conf); List<DependencyLink> links = javaFunctions(sc) .cassandraTable(keyspace, "traces") .spanBy(ROW_TRACE_ID, Long.class) .flatMapValues(new CassandraRowsToDependencyLinks(logInitializer, microsLower, microsUpper)) .values() .mapToPair(LINK_TO_PAIR) .reduceByKey(MERGE_LINK) .values() .collect(); sc.stop(); saveToCassandra(links); }
sparkContext.stop(); sc.stop();