Refine search
public static JavaSparkContext createConf() { SparkConf sparkConf = new SparkConf(); sparkConf.setAppName("animalClass"); JavaSparkContext sc = new JavaSparkContext(sparkConf); return sc; } }
@Override public SparkConf getSparkConf() { SparkConf sparkConf = new SparkConf(); sparkConf.setMaster("local[1]"); sparkConf.setAppName("appName1"); return sparkConf; }
protected void entryPoint(String[] args) throws Exception { JCommanderUtils.parseArgs(this, args); SparkConf conf = new SparkConf(); conf.setAppName("DL4JTinyImageNetSparkPreproc"); JavaSparkContext sc = new JavaSparkContext(conf); //Create training set JavaRDD<String> filePathsTrain = SparkUtils.listPaths(sc, sourceDir + "/train", true, NativeImageLoader.ALLOWED_FORMATS); SparkDataUtils.createFileBatchesSpark(filePathsTrain, saveDir, batchSize, sc); //Create test set JavaRDD<String> filePathsTest = SparkUtils.listPaths(sc, sourceDir + "/test", true, NativeImageLoader.ALLOWED_FORMATS); SparkDataUtils.createFileBatchesSpark(filePathsTest, saveDir, batchSize, sc); System.out.println("----- Data Preprocessing Complete -----"); }
/** * Main function will be triggered via spark-submit. * * @param args optional argument mPartitions may be passed in */ public static void main(String[] args) throws Exception { AlluxioConfiguration alluxioConf = new InstancedConfiguration(ConfigurationUtils.defaults()); SparkIntegrationChecker checker = new SparkIntegrationChecker(); JCommander jCommander = new JCommander(checker, args); jCommander.setProgramName("SparkIntegrationChecker"); // Create a file to save user-facing messages try (PrintWriter reportWriter = CheckerUtils.initReportFile()) { // Start the Java Spark Context SparkConf conf = new SparkConf().setAppName(SparkIntegrationChecker.class.getName()); JavaSparkContext sc = new JavaSparkContext(conf); checker.printConfigInfo(conf, reportWriter); Status resultStatus = checker.run(sc, reportWriter, alluxioConf); checker.printResultInfo(resultStatus, reportWriter); reportWriter.flush(); System.exit(resultStatus.equals(Status.SUCCESS) ? 0 : 1); } } }
public static void main(String[] args) throws Exception { String zkQuorum = args[0]; String group = args[1]; SparkConf conf = new SparkConf().setAppName("KafkaInput"); // Create a StreamingContext with a 1 second batch size JavaStreamingContext jssc = new JavaStreamingContext(conf, new Duration(1000)); Map<String, Integer> topics = new HashMap<String, Integer>(); topics.put("pandas", 1); JavaPairDStream<String, String> input = KafkaUtils.createStream(jssc, zkQuorum, group, topics); input.print(); // start our streaming context and wait for it to "finish" jssc.start(); // Wait for 10 seconds then exit. To run forever call without a timeout jssc.awaitTermination(10000); // Stop the streaming context jssc.stop(); } }
private SparkConf initializeSparkConf(String pluginNames) { return new SparkConf() .setMaster("local") .setAppName("test") .set(EXECUTOR_PLUGIN_CONF_NAME, pluginNames); }
SparkConf sparkConf = new SparkConf(); sparkConf.setAppName(sparkAppName); JavaSparkContext sc = new JavaSparkContext();
private SparkConf initializeSparkConf(String pluginNames) { return new SparkConf() .setMaster("local") .setAppName("test") .set(EXECUTOR_PLUGIN_CONF_NAME, pluginNames); }
protected final JavaStreamingContext buildStreamingContext() { log.info("Starting SparkContext with interval {} seconds", generationIntervalSec); SparkConf sparkConf = new SparkConf(); // Only for tests, really if (sparkConf.getOption("spark.master").isEmpty()) { log.info("Overriding master to {} for tests", streamingMaster); sparkConf.setMaster(streamingMaster); } // Only for tests, really if (sparkConf.getOption("spark.app.name").isEmpty()) { String appName = "Oryx" + getLayerName(); if (id != null) { appName = appName + "-" + id; } log.info("Overriding app name to {} for tests", appName); sparkConf.setAppName(appName); } extraSparkConfig.forEach((key, value) -> sparkConf.setIfMissing(key, value.toString())); // Turn this down to prevent long blocking at shutdown sparkConf.setIfMissing( "spark.streaming.gracefulStopTimeout", Long.toString(TimeUnit.MILLISECONDS.convert(generationIntervalSec, TimeUnit.SECONDS))); sparkConf.setIfMissing("spark.cleaner.ttl", Integer.toString(20 * generationIntervalSec)); long generationIntervalMS = TimeUnit.MILLISECONDS.convert(generationIntervalSec, TimeUnit.SECONDS); JavaSparkContext jsc = JavaSparkContext.fromSparkContext(SparkContext.getOrCreate(sparkConf)); return new JavaStreamingContext(jsc, new Duration(generationIntervalMS)); }
SparkConf conf = new SparkConf().setMaster(master).setAppName("basicavgwithkyro"); conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); conf.set("spark.kryo.registrator", AvgRegistrator.class.getName());
SparkConf sparkConf = new SparkConf(); if(useSparkLocal){ sparkConf.setMaster("local[*]"); log.info("Using Spark Local"); sparkConf.setAppName("DL4J Spark Stats Example"); JavaSparkContext sc = new JavaSparkContext(sparkConf);
public static void main(String[] args) { SparkConf sparkConf = new SparkConf().setAppName("JavaBookExample"); JavaSparkContext sc = new JavaSparkContext(sparkConf);
System.out.println("Properties: " + System.getProperties()); SparkConf sparkConf = new SparkConf().setAppName("GroupActionsJob");
SparkConf sparkConf = new SparkConf(); sparkConf.setAppName(sparkAppName); JavaSparkContext sc = new JavaSparkContext(); int numWorkers = this.numNodes * this.numWorkersPerNode;
SparkConf conf = new SparkConf(); conf.setMaster("local[*]"); conf.setAppName("DataVec Example");
@BeforeClass public static void setUp() { SparkConf sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkIT"); javaSparkContext = JavaSparkContext.fromSparkContext(SparkContext.getOrCreate(sparkConf)); }
KeyValue.class, RowKeyWritable.class }; SparkConf conf = new SparkConf().setAppName("Converting HFile for:" + cubeName + " segment " + segmentId);
SparkConf conf = new SparkConf(); conf.setMaster("local[*]"); conf.setAppName("DataVec Join Example"); JavaSparkContext sc = new JavaSparkContext(conf);
SparkConf conf = new SparkConf(); conf.setMaster("local[*]"); conf.setAppName("DataVec Example");
.build(); SparkConf conf = new SparkConf(); conf.setMaster("local[*]"); conf.setAppName("DataVec Example");