private static void initializeStreamingJob() { int batchMilliseconds = INSTANCE.config.getInt(BATCH_MILLISECONDS_PROPERTY); final Duration batchDuration = Durations.milliseconds(batchMilliseconds); JavaStreamingContext jsc = new JavaStreamingContext(new JavaSparkContext(getSparkSession().sparkContext()), batchDuration); INSTANCE.jsc = jsc; }
@Before public void setUp() { kafkaTestUtils = new KafkaTestUtils(); kafkaTestUtils.setup(); SparkConf sparkConf = new SparkConf() .setMaster("local[4]").setAppName(this.getClass().getSimpleName()); ssc = new JavaStreamingContext(sparkConf, Durations.milliseconds(200)); }
@Before public void setUp() { kafkaTestUtils = new KafkaTestUtils(); kafkaTestUtils.setup(); SparkConf sparkConf = new SparkConf() .setMaster("local[4]").setAppName(this.getClass().getSimpleName()); ssc = new JavaStreamingContext(sparkConf, Durations.milliseconds(200)); }
@Before public void setUp() { kafkaTestUtils = new KafkaTestUtils(); kafkaTestUtils.setup(); SparkConf sparkConf = new SparkConf() .setMaster("local[4]").setAppName(this.getClass().getSimpleName()); ssc = new JavaStreamingContext(sparkConf, Durations.milliseconds(200)); }
@Before public void setUp() { kafkaTestUtils = new KafkaTestUtils(); kafkaTestUtils.setup(); SparkConf sparkConf = new SparkConf() .setMaster("local[4]").setAppName(this.getClass().getSimpleName()); ssc = new JavaStreamingContext(sparkConf, Durations.milliseconds(200)); }
@Before public void setUp() { kafkaTestUtils = new KafkaTestUtils(); kafkaTestUtils.setup(); SparkConf sparkConf = new SparkConf() .setMaster("local[4]").setAppName(this.getClass().getSimpleName()); ssc = new JavaStreamingContext(sparkConf, Durations.milliseconds(200)); }
@Test public void testMilliseconds() { Assert.assertEquals(new Duration(100), Durations.milliseconds(100)); }
@Test public void testMilliseconds() { Assert.assertEquals(new Duration(100), Durations.milliseconds(100)); }
@Before public void setUp() { kafkaTestUtils = new KafkaTestUtils(); kafkaTestUtils.setup(); SparkConf sparkConf = new SparkConf() .setMaster("local[4]").setAppName(this.getClass().getSimpleName()); ssc = new JavaStreamingContext(sparkConf, Durations.milliseconds(200)); }
@Before public void setUp() { kafkaTestUtils = new KafkaTestUtils(); kafkaTestUtils.setup(); SparkConf sparkConf = new SparkConf() .setMaster("local[4]").setAppName(this.getClass().getSimpleName()); ssc = new JavaStreamingContext(sparkConf, Durations.milliseconds(200)); }
@Override public JavaStreamingContext call() throws Exception { JavaStreamingContext jssc = new JavaStreamingContext( new JavaSparkContext(), Durations.milliseconds(pipelineSpec.getBatchIntervalMillis())); SparkStreamingPipelineRunner runner = new SparkStreamingPipelineRunner(sec, jssc, pipelineSpec, pipelineSpec.isCheckpointsDisabled()); PipelinePluginContext pluginContext = new PipelinePluginContext(sec.getPluginContext(), sec.getMetrics(), pipelineSpec.isStageLoggingEnabled(), pipelineSpec.isProcessTimingEnabled()); // TODO: figure out how to get partitions to use for aggregators and joiners. // Seems like they should be set at configure time instead of runtime? but that requires an API change. try { runner.runPipeline(pipelinePhase, StreamingSource.PLUGIN_TYPE, sec, new HashMap<String, Integer>(), pluginContext, new HashMap<String, StageStatisticsCollector>()); } catch (Exception e) { throw new RuntimeException(e); } if (checkpointDir != null) { jssc.checkpoint(checkpointDir); } return jssc; } };