@Override public void write(@NonNull final JavaRDD<AvroPayload> data) { final Configuration hadoopConf = this.conf.getHadoopConf();
@Override public void write(@NonNull final JavaRDD<AvroPayload> data) { final Configuration hadoopConf = this.conf.getHadoopConf();
@Test(expected = MissingPropertyException.class) public void testMissingDataCenter() { final Configuration rawConf = getConfig(CassandraSinkConfiguration.DATACENTER); final CassandraSinkConfiguration conf = new CassandraSinkConfiguration(rawConf); conf.getHadoopConf(); Assert.fail(); }
@Test public void testSetHadoopSparkProperties() { final String EXECUTOR_MEM_PROP = "spark.executor.memory"; final String DRIVER_MEM_PROP = "spark.driver.memory"; final String NETWORK_TIMEOUT = "spark.network.timeout"; final String FILE_FETCH_TIMEOUT = "spark.files.fetchTimeout"; final String NUM_EXECUTORS_PROP = "spark.executor.instances"; final Configuration conf = getConfig(StringTypes.EMPTY); conf.setProperty(CassandraSinkConfiguration.HADOOP_COMMON_PREFIX + EXECUTOR_MEM_PROP, "6g"); conf.setProperty(CassandraSinkConfiguration.HADOOP_COMMON_PREFIX + DRIVER_MEM_PROP, "7g"); conf.setProperty(CassandraSinkConfiguration.HADOOP_COMMON_PREFIX + NETWORK_TIMEOUT, "1234s"); conf.setProperty(CassandraSinkConfiguration.HADOOP_COMMON_PREFIX + FILE_FETCH_TIMEOUT, "1111s"); conf.setProperty(CassandraSinkConfiguration.HADOOP_COMMON_PREFIX + NUM_EXECUTORS_PROP, "32"); final CassandraSinkConfiguration cassConf = new CassandraSinkConfiguration(conf); org.apache.hadoop.conf.Configuration hadoopConf = cassConf.getHadoopConf(); Assert.assertEquals("6g", hadoopConf.get(EXECUTOR_MEM_PROP)); Assert.assertEquals("7g", hadoopConf.get(DRIVER_MEM_PROP)); Assert.assertEquals("1234s", hadoopConf.get(NETWORK_TIMEOUT)); Assert.assertEquals("1111s", hadoopConf.get(FILE_FETCH_TIMEOUT)); Assert.assertEquals("32", hadoopConf.get(NUM_EXECUTORS_PROP)); }