public static boolean initConf() { if (HoodieCLI.conf == null) { HoodieCLI.conf = FSUtils.prepareHadoopConf(new Configuration()); return true; } return false; }
public static FileSystem getFs(String path, Configuration conf) { FileSystem fs; conf = prepareHadoopConf(conf); try { fs = new Path(path).getFileSystem(conf); } catch (IOException e) { throw new HoodieIOException("Failed to get instance of " + FileSystem.class.getName(), e); } LOG.info( String.format("Hadoop Configuration: fs.defaultFS: [%s], Config:[%s], FileSystem: [%s]", conf.getRaw("fs.defaultFS"), conf.toString(), fs.toString())); return fs; }
@Test public void testEnvVarVariablesPickedup() { environmentVariables.set("HOODIE_ENV_fs_DOT_key1", "value1"); Configuration conf = FSUtils.prepareHadoopConf(HoodieTestUtils.getDefaultHadoopConf()); assertEquals("value1", conf.get("fs.key1")); conf.set("fs.key1", "value11"); conf.set("fs.key2", "value2"); assertEquals("value11", conf.get("fs.key1")); assertEquals("value2", conf.get("fs.key2")); } }
public static JavaSparkContext initJavaSparkConf(String name) { SparkConf sparkConf = new SparkConf().setAppName(name); String defMasterFromEnv = sparkConf.get("spark.master"); if ((null == defMasterFromEnv) || (defMasterFromEnv.isEmpty())) { sparkConf.setMaster(DEFUALT_SPARK_MASTER); } sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); sparkConf.set("spark.driver.maxResultSize", "2g"); sparkConf.set("spark.eventLog.overwrite", "true"); sparkConf.set("spark.eventLog.enabled", "true"); // Configure hadoop conf sparkConf.set("spark.hadoop.mapred.output.compress", "true"); sparkConf.set("spark.hadoop.mapred.output.compression.codec", "true"); sparkConf.set("spark.hadoop.mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); sparkConf.set("spark.hadoop.mapred.output.compression.type", "BLOCK"); sparkConf = HoodieWriteClient.registerClasses(sparkConf); JavaSparkContext jsc = new JavaSparkContext(sparkConf); jsc.hadoopConfiguration().setBoolean("parquet.enable.summary-metadata", false); FSUtils.prepareHadoopConf(jsc.hadoopConfiguration()); return jsc; } }