public String getString(String property, String defaultValue) { return containsKey(property) ? getProperty(property) : defaultValue; }
public KafkaSource(TypedProperties props, JavaSparkContext sparkContext, SchemaProvider schemaProvider) { super(props, sparkContext, schemaProvider); kafkaParams = new HashMap<>(); for (Object prop : props.keySet()) { kafkaParams.put(prop.toString(), props.getString(prop.toString())); } DataSourceUtils.checkRequiredProperties(props, Arrays.asList(Config.KAFKA_TOPIC_NAME)); topicName = props.getString(Config.KAFKA_TOPIC_NAME); }
public static TypedProperties readConfig(InputStream in) throws IOException { TypedProperties defaults = new TypedProperties(); defaults.load(in); return defaults; } }
public String getString(String property) { checkKey(property); return getProperty(property); }
@Test public void testIncludes() { DFSPropertiesConfiguration cfg = new DFSPropertiesConfiguration(dfs, new Path(dfsBasePath + "/t3.props")); TypedProperties props = cfg.getConfig(); assertEquals(123, props.getInteger("int.prop")); assertEquals(243.4, props.getDouble("double.prop"), 0.001); assertEquals(true, props.getBoolean("boolean.prop")); assertEquals("t3.value", props.getString("string.prop")); assertEquals(1354354354, props.getLong("long.prop")); try { new DFSPropertiesConfiguration(dfs, new Path(dfsBasePath + "/t4.props")); fail("Should error out on a self-included file."); } catch (IllegalStateException ise) { /* ignore */ } } }
public FilebasedSchemaProvider(TypedProperties props, JavaSparkContext jssc) { super(props, jssc); DataSourceUtils.checkRequiredProperties(props, Collections.singletonList(Config.SOURCE_SCHEMA_FILE_PROP)); this.fs = FSUtils.getFs(props.getString(Config.SOURCE_SCHEMA_FILE_PROP), jssc.hadoopConfiguration()); try { this.sourceSchema = new Schema.Parser().parse( fs.open(new Path(props.getString(Config.SOURCE_SCHEMA_FILE_PROP)))); if (props.containsKey(Config.TARGET_SCHEMA_FILE_PROP)) { this.targetSchema = new Schema.Parser().parse( fs.open(new Path(props.getString(Config.TARGET_SCHEMA_FILE_PROP)))); } } catch (IOException ioe) { throw new HoodieIOException("Error reading schema", ioe); } }
private void checkKey(String property) { if (!containsKey(property)) { throw new IllegalArgumentException("Property " + property + " not found"); } }
public DFSPropertiesConfiguration(FileSystem fs, Path rootFile) { this(fs, rootFile, new TypedProperties()); }
@Test public void testParsing() throws IOException { DFSPropertiesConfiguration cfg = new DFSPropertiesConfiguration(dfs, new Path(dfsBasePath + "/t1.props")); TypedProperties props = cfg.getConfig(); assertEquals(5, props.size()); try { props.getString("invalid.key"); fail("Should error out here."); } catch (IllegalArgumentException iae) { /* ignore */ } assertEquals(123, props.getInteger("int.prop")); assertEquals(113.4, props.getDouble("double.prop"), 0.001); assertEquals(true, props.getBoolean("boolean.prop")); assertEquals("str", props.getString("string.prop")); assertEquals(1354354354, props.getLong("long.prop")); assertEquals(123, props.getInteger("int.prop", 456)); assertEquals(113.4, props.getDouble("double.prop", 223.4), 0.001); assertEquals(true, props.getBoolean("boolean.prop", false)); assertEquals("str", props.getString("string.prop", "default")); assertEquals(1354354354, props.getLong("long.prop", 8578494434L)); assertEquals(456, props.getInteger("bad.int.prop", 456)); assertEquals(223.4, props.getDouble("bad.double.prop", 223.4), 0.001); assertEquals(false, props.getBoolean("bad.boolean.prop", false)); assertEquals("default", props.getString("bad.string.prop", "default")); assertEquals(8578494434L, props.getLong("bad.long.prop", 8578494434L)); }
public FilebasedSchemaProvider(TypedProperties props, JavaSparkContext jssc) { super(props, jssc); DataSourceUtils.checkRequiredProperties(props, Arrays.asList(Config.SOURCE_SCHEMA_FILE_PROP)); this.fs = FSUtils.getFs(props.getString(Config.SOURCE_SCHEMA_FILE_PROP), jssc.hadoopConfiguration()); try { this.sourceSchema = new Schema.Parser().parse( fs.open(new Path(props.getString(Config.SOURCE_SCHEMA_FILE_PROP)))); if (props.containsKey(Config.TARGET_SCHEMA_FILE_PROP)) { this.targetSchema = new Schema.Parser().parse( fs.open(new Path(props.getString(Config.TARGET_SCHEMA_FILE_PROP)))); } } catch (IOException ioe) { throw new HoodieIOException("Error reading schema", ioe); } }
public static void checkRequiredProperties(TypedProperties props, List<String> checkPropNames) { checkPropNames.stream().forEach(prop -> { if (!props.containsKey(prop)) { throw new HoodieNotSupportedException("Required property " + prop + " is missing"); } }); }
public boolean getBoolean(String property) { checkKey(property); return Boolean.valueOf(getProperty(property)); }
public TimestampBasedKeyGenerator(TypedProperties config) { super(config); DataSourceUtils.checkRequiredProperties(config, Arrays.asList(Config.TIMESTAMP_TYPE_FIELD_PROP, Config.TIMESTAMP_OUTPUT_DATE_FORMAT_PROP)); this.timestampType = TimestampType.valueOf(config.getString(Config.TIMESTAMP_TYPE_FIELD_PROP)); this.outputDateFormat = config.getString(Config.TIMESTAMP_OUTPUT_DATE_FORMAT_PROP); if (timestampType == TimestampType.DATE_STRING || timestampType == TimestampType.MIXED) { DataSourceUtils .checkRequiredProperties(config, Arrays.asList(Config.TIMESTAMP_INPUT_DATE_FORMAT_PROP)); this.inputDateFormat = new SimpleDateFormat( config.getString(Config.TIMESTAMP_INPUT_DATE_FORMAT_PROP)); this.inputDateFormat.setTimeZone(TimeZone.getTimeZone("GMT")); } }
public boolean getBoolean(String property, boolean defaultValue) { return containsKey(property) ? Boolean.valueOf(getProperty(property)) : defaultValue; }
public KafkaSource(TypedProperties props, JavaSparkContext sparkContext, SchemaProvider schemaProvider) { super(props, sparkContext, schemaProvider); kafkaParams = new HashMap<>(); for (Object prop : props.keySet()) { kafkaParams.put(prop.toString(), props.getString(prop.toString())); } DataSourceUtils.checkRequiredProperties(props, Collections.singletonList(Config.KAFKA_TOPIC_NAME)); topicName = props.getString(Config.KAFKA_TOPIC_NAME); }
public static void checkRequiredProperties(TypedProperties props, List<String> checkPropNames) { checkPropNames.stream().forEach(prop -> { if (!props.containsKey(prop)) { throw new HoodieNotSupportedException("Required property " + prop + " is missing"); } }); }
public long getLong(String property) { checkKey(property); return Long.valueOf(getProperty(property)); }
public static TypedProperties readConfig(InputStream in) throws IOException { TypedProperties defaults = new TypedProperties(); defaults.load(in); return defaults; } }