void load(Properties properties) { if (properties.containsKey(PURGE_ENABLED_KEY)) { purgeEnable = Boolean.parseBoolean(properties.getProperty(PURGE_ENABLED_KEY)); } else { purgeEnable = true; } if (purgeEnable && properties.containsKey(PURGE_PERIOD_SECONDS_KEY)) { try { purgePeriod = Integer.parseInt(properties.getProperty(PURGE_PERIOD_SECONDS_KEY)); } catch (NumberFormatException ex) { purgePeriod = 1; } } else { purgePeriod = 1; } } }
@Override public void checkProperties(Properties properties) { if (!properties.containsKey("log4j.shutdownCallbackRegistry")) { properties.setProperty("log4j.shutdownCallbackRegistry", "org.apache.druid.common.config.Log4jShutdown"); } if (!properties.containsKey("log4j.shutdownHookEnabled")) { properties.setProperty("log4j.shutdownHookEnabled", "true"); } } }
public String getProperties() { Properties properties = new Properties(); properties.putAll(connectProperties); if (properties.containsKey("password")) { properties.put("password", "******"); } return properties.toString(); }
public static Properties getCredentials(Properties securityProps) { Properties credentials = null; if (securityProps.containsKey(USER_NAME) && securityProps.containsKey(PASSWORD)) { credentials = new Properties(); credentials.setProperty(USER_NAME, securityProps.getProperty(USER_NAME)); credentials.setProperty(PASSWORD, securityProps.getProperty(PASSWORD)); } return credentials; } }
Properties props = new Properties(); try { Enumeration<URL> resources = classLoader.getResources("META-INF/io.netty.versions.properties"); if (!props.containsKey(artifactId + PROP_VERSION) || !props.containsKey(artifactId + PROP_BUILD_DATE) || !props.containsKey(artifactId + PROP_COMMIT_DATE) || !props.containsKey(artifactId + PROP_SHORT_COMMIT_HASH) || !props.containsKey(artifactId + PROP_LONG_COMMIT_HASH) || !props.containsKey(artifactId + PROP_REPO_STATUS)) { continue; new Version( artifactId, props.getProperty(artifactId + PROP_VERSION), parseIso8601(props.getProperty(artifactId + PROP_BUILD_DATE)), parseIso8601(props.getProperty(artifactId + PROP_COMMIT_DATE)), props.getProperty(artifactId + PROP_SHORT_COMMIT_HASH), props.getProperty(artifactId + PROP_LONG_COMMIT_HASH),
public static Properties noClobberWriteProperties(Properties bp, Properties ovp) { for (String propertyName : ovp.stringPropertyNames()) { if (bp.containsKey(propertyName)) continue; bp.setProperty(propertyName,ovp.getProperty(propertyName)); } return bp; }
@SuppressWarnings("OptionalUsedAsFieldOrParameterType") private static Properties loadProperties(final Optional<String> propertiesFile) { final Properties properties = new Properties(); if (!propertiesFile.isPresent()) { return properties; } try (InputStream input = new FileInputStream(propertiesFile.get())) { properties.load(input); if (properties.containsKey(KsqlConfig.KSQL_SERVICE_ID_CONFIG)) { properties.put( StreamsConfig.APPLICATION_ID_CONFIG, properties.getProperty(KsqlConfig.KSQL_SERVICE_ID_CONFIG) ); } return properties; } catch (final IOException e) { throw new KsqlException("failed to load properties file: " + propertiesFile.get(), e); } } }
/** * Store all properties, those local and also those in parent props * * @param out * The stream to write to * @throws IOException * If there is an error writing */ public void storeFlattened(OutputStream out) throws IOException { Properties p = new Properties(); for (String key : keySet()) { if (!p.containsKey(key)) { p.setProperty(key, get(key)); } } p.store(out, null); } }
private Properties mailProperties() { Properties props = new Properties(); props.put("mail.from", from); if (!System.getProperties().containsKey("mail.smtp.connectiontimeout")) { props.put("mail.smtp.connectiontimeout", DEFAULT_TIMEOUT); } if (!System.getProperties().containsKey("mail.smtp.timeout")) { props.put("mail.smtp.timeout", DEFAULT_TIMEOUT); } if (System.getProperties().containsKey("mail.smtp.starttls.enable")) { props.put("mail.smtp.starttls.enable", "true"); } String mailProtocol = tls ? "smtps" : "smtp"; props.put("mail.transport.protocol", mailProtocol); return props; }
/** * (non-Javadoc) * * @see org.geoserver.security.GeoServerRoleService#personalizeRoleParams(java.lang.String, * java.util.Properties, java.lang.String, java.util.Properties) * <p>Default implementation: if a user property name equals a role property name, take the * value from to user property and use it for the role property. */ public Properties personalizeRoleParams( String roleName, Properties roleParams, String userName, Properties userProps) throws IOException { Properties props = null; // this is true if the set is modified --> common // property names exist props = new Properties(); boolean personalized = false; for (Object key : roleParams.keySet()) { if (userProps.containsKey(key)) { props.put(key, userProps.get(key)); personalized = true; } else { props.put(key, roleParams.get(key)); } } return personalized ? props : null; }
Assert.assertEquals("myInput", props.getProperty("input")); final String pathToFile = tmp.newFile().getAbsolutePath(); parameter.createPropertiesFile(pathToFile); final Properties defaultProps = new Properties(); try (FileInputStream fis = new FileInputStream(pathToFile)) { defaultProps.load(fis); Assert.assertEquals("myDefaultValue", defaultProps.get("output")); Assert.assertEquals("-1", defaultProps.get("expectedCount")); Assert.assertTrue(defaultProps.containsKey("input"));
public State(State otherState) { this.commonProperties = otherState.getCommonProperties(); this.specProperties = new Properties(); this.specProperties.putAll(otherState.getProperties()); for (Object key : this.commonProperties.keySet()) { if (this.specProperties.containsKey(key) && this.commonProperties.get(key).equals(this.specProperties.get(key))) { this.specProperties.remove(key); } } }
static Properties putIncludes(Properties props) { String includes = props.getProperty("includes"); if (includes == null) return props; for (String include : COMMA.split(includes)) { final Properties p; try { p = propsFrom(include.trim()); } catch (IOException e) { throw new RuntimeException(e); } for (Object key : p.keySet()) { if (!props.containsKey(key)) props.put(key, p.get(key)); } } return props; }
@Override public String getProperty(String name) { String propertyValue; if (orgPropertyNameValid(name)) { if (orgProperties.containsKey(name)) { // return from org-specific properties propertyValue = orgProperties.get(name); } else if (properties.containsKey(name)) { // return from properties file propertyValue = (String)properties.get(name); } else { // return the default propertyValue = defaultProperties.get(name); } } else { // not an org config item, return from properties propertyValue = properties.getProperty(name); } return !isBlank(propertyValue) ? propertyValue : null; }
public static void main(String[] args) throws Exception { Properties props; if (args.length > 0) { props = StringUtils.argsToProperties(args); } else { props = new Properties(); } if ( ! props.containsKey("dcoref.conll2011")) { log.info("-dcoref.conll2011 [input_CoNLL_corpus]: was not specified"); return; } if ( ! props.containsKey("singleton.predictor.output")) { log.info("-singleton.predictor.output [output_model_file]: was not specified"); return; } GeneralDataset<String, String> data = SingletonPredictor.generateFeatureVectors(props); LogisticClassifier<String, String> classifier = SingletonPredictor.train(data); SingletonPredictor.saveToSerialized(classifier, getPathSingletonPredictor(props)); }
/** * Remove a property if it exists. * * @param key property key */ public void removeProp(String key) { this.specProperties.remove(key); if (this.commonProperties.containsKey(key)) { // This case should not happen. Properties commonPropsCopy = new Properties(); commonPropsCopy.putAll(this.commonProperties); commonPropsCopy.remove(key); this.commonProperties = commonPropsCopy; } }
/** * Creates an instance of this class as well as the instance of the * corresponding Kafka {@link KafkaProducer} using provided Kafka * configuration properties. * * @param kafkaProperties * instance of {@link Properties} used to bootstrap * {@link KafkaProducer} */ KafkaPublisher(Properties kafkaProperties, int ackCheckSize, ComponentLog componentLog) { kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); this.kafkaProducer = new KafkaProducer<>(kafkaProperties); this.ackCheckSize = ackCheckSize; try { if (kafkaProperties.containsKey("partitioner.class")) { this.partitioner = (Partitioner) Class.forName(kafkaProperties.getProperty("partitioner.class")).newInstance(); } else { this.partitioner = null; } } catch (Exception e) { throw new IllegalStateException("Failed to create partitioner", e); } this.componentLog = componentLog; }
private void mergeIfExist(Properties from, Properties to) { for (Object key : from.keySet()) { if (!to.containsKey(key)) { continue; } Object fromObj = from.get(key), toObj = to.get(key); if (toObj != null && !toObj.equals(fromObj)) { log.info("Replace, key: {}, value: {} -> {}", key, toObj, fromObj); } to.put(key, fromObj); } }
/** * Appends the log-file property to the Properties object if set of properties does not already * define the log-file property or the gemfire.agent.log-file property. * <p/> * * @param props the <code>Properties</code> to append the log-file property to if the property * does not exist. */ private static void appendLogFileProperty(final Properties props) { if (!(props.containsKey(DistributedSystemConfig.LOG_FILE_NAME) || props.containsKey(SYSTEM_PROPERTY_PREFIX + DistributedSystemConfig.LOG_FILE_NAME))) { props.put(DistributedSystemConfig.LOG_FILE_NAME, DEFAULT_LOG_FILE); } }
@Override public IRichBolt getConsumer() { Preconditions.checkArgument(!props.isEmpty(), "Writable Kafka table " + topic + " must contain producer config"); HashMap<String, Object> producerConfig = (HashMap<String, Object>) props.get(CONFIG_KEY_PRODUCER); props.putAll(producerConfig); Preconditions.checkState(!props.containsKey(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG), "Writable Kafka table " + topic + " must not contain \"bootstrap.servers\" config, set it in the kafka URL instead"); Preconditions.checkState(!props.containsKey(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG), "Writable Kafka table " + topic + "must not contain " + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG + ", it will be hardcoded to be " + ByteBufferSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteBufferSerializer.class); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); TupleToKafkaMapper<Object, ByteBuffer> mapper = new SqlKafkaMapper(serializer); return new KafkaBolt<Object, ByteBuffer>() .withTopicSelector(new DefaultTopicSelector(topic)) .withProducerProperties(props) .withTupleToKafkaMapper(mapper); } }