public static Map<String, String> loadProperties(File file) throws IOException { Properties properties = new Properties(); try (InputStream in = new FileInputStream(file)) { properties.load(in); } return fromProperties(properties); } }
@GET @Path("/properties") @ResourceFilters(ConfigResourceFilter.class) @Produces(MediaType.APPLICATION_JSON) public Map<String, String> getProperties() { Map<String, String> allProperties = Maps.fromProperties(properties); Set<String> hidderProperties = druidServerConfig.getHiddenProperties(); return Maps.filterEntries(allProperties, (entry) -> !hidderProperties.contains(entry.getKey())); }
private Map<String, String> readConfig(final File file) throws FlowTriggerDependencyPluginException { final Properties props = new Properties(); InputStream input = null; try { input = new BufferedInputStream(new FileInputStream(file)); props.load(input); } catch (final Exception e) { logger.debug("unable to read the file " + file, e); throw new FlowTriggerDependencyPluginException(e); } finally { try { if (input != null) { input.close(); } } catch (final IOException e) { logger.error("unable to close input stream when reading config from file " + file .getAbsolutePath(), e); } } return Maps.fromProperties(props); }
public static Map<String,String> getHCatKeyHiveConf(JobConf conf) { try { Properties properties = null; if (! StringUtils.isBlank(conf.get(HCatConstants.HCAT_KEY_HIVE_CONF))) { properties = (Properties) HCatUtil.deserialize( conf.get(HCatConstants.HCAT_KEY_HIVE_CONF)); LOG.info(HCatConstants.HCAT_KEY_HIVE_CONF + " is set. Using differences=" + properties); } else { LOG.info(HCatConstants.HCAT_KEY_HIVE_CONF + " not set. Generating configuration differences."); properties = getHiveSiteOverrides(conf); } // This method may not be safe as it can throw an NPE if a key or value is null. return Maps.fromProperties(properties); } catch (IOException e) { throw new IllegalStateException("Failed to deserialize hive conf", e); } }
Map<String, String> map = Maps.fromProperties(properties); for (Map.Entry<String, String> e : map.entrySet()) { Class<?> key;
@GwtIncompatible // Maps.fromProperties @SuppressWarnings("serial") // never serialized public void testFromPropertiesNullKey() { Properties properties = new Properties() { @Override public Enumeration<?> propertyNames() { return Iterators.asEnumeration(Arrays.asList(null, "first", "second").iterator()); } }; properties.setProperty("first", "true"); properties.setProperty("second", "null"); try { Maps.fromProperties(properties); fail(); } catch (NullPointerException expected) { } }
@GwtIncompatible // Maps.fromProperties @SuppressWarnings("serial") // never serialized public void testFromPropertiesNonStringKeys() { Properties properties = new Properties() { @Override public Enumeration<?> propertyNames() { return Iterators.asEnumeration( Arrays.<Object>asList(Integer.valueOf(123), "first").iterator()); } }; try { Maps.fromProperties(properties); fail(); } catch (ClassCastException expected) { } }
Properties testProp = new Properties(); Map<String, String> result = Maps.fromProperties(testProp); assertTrue(result.isEmpty()); testProp.setProperty("first", "true"); result = Maps.fromProperties(testProp); assertEquals("true", result.get("first")); assertEquals(1, result.size()); testProp.setProperty("second", "null"); result = Maps.fromProperties(testProp); assertEquals("true", result.get("first")); assertEquals("null", result.get("second")); result = Maps.fromProperties(testProp); assertEquals(4, result.size()); assertEquals("true", result.get("first")); result = Maps.fromProperties(System.getProperties()); assertTrue(result.containsKey("java.version")); result = Maps.fromProperties(testProp); assertTrue(result.size() > 2); assertEquals("", result.get("test"));
public static FlowConfig deserializeFlowConfig(String serialized) throws IOException { Properties properties = PropertiesUtils.deserialize(serialized); FlowConfig flowConfig = new FlowConfig().setId(new FlowId() .setFlowName(properties.getProperty(FLOWCONFIG_ID_NAME)) .setFlowGroup(properties.getProperty(FLOWCONFIG_ID_GROUP))); if (properties.containsKey(FLOWCONFIG_SCHEDULE_CRON)) { flowConfig.setSchedule(new Schedule() .setCronSchedule(properties.getProperty(FLOWCONFIG_SCHEDULE_CRON)) .setRunImmediately(Boolean.valueOf(properties.getProperty(FLOWCONFIG_SCHEDULE_RUN_IMMEDIATELY)))); } if (properties.containsKey(FLOWCONFIG_TEMPLATEURIS)) { flowConfig.setTemplateUris(properties.getProperty(FLOWCONFIG_TEMPLATEURIS)); } properties.remove(FLOWCONFIG_ID_NAME); properties.remove(FLOWCONFIG_ID_GROUP); properties.remove(FLOWCONFIG_SCHEDULE_CRON); properties.remove(FLOWCONFIG_SCHEDULE_RUN_IMMEDIATELY); properties.remove(FLOWCONFIG_TEMPLATEURIS); flowConfig.setProperties(new StringMap(Maps.fromProperties(properties))); return flowConfig; } }
EmbeddedKafka(EmbeddedZookeeper zookeeper, Properties overrideProperties) throws IOException { this.zookeeper = requireNonNull(zookeeper, "zookeeper is null"); requireNonNull(overrideProperties, "overrideProperties is null"); this.port = findUnusedPort(); this.kafkaDataDir = Files.createTempDir(); Map<String, String> properties = ImmutableMap.<String, String>builder() .put("broker.id", "0") .put("host.name", "localhost") .put("num.partitions", "2") .put("log.flush.interval.messages", "10000") .put("log.flush.interval.ms", "1000") .put("log.retention.minutes", "60") .put("log.segment.bytes", "1048576") .put("auto.create.topics.enable", "false") .put("zookeeper.connection.timeout.ms", "1000000") .put("port", Integer.toString(port)) .put("log.dirs", kafkaDataDir.getAbsolutePath()) .put("zookeeper.connect", zookeeper.getConnectString()) .putAll(Maps.fromProperties(overrideProperties)) .build(); KafkaConfig config = new KafkaConfig(toProperties(properties)); this.kafka = new KafkaServerStartable(config); }
@Override public Future<?> deleteSpec(URI deletedSpecURI, Properties headers) { AvroJobSpec avroJobSpec = AvroJobSpec.newBuilder().setUri(deletedSpecURI.toString()) .setMetadata(ImmutableMap.of(VERB_KEY, SpecExecutor.Verb.DELETE.name())) .setProperties(Maps.fromProperties(headers)).build(); log.info("Deleting Spec: " + deletedSpecURI + " using Kafka."); return getKafkaProducer().write(_serializer.serializeRecord(avroJobSpec), WriteCallback.EMPTY); }
private AvroJobSpec convertToAvroJobSpec(Spec spec, SpecExecutor.Verb verb) { if (spec instanceof JobSpec) { JobSpec jobSpec = (JobSpec) spec; AvroJobSpec.Builder avroJobSpecBuilder = AvroJobSpec.newBuilder(); avroJobSpecBuilder.setUri(jobSpec.getUri().toString()).setVersion(jobSpec.getVersion()) .setDescription(jobSpec.getDescription()).setProperties(Maps.fromProperties(jobSpec.getConfigAsProperties())) .setMetadata(ImmutableMap.of(VERB_KEY, verb.name())); if (jobSpec.getTemplateURI().isPresent()) { avroJobSpecBuilder.setTemplateUri(jobSpec.getTemplateURI().get().toString()); } return avroJobSpecBuilder.build(); } else { throw new RuntimeException("Unsupported spec type " + spec.getClass()); } } }
@Override public int run(CommandLine cl) throws AlluxioException, IOException { String[] args = cl.getArgs(); if (args.length == 0) { Map<String, MountPointInfo> mountTable = mFileSystem.getMountTable(); UfsCommand.printMountInfo(mountTable); return 0; } AlluxioURI alluxioPath = new AlluxioURI(args[0]); AlluxioURI ufsPath = new AlluxioURI(args[1]); MountPOptions.Builder optionsBuilder = MountPOptions.newBuilder(); if (cl.hasOption(READONLY_OPTION.getLongOpt())) { optionsBuilder.setReadOnly(true); } if (cl.hasOption(SHARED_OPTION.getLongOpt())) { optionsBuilder.setShared(true); } if (cl.hasOption(OPTION_OPTION.getLongOpt())) { Properties properties = cl.getOptionProperties(OPTION_OPTION.getLongOpt()); optionsBuilder.putAllProperties(Maps.fromProperties(properties)); } mFileSystem.mount(alluxioPath, ufsPath, optionsBuilder.build()); System.out.println("Mounted " + ufsPath + " at " + alluxioPath); return 0; }
jobProperties.put(DataWritableWriteSupport.PARQUET_HIVE_SCHEMA, parquetSchema); jobProperties.putAll(Maps.fromProperties(tblProperties));
flowPropsAsStringMap.putAll(Maps.fromProperties(flowProps));
public void testFlowConfigWithoutSchedule() { FlowConfig flowConfig = new FlowConfig().setId(new FlowId() .setFlowName("SN_CRMSYNC") .setFlowGroup("DYNAMICS-USER-123456789")); flowConfig.setTemplateUris("FS:///my.template"); Properties properties = new Properties(); properties.put("gobblin.flow.sourceIdentifier", "dynamicsCrm"); properties.put("gobblin.flow.destinationIdentifier", "espresso"); flowConfig.setProperties(new StringMap(Maps.fromProperties(properties))); testFlowSpec(flowConfig); testSerDer(flowConfig); }
public void testFlowConfigWithoutTemplateUri() { FlowConfig flowConfig = new FlowConfig().setId(new FlowId() .setFlowName("SN_CRMSYNC") .setFlowGroup("DYNAMICS-USER-123456789")); flowConfig.setSchedule(new Schedule() .setCronSchedule("0 58 2/12 ? * * *")); Properties properties = new Properties(); properties.put("gobblin.flow.sourceIdentifier", "dynamicsCrm"); properties.put("gobblin.flow.destinationIdentifier", "espresso"); flowConfig.setProperties(new StringMap(Maps.fromProperties(properties))); try { FlowConfigResourceLocalHandler.createFlowSpecForConfig(flowConfig); Assert.fail("Should not get to here"); } catch (RequiredFieldNotPresentException e) { Assert.assertTrue(true, "templateUri cannot be empty"); } testSerDer(flowConfig); } }
public void testFullFlowConfig() { FlowConfig flowConfig = new FlowConfig().setId(new FlowId() .setFlowName("SN_CRMSYNC") .setFlowGroup("DYNAMICS-USER-123456789")); flowConfig.setSchedule(new Schedule() .setCronSchedule("0 58 2/12 ? * * *") .setRunImmediately(Boolean.valueOf("true"))); flowConfig.setTemplateUris("FS:///my.template"); Properties properties = new Properties(); properties.put("gobblin.flow.sourceIdentifier", "dynamicsCrm"); properties.put("gobblin.flow.destinationIdentifier", "espresso"); flowConfig.setProperties(new StringMap(Maps.fromProperties(properties))); testFlowSpec(flowConfig); testSerDer(flowConfig); }
public void testFlowConfigWithDefaultRunImmediately() { FlowConfig flowConfig = new FlowConfig().setId(new FlowId() .setFlowName("SN_CRMSYNC") .setFlowGroup("DYNAMICS-USER-123456789")); flowConfig.setSchedule(new Schedule() .setCronSchedule("0 58 2/12 ? * * *")); flowConfig.setTemplateUris("FS:///my.template"); Properties properties = new Properties(); properties.put("gobblin.flow.sourceIdentifier", "dynamicsCrm"); properties.put("gobblin.flow.destinationIdentifier", "espresso"); flowConfig.setProperties(new StringMap(Maps.fromProperties(properties))); testFlowSpec(flowConfig); testSerDer(flowConfig); }
@Override public Dataset<String> read(SparkSession spark, Properties profilerProps, Properties readerProps) { String inputPath = TELEMETRY_INPUT_PATH.get(profilerProps, String.class); if(inputFormat == null) { inputFormat = TELEMETRY_INPUT_FORMAT.get(profilerProps, String.class); } LOG.debug("Loading telemetry; inputPath={}, inputFormat={}", inputPath, inputFormat); return spark .read() .options(Maps.fromProperties(readerProps)) .format(inputFormat) .load(inputPath) .as(Encoders.STRING()); } }