public ZookeeperBootstrap() { if (zookeeperLocalCluster == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } } }
public HiveMetastoreBootstrap(URL url) { if (hiveLocalMetaStore == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(url); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } } }
public HBaseBootstrap(URL url) { if (hbaseLocalCluster == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(url); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } } }
public YarnBootstrap() { if (yarnLocalCluster == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException | NotFoundServiceException e) { LOGGER.error("unable to load configuration", e); } } }
public ConfluentKsqlRestBootstrap() { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } }
public RedisBootstrap(URL url) { if (!System.getProperty("os.name").startsWith("Windows")) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(url); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } } else { throw new IllegalArgumentException("Sorry redis is not supported on windows..."); } }
public SolrCloudBootstrap() { if (solrServer == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } } }
public KafkaBootstrap(URL url) { if (kafkaLocalCluster == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(url); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } } }
public HBaseBootstrap(URL url) { if (hbaseLocalCluster == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(url); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } } }
public MongoDbBootstrap() { if (mongodbLocalServer == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } } }
public ConfluentKafkaBootstrap(URL url) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(url); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } }
public YarnBootstrap() { if (yarnLocalCluster == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException | NotFoundServiceException e) { LOGGER.error("unable to load configuration", e); } } }
public ElasticSearchBootstrap() { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } }
public HBaseBootstrap() { if (hbaseLocalCluster == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } } }
public HiveMetastoreBootstrap() { if (hiveLocalMetaStore == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } } }
public HiveServer2Bootstrap() { if (hiveLocalServer2 == null) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } } }
public CassandraBootstrap() { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } }
public ConfluentSchemaRegistryBootstrap() { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } }
public ConfluentKsqlRestBootstrap() { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(null); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } }
public ConfluentKsqlRestBootstrap(URL url) { try { configuration = HadoopUtils.INSTANCE.loadConfigFile(url); loadConfig(); } catch (BootstrapException e) { LOGGER.error("unable to load configuration", e); } }