public ServerInformationCatalogImpl(final ServerInformation... servers) { final List<ServerInformation> serversList = new ArrayList<>(); Collections.addAll(serversList, servers); _servers = serversList; try { if (!containsServer(HadoopResource.DEFAULT_CLUSTERREFERENCE)) { final EnvironmentBasedHadoopClusterInformation environmentBasedHadoopClusterInformation = new EnvironmentBasedHadoopClusterInformation(HadoopResource.DEFAULT_CLUSTERREFERENCE, null); if (environmentBasedHadoopClusterInformation.getDirectories().length > 0) { serversList.add(0, environmentBasedHadoopClusterInformation); } } } catch (final IllegalStateException e) { logger.info("No Hadoop environment variables, skipping default server"); } }
@Override public Configuration getConfiguration() { final Configuration configuration; if (SystemProperties.getBoolean(HdfsResource.SYSTEM_PROPERTY_HADOOP_CONF_DIR_ENABLED, false)) { configuration = super.getConfiguration(); } else { configuration = new Configuration(); } configuration.set("fs.defaultFS", _nameNodeUri.toString()); return configuration; }
/** * Determines if the configuration directories specified by YARN_CONF_DIR * and/or HADOOP_CONF_DIR are set or not. * * @return */ public static boolean isConfigurationDirectoriesSpecified() { return getConfigurationDirectories().length > 0; }
throw new DCUserInputException("The Hadoop path does not exist"); final EnvironmentBasedHadoopClusterInformation environmentBasedHadoopClusterInformation = new EnvironmentBasedHadoopClusterInformation( "default", HadoopResource.DEFAULT_CLUSTERREFERENCE); if (!EnvironmentBasedHadoopClusterInformation.isConfigurationDirectoriesSpecified()) { throw new DCUserInputException("HADOOP_CONF_DIR or/and SPARK_CONF_DIR are not defined"); logger.debug("Environment variable is", environmentBasedHadoopClusterInformation.getDescription()); resource = new HadoopResource(uri, environmentBasedHadoopClusterInformation.getConfiguration(), HadoopResource.DEFAULT_CLUSTERREFERENCE); } else {
servers.add(new EnvironmentBasedHadoopClusterInformation("default", "hadoop conf dir")); servers.add(new DirectoryBasedHadoopClusterInformation("directory", "directopry set up", "C:\\Users\\claudiap\\git\\vagrant-vms\\bigdatavm\\hadoop_conf"));
if (EnvironmentBasedHadoopClusterInformation.isConfigurationDirectoriesSpecified()) { selectedServer = serverNames[0];
@Override public Configuration getConfiguration() { try { return super.getConfiguration(); } catch (final IllegalStateException e) { if (getDirectories().length == 0) { throw new IllegalStateException( "None of the standard Hadoop environment variables (HADOOP_CONF_DIR, YARN_CONF_DIR) has been set.", e); } else { throw e; } } } }
private ServerInformation createHadoopClusterInformation(final HadoopClusterType hadoopClusterType, final String name, final String description) { final ServerInformation serverInformation; if (hadoopClusterType.getEnvironmentConfigured() != null) { serverInformation = new EnvironmentBasedHadoopClusterInformation(name, description); } else if (hadoopClusterType.getDirectories() != null) { final List<String> directoryList = hadoopClusterType.getDirectories().getDirectory(); // TODO: Variable-thingy final String[] directories = directoryList.toArray(new String[directoryList.size()]); serverInformation = new DirectoryBasedHadoopClusterInformation(name, description, directories); } else if (hadoopClusterType.getNamenodeUrl() != null) { serverInformation = new DirectConnectionHadoopClusterInformation(name, description, URI.create(hadoopClusterType.getNamenodeUrl())); } else { throw new UnsupportedOperationException("Unsupported hadoop cluster configuration method"); } return serverInformation; }
private ServerInformationCatalog createServerInformationCatalog(final ServersType serversType, final DataCleanerConfigurationImpl temporaryConfiguration, final TemporaryMutableDataCleanerEnvironment temporaryEnvironment) { if (serversType == null) { return temporaryConfiguration.getServerInformationCatalog(); } final Map<String, ServerInformation> servers = new HashMap<>(); final List<HadoopClusterType> hadoopClusterTypes = serversType.getHadoopClusters().getHadoopCluster(); for (final HadoopClusterType hadoopClusterType : hadoopClusterTypes) { final String name = hadoopClusterType.getName(); checkName(name, ServerInformation.class, servers); final String description = hadoopClusterType.getDescription(); final ServerInformation serverInformation = createHadoopClusterInformation(hadoopClusterType, name, description); servers.put(name, serverInformation); } try { servers.put(HadoopResource.DEFAULT_CLUSTERREFERENCE, new EnvironmentBasedHadoopClusterInformation(HadoopResource.DEFAULT_CLUSTERREFERENCE, null)); } catch (final IllegalStateException e) { logger.info("No Hadoop environment variables, skipping default server"); } return new ServerInformationCatalogImpl(servers.values()); }
public EnvironmentBasedHadoopClusterInformation(final String name, final String description) { super(name, description, getConfigurationDirectories()); }