public ServerInformationCatalogImpl(final ServerInformation... servers) { final List<ServerInformation> serversList = new ArrayList<>(); Collections.addAll(serversList, servers); _servers = serversList; try { if (!containsServer(HadoopResource.DEFAULT_CLUSTERREFERENCE)) { final EnvironmentBasedHadoopClusterInformation environmentBasedHadoopClusterInformation = new EnvironmentBasedHadoopClusterInformation(HadoopResource.DEFAULT_CLUSTERREFERENCE, null); if (environmentBasedHadoopClusterInformation.getDirectories().length > 0) { serversList.add(0, environmentBasedHadoopClusterInformation); } } } catch (final IllegalStateException e) { logger.info("No Hadoop environment variables, skipping default server"); } }
servers.add(new EnvironmentBasedHadoopClusterInformation("default", "hadoop conf dir")); servers.add(new DirectoryBasedHadoopClusterInformation("directory", "directopry set up", "C:\\Users\\claudiap\\git\\vagrant-vms\\bigdatavm\\hadoop_conf"));
private ServerInformation createHadoopClusterInformation(final HadoopClusterType hadoopClusterType, final String name, final String description) { final ServerInformation serverInformation; if (hadoopClusterType.getEnvironmentConfigured() != null) { serverInformation = new EnvironmentBasedHadoopClusterInformation(name, description); } else if (hadoopClusterType.getDirectories() != null) { final List<String> directoryList = hadoopClusterType.getDirectories().getDirectory(); // TODO: Variable-thingy final String[] directories = directoryList.toArray(new String[directoryList.size()]); serverInformation = new DirectoryBasedHadoopClusterInformation(name, description, directories); } else if (hadoopClusterType.getNamenodeUrl() != null) { serverInformation = new DirectConnectionHadoopClusterInformation(name, description, URI.create(hadoopClusterType.getNamenodeUrl())); } else { throw new UnsupportedOperationException("Unsupported hadoop cluster configuration method"); } return serverInformation; }
private ServerInformationCatalog createServerInformationCatalog(final ServersType serversType, final DataCleanerConfigurationImpl temporaryConfiguration, final TemporaryMutableDataCleanerEnvironment temporaryEnvironment) { if (serversType == null) { return temporaryConfiguration.getServerInformationCatalog(); } final Map<String, ServerInformation> servers = new HashMap<>(); final List<HadoopClusterType> hadoopClusterTypes = serversType.getHadoopClusters().getHadoopCluster(); for (final HadoopClusterType hadoopClusterType : hadoopClusterTypes) { final String name = hadoopClusterType.getName(); checkName(name, ServerInformation.class, servers); final String description = hadoopClusterType.getDescription(); final ServerInformation serverInformation = createHadoopClusterInformation(hadoopClusterType, name, description); servers.put(name, serverInformation); } try { servers.put(HadoopResource.DEFAULT_CLUSTERREFERENCE, new EnvironmentBasedHadoopClusterInformation(HadoopResource.DEFAULT_CLUSTERREFERENCE, null)); } catch (final IllegalStateException e) { logger.info("No Hadoop environment variables, skipping default server"); } return new ServerInformationCatalogImpl(servers.values()); }
throw new DCUserInputException("The Hadoop path does not exist"); final EnvironmentBasedHadoopClusterInformation environmentBasedHadoopClusterInformation = new EnvironmentBasedHadoopClusterInformation( "default", HadoopResource.DEFAULT_CLUSTERREFERENCE); if (!EnvironmentBasedHadoopClusterInformation.isConfigurationDirectoriesSpecified()) {