/** * Determines if the configuration directories specified by YARN_CONF_DIR * and/or HADOOP_CONF_DIR are set or not. * * @return */ public static boolean isConfigurationDirectoriesSpecified() { return getConfigurationDirectories().length > 0; }
public HadoopResource(final URI uri, final HadoopClusterInformation defaultCluster) { this(uri, defaultCluster.getConfiguration(), defaultCluster.getName()); }
@Override public Configuration getConfiguration() { try { return super.getConfiguration(); } catch (final IllegalStateException e) { if (getDirectories().length == 0) { throw new IllegalStateException( "None of the standard Hadoop environment variables (HADOOP_CONF_DIR, YARN_CONF_DIR) has been set.", e); } else { throw e; } } } }
private ServerInformation createHadoopClusterInformation(final HadoopClusterType hadoopClusterType, final String name, final String description) { final ServerInformation serverInformation; if (hadoopClusterType.getEnvironmentConfigured() != null) { serverInformation = new EnvironmentBasedHadoopClusterInformation(name, description); } else if (hadoopClusterType.getDirectories() != null) { final List<String> directoryList = hadoopClusterType.getDirectories().getDirectory(); // TODO: Variable-thingy final String[] directories = directoryList.toArray(new String[directoryList.size()]); serverInformation = new DirectoryBasedHadoopClusterInformation(name, description, directories); } else if (hadoopClusterType.getNamenodeUrl() != null) { serverInformation = new DirectConnectionHadoopClusterInformation(name, description, URI.create(hadoopClusterType.getNamenodeUrl())); } else { throw new UnsupportedOperationException("Unsupported hadoop cluster configuration method"); } return serverInformation; }
throw new DCUserInputException("The Hadoop path does not exist"); final EnvironmentBasedHadoopClusterInformation environmentBasedHadoopClusterInformation = new EnvironmentBasedHadoopClusterInformation( "default", HadoopResource.DEFAULT_CLUSTERREFERENCE); if (!EnvironmentBasedHadoopClusterInformation.isConfigurationDirectoriesSpecified()) { throw new DCUserInputException("HADOOP_CONF_DIR or/and SPARK_CONF_DIR are not defined"); logger.debug("Environment variable is", environmentBasedHadoopClusterInformation.getDescription()); resource = new HadoopResource(uri, environmentBasedHadoopClusterInformation.getConfiguration(), HadoopResource.DEFAULT_CLUSTERREFERENCE); } else {
final Element hadoopClusterElement = getDocument().createElement("hadoop-cluster"); hadoopClusterElement.setAttribute("name", hadoopClusterInformation.getName()); final String description = hadoopClusterInformation.getDescription(); if (!Strings.isNullOrEmpty(description)) { hadoopClusterElement.setAttribute("description", description); ((DirectConnectionHadoopClusterInformation) hadoopClusterInformation).getNameNodeUri().toString()); } else if (hadoopClusterInformation instanceof EnvironmentBasedHadoopClusterInformation) { appendElement(hadoopClusterElement, "environment-configured", ""); final Element directoriesElement = getDocument().createElement("directories"); hadoopClusterElement.appendChild(directoriesElement); for (final String directory : directoryBasedHadoopClusterInformation.getDirectories()) { appendElement(directoriesElement, "directory", directory);
final URI nameNodeUri = new URI(_fileSystemURITextField.getText().trim()); final DirectConnectionHadoopClusterInformation newServer = new DirectConnectionHadoopClusterInformation(_nameTextField.getText(), _descriptionTextField.getText(), nameNodeUri); _savedServer = newServer; _nameTextField.setText(directConnection.getName()); _nameTextField.setEnabled(false); _fileSystemURITextField.setText(directConnection.getNameNodeUri().toString()); final String description = directConnection.getDescription(); if (description != null) { _descriptionTextField.setText(description);
public ServerInformationCatalogImpl(final ServerInformation... servers) { final List<ServerInformation> serversList = new ArrayList<>(); Collections.addAll(serversList, servers); _servers = serversList; try { if (!containsServer(HadoopResource.DEFAULT_CLUSTERREFERENCE)) { final EnvironmentBasedHadoopClusterInformation environmentBasedHadoopClusterInformation = new EnvironmentBasedHadoopClusterInformation(HadoopResource.DEFAULT_CLUSTERREFERENCE, null); if (environmentBasedHadoopClusterInformation.getDirectories().length > 0) { serversList.add(0, environmentBasedHadoopClusterInformation); } } } catch (final IllegalStateException e) { logger.info("No Hadoop environment variables, skipping default server"); } }
private List<DirectoryPathPanel> getDirectoriesListPanel(final JPanel parent) { _pathPanels = new ArrayList<>(); if (_server != null) { final String[] directories = _server.getDirectories(); if (directories != null) { for (final String directory : directories) { final DirectoryPathPanel directoryPanel = new DirectoryPathPanel(new File(directory), parent); _pathPanels.add(directoryPanel); } } else { _pathPanels.add(new DirectoryPathPanel(null, parent)); } } else { _pathPanels.add(new DirectoryPathPanel(null, parent)); } return _pathPanels; }
if (EnvironmentBasedHadoopClusterInformation.isConfigurationDirectoriesSpecified()) { selectedServer = serverNames[0]; (HadoopClusterInformation) serverInformationCatalog.getServer(selectedServer); final HdfsResource resource = new HadoopResource(selectedFile, server.getConfiguration(), selectedServer); final Datastore datastore = createAndAddDatastore(resource); _datastoreSelectListener.datastoreSelected(datastore);
private ServerInformationCatalog createServerInformationCatalog(final ServersType serversType, final DataCleanerConfigurationImpl temporaryConfiguration, final TemporaryMutableDataCleanerEnvironment temporaryEnvironment) { if (serversType == null) { return temporaryConfiguration.getServerInformationCatalog(); } final Map<String, ServerInformation> servers = new HashMap<>(); final List<HadoopClusterType> hadoopClusterTypes = serversType.getHadoopClusters().getHadoopCluster(); for (final HadoopClusterType hadoopClusterType : hadoopClusterTypes) { final String name = hadoopClusterType.getName(); checkName(name, ServerInformation.class, servers); final String description = hadoopClusterType.getDescription(); final ServerInformation serverInformation = createHadoopClusterInformation(hadoopClusterType, name, description); servers.put(name, serverInformation); } try { servers.put(HadoopResource.DEFAULT_CLUSTERREFERENCE, new EnvironmentBasedHadoopClusterInformation(HadoopResource.DEFAULT_CLUSTERREFERENCE, null)); } catch (final IllegalStateException e) { logger.info("No Hadoop environment variables, skipping default server"); } return new ServerInformationCatalogImpl(servers.values()); }
final String serverName = _server.getName(); if (serverName != null) { _nameTextField.setText(serverName); _nameTextField.setEnabled(false); final String description = _server.getDescription(); if (description != null) { _descriptionTextField.setText(description); new DirectoryBasedHadoopClusterInformation(_nameTextField.getText(), _descriptionTextField.getText(), paths.toArray(new String[paths.size()])); _serverInformationCatalog.addServerInformation(newServer);
public static Configuration getHadoopConfigurationWithTimeout(final HadoopClusterInformation clusterInformation) { Configuration configuration = null; if (clusterInformation == null) { configuration = new Configuration(); } else { configuration = clusterInformation.getConfiguration(); } configuration.set(CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_KEY, String.valueOf(1)); return configuration; } }
@Override public Configuration getConfiguration() { final Configuration configuration; if (SystemProperties.getBoolean(HdfsResource.SYSTEM_PROPERTY_HADOOP_CONF_DIR_ENABLED, false)) { configuration = super.getConfiguration(); } else { configuration = new Configuration(); } configuration.set("fs.defaultFS", _nameNodeUri.toString()); return configuration; }
servers.add(new EnvironmentBasedHadoopClusterInformation("default", "hadoop conf dir")); servers.add(new DirectoryBasedHadoopClusterInformation("directory", "directopry set up", "C:\\Users\\claudiap\\git\\vagrant-vms\\bigdatavm\\hadoop_conf")); servers.add(new DirectConnectionHadoopClusterInformation("namenode", "directconnection", new URI("hdfs://192.168.0.255:9000/"))); final ServerInformationCatalog serverInformationCatalog = new ServerInformationCatalogImpl(servers);
@Override public Configuration getConfiguration() { final Configuration configuration = new Configuration(); final Map<String, File> configurationFiles = new HashMap<>(); Arrays.stream(getDirectories()).map(File::new).filter(File::isDirectory).forEach(c -> { final File[] array = c.listFiles(); assert (array != null); Arrays.stream(array).filter(File::isFile).filter(f -> !configurationFiles.containsKey(f.getName())) .filter(f -> FilenameUtils.getExtension(f.getName()).equalsIgnoreCase("xml")) .forEach(f -> configurationFiles.put(f.getName(), f)); }); if (configurationFiles.size() == 0) { throw new IllegalStateException("Specified directories does not contain any Hadoop configuration files"); } configurationFiles.values().stream().map(File::toURI).map(Path::new).forEach(configuration::addResource); return configuration; }
public HadoopResourceBuilder(final ServerInformationCatalog catalog, final String templatedUri) { final Matcher matcher = RESOURCE_SCHEME_PATTERN.matcher(templatedUri); if (!matcher.matches()) { _clusterReferenceName = null; final String fixedUri = templatedUri.replace(" ", "%20"); final HadoopClusterInformation hadoopClusterInformation = (HadoopClusterInformation) catalog.getServer(HadoopResource.DEFAULT_CLUSTERREFERENCE); if (hadoopClusterInformation != null) { _configuration = hadoopClusterInformation.getConfiguration(); } else { _configuration = new Configuration(); } _configuration.set("fs.defaultFS", fixedUri); _uri = URI.create(fixedUri); } else { _clusterReferenceName = matcher.group(2); final HadoopClusterInformation hadoopClusterInformation = (HadoopClusterInformation) catalog.getServer(_clusterReferenceName); _configuration = hadoopClusterInformation.getConfiguration(); _uri = URI.create(matcher.group(3).replace(" ", "%20")); } }
public EnvironmentBasedHadoopClusterInformation(final String name, final String description) { super(name, description, getConfigurationDirectories()); }