Tabnine Logo
EnvironmentBasedHadoopClusterInformation
Code IndexAdd Tabnine to your IDE (free)

How to use
EnvironmentBasedHadoopClusterInformation
in
org.datacleaner.server

Best Java code snippets using org.datacleaner.server.EnvironmentBasedHadoopClusterInformation (Showing top 10 results out of 315)

origin: datacleaner/DataCleaner

public ServerInformationCatalogImpl(final ServerInformation... servers) {
  final List<ServerInformation> serversList = new ArrayList<>();
  Collections.addAll(serversList, servers);
  _servers = serversList;
  try {
    if (!containsServer(HadoopResource.DEFAULT_CLUSTERREFERENCE)) {
      final EnvironmentBasedHadoopClusterInformation environmentBasedHadoopClusterInformation =
          new EnvironmentBasedHadoopClusterInformation(HadoopResource.DEFAULT_CLUSTERREFERENCE, null);
      if (environmentBasedHadoopClusterInformation.getDirectories().length > 0) {
        serversList.add(0, environmentBasedHadoopClusterInformation);
      }
    }
  } catch (final IllegalStateException e) {
    logger.info("No Hadoop environment variables, skipping default server");
  }
}
origin: datacleaner/DataCleaner

@Override
public Configuration getConfiguration() {
  final Configuration configuration;
  if (SystemProperties.getBoolean(HdfsResource.SYSTEM_PROPERTY_HADOOP_CONF_DIR_ENABLED, false)) {
    configuration = super.getConfiguration();
  } else {
    configuration = new Configuration();
  }
  configuration.set("fs.defaultFS", _nameNodeUri.toString());
  return configuration;
}
origin: datacleaner/DataCleaner

/**
 * Determines if the configuration directories specified by YARN_CONF_DIR
 * and/or HADOOP_CONF_DIR are set or not.
 *
 * @return
 */
public static boolean isConfigurationDirectoriesSpecified() {
  return getConfigurationDirectories().length > 0;
}
origin: org.eobjects.datacleaner/DataCleaner-monitor-csv-datastore-wizard

    throw new DCUserInputException("The Hadoop path does not exist");
  final EnvironmentBasedHadoopClusterInformation environmentBasedHadoopClusterInformation = new EnvironmentBasedHadoopClusterInformation(
      "default", HadoopResource.DEFAULT_CLUSTERREFERENCE);
  if (!EnvironmentBasedHadoopClusterInformation.isConfigurationDirectoriesSpecified()) {
    throw new DCUserInputException("HADOOP_CONF_DIR or/and SPARK_CONF_DIR are not defined");
  logger.debug("Environment variable is", environmentBasedHadoopClusterInformation.getDescription());
  resource = new HadoopResource(uri, environmentBasedHadoopClusterInformation.getConfiguration(),
      HadoopResource.DEFAULT_CLUSTERREFERENCE);
} else {
origin: datacleaner/DataCleaner

servers.add(new EnvironmentBasedHadoopClusterInformation("default", "hadoop conf dir"));
servers.add(new DirectoryBasedHadoopClusterInformation("directory", "directopry set up",
    "C:\\Users\\claudiap\\git\\vagrant-vms\\bigdatavm\\hadoop_conf"));
origin: datacleaner/DataCleaner

if (EnvironmentBasedHadoopClusterInformation.isConfigurationDirectoriesSpecified()) {
  selectedServer = serverNames[0];
origin: datacleaner/DataCleaner

  @Override
  public Configuration getConfiguration() {
    try {
      return super.getConfiguration();
    } catch (final IllegalStateException e) {
      if (getDirectories().length == 0) {
        throw new IllegalStateException(
            "None of the standard Hadoop environment variables (HADOOP_CONF_DIR, YARN_CONF_DIR) has been set.",
            e);
      } else {
        throw e;
      }
    }
  }
}
origin: datacleaner/DataCleaner

private ServerInformation createHadoopClusterInformation(final HadoopClusterType hadoopClusterType,
    final String name, final String description) {
  final ServerInformation serverInformation;
  if (hadoopClusterType.getEnvironmentConfigured() != null) {
    serverInformation = new EnvironmentBasedHadoopClusterInformation(name, description);
  } else if (hadoopClusterType.getDirectories() != null) {
    final List<String> directoryList = hadoopClusterType.getDirectories().getDirectory();
    // TODO: Variable-thingy
    final String[] directories = directoryList.toArray(new String[directoryList.size()]);
    serverInformation = new DirectoryBasedHadoopClusterInformation(name, description, directories);
  } else if (hadoopClusterType.getNamenodeUrl() != null) {
    serverInformation = new DirectConnectionHadoopClusterInformation(name, description,
        URI.create(hadoopClusterType.getNamenodeUrl()));
  } else {
    throw new UnsupportedOperationException("Unsupported hadoop cluster configuration method");
  }
  return serverInformation;
}
origin: datacleaner/DataCleaner

private ServerInformationCatalog createServerInformationCatalog(final ServersType serversType,
    final DataCleanerConfigurationImpl temporaryConfiguration,
    final TemporaryMutableDataCleanerEnvironment temporaryEnvironment) {
  if (serversType == null) {
    return temporaryConfiguration.getServerInformationCatalog();
  }
  final Map<String, ServerInformation> servers = new HashMap<>();
  final List<HadoopClusterType> hadoopClusterTypes = serversType.getHadoopClusters().getHadoopCluster();
  for (final HadoopClusterType hadoopClusterType : hadoopClusterTypes) {
    final String name = hadoopClusterType.getName();
    checkName(name, ServerInformation.class, servers);
    final String description = hadoopClusterType.getDescription();
    final ServerInformation serverInformation =
        createHadoopClusterInformation(hadoopClusterType, name, description);
    servers.put(name, serverInformation);
  }
  try {
    servers.put(HadoopResource.DEFAULT_CLUSTERREFERENCE,
        new EnvironmentBasedHadoopClusterInformation(HadoopResource.DEFAULT_CLUSTERREFERENCE, null));
  } catch (final IllegalStateException e) {
    logger.info("No Hadoop environment variables, skipping default server");
  }
  return new ServerInformationCatalogImpl(servers.values());
}
origin: datacleaner/DataCleaner

public EnvironmentBasedHadoopClusterInformation(final String name, final String description) {
  super(name, description, getConfigurationDirectories());
}
org.datacleaner.serverEnvironmentBasedHadoopClusterInformation

Javadoc

Environment based configuration

Most used methods

  • <init>
  • getConfiguration
  • isConfigurationDirectoriesSpecified
  • getConfigurationDirectories
  • getDescription
  • getDirectories

Popular in Java

  • Updating database using SQL prepared statement
  • getApplicationContext (Context)
  • compareTo (BigDecimal)
  • onRequestPermissionsResult (Fragment)
  • ServerSocket (java.net)
    This class represents a server-side socket that waits for incoming client connections. A ServerSocke
  • Time (java.sql)
    Java representation of an SQL TIME value. Provides utilities to format and parse the time's represen
  • NumberFormat (java.text)
    The abstract base class for all number formats. This class provides the interface for formatting and
  • Calendar (java.util)
    Calendar is an abstract base class for converting between a Date object and a set of integer fields
  • JarFile (java.util.jar)
    JarFile is used to read jar entries and their associated data from jar files.
  • Notification (javax.management)
  • CodeWhisperer alternatives
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now