Tabnine Logo
WindowsLibsUtils
Code IndexAdd Tabnine to your IDE (free)

How to use
WindowsLibsUtils
in
com.github.sakserv.minicluster.util

Best Java code snippets using com.github.sakserv.minicluster.util.WindowsLibsUtils (Showing top 14 results out of 315)

origin: sakserv/hadoop-mini-clusters

@Override
public void configure() throws Exception {
  if(null != hdfsEnableRunningUserAsProxyUser && hdfsEnableRunningUserAsProxyUser) {
    hdfsConfig.set("hadoop.proxyuser." + System.getProperty("user.name") + ".hosts", "*");
    hdfsConfig.set("hadoop.proxyuser." + System.getProperty("user.name") + ".groups", "*");
  }
  hdfsConfig.setBoolean("dfs.permissions", hdfsEnablePermissions);
  System.setProperty("test.build.data", hdfsTempDir);
  // Handle Windows
  WindowsLibsUtils.setHadoopHome();
}
origin: sakserv/hadoop-mini-clusters

public static void setHadoopHome() {
  // Set hadoop.home.dir to point to the windows lib dir
  if (System.getProperty("os.name").startsWith("Windows")) {
    String windowsLibDir = getHadoopHome();
    LOG.info("WINDOWS: Setting hadoop.home.dir: {}", windowsLibDir);
    System.setProperty("hadoop.home.dir", windowsLibDir);
    System.load(new File(windowsLibDir + Path.SEPARATOR + "lib" + Path.SEPARATOR + "hadoop.dll").getAbsolutePath());
    System.load(new File(windowsLibDir + Path.SEPARATOR + "lib" + Path.SEPARATOR + "hdfs.dll").getAbsolutePath());
  }
}
origin: com.github.sakserv/hadoop-mini-clusters-hbase

@Override
public void configure() throws Exception {
  configure(hbaseConfiguration);
  // Handle Windows
  WindowsLibsUtils.setHadoopHome();
}
origin: com.github.sakserv/hadoop-mini-clusters-common

public static void setHadoopHome() {
  // Set hadoop.home.dir to point to the windows lib dir
  if (System.getProperty("os.name").startsWith("Windows")) {
    String windowsLibDir = getHadoopHome();
    LOG.info("WINDOWS: Setting hadoop.home.dir: {}", windowsLibDir);
    System.setProperty("hadoop.home.dir", windowsLibDir);
    System.load(new File(windowsLibDir + Path.SEPARATOR + "lib" + Path.SEPARATOR + "hadoop.dll").getAbsolutePath());
    System.load(new File(windowsLibDir + Path.SEPARATOR + "lib" + Path.SEPARATOR + "hdfs.dll").getAbsolutePath());
  }
}
origin: sakserv/hadoop-mini-clusters

@Override
public void configure() throws Exception {
  // Handle Windows
  WindowsLibsUtils.setHadoopHome();
  configuration.set(YarnConfiguration.RM_ADDRESS, resourceManagerAddress);
  configuration.set(YarnConfiguration.RM_HOSTNAME, resourceManagerHostname);
  configuration.set(YarnConfiguration.RM_SCHEDULER_ADDRESS, resourceManagerSchedulerAddress);
  configuration.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, resourceManagerResourceTrackerAddress);
  configuration.set(YarnConfiguration.RM_WEBAPP_ADDRESS, resourceManagerWebappAddress);
  configuration.set(YarnConfiguration.YARN_MINICLUSTER_FIXED_PORTS, "true");
  if (getUseInJvmContainerExecutor()) {
    configuration.set(YarnConfiguration.NM_CONTAINER_EXECUTOR, inJvmContainerExecutorClass);
    configuration.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
    configuration.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
  }
}
origin: jetoile/hadoop-unit

@Override
public void configure() throws Exception {
  // Handle Windows
  WindowsLibsUtils.setHadoopHome();
  // Oozie has very particular naming conventions for these directories, don't change
  fullOozieHomeDir = oozieTestDir + "/" + oozieHomeDir;
  fullOozieConfDir = fullOozieHomeDir + "/conf";
  fullOozieHadoopConfDir = fullOozieConfDir + "/hadoop-conf";
  fullOozieActionDir = fullOozieConfDir + "/action-conf";
  //set system properties
  System.setProperty(Services.OOZIE_HOME_DIR, new File(fullOozieHomeDir).getAbsolutePath());
  System.setProperty(ConfigurationService.OOZIE_CONFIG_DIR, fullOozieConfDir);
  System.setProperty("oozielocal.log", fullOozieHomeDir + "/oozielocal.log");
  System.setProperty(XTestCase.OOZIE_TEST_JOB_TRACKER, oozieYarnResourceManagerAddress);
  System.setProperty(XTestCase.OOZIE_TEST_NAME_NODE, oozieHdfsDefaultFs);
  System.setProperty("oozie.test.db.host", "localhost");
  System.setProperty(ConfigurationService.OOZIE_DATA_DIR, fullOozieHomeDir);
  System.setProperty(HadoopAccessorService.SUPPORTED_FILESYSTEMS, "*");
  if (oozieShareLibCreate) {
    oozieConf.set("oozie.service.WorkflowAppService.system.libpath",
        oozieHdfsDefaultFs + oozieHdfsShareLibDir);
    oozieConf.set("use.system.libpath.for.mapreduce.and.pig.jobs", "true");
  }
  oozieConf.set("oozie.service.JPAService.jdbc.driver", "org.hsqldb.jdbcDriver");
  oozieConf.set("oozie.service.JPAService.jdbc.url", "jdbc:hsqldb:mem:oozie-db;create=true");
  oozieConf.set(JPAService.CONF_CREATE_DB_SCHEMA, "true");
}
origin: fr.jetoile.hadoop/hadoop-unit-hive

  private HiveConf buildHiveConf() {
    // Handle Windows
    WindowsLibsUtils.setHadoopHome();

    HiveConf hiveConf = new HiveConf();
    hiveConf.set("fs.defaultFS", "hdfs://" + configuration.getString(HdfsConfig.HDFS_NAMENODE_HOST_KEY) + ":" + configuration.getInt(HdfsConfig.HDFS_NAMENODE_PORT_KEY));
//        hiveConf.set(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
//        hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_INITIATOR_ON.varname, "true");
//        hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_THREADS.varname, "5");
//        hiveConf.set("hive.root.logger", "DEBUG,console");
//        hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
//        hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
//        hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
//        System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
//        System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");

    return hiveConf;
  }

origin: jetoile/hadoop-unit

  private HiveConf buildHiveConf() {
    // Handle Windows
    WindowsLibsUtils.setHadoopHome();

    HiveConf hiveConf = new HiveConf();
    hiveConf.set("fs.defaultFS", "hdfs://" + configuration.getString(HdfsConfig.HDFS_NAMENODE_HOST_KEY) + ":" + configuration.getInt(HdfsConfig.HDFS_NAMENODE_PORT_KEY));
//        hiveConf.set(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
//        hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_INITIATOR_ON.varname, "true");
//        hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_THREADS.varname, "5");
//        hiveConf.set("hive.root.logger", "DEBUG,console");
//        hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
//        hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
//        hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
//        System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
//        System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");

    return hiveConf;
  }

origin: fr.jetoile.hadoop/hadoop-unit-hive

  private HiveConf buildHiveConf() {
    // Handle Windows
    WindowsLibsUtils.setHadoopHome();

    HiveConf hiveConf = new HiveConf();
    hiveConf.set("fs.defaultFS", hdfsUri);
//        hiveConf.set(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
//        hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_INITIATOR_ON.varname, "true");
//        hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_THREADS.varname, "5");
//        hiveConf.set("hive.root.logger", "DEBUG,console");
//        hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
//        hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
//        hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
//        System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
//        System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");

    return hiveConf;
  }

origin: jetoile/hadoop-unit

  private HiveConf buildHiveConf() {
    // Handle Windows
    WindowsLibsUtils.setHadoopHome();

    HiveConf hiveConf = new HiveConf();
    hiveConf.set("fs.defaultFS", hdfsUri);
//        hiveConf.set(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
//        hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_INITIATOR_ON.varname, "true");
//        hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_THREADS.varname, "5");
//        hiveConf.set("hive.root.logger", "DEBUG,console");
//        hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
//        hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
//        hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
//        System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
//        System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");

    return hiveConf;
  }

origin: sakserv/hadoop-mini-clusters

@Override
public void configure() throws Exception {
  hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS,
      "thrift://" + hiveMetastoreHostname + ":" + hiveMetastorePort);
  hiveConf.setVar(HiveConf.ConfVars.SCRATCHDIR, hiveScratchDir);
  hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY,
      "jdbc:derby:;databaseName=" + hiveMetastoreDerbyDbDir + ";create=true");
  hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, new File(hiveWarehouseDir).getAbsolutePath());
  hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_IN_TEST, true);
  hiveConf.set("datanucleus.schema.autoCreateTables", "true");
  hiveConf.set("hive.metastore.schema.verification", "false");
  // Handle Windows
  WindowsLibsUtils.setHadoopHome();
}
origin: com.github.sakserv/hadoop-mini-clusters-hiveserver2

@Override
public void configure() throws Exception {
  // Handle Windows
  WindowsLibsUtils.setHadoopHome();
  hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS,
      "thrift://" + hiveMetastoreHostname + ":" + hiveMetastorePort);
  hiveConf.setVar(HiveConf.ConfVars.SCRATCHDIR, hiveScratchDir);
  hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY,
      "jdbc:derby:;databaseName=" + hiveMetastoreDerbyDbDir + ";create=true");
  hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, new File(hiveWarehouseDir).getAbsolutePath());
  hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_IN_TEST, true);
  hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, String.valueOf(hiveServer2Hostname));
  hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT, hiveServer2Port);
  hiveConf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM, zookeeperConnectionString);
  hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, Boolean.TRUE);
}
origin: fr.jetoile.hadoop/hadoop-unit-hive

  @Override
  public void configure() throws Exception {
    hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS,
        "thrift://" + hiveMetastoreHostname + ":" + hiveMetastorePort);
    hiveConf.setVar(HiveConf.ConfVars.SCRATCHDIR, hiveScratchDir);
    hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY,
        "jdbc:derby:;databaseName=" + hiveMetastoreDerbyDbDir + ";create=true");
//        hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, new File(hiveWarehouseDir).getAbsolutePath());
    hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, hiveWarehouseDir);
    hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_IN_TEST, true);
    hiveConf.set("datanucleus.schema.autoCreateTables", "true");
    hiveConf.set("hive.metastore.schema.verification", "false");

    // Handle Windows
    WindowsLibsUtils.setHadoopHome();
  }

origin: jetoile/hadoop-unit

  @Override
  public void configure() throws Exception {
    hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS,
        "thrift://" + hiveMetastoreHostname + ":" + hiveMetastorePort);
    hiveConf.setVar(HiveConf.ConfVars.SCRATCHDIR, hiveScratchDir);
    hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY,
        "jdbc:derby:;databaseName=" + hiveMetastoreDerbyDbDir + ";create=true");
//        hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, new File(hiveWarehouseDir).getAbsolutePath());
    hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, hiveWarehouseDir);
    hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_IN_TEST, true);
    hiveConf.set("datanucleus.schema.autoCreateTables", "true");
    hiveConf.set("hive.metastore.schema.verification", "false");

    // Handle Windows
    WindowsLibsUtils.setHadoopHome();
  }

com.github.sakserv.minicluster.utilWindowsLibsUtils

Most used methods

  • setHadoopHome
  • getHadoopHome

Popular in Java

  • Running tasks concurrently on multiple threads
  • getSystemService (Context)
  • getOriginalFilename (MultipartFile)
    Return the original filename in the client's filesystem.This may contain path information depending
  • getSharedPreferences (Context)
  • Pointer (com.sun.jna)
    An abstraction for a native pointer data type. A Pointer instance represents, on the Java side, a na
  • Graphics2D (java.awt)
    This Graphics2D class extends the Graphics class to provide more sophisticated control overgraphics
  • ServerSocket (java.net)
    This class represents a server-side socket that waits for incoming client connections. A ServerSocke
  • ResultSet (java.sql)
    An interface for an object which represents a database table entry, returned as the result of the qu
  • Arrays (java.util)
    This class contains various methods for manipulating arrays (such as sorting and searching). This cl
  • Random (java.util)
    This class provides methods that return pseudo-random values.It is dangerous to seed Random with the
  • Best IntelliJ plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now