Tabnine Logo
StringUtils.join
Code IndexAdd Tabnine to your IDE (free)

How to use
join
method
in
org.apache.hadoop.util.StringUtils

Best Java code snippets using org.apache.hadoop.util.StringUtils.join (Showing top 20 results out of 585)

origin: apache/hive

public static String joinWithSeparator(Iterable<?> strings) {
 return org.apache.hadoop.util.StringUtils.join(TXN_WRITE_EVENT_FILE_SEPARATOR, strings);
}
origin: org.apache.hadoop/hadoop-common

public static String join(char separator, String[] strings) {
 return join(separator + "", strings);
}
origin: org.apache.hadoop/hadoop-common

public static String join(char separator, Iterable<?> strings) {
 return join(separator + "", strings);
}
origin: apache/hive

@Override
public String toString() {
 return String.format("FixedRegistry hosts=%s", StringUtils.join(",", this.hosts));
}
origin: apache/hbase

@Override
public String prettyPrintJournal() {
 return StringUtils.join("\n\t", getStatusJournal());
}
origin: org.apache.hadoop/hadoop-common

/**
 * Checks if a given command (String[]) fits in the Windows maximum command
 * line length Note that the input is expected to already include space
 * delimiters, no extra count will be added for delimiters.
 *
 * @param commands command parts, including any space delimiters
 */
public static void checkWindowsCommandLineLength(String...commands)
  throws IOException {
 int len = 0;
 for (String s: commands) {
  len += s.length();
 }
 if (len > WINDOWS_MAX_SHELL_LENGTH) {
  throw new IOException(String.format(
   "The command line has a length of %d exceeds maximum allowed length" +
     " of %d. Command starts with: %s",
   len, WINDOWS_MAX_SHELL_LENGTH,
   StringUtils.join("", commands).substring(0, 100)));
 }
}
origin: org.apache.hadoop/hadoop-common

/**
 * Execute the shell command.
 * @throws IOException if the command fails, or if the command is
 * not well constructed.
 */
public void execute() throws IOException {
 for (String s : command) {
  if (s == null) {
   throw new IOException("(null) entry in command string: "
     + StringUtils.join(" ", command));
  }
 }
 this.run();
}
origin: apache/hive

/**
 * Appends read nested column's paths. Once a read nested column path
 * is included in the list, a underlying record reader of a columnar file format
 * (e.g. Parquet and ORC) can know what columns are needed.
 */
public static void appendNestedColumnPaths(
 Configuration conf,
 List<String> paths) {
 if (paths == null || paths.isEmpty()) {
  return;
 }
 String pathsStr = StringUtils.join(StringUtils.COMMA_STR,
  paths.toArray(new String[paths.size()]));
 String old = conf.get(READ_NESTED_COLUMN_PATH_CONF_STR, null);
 String newConfStr = pathsStr;
 if (old != null && !old.isEmpty()) {
  newConfStr = newConfStr + StringUtils.COMMA_STR + old;
 }
 setReadNestedColumnPathConf(conf, newConfStr);
}
origin: apache/hive

public static boolean distCp(FileSystem srcFS, List<Path> srcPaths, Path dst,
  boolean deleteSource, String doAsUser,
  HiveConf conf, HadoopShims shims) throws IOException {
 LOG.debug("copying srcPaths : {}, to DestPath :{} ,with doAs: {}",
   StringUtils.join(",", srcPaths), dst.toString(), doAsUser);
 boolean copied = false;
 if (doAsUser == null){
  copied = shims.runDistCp(srcPaths, dst, conf);
 } else {
  copied = shims.runDistCpAs(srcPaths, dst, conf, doAsUser);
 }
 if (copied && deleteSource) {
  if (doAsUser != null) {
   // if distcp is done using doAsUser, delete also should be done using same user.
   //TODO : Need to change the delete execution within doAs if doAsUser is given.
   throw new IOException("Distcp is called with doAsUser and delete source set as true");
  }
  for (Path path : srcPaths) {
   srcFS.delete(path, true);
  }
 }
 return copied;
}
origin: org.apache.hadoop/hadoop-common

String filter = StringUtils.join(" and ", new String[] {
  "Name LIKE '%java.exe%'",
  "CommandLine LIKE '%" + processName+ "%'"});
origin: org.apache.hadoop/hadoop-common

/**
 * Creates an SSLFactory.
 *
 * @param mode SSLFactory mode, client or server.
 * @param conf Hadoop configuration from where the SSLFactory configuration
 * will be read.
 */
public SSLFactory(Mode mode, Configuration conf) {
 this.conf = conf;
 if (mode == null) {
  throw new IllegalArgumentException("mode cannot be NULL");
 }
 this.mode = mode;
 Configuration sslConf = readSSLConfiguration(conf, mode);
 requireClientCert = sslConf.getBoolean(SSL_REQUIRE_CLIENT_CERT_KEY,
   SSL_REQUIRE_CLIENT_CERT_DEFAULT);
 Class<? extends KeyStoresFactory> klass
  = conf.getClass(KEYSTORES_FACTORY_CLASS_KEY,
          FileBasedKeyStoresFactory.class, KeyStoresFactory.class);
 keystoresFactory = ReflectionUtils.newInstance(klass, sslConf);
 enabledProtocols = conf.getStrings(SSL_ENABLED_PROTOCOLS_KEY,
   SSL_ENABLED_PROTOCOLS_DEFAULT);
 excludeCiphers = Arrays.asList(
   sslConf.getTrimmedStrings(SSL_SERVER_EXCLUDE_CIPHER_LIST));
 if (LOG.isDebugEnabled()) {
  LOG.debug("will exclude cipher suites: {}",
    StringUtils.join(",", excludeCiphers));
 }
}
origin: org.apache.hadoop/hadoop-common

 /**
  * A util function to retrieve specific additional sasl property from config.
  * Used by subclasses to read sasl properties used by themselves.
  * @param conf the configuration
  * @param configKey the config key to look for
  * @param defaultQOP the default QOP if the key is missing
  * @return sasl property associated with the given key
  */
 static Map<String, String> getSaslProperties(Configuration conf,
   String configKey, QualityOfProtection defaultQOP) {
  Map<String, String> saslProps = new TreeMap<>();
  String[] qop = conf.getStrings(configKey, defaultQOP.toString());

  for (int i=0; i < qop.length; i++) {
   qop[i] = QualityOfProtection.valueOf(
     StringUtils.toUpperCase(qop[i])).getSaslQop();
  }

  saslProps.put(Sasl.QOP, StringUtils.join(",", qop));
  saslProps.put(Sasl.SERVER_AUTH, "true");

  return saslProps;
 }
}
origin: apache/hbase

private String getColumnFamiliesAsString() {
 return StringUtils.join(",", getColumnFamilies());
}
origin: org.apache.hadoop/hadoop-common

@Override
public void setConf(Configuration conf) {
 this.conf = conf;
 properties = new TreeMap<String,String>();
 String[] qop = conf.getTrimmedStrings(
   CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION,
   QualityOfProtection.AUTHENTICATION.toString());
 for (int i=0; i < qop.length; i++) {
  qop[i] = QualityOfProtection.valueOf(
    StringUtils.toUpperCase(qop[i])).getSaslQop();
 }
 properties.put(Sasl.QOP, StringUtils.join(",", qop));
 properties.put(Sasl.SERVER_AUTH, "true");
}
origin: apache/hive

  " mrl_txn_id IN(" + StringUtils.join(",", txnIds) + ") ";
LOG.debug("Going to execute update <" + deleteQ + ">");
cnt = stmt.executeUpdate(deleteQ);
origin: apache/hbase

@Override
public void setConf(Configuration conf) {
 conf.setIfUnset(
  String.format("%s.%s", TEST_NAME, LoadTestTool.OPT_REGION_REPLICATION),
  String.valueOf(DEFAULT_REGION_REPLICATION));
 conf.setIfUnset(
  String.format("%s.%s", TEST_NAME, LoadTestTool.OPT_COLUMN_FAMILIES),
  StringUtils.join(",", DEFAULT_COLUMN_FAMILIES));
 conf.setBoolean("hbase.table.sanity.checks", true);
 // enable async wal replication to region replicas for unit tests
 conf.setBoolean(ServerRegionReplicaUtil.REGION_REPLICA_REPLICATION_CONF_KEY, true);
 conf.setLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024L * 1024 * 4); // flush every 4 MB
 conf.setInt("hbase.hstore.blockingStoreFiles", 100);
 super.setConf(conf);
}
origin: apache/hive

query.append("CLUSTERED BY (").append(StringUtils.join(",", buckCols)).append(") ");
List<Order> sortCols = t.getSd().getSortCols();
if (sortCols.size() > 0) {
if (skewedInfo != null && !skewedInfo.getSkewedColNames().isEmpty()) {
 query.append(" SKEWED BY (").append(
   StringUtils.join(", ", skewedInfo.getSkewedColNames())).append(") ON ");
 isFirst = true;
 for (List<String> colValues : skewedInfo.getSkewedColValues()) {
  query.append("('").append(StringUtils.join("','", colValues)).append("')");
origin: org.apache.hadoop/hadoop-common

   + "This behavior can be changed in the Local Security Policy management console");
} else if (returnVal != 0) {
 LOG.warn("Command '" + StringUtils.join(" ", cmd) + "' failed "
   + returnVal + " with: " + ec.getMessage());
origin: apache/hbase

   org.apache.hadoop.util.StringUtils.join(",", missingIds);
throw new IOException(errMsg);
origin: org.apache.hadoop/hadoop-common

String args = StringUtils.join(" ", argv);
if (args.length() > 2048) {
 args = args.substring(0, 2048);
org.apache.hadoop.utilStringUtilsjoin

Javadoc

Concatenates strings, using a separator.

Popular methods of StringUtils

  • stringifyException
    Make a string representation of the exception.
  • split
  • arrayToString
  • toLowerCase
    Converts all of the characters in this String to lower case with Locale.ENGLISH.
  • escapeString
  • startupShutdownMessage
    Print a log message for starting up and shutting down
  • getStrings
    Returns an arraylist of strings.
  • toUpperCase
    Converts all of the characters in this String to upper case with Locale.ENGLISH.
  • byteToHexString
    Given an array of bytes it will convert the bytes to a hex string representation of the bytes
  • formatTime
    Given the time in long milliseconds, returns a String in the format Xhrs, Ymins, Z sec.
  • unEscapeString
  • getStringCollection
    Returns a collection of strings.
  • unEscapeString,
  • getStringCollection,
  • byteDesc,
  • formatPercent,
  • getTrimmedStrings,
  • equalsIgnoreCase,
  • format,
  • formatTimeDiff,
  • getTrimmedStringCollection

Popular in Java

  • Parsing JSON documents to java classes using gson
  • getSystemService (Context)
  • getApplicationContext (Context)
  • findViewById (Activity)
  • BufferedImage (java.awt.image)
    The BufferedImage subclass describes an java.awt.Image with an accessible buffer of image data. All
  • FileWriter (java.io)
    A specialized Writer that writes to a file in the file system. All write requests made by calling me
  • BigDecimal (java.math)
    An immutable arbitrary-precision signed decimal.A value is represented by an arbitrary-precision "un
  • UnknownHostException (java.net)
    Thrown when a hostname can not be resolved.
  • ThreadPoolExecutor (java.util.concurrent)
    An ExecutorService that executes each submitted task using one of possibly several pooled threads, n
  • XPath (javax.xml.xpath)
    XPath provides access to the XPath evaluation environment and expressions. Evaluation of XPath Expr
  • Best plugins for Eclipse
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now