public static String joinWithSeparator(Iterable<?> strings) { return org.apache.hadoop.util.StringUtils.join(TXN_WRITE_EVENT_FILE_SEPARATOR, strings); }
public static String join(char separator, String[] strings) { return join(separator + "", strings); }
public static String join(char separator, Iterable<?> strings) { return join(separator + "", strings); }
@Override public String toString() { return String.format("FixedRegistry hosts=%s", StringUtils.join(",", this.hosts)); }
@Override public String prettyPrintJournal() { return StringUtils.join("\n\t", getStatusJournal()); }
/** * Checks if a given command (String[]) fits in the Windows maximum command * line length Note that the input is expected to already include space * delimiters, no extra count will be added for delimiters. * * @param commands command parts, including any space delimiters */ public static void checkWindowsCommandLineLength(String...commands) throws IOException { int len = 0; for (String s: commands) { len += s.length(); } if (len > WINDOWS_MAX_SHELL_LENGTH) { throw new IOException(String.format( "The command line has a length of %d exceeds maximum allowed length" + " of %d. Command starts with: %s", len, WINDOWS_MAX_SHELL_LENGTH, StringUtils.join("", commands).substring(0, 100))); } }
/** * Execute the shell command. * @throws IOException if the command fails, or if the command is * not well constructed. */ public void execute() throws IOException { for (String s : command) { if (s == null) { throw new IOException("(null) entry in command string: " + StringUtils.join(" ", command)); } } this.run(); }
/** * Appends read nested column's paths. Once a read nested column path * is included in the list, a underlying record reader of a columnar file format * (e.g. Parquet and ORC) can know what columns are needed. */ public static void appendNestedColumnPaths( Configuration conf, List<String> paths) { if (paths == null || paths.isEmpty()) { return; } String pathsStr = StringUtils.join(StringUtils.COMMA_STR, paths.toArray(new String[paths.size()])); String old = conf.get(READ_NESTED_COLUMN_PATH_CONF_STR, null); String newConfStr = pathsStr; if (old != null && !old.isEmpty()) { newConfStr = newConfStr + StringUtils.COMMA_STR + old; } setReadNestedColumnPathConf(conf, newConfStr); }
public static boolean distCp(FileSystem srcFS, List<Path> srcPaths, Path dst, boolean deleteSource, String doAsUser, HiveConf conf, HadoopShims shims) throws IOException { LOG.debug("copying srcPaths : {}, to DestPath :{} ,with doAs: {}", StringUtils.join(",", srcPaths), dst.toString(), doAsUser); boolean copied = false; if (doAsUser == null){ copied = shims.runDistCp(srcPaths, dst, conf); } else { copied = shims.runDistCpAs(srcPaths, dst, conf, doAsUser); } if (copied && deleteSource) { if (doAsUser != null) { // if distcp is done using doAsUser, delete also should be done using same user. //TODO : Need to change the delete execution within doAs if doAsUser is given. throw new IOException("Distcp is called with doAsUser and delete source set as true"); } for (Path path : srcPaths) { srcFS.delete(path, true); } } return copied; }
String filter = StringUtils.join(" and ", new String[] { "Name LIKE '%java.exe%'", "CommandLine LIKE '%" + processName+ "%'"});
/** * Creates an SSLFactory. * * @param mode SSLFactory mode, client or server. * @param conf Hadoop configuration from where the SSLFactory configuration * will be read. */ public SSLFactory(Mode mode, Configuration conf) { this.conf = conf; if (mode == null) { throw new IllegalArgumentException("mode cannot be NULL"); } this.mode = mode; Configuration sslConf = readSSLConfiguration(conf, mode); requireClientCert = sslConf.getBoolean(SSL_REQUIRE_CLIENT_CERT_KEY, SSL_REQUIRE_CLIENT_CERT_DEFAULT); Class<? extends KeyStoresFactory> klass = conf.getClass(KEYSTORES_FACTORY_CLASS_KEY, FileBasedKeyStoresFactory.class, KeyStoresFactory.class); keystoresFactory = ReflectionUtils.newInstance(klass, sslConf); enabledProtocols = conf.getStrings(SSL_ENABLED_PROTOCOLS_KEY, SSL_ENABLED_PROTOCOLS_DEFAULT); excludeCiphers = Arrays.asList( sslConf.getTrimmedStrings(SSL_SERVER_EXCLUDE_CIPHER_LIST)); if (LOG.isDebugEnabled()) { LOG.debug("will exclude cipher suites: {}", StringUtils.join(",", excludeCiphers)); } }
/** * A util function to retrieve specific additional sasl property from config. * Used by subclasses to read sasl properties used by themselves. * @param conf the configuration * @param configKey the config key to look for * @param defaultQOP the default QOP if the key is missing * @return sasl property associated with the given key */ static Map<String, String> getSaslProperties(Configuration conf, String configKey, QualityOfProtection defaultQOP) { Map<String, String> saslProps = new TreeMap<>(); String[] qop = conf.getStrings(configKey, defaultQOP.toString()); for (int i=0; i < qop.length; i++) { qop[i] = QualityOfProtection.valueOf( StringUtils.toUpperCase(qop[i])).getSaslQop(); } saslProps.put(Sasl.QOP, StringUtils.join(",", qop)); saslProps.put(Sasl.SERVER_AUTH, "true"); return saslProps; } }
private String getColumnFamiliesAsString() { return StringUtils.join(",", getColumnFamilies()); }
@Override public void setConf(Configuration conf) { this.conf = conf; properties = new TreeMap<String,String>(); String[] qop = conf.getTrimmedStrings( CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION, QualityOfProtection.AUTHENTICATION.toString()); for (int i=0; i < qop.length; i++) { qop[i] = QualityOfProtection.valueOf( StringUtils.toUpperCase(qop[i])).getSaslQop(); } properties.put(Sasl.QOP, StringUtils.join(",", qop)); properties.put(Sasl.SERVER_AUTH, "true"); }
" mrl_txn_id IN(" + StringUtils.join(",", txnIds) + ") "; LOG.debug("Going to execute update <" + deleteQ + ">"); cnt = stmt.executeUpdate(deleteQ);
@Override public void setConf(Configuration conf) { conf.setIfUnset( String.format("%s.%s", TEST_NAME, LoadTestTool.OPT_REGION_REPLICATION), String.valueOf(DEFAULT_REGION_REPLICATION)); conf.setIfUnset( String.format("%s.%s", TEST_NAME, LoadTestTool.OPT_COLUMN_FAMILIES), StringUtils.join(",", DEFAULT_COLUMN_FAMILIES)); conf.setBoolean("hbase.table.sanity.checks", true); // enable async wal replication to region replicas for unit tests conf.setBoolean(ServerRegionReplicaUtil.REGION_REPLICA_REPLICATION_CONF_KEY, true); conf.setLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024L * 1024 * 4); // flush every 4 MB conf.setInt("hbase.hstore.blockingStoreFiles", 100); super.setConf(conf); }
query.append("CLUSTERED BY (").append(StringUtils.join(",", buckCols)).append(") "); List<Order> sortCols = t.getSd().getSortCols(); if (sortCols.size() > 0) { if (skewedInfo != null && !skewedInfo.getSkewedColNames().isEmpty()) { query.append(" SKEWED BY (").append( StringUtils.join(", ", skewedInfo.getSkewedColNames())).append(") ON "); isFirst = true; for (List<String> colValues : skewedInfo.getSkewedColValues()) { query.append("('").append(StringUtils.join("','", colValues)).append("')");
+ "This behavior can be changed in the Local Security Policy management console"); } else if (returnVal != 0) { LOG.warn("Command '" + StringUtils.join(" ", cmd) + "' failed " + returnVal + " with: " + ec.getMessage());
org.apache.hadoop.util.StringUtils.join(",", missingIds); throw new IOException(errMsg);
String args = StringUtils.join(" ", argv); if (args.length() > 2048) { args = args.substring(0, 2048);