Tabnine Logo
StringUtils
Code IndexAdd Tabnine to your IDE (free)

How to use
StringUtils
in
org.apache.hadoop.util

Best Java code snippets using org.apache.hadoop.util.StringUtils (Showing top 20 results out of 2,304)

Refine searchRefine arrow

  • Path
  • FileSystem
  • Configuration
  • HiveException
  • JobConf
  • IMetaStoreClient
  • Table
origin: apache/hbase

/**
 * Checks that a cluster ID file exists in the HBase root directory
 * @param fs the root directory FileSystem
 * @param rootdir the HBase root directory in HDFS
 * @param wait how long to wait between retries
 * @return <code>true</code> if the file exists, otherwise <code>false</code>
 * @throws IOException if checking the FileSystem fails
 */
public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,
  int wait) throws IOException {
 while (true) {
  try {
   Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);
   return fs.exists(filePath);
  } catch (IOException ioe) {
   if (wait > 0) {
    LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +
      ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));
    try {
     Thread.sleep(wait);
    } catch (InterruptedException e) {
     throw (InterruptedIOException)new InterruptedIOException().initCause(e);
    }
   } else {
    throw ioe;
   }
  }
 }
}
origin: apache/hive

public static String joinWithSeparator(Iterable<?> strings) {
 return org.apache.hadoop.util.StringUtils.join(TXN_WRITE_EVENT_FILE_SEPARATOR, strings);
}
origin: apache/hive

/**
 * Get the list of input {@link Path}s for the map-reduce job.
 *
 * @param conf The configuration of the job
 * @return the list of input {@link Path}s for the map-reduce job.
 */
static Path[] getInputPaths(Configuration conf) throws IOException {
 String dirs = conf.get("mapred.input.dir");
 if (dirs == null) {
  throw new IOException("Configuration mapred.input.dir is not defined.");
 }
 String [] list = StringUtils.split(dirs);
 Path[] result = new Path[list.length];
 for (int i = 0; i < list.length; i++) {
  result[i] = new Path(StringUtils.unEscapeString(list[i]));
 }
 return result;
}
origin: apache/hive

/**
 * Take an encode strings and decode it into an array of strings.
 */
public static String[] decodeArray(String s) {
 if (s == null)
  return null;
 String[] escaped = StringUtils.split(s);
 String[] plain = new String[escaped.length];
 for (int i = 0; i < escaped.length; ++i)
  plain[i] = StringUtils.unEscapeString(escaped[i]);
 return plain;
}
origin: apache/hive

/**
 * Take an array of strings and encode it into one string.
 */
public static String encodeArray(String[] plain) {
 if (plain == null)
  return null;
 String[] escaped = new String[plain.length];
 for (int i = 0; i < plain.length; ++i) {
  if (plain[i] == null) {
   plain[i] = "";
  }
  escaped[i] = StringUtils.escapeString(plain[i]);
 }
 return StringUtils.arrayToString(escaped);
}
origin: Qihoo360/XLearning

if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) {
 buildInputStreamFileStatus();
} else {
rmCallbackHandler.setNeededWorkerContainersCount(workerNum);
int allocateInterval = conf.getInt(XLearningConfiguration.XLEARNING_ALLOCATE_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_ALLOCATE_INTERVAL);
amrmAsync.setHeartbeatInterval(allocateInterval);
  startAllocatedTimeStamp = System.currentTimeMillis();
 if (startAllocatedContainer && (System.currentTimeMillis() - startAllocatedTimeStamp) > conf.getInt(YarnConfiguration.RM_CONTAINER_ALLOC_EXPIRY_INTERVAL_MS, YarnConfiguration.DEFAULT_RM_CONTAINER_ALLOC_EXPIRY_INTERVAL_MS)) {
  this.appendMessage(failMessage, true);
  this.appendMessage("Unregister  Application", true);
    if (fs.exists(tmpResultPath)) {
     LOG.info("Move from " + tmpResultPath.toString() + " to " + finalResultPath.toString());
     fs.rename(tmpResultPath, finalResultPath);
   if (fs.exists(tmpPath)) {
 finalSuccess = false;
 this.appendMessage("Some error occurs"
   + org.apache.hadoop.util.StringUtils.stringifyException(e), true);
 diagnostics = e.getMessage();
origin: apache/hive

    if (!destFs.exists(destPath.getParent())) {
     destFs.mkdirs(destPath.getParent());
   dc = new DataContainer(table.getTTable());
   if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
    Utilities.FILE_OP_LOGGER.trace("loadTable called from " + tbd.getSourcePath()
     tableCols = table.getCols();
     break;
 int errorCode = 1;
 if (he.getCanonicalErrorMsg() != ErrorMsg.GENERIC_ERROR) {
  errorCode = he.getCanonicalErrorMsg().getErrorCode();
     + StringUtils.stringifyException(he));
   console.printInfo("\n", StringUtils.stringifyException(he),false);
} catch (Exception e) {
 console.printError("Failed with exception " + e.getMessage(), "\n"
   + StringUtils.stringifyException(e));
 setException(e);
 return (1);
origin: apache/hive

success = true;
HiveFileFormatUtils.prepareJobOutput(job);
job.setOutputFormat(HiveOutputFormatImpl.class);
job.setMapperClass(work.getMapperClass());
 LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
 setException(e);
 return 5;
job.setMapOutputKeyClass(NullWritable.class);
job.setMapOutputValueClass(NullWritable.class);
if(work.getNumMapTasks() != null) {
Path tempOutPath = Utilities.toTempPath(outputPath);
try {
 FileSystem fs = tempOutPath.getFileSystem(job);
 if (!fs.exists(tempOutPath)) {
  fs.mkdirs(tempOutPath);
 LOG.error(mesg, org.apache.hadoop.util.StringUtils.stringifyException(e));
 setException(e);
origin: ml.shifu/guagua-yarn

public static void addInputPath(Configuration conf, Path path) throws IOException {
  path = path.getFileSystem(conf).makeQualified(path);
  String dirStr = org.apache.hadoop.util.StringUtils.escapeString(path.toString());
  String dirs = conf.get(GuaguaYarnConstants.GUAGUA_YARN_INPUT_DIR);
  conf.set(GuaguaYarnConstants.GUAGUA_YARN_INPUT_DIR, dirs == null ? dirStr : dirs + "," + dirStr);
}
origin: apache/hive

@SuppressWarnings("SameParameterValue") static void addDependencyJars(Configuration conf, Class<?>... classes)
  throws IOException {
 FileSystem localFs = FileSystem.getLocal(conf);
 Set<String> jars = new HashSet<>(conf.getStringCollection("tmpjars"));
 for (Class<?> clazz : classes) {
  if (clazz == null) {
   continue;
  }
  final String path = Utilities.jarFinderGetJar(clazz);
  if (path == null) {
   throw new RuntimeException("Could not find jar for class " + clazz + " in order to ship it to the cluster.");
  }
  if (!localFs.exists(new Path(path))) {
   throw new RuntimeException("Could not validate jar file " + path + " for class " + clazz);
  }
  jars.add(path);
 }
 if (jars.isEmpty()) {
  return;
 }
 //noinspection ToArrayCallWithZeroLengthArrayArgument
 conf.set("tmpjars", StringUtils.arrayToString(jars.toArray(new String[jars.size()])));
}
origin: apache/hive

 FileSystem fs = emptyScratchDir.getFileSystem(job);
 fs.mkdirs(emptyScratchDir);
} catch (IOException e) {
 e.printStackTrace();
 console.printError("Error launching map-reduce job", "\n"
   + org.apache.hadoop.util.StringUtils.stringifyException(e));
 return 5;
job.setOutputFormat(HiveOutputFormatImpl.class);
job.setMapRunnerClass(ExecMapRunner.class);
job.setMapperClass(ExecMapper.class);
   Path hdfsPath = mWork.getTmpHDFSPath();
   hdfs.copyFromLocalFile(archivePath, hdfsFilePath);
   LOG.error("Sampling error", e);
   console.printError(e.toString(),
     "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
 console.printError(mesg, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
  returnVal = 3;
  String mesg = "Job Commit failed with exception '" + Utilities.getNameMessage(e) + "'";
  console.printError(mesg, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
origin: apache/hive

StringBuilder str = new StringBuilder();
boolean ignoreInvalidPath =jobConf.getBoolean(HCatConstants.HCAT_INPUT_IGNORE_INVALID_PATH_KEY,
  HCatConstants.HCAT_INPUT_IGNORE_INVALID_PATH_DEFAULT);
Iterator<String> pathIterator = pathStrings.iterator();
  continue;
 Path path = new Path(pathString);
 FileSystem fs = path.getFileSystem(jobConf);
 if (ignoreInvalidPath && !fs.exists(path)) {
  pathIterator.remove();
  continue;
 final String qualifiedPath = fs.makeQualified(path).toString();
 str.append(separator)
  .append(StringUtils.escapeString(qualifiedPath));
 separator = StringUtils.COMMA_STR;
 jobConf.set("mapred.input.dir", str.toString());
origin: apache/sqoop

@Before
public void setUp() {
 super.setUp();
 Path p = new Path(getWarehouseDir());
 try {
  FileSystem fs = FileSystem.get(new Configuration());
  fs.delete(p);
 } catch (IOException e) {
  LOG.error("Setup fail with IOException: " + StringUtils.stringifyException(e));
  fail("Setup fail with IOException: " + StringUtils.stringifyException(e));
 }
}
origin: intel-hadoop/HiBench

  Configuration fsConfig = new Configuration(getConf());
  fsConfig.setInt("test.io.file.buffer.size", bufferSize);
  fsConfig.setInt("test.io.sampling.interval",tputSampleInterval);
FileSystem fs = FileSystem.get(fsConfig);
  JobConf dummyConf = new JobConf(fsConfig, TestDFSIOEnh.class);
  JobClient jc = new JobClient(dummyConf);
  int maxreduces = jc.getDefaultReduces();
  System.err.print(StringUtils.stringifyException(e));
  return -1;
origin: apache/hbase

 Class<?>... classes) throws IOException {
FileSystem localFs = FileSystem.getLocal(conf);
Set<String> jars = new HashSet<>();
jars.addAll(conf.getStringCollection("tmpjars"));
  continue;
 if (!localFs.exists(path)) {
  LOG.warn("Could not validate jar file " + path + " for class "
       + clazz);
  continue;
 jars.add(path.toString());
conf.set("tmpjars", StringUtils.arrayToString(jars.toArray(new String[jars.size()])));
origin: apache/hbase

@Override
protected void initialize(JobContext context) throws IOException {
 // Do we have to worry about mis-matches between the Configuration from setConf and the one
 // in this context?
 TableName tableName = TableName.valueOf(conf.get(INPUT_TABLE));
 try {
  initializeTable(ConnectionFactory.createConnection(new Configuration(conf)), tableName);
 } catch (Exception e) {
  LOG.error(StringUtils.stringifyException(e));
 }
}
origin: apache/hive

assert tbl.getPath() != null : "null==getPath() for " + tbl.getTableName();
boolean isTxnTable = AcidUtils.isTransactionalTable(tbl);
boolean isMmTable = AcidUtils.isInsertOnlyTable(tbl);
boolean isFullAcidTable = AcidUtils.isFullAcidTable(tbl);
if (conf.getBoolVar(ConfVars.FIRE_EVENTS_FOR_DML) && !tbl.isTemporary()) {
 newFiles = Collections.synchronizedList(new ArrayList<Path>());
if (((isMmTable || isFullAcidTable) && loadPath.equals(tbl.getPath())) || (loadFileType == LoadFileType.IGNORE)) {
  assert !isAcidIUDoperation;
  destPath = new Path(destPath, isInsertOverwrite
    ? AcidUtils.baseDir(writeId) : AcidUtils.deltaSubdir(writeId, writeId, stmtId));
 } else {
  try {
   FileSystem fs = tbl.getDataLocation().getFileSystem(conf);
   copyFiles(conf, loadPath, destPath, fs, isSrcLocal, isAcidIUDoperation,
     loadFileType == LoadFileType.OVERWRITE_EXISTING, newFiles,
     tbl.getNumBuckets() > 0, isFullAcidTable, isManaged);
  } catch (IOException e) {
   throw new HiveException("addFiles: filesystem error in check phase", e);
 LOG.error(StringUtils.stringifyException(e));
 throw new HiveException(e);
origin: apache/hive

try {
 if (!fs.exists(destf)) {
  FileUtils.mkdir(fs, destf, conf);
 throw new HiveException(
   "copyFiles: error while checking/creating destination directory!!!",
   e);
FileSystem srcFs;
try {
 srcFs = srcf.getFileSystem(conf);
 srcs = srcFs.globStatus(srcf);
} catch (IOException e) {
 LOG.error(StringUtils.stringifyException(e));
 throw new HiveException("addFiles: filesystem error in check phase. " + e.getMessage(), e);
origin: voldemort/voldemort

@Override
protected FileStatus[] listStatus(JobConf job) throws IOException {
  String dirs = job.get("mapred.input.dir", "");
  String[] list = StringUtils.split(dirs);
  List<FileStatus> status = new ArrayList<FileStatus>();
  for(int i = 0; i < list.length; i++) {
    status.addAll(getAllSubFileStatus(job, new Path(list[i])));
  }
  return status.toArray(new FileStatus[0]);
}
origin: apache/flink

/**
 * Set the array of string values for the <code>name</code> property as
 * as comma delimited values.
 *
 * @param name property name.
 * @param values The values
 */
public void setStrings(String name, String... values) {
 set(name, StringUtils.arrayToString(values));
}
org.apache.hadoop.utilStringUtils

Javadoc

General string utils

Most used methods

  • stringifyException
    Make a string representation of the exception.
  • join
    Concatenates strings, using a separator.
  • split
  • arrayToString
  • toLowerCase
    Converts all of the characters in this String to lower case with Locale.ENGLISH.
  • escapeString
  • startupShutdownMessage
    Print a log message for starting up and shutting down
  • getStrings
    Returns an arraylist of strings.
  • toUpperCase
    Converts all of the characters in this String to upper case with Locale.ENGLISH.
  • byteToHexString
    Given an array of bytes it will convert the bytes to a hex string representation of the bytes
  • formatTime
    Given the time in long milliseconds, returns a String in the format Xhrs, Ymins, Z sec.
  • unEscapeString
  • formatTime,
  • unEscapeString,
  • getStringCollection,
  • byteDesc,
  • formatPercent,
  • getTrimmedStrings,
  • equalsIgnoreCase,
  • format,
  • formatTimeDiff,
  • getTrimmedStringCollection

Popular in Java

  • Parsing JSON documents to java classes using gson
  • setContentView (Activity)
  • addToBackStack (FragmentTransaction)
  • setRequestProperty (URLConnection)
  • Font (java.awt)
    The Font class represents fonts, which are used to render text in a visible way. A font provides the
  • Runnable (java.lang)
    Represents a command that can be executed. Often used to run code in a different Thread.
  • ConnectException (java.net)
    A ConnectException is thrown if a connection cannot be established to a remote host on a specific po
  • Socket (java.net)
    Provides a client-side TCP socket.
  • BitSet (java.util)
    The BitSet class implements abit array [http://en.wikipedia.org/wiki/Bit_array]. Each element is eit
  • Hashtable (java.util)
    A plug-in replacement for JDK1.5 java.util.Hashtable. This version is based on org.cliffc.high_scale
  • Top plugins for Android Studio
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now