congrats Icon
New! Announcing our next generation AI code completions
Read here
Tabnine Logo
StringUtils.escapeString
Code IndexAdd Tabnine to your IDE (free)

How to use
escapeString
method
in
org.apache.hadoop.util.StringUtils

Best Java code snippets using org.apache.hadoop.util.StringUtils.escapeString (Showing top 20 results out of 378)

origin: org.apache.hadoop/hadoop-common

/**
 * Escape commas in the string using the default escape char
 * @param str a string
 * @return an escaped string
 */
public static String escapeString(String str) {
 return escapeString(str, ESCAPE_CHAR, COMMA);
}

origin: org.apache.hadoop/hadoop-common

/**
 * Escape <code>charToEscape</code> in the string 
 * with the escape char <code>escapeChar</code>
 * 
 * @param str string
 * @param escapeChar escape char
 * @param charToEscape the char to be escaped
 * @return an escaped string
 */
public static String escapeString(
  String str, char escapeChar, char charToEscape) {
 return escapeString(str, escapeChar, new char[] {charToEscape});
}

origin: apache/hive

private String makeInputString(List<Path> dirs) {
 if (dirs == null || dirs.isEmpty()) return "";
 StringBuffer str = new StringBuffer(StringUtils.escapeString(dirs.get(0).toString()));
 for(int i = 1; i < dirs.size(); i++) {
  str.append(",").append(StringUtils.escapeString(dirs.get(i).toString()));
 }
 return str.toString();
}
private ValidWriteIdList extractValidWriteIdList() {
origin: OryxProject/oryx

/**
 * @return paths from {@link FileStatus}es into one comma-separated String
 * @see FileInputFormat#addInputPath(org.apache.hadoop.mapreduce.Job, Path)
 */
private static String joinFSPaths(FileSystem fs, FileStatus[] statuses) {
 StringBuilder joined = new StringBuilder();
 for (FileStatus status : statuses) {
  if (joined.length() > 0) {
   joined.append(',');
  }
  Path path = fs.makeQualified(status.getPath());
  joined.append(StringUtils.escapeString(path.toString()));
 }
 return joined.toString();
}
origin: apache/hive

/**
 * Take an array of strings and encode it into one string.
 */
public static String encodeArray(String[] plain) {
 if (plain == null)
  return null;
 String[] escaped = new String[plain.length];
 for (int i = 0; i < plain.length; ++i) {
  if (plain[i] == null) {
   plain[i] = "";
  }
  escaped[i] = StringUtils.escapeString(plain[i]);
 }
 return StringUtils.arrayToString(escaped);
}
origin: apache/hive

 .append(StringUtils.escapeString(qualifiedPath));
separator = StringUtils.COMMA_STR;
origin: apache/hive

String escapedQuery = StringUtils.escapeString(query, ESCAPE_CHAR, escapedChars);
String sql = String.format(SPLIT_QUERY, escapedQuery, numSplits);
try {
origin: apache/hive

for(String whProp : webhcatHiveProps) {
 hiveProps.append(hiveProps.length() > 0 ? "," : "").append(StringUtils.escapeString(whProp));
origin: apache/hive

job.set("mapred.input.dir", StringUtils.escapeString(currPath.toString()));
origin: apache/drill

protected FetchInputFormatSplit[] getNextSplits() throws Exception {
 while (getNextPath()) {
  // not using FileInputFormat.setInputPaths() here because it forces a connection to the
  // default file system - which may or may not be online during pure metadata operations
  job.set("mapred.input.dir", StringUtils.escapeString(currPath.toString()));
  // Fetch operator is not vectorized and as such turn vectorization flag off so that
  // non-vectorized record reader is created below.
  HiveConf.setBoolVar(job, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false);
  Class<? extends InputFormat> formatter = currDesc.getInputFileFormatClass();
  Utilities.copyTableJobPropertiesToConf(currDesc.getTableDesc(), job);
  InputFormat inputFormat = getInputFormatFromCache(formatter, job);
  InputSplit[] splits = inputFormat.getSplits(job, 1);
  FetchInputFormatSplit[] inputSplits = new FetchInputFormatSplit[splits.length];
  for (int i = 0; i < splits.length; i++) {
   inputSplits[i] = new FetchInputFormatSplit(splits[i], inputFormat);
  }
  if (work.getSplitSample() != null) {
   inputSplits = splitSampling(work.getSplitSample(), inputSplits);
  }
  if (inputSplits.length > 0) {
   return inputSplits;
  }
 }
 return null;
}
origin: apache/hive

  org.apache.hadoop.util.StringUtils.escapeString(parentDir.getAbsolutePath()));
inputSplits = inputFormat.getSplits(localJc, 1);
actualSplitNum = inputSplits.length;
origin: apache/drill

  org.apache.hadoop.util.StringUtils.escapeString(parentDir.getAbsolutePath()));
inputSplits = inputFormat.getSplits(localJc, 1);
actualSplitNum = inputSplits.length;
origin: apache/accumulo

private void addField(List<String> fields, String name, Object value) {
 String key = StringUtils.escapeString(name, '\\', new char[] {',', '='});
 String val = StringUtils.escapeString(String.valueOf(value), '\\', new char[] {',', '='});
 fields.add(key + '=' + val);
}
origin: ch.cern.hadoop/hadoop-common

/**
 * Escape commas in the string using the default escape char
 * @param str a string
 * @return an escaped string
 */
public static String escapeString(String str) {
 return escapeString(str, ESCAPE_CHAR, COMMA);
}

origin: org.apache.accumulo/accumulo-core

private void addField(List<String> fields, String name, Object value) {
 String key = StringUtils.escapeString(name, '\\', new char[] {',', '='});
 String val = StringUtils.escapeString(String.valueOf(value), '\\', new char[] {',', '='});
 fields.add(key + '=' + val);
}
origin: ml.shifu/guagua-yarn

public static void addInputPath(Configuration conf, Path path) throws IOException {
  path = path.getFileSystem(conf).makeQualified(path);
  String dirStr = org.apache.hadoop.util.StringUtils.escapeString(path.toString());
  String dirs = conf.get(GuaguaYarnConstants.GUAGUA_YARN_INPUT_DIR);
  conf.set(GuaguaYarnConstants.GUAGUA_YARN_INPUT_DIR, dirs == null ? dirStr : dirs + "," + dirStr);
}
origin: cwensel/cascading

public static void addInputPath( Configuration conf, Path path )
 {
 Path workingDirectory = getWorkingDirectory( conf );
 path = new Path( workingDirectory, path );
 String dirStr = StringUtils.escapeString( path.toString() );
 String dirs = conf.get( "mapred.input.dir" );
 conf.set( "mapred.input.dir", dirs == null ? dirStr :
  dirs + StringUtils.COMMA_STR + dirStr );
 }
origin: org.apache.hadoop/hadoop-mapreduce-client-app

<T> SummaryBuilder add(String key, T value) {
 String escapedString = StringUtils.escapeString(String.valueOf(value), 
   StringUtils.ESCAPE_CHAR, charsToEscape).replaceAll("\n", "\\\\n")
                       .replaceAll("\r", "\\\\r");
 return _add(key, escapedString);
}
origin: org.apache.hadoop/hadoop-yarn-server-resourcemanager

<T> SummaryBuilder add(String key, T value) {
 String escapedString = StringUtils.escapeString(String.valueOf(value),
   StringUtils.ESCAPE_CHAR, charsToEscape).replaceAll("\n", "\\\\n")
   .replaceAll("\r", "\\\\r");
 return _add(key, escapedString);
}
origin: com.github.jiayuhan-it/hadoop-mapreduce-client-app

<T> SummaryBuilder add(String key, T value) {
 String escapedString = StringUtils.escapeString(String.valueOf(value), 
   StringUtils.ESCAPE_CHAR, charsToEscape).replaceAll("\n", "\\\\n")
                       .replaceAll("\r", "\\\\r");
 return _add(key, escapedString);
}
org.apache.hadoop.utilStringUtilsescapeString

Javadoc

Escape commas in the string using the default escape char

Popular methods of StringUtils

  • stringifyException
    Make a string representation of the exception.
  • join
    Concatenates strings, using a separator.
  • split
  • arrayToString
  • toLowerCase
    Converts all of the characters in this String to lower case with Locale.ENGLISH.
  • startupShutdownMessage
    Print a log message for starting up and shutting down
  • getStrings
    Returns an arraylist of strings.
  • toUpperCase
    Converts all of the characters in this String to upper case with Locale.ENGLISH.
  • byteToHexString
    Given an array of bytes it will convert the bytes to a hex string representation of the bytes
  • formatTime
    Given the time in long milliseconds, returns a String in the format Xhrs, Ymins, Z sec.
  • unEscapeString
  • getStringCollection
    Returns a collection of strings.
  • unEscapeString,
  • getStringCollection,
  • byteDesc,
  • formatPercent,
  • getTrimmedStrings,
  • equalsIgnoreCase,
  • format,
  • formatTimeDiff,
  • getTrimmedStringCollection

Popular in Java

  • Making http requests using okhttp
  • getSupportFragmentManager (FragmentActivity)
  • addToBackStack (FragmentTransaction)
  • getOriginalFilename (MultipartFile)
    Return the original filename in the client's filesystem.This may contain path information depending
  • GridBagLayout (java.awt)
    The GridBagLayout class is a flexible layout manager that aligns components vertically and horizonta
  • NoSuchElementException (java.util)
    Thrown when trying to retrieve an element past the end of an Enumeration or Iterator.
  • PriorityQueue (java.util)
    A PriorityQueue holds elements on a priority heap, which orders the elements according to their natu
  • Scanner (java.util)
    A parser that parses a text string of primitive types and strings with the help of regular expressio
  • ThreadPoolExecutor (java.util.concurrent)
    An ExecutorService that executes each submitted task using one of possibly several pooled threads, n
  • Manifest (java.util.jar)
    The Manifest class is used to obtain attribute information for a JarFile and its entries.
  • Top 12 Jupyter Notebook Extensions
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now