/** * Gets the String content that the tokenizer is parsing. * * @return the string content being parsed */ @Override public String toString() { if (tokens == null) { return "StrTokenizer[not tokenized yet]"; } return "StrTokenizer" + getTokenList(); }
@Test public void testListArray() { final String input = "a b c"; final StrTokenizer tok = new StrTokenizer(input); final String[] array = tok.getTokenArray(); final List<?> list = tok.getTokenList(); assertEquals(Arrays.asList(array), list); assertEquals(3, list.size()); }
/** * Gets the String content that the tokenizer is parsing. * * @return the string content being parsed */ @Override public String toString() { if (tokens == null) { return "StrTokenizer[not tokenized yet]"; } return "StrTokenizer" + getTokenList(); }
/** * Gets the String content that the tokenizer is parsing. * * @return the string content being parsed */ @Override public String toString() { if (tokens == null) { return "StrTokenizer[not tokenized yet]"; } return "StrTokenizer" + getTokenList(); }
/** * Gets the String content that the tokenizer is parsing. * * @return the string content being parsed */ @Override public String toString() { if (tokens == null) { return "StrTokenizer[not tokenized yet]"; } return "StrTokenizer" + getTokenList(); }
StrTokenizer tokenizer = StrTokenizer.getCSVInstance(); List<String> allTokens = new ArrayList<String>(); while (...) { tokenizer.reset(dataLine); allTokens.addAll(tokenizer.getTokenList()); ... }
@Nullable @Override public List<String> apply(@Nullable String input) { if (input == null) { return null; } tokenizer.reset(input); return tokenizer.getTokenList(); } }
private List<String> processCommand(String commandLine) { List<String> command = new ArrayList<>(arguments); if(!isEmpty(commandLine)) { command.addAll(new StrTokenizer(commandLine, ',', '"').getTokenList()); } return command; }
/** * Parase a fully qualified dotted schema path to list of strings. * a.b.`c.json` -> [a,b,c.json] * a.b.`c-1` -> [a,b,c-1] * a.b.c -> [a,b,c] * @param path dotted schema path * @return list of path components. */ public static List<String> parseFullPath(final String path) { final StrTokenizer tokenizer = new StrTokenizer(path, PATH_DELIMITER, SqlUtils.QUOTE).setIgnoreEmptyTokens(true); return tokenizer.getTokenList(); }
/** * Parse the schema path into a list of schema entries. * @param schemaPath * @return */ public static List<String> parseSchemaPath(String schemaPath) { return new StrTokenizer(schemaPath, '.', SqlUtils.QUOTE) .setIgnoreEmptyTokens(true) .getTokenList(); }
/** * Convert fs path to list of strings. * /a/b/c -> [a,b,c] * @param fsPath a string * @return list of path components */ public static List<String> toPathComponents(String fsPath) { if (fsPath == null ) { return EMPTY_SCHEMA_PATHS; } final StrTokenizer tokenizer = new StrTokenizer(fsPath, SLASH_CHAR, SqlUtils.QUOTE).setIgnoreEmptyTokens(true); return tokenizer.getTokenList(); }
/** Split string x in tokens. Effectively just a friendly wrapper around StrTokenizer. * Use *single* quotes for avoiding splitting. */ public static ArrayList<String> tokenize(String x, String delimiterString){ if(x == null){ return null; } // This is a hack to allow empty tokens to be passed at the command line. // An empty x= x.replace("''", "' '"); // See also http://stackoverflow.com/questions/38161437/inconsistent-behaviour-of-strtokenizer-to-split-string StrTokenizer str= new StrTokenizer(x); str.setTrimmerMatcher(StrMatcher.spaceMatcher()); str.setDelimiterString(delimiterString); str.setQuoteChar('\''); // str.setIgnoreEmptyTokens(false); ArrayList<String> tokens= (ArrayList<String>) str.getTokenList(); for(int i= 0; i < tokens.size(); i++){ String tok= tokens.get(i).trim(); tokens.set(i, tok); } return tokens; }
if (arguments != null) { if (logger.isTraceEnabled()) { logger.trace("Arguments found for {}, Tokens: {}", action, strTokenizer.getTokenList()); logger.trace("Arguments for {}: {}", action, arguments); for (String token : strTokenizer.getTokenList()) { boolean argContains = arguments.contains(token); if (token.startsWith("-") && !argContains) {
new StrTokenizer(storeTablePath, '.', quoting.string.charAt(0)) .setIgnoreEmptyTokens(true) .getTokenList();
new StrTokenizer(storeTablePath, '.', config.quoting().string.charAt(0)) .setIgnoreEmptyTokens(true) .getTokenList();