@Test public void testTrimMatcher() { final StrMatcher matcher = StrMatcher.trimMatcher(); assertSame(matcher, StrMatcher.trimMatcher()); assertEquals(0, matcher.isMatch(BUFFER1, 2)); assertEquals(1, matcher.isMatch(BUFFER1, 3)); assertEquals(0, matcher.isMatch(BUFFER1, 4)); assertEquals(1, matcher.isMatch(BUFFER1, 5)); assertEquals(0, matcher.isMatch(BUFFER1, 6)); assertEquals(1, matcher.isMatch(BUFFER1, 7)); assertEquals(1, matcher.isMatch(BUFFER1, 8)); assertEquals(1, matcher.isMatch(BUFFER1, 9)); assertEquals(1, matcher.isMatch(BUFFER1, 10)); }
@Test public void test4() { final String input = "a;b; c;\"d;\"\"e\";f; ; ;"; final StrTokenizer tok = new StrTokenizer(input); tok.setDelimiterChar(';'); tok.setQuoteChar('"'); tok.setIgnoredMatcher(StrMatcher.trimMatcher()); tok.setIgnoreEmptyTokens(true); final String tokens[] = tok.getTokenArray(); final String expected[] = new String[]{"a", "b", "c", "d;\"e", "f",}; assertEquals(ArrayUtils.toString(tokens), expected.length, tokens.length); for (int i = 0; i < expected.length; i++) { assertEquals("token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'", expected[i], tokens[i]); } }
@Test public void test1() { final String input = "a;b;c;\"d;\"\"e\";f; ; ; "; final StrTokenizer tok = new StrTokenizer(input); tok.setDelimiterChar(';'); tok.setQuoteChar('"'); tok.setIgnoredMatcher(StrMatcher.trimMatcher()); tok.setIgnoreEmptyTokens(false); final String tokens[] = tok.getTokenArray(); final String expected[] = new String[]{"a", "b", "c", "d;\"e", "f", "", "", "",}; assertEquals(ArrayUtils.toString(tokens), expected.length, tokens.length); for (int i = 0; i < expected.length; i++) { assertEquals("token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'", expected[i], tokens[i]); } }
@Test public void testBasicQuoted4() { final String input = "a: 'b' 'c' :d"; final StrTokenizer tok = new StrTokenizer(input, ':', '\''); tok.setTrimmerMatcher(StrMatcher.trimMatcher()); tok.setIgnoreEmptyTokens(false); tok.setEmptyTokenAsNull(true); assertEquals("a", tok.next()); assertEquals("b c", tok.next()); assertEquals("d", tok.next()); assertFalse(tok.hasNext()); }
@Test public void testBasicTrimmed1() { final String input = "a: b : "; final StrTokenizer tok = new StrTokenizer(input, ':'); tok.setTrimmerMatcher(StrMatcher.trimMatcher()); tok.setIgnoreEmptyTokens(false); tok.setEmptyTokenAsNull(true); assertEquals("a", tok.next()); assertEquals("b", tok.next()); assertNull(tok.next()); assertFalse(tok.hasNext()); }
@Test public void testBasicQuoted5() { final String input = "a: 'b'x'c' :d"; final StrTokenizer tok = new StrTokenizer(input, ':', '\''); tok.setTrimmerMatcher(StrMatcher.trimMatcher()); tok.setIgnoreEmptyTokens(false); tok.setEmptyTokenAsNull(true); assertEquals("a", tok.next()); assertEquals("bxc", tok.next()); assertEquals("d", tok.next()); assertFalse(tok.hasNext()); }
@Test public void testBasicQuotedTrimmed1() { final String input = "a: 'b' :"; final StrTokenizer tok = new StrTokenizer(input, ':', '\''); tok.setTrimmerMatcher(StrMatcher.trimMatcher()); tok.setIgnoreEmptyTokens(false); tok.setEmptyTokenAsNull(true); assertEquals("a", tok.next()); assertEquals("b", tok.next()); assertNull(tok.next()); assertFalse(tok.hasNext()); }
@Test public void test5() { final String input = "a;b; c;\"d;\"\"e\";f; ; ;"; final StrTokenizer tok = new StrTokenizer(input); tok.setDelimiterChar(';'); tok.setQuoteChar('"'); tok.setIgnoredMatcher(StrMatcher.trimMatcher()); tok.setIgnoreEmptyTokens(false); tok.setEmptyTokenAsNull(true); final String tokens[] = tok.getTokenArray(); final String expected[] = new String[]{"a", "b", "c", "d;\"e", "f", null, null, null,}; assertEquals(ArrayUtils.toString(tokens), expected.length, tokens.length); for (int i = 0; i < expected.length; i++) { assertEquals("token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'", expected[i], tokens[i]); } }
@Test public void testBasicIgnoreTrimmed4() { final String input = "IGNOREaIGNORE: IGNORE 'bIGNOREc'IGNORE'd' IGNORE : IGNORE "; final StrTokenizer tok = new StrTokenizer(input, ':', '\''); tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE")); tok.setTrimmerMatcher(StrMatcher.trimMatcher()); tok.setIgnoreEmptyTokens(false); tok.setEmptyTokenAsNull(true); assertEquals("a", tok.next()); assertEquals("bIGNOREcd", tok.next()); assertNull(tok.next()); assertFalse(tok.hasNext()); }
@Test public void testBasicIgnoreTrimmed1() { final String input = "a: bIGNOREc : "; final StrTokenizer tok = new StrTokenizer(input, ':'); tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE")); tok.setTrimmerMatcher(StrMatcher.trimMatcher()); tok.setIgnoreEmptyTokens(false); tok.setEmptyTokenAsNull(true); assertEquals("a", tok.next()); assertEquals("bc", tok.next()); assertNull(tok.next()); assertFalse(tok.hasNext()); }
@Test public void testBasicIgnoreTrimmed2() { final String input = "IGNOREaIGNORE: IGNORE bIGNOREc IGNORE : IGNORE "; final StrTokenizer tok = new StrTokenizer(input, ':'); tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE")); tok.setTrimmerMatcher(StrMatcher.trimMatcher()); tok.setIgnoreEmptyTokens(false); tok.setEmptyTokenAsNull(true); assertEquals("a", tok.next()); assertEquals("bc", tok.next()); assertNull(tok.next()); assertFalse(tok.hasNext()); }
@Test public void test6() { final String input = "a;b; c;\"d;\"\"e\";f; ; ;"; final StrTokenizer tok = new StrTokenizer(input); tok.setDelimiterChar(';'); tok.setQuoteChar('"'); tok.setIgnoredMatcher(StrMatcher.trimMatcher()); tok.setIgnoreEmptyTokens(false); // tok.setTreatingEmptyAsNull(true); final String tokens[] = tok.getTokenArray(); final String expected[] = new String[]{"a", "b", " c", "d;\"e", "f", null, null, null,}; int nextCount = 0; while (tok.hasNext()) { tok.next(); nextCount++; } int prevCount = 0; while (tok.hasPrevious()) { tok.previous(); prevCount++; } assertEquals(ArrayUtils.toString(tokens), expected.length, tokens.length); assertTrue("could not cycle through entire token list" + " using the 'hasNext' and 'next' methods", nextCount == expected.length); assertTrue("could not cycle through entire token list" + " using the 'hasPrevious' and 'previous' methods", prevCount == expected.length); }
private String[] splitList(String commaDelimitedList) { return new StrTokenizer(commaDelimitedList, StrMatcher.commaMatcher(), StrMatcher.quoteMatcher()) .setTrimmerMatcher(StrMatcher.trimMatcher()) .getTokenArray(); }