@Test(expected = IllegalStateException.class) public void testNoMatch() throws Exception { tokenizer.setTokenizers(Collections.singletonMap("foo", (LineTokenizer) new DelimitedLineTokenizer())); tokenizer.afterPropertiesSet(); tokenizer.tokenize("nomatch"); }
@Test(expected = IllegalArgumentException.class) public void testNoTokenizers() throws Exception { tokenizer.afterPropertiesSet(); tokenizer.tokenize("a line"); }
@Override public T mapLine(String line, int lineNumber) throws Exception { return patternMatcher.match(line).mapFieldSet(this.tokenizer.tokenize(line)); }
@Override public void afterPropertiesSet() throws Exception { this.tokenizer.afterPropertiesSet(); Assert.isTrue(this.patternMatcher != null, "The 'patternMatcher' property must be non-null"); }
public void setTokenizers(Map<String, LineTokenizer> tokenizers) { this.tokenizer.setTokenizers(tokenizers); }
@Override public T mapLine(String line, int lineNumber) throws Exception { return patternMatcher.match(line).mapFieldSet(this.tokenizer.tokenize(line)); }
@Override public void afterPropertiesSet() throws Exception { this.tokenizer.afterPropertiesSet(); Assert.isTrue(this.patternMatcher != null, "The 'patternMatcher' property must be non-null"); }
public void setTokenizers(Map<String, LineTokenizer> tokenizers) { this.tokenizer.setTokenizers(tokenizers); }
@Test public void testEmptyKeyDoesNotMatchWhenAlternativeAvailable() throws Exception { Map<String, LineTokenizer> map = new LinkedHashMap<>(); map.put("*", new LineTokenizer() { @Override public FieldSet tokenize(String line) { return null; } }); map.put("foo*", new DelimitedLineTokenizer()); tokenizer.setTokenizers(map); tokenizer.afterPropertiesSet(); FieldSet fields = tokenizer.tokenize("foo,bar"); assertEquals("bar", fields.readString(1)); }
@Test public void testEmptyKeyMatchesAnyLine() throws Exception { Map<String, LineTokenizer> map = new HashMap<>(); map.put("*", new DelimitedLineTokenizer()); map.put("foo", new LineTokenizer() { @Override public FieldSet tokenize(String line) { return null; } }); tokenizer.setTokenizers(map); tokenizer.afterPropertiesSet(); FieldSet fields = tokenizer.tokenize("abc"); assertEquals(1, fields.getFieldCount()); }
@Test public void testMatchWithPrefix() throws Exception { tokenizer.setTokenizers(Collections.singletonMap("foo*", (LineTokenizer) new LineTokenizer() { @Override public FieldSet tokenize(String line) { return new DefaultFieldSet(new String[] { line }); } })); tokenizer.afterPropertiesSet(); FieldSet fields = tokenizer.tokenize("foo bar"); assertEquals(1, fields.getFieldCount()); assertEquals("foo bar", fields.readString(0)); } }