public String asWord() { if ( ! hasType(TokenType.KEYWORD) ) return null ; return tokenImage ; }
@Override public final Token peek() { if ( !hasNext() ) return null; return token; }
/** Create a tokenizer for a line * @param line * Content * @return Tokenizer */ protected Tokenizer getTokenizer(String line) { return TokenizerFactory.makeTokenizerString(line); }
@Test public void tokenizer_BOM_1() { // BOM ByteArrayInputStream in = bytes("\uFEFF'abc'") ; Tokenizer tokenizer = TokenizerFactory.makeTokenizerUTF8(in) ; assertTrue(tokenizer.hasNext()) ; Token token = tokenizer.next() ; assertNotNull(token) ; assertEquals(TokenType.STRING, token.getType()) ; assertEquals("abc", token.getImage()) ; assertFalse(tokenizer.hasNext()) ; }
private static Token testNextToken(Tokenizer tokenizer, TokenType tokenType, String tokenImage1, String tokenImage2) { assertTrue(tokenizer.hasNext()) ; Token token = tokenizer.next() ; assertNotNull(token) ; assertEquals(tokenType, token.getType()) ; if ( tokenImage1 != null ) assertEquals(tokenImage1, token.getImage()) ; if ( tokenImage2 != null ) assertEquals(tokenImage1, token.getImage()) ; assertEquals(tokenImage2, token.getImage2()) ; return token ; }
@Test public void tokenUnit_pname9() { Tokenizer tokenizer = tokenizeAndTestFirst("a123:-456", TokenType.PREFIXED_NAME, "a123", "") ; assertTrue(tokenizer.hasNext()) ; Token token = tokenizer.next() ; assertEquals(TokenType.INTEGER, token.getType()) ; assertEquals("-456", token.getImage()) ; }
private static Token tokenizeAndTest(String input, TokenType tokenType, String tokenImage1, String tokenImage2, Token subToken1, Token subToken2) { Token token = tokenFor(input) ; assertNotNull(token) ; assertEquals(tokenType, token.getType()) ; assertEquals(tokenImage1, token.getImage()) ; assertEquals(tokenImage2, token.getImage2()) ; assertEquals(subToken1, token.getSubToken1()) ; assertEquals(subToken2, token.getSubToken2()) ; return token ; }
private static void test(Node node, TokenType type, String image, String image2, Token subToken1, Token subToken2) { Token t = Token.tokenForNode(node, base, prefixMap) ; assertEquals(type, t.getType()) ; assertEquals(image, t.getImage()) ; assertEquals(image2, t.getImage2()) ; assertEquals(subToken1, t.getSubToken1()) ; assertEquals(subToken2, t.getSubToken2()) ; } }
@Test(expected = RiotParseException.class) public void tokenizer_charset_2() { ByteArrayInputStream in = bytes("'abcdé'") ; Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ; Token t = tokenizer.next() ; assertFalse(tokenizer.hasNext()) ; }
private static Token tokenFor(String string) { Tokenizer tokenizer = tokenizer(string) ; assertTrue(tokenizer.hasNext()) ; Token token = tokenizer.next() ; assertFalse(tokenizer.hasNext()) ; return token ; }
private static void tokenFirst(String string) { Tokenizer tokenizer = tokenizer(string) ; assertTrue(tokenizer.hasNext()) ; assertNotNull(tokenizer.next()) ; // Maybe more. // assertFalse(tokenizer.hasNext()) ; }
public void token_newlines_6() { Tokenizer tokenizer = tokenizer("\n \n", true) ; testNextToken(tokenizer, TokenType.NL) ; testNextToken(tokenizer, TokenType.NL) ; assertFalse(tokenizer.hasNext()) ; } }
private static void testSymbol(String string, TokenType expected) { Tokenizer tokenizer = tokenizeAndTestFirst(string, expected, null) ; assertFalse(tokenizer.hasNext()); }
private static Token tokenizeAndTestExact(String input, StringType stringType, String tokenImage) { Token token = tokenizeAndTestExact(input, TokenType.STRING, tokenImage, null) ; assertEquals(stringType, token.getStringType()); return token; }
@Test public void tokenUnit_pname2() { Tokenizer tokenizer = tokenizeAndTestFirst("a:b.", TokenType.PREFIXED_NAME, "a", "b") ; assertTrue(tokenizer.hasNext()) ; Token token = tokenizer.next() ; assertEquals(TokenType.DOT, token.getType()) ; }
private static Token tokenizeAndTestExact(String input, TokenType tokenType, String tokenImage1, String tokenImage2, Token subToken1, Token subToken2) { Token token = tokenFor(input) ; assertEquals(tokenType, token.getType()) ; assertEquals(tokenImage1, token.getImage()) ; assertEquals(tokenImage2, token.getImage2()) ; assertEquals(subToken1, token.getSubToken1()) ; assertEquals(subToken2, token.getSubToken2()) ; return token ; }
@Test public void tokenizer_charset_1() { ByteArrayInputStream in = bytes("'abc'") ; Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ; Token t = tokenizer.next() ; assertFalse(tokenizer.hasNext()) ; }
private static Token tokenizeAndTestExact(String input, TokenType tokenType, String tokenImage1, String tokenImage2) { Tokenizer tokenizer = tokenizer(input) ; Token token = testNextToken(tokenizer, tokenType, tokenImage1, tokenImage2) ; assertFalse("Excess tokens", tokenizer.hasNext()) ; return token ; }
@Test(expected = RiotParseException.class) public void tokenizer_charset_3() { ByteArrayInputStream in = bytes("<http://example/abcdé>") ; Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ; Token t = tokenizer.next() ; assertFalse(tokenizer.hasNext()) ; }
public void token_newlines_5() { Tokenizer tokenizer = tokenizer("abc\n\n", true) ; testNextToken(tokenizer, TokenType.KEYWORD, "abc") ; testNextToken(tokenizer, TokenType.NL) ; assertFalse(tokenizer.hasNext()) ; }