private static void testSymbol(String string, TokenType expected) { Tokenizer tokenizer = tokenizeAndTestFirst(string, expected, null) ; assertFalse(tokenizer.hasNext()); }
private static Token tokenizeAndTestExact(String input, TokenType tokenType, String tokenImage1, String tokenImage2) { Tokenizer tokenizer = tokenizer(input) ; Token token = testNextToken(tokenizer, tokenType, tokenImage1, tokenImage2) ; assertFalse("Excess tokens", tokenizer.hasNext()) ; return token ; }
@Test public void tokenUnit_bNode5() { Tokenizer tokenizer = tokenizeAndTestFirst("_:x. ", TokenType.BNODE, "x") ; testNextToken(tokenizer, TokenType.DOT) ; assertFalse(tokenizer.hasNext()) ; }
@Test public void token_newlines_1() { Tokenizer tokenizer = tokenizer("\n", true) ; testNextToken(tokenizer, TokenType.NL) ; assertFalse(tokenizer.hasNext()) ; }
public void token_newlines_6() { Tokenizer tokenizer = tokenizer("\n \n", true) ; testNextToken(tokenizer, TokenType.NL) ; testNextToken(tokenizer, TokenType.NL) ; assertFalse(tokenizer.hasNext()) ; } }
public void token_newlines_5() { Tokenizer tokenizer = tokenizer("abc\n\n", true) ; testNextToken(tokenizer, TokenType.KEYWORD, "abc") ; testNextToken(tokenizer, TokenType.NL) ; assertFalse(tokenizer.hasNext()) ; }
@Test(expected = RiotParseException.class) public void tokenizer_charset_2() { ByteArrayInputStream in = bytes("'abcdé'") ; Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ; Token t = tokenizer.next() ; assertFalse(tokenizer.hasNext()) ; }
@Test public void tokenUnit_bNode6() { Tokenizer tokenizer = tokenizeAndTestFirst("_:x:a. ", TokenType.BNODE, "x") ; testNextToken(tokenizer, TokenType.PREFIXED_NAME, "", "a") ; testNextToken(tokenizer, TokenType.DOT) ; assertFalse(tokenizer.hasNext()) ; }
private static Token tokenFor(String string) { Tokenizer tokenizer = tokenizer(string) ; assertTrue(tokenizer.hasNext()) ; Token token = tokenizer.next() ; assertFalse(tokenizer.hasNext()) ; return token ; }
@Test public void tokenizer_charset_1() { ByteArrayInputStream in = bytes("'abc'") ; Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ; Token t = tokenizer.next() ; assertFalse(tokenizer.hasNext()) ; }
@Test(expected = RiotParseException.class) public void tokenizer_charset_3() { ByteArrayInputStream in = bytes("<http://example/abcdé>") ; Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ; Token t = tokenizer.next() ; assertFalse(tokenizer.hasNext()) ; }
@Test public void token_newlines_4() { Tokenizer tokenizer = tokenizer("abc\n\rdef", true) ; testNextToken(tokenizer, TokenType.KEYWORD, "abc") ; testNextToken(tokenizer, TokenType.NL) ; testNextToken(tokenizer, TokenType.KEYWORD, "def") ; assertFalse(tokenizer.hasNext()) ; }
@Test public void tokenUnit_symbol_16() { Tokenizer tokenizer = tokenizer("|&/"); testNextToken(tokenizer, TokenType.VBAR); testNextToken(tokenizer, TokenType.AMPHERSAND); testNextToken(tokenizer, TokenType.SLASH); assertFalse(tokenizer.hasNext()); }
@Test public void token_newlines_2() { Tokenizer tokenizer = tokenizer("abc\ndef", true) ; testNextToken(tokenizer, TokenType.KEYWORD, "abc") ; testNextToken(tokenizer, TokenType.NL) ; testNextToken(tokenizer, TokenType.KEYWORD, "def") ; assertFalse(tokenizer.hasNext()) ; }
@Test public void tokenUnit_syntax2() { Tokenizer tokenizer = tokenizer(".;,") ; testNextToken(tokenizer, TokenType.DOT) ; testNextToken(tokenizer, TokenType.SEMICOLON) ; testNextToken(tokenizer, TokenType.COMMA) ; assertFalse(tokenizer.hasNext()) ; }
@Test public void token_newlines_3() { Tokenizer tokenizer = tokenizer("abc\n\ndef", true) ; testNextToken(tokenizer, TokenType.KEYWORD, "abc") ; testNextToken(tokenizer, TokenType.NL) ; testNextToken(tokenizer, TokenType.KEYWORD, "def") ; assertFalse(tokenizer.hasNext()) ; }
@Test public void tokenizer_BOM_1() { // BOM ByteArrayInputStream in = bytes("\uFEFF'abc'") ; Tokenizer tokenizer = TokenizerFactory.makeTokenizerUTF8(in) ; assertTrue(tokenizer.hasNext()) ; Token token = tokenizer.next() ; assertNotNull(token) ; assertEquals(TokenType.STRING, token.getType()) ; assertEquals("abc", token.getImage()) ; assertFalse(tokenizer.hasNext()) ; }
@Test public void token_multiple() { Tokenizer tokenizer = tokenizer("<x><y>") ; assertTrue(tokenizer.hasNext()) ; Token token = tokenizer.next() ; assertNotNull(token) ; assertEquals(TokenType.IRI, token.getType()) ; assertEquals("x", token.getImage()) ; assertTrue(tokenizer.hasNext()) ; Token token2 = tokenizer.next() ; assertNotNull(token2) ; assertEquals(TokenType.IRI, token2.getType()) ; assertEquals("y", token2.getImage()) ; assertFalse(tokenizer.hasNext()) ; }