public static Tokenizer makeTokenizerASCII(String string) { byte b[] = StrUtils.asUTF8bytes(string) ; ByteArrayInputStream in = new ByteArrayInputStream(b) ; return makeTokenizerASCII(in) ; }
/** Create a parser for N-Triples * @deprecated use an RDFDataMgr operation with argument Lang.NTRIPLES */ @Deprecated public static LangNTriples createParserNTriples(InputStream input, StreamRDF dest) { Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(input) ; return createParserNTriples(tokenizer, dest) ; }
/** Create a parser for NQuads * @deprecated use an RDFDataMgr operation with argument Lang.NQUADS) */ @Deprecated public static LangNQuads createParserNQuads(InputStream input, StreamRDF dest) { Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(input) ; return createParserNQuads(tokenizer, dest) ; }
/** Create a parser for N-Triples, with default behaviour */ public static LangNTriples createParserNTriples(InputStream input, StreamRDF dest) { Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(input) ; return createParserNTriples(tokenizer, dest) ; }
public static Tokenizer makeTokenizerASCII(String string) { byte b[] = StrUtils.asUTF8bytes(string) ; ByteArrayInputStream in = new ByteArrayInputStream(b) ; return makeTokenizerASCII(in) ; }
/** Create a parser for NQuads, with default behaviour */ public static LangNQuads createParserNQuads(InputStream input, StreamRDF dest) { Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(input) ; return createParserNQuads(tokenizer, dest) ; }
public static Stream<Triple> parseLineRiot2(File file) throws IOException { return Files.lines(file.toPath()) .map(line -> RiotParsers.createParserNTriples(TokenizerFactory.makeTokenizerASCII(line), null, parserProfile).next()); } }
static protected Tokenizer tokenizer(CharSpace charSpace, String string) { byte b[] = StrUtils.asUTF8bytes(string); ByteArrayInputStream in = new ByteArrayInputStream(b); Tokenizer tokenizer = charSpace == CharSpace.ASCII ? TokenizerFactory.makeTokenizerASCII(in) : TokenizerFactory.makeTokenizerUTF8(in); return tokenizer; }
@Override public Iterator<Quad> createDeserializer(InputStream in) { Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in); ParserProfile profile = RiotLib.createParserProfile(RiotLib.factoryRDF(LabelToNode.createUseLabelEncoded()), ErrorHandlerFactory.errorHandlerNoWarnings, IRIResolver.createNoResolve(), false); LangNQuads parser = new LangNQuads(tokenizer, profile, null) ; return parser ; }
@Override public Iterator<Triple> createDeserializer(InputStream in) { Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in); ParserProfile profile = RiotLib.createParserProfile(RiotLib.factoryRDF(LabelToNode.createUseLabelEncoded()), ErrorHandlerFactory.errorHandlerNoWarnings, IRIResolver.createNoResolve(), false); LangNTriples parser = new LangNTriples(tokenizer, profile, null); return parser ; }
@Override public Iterator<Triple> createDeserializer(InputStream in) { Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ; ParserProfileBase profile = new ParserProfileBase(new Prologue(null, IRIResolver.createNoResolve()), null, LabelToNode.createUseLabelEncoded()) ; LangNTriples parser = new LangNTriples(tokenizer, profile, null) ; return parser ; }
@Override public Iterator<Quad> createDeserializer(InputStream in) { Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ; ParserProfileBase profile = new ParserProfileBase(new Prologue(null, IRIResolver.createNoResolve()), null, LabelToNode.createUseLabelEncoded()) ; LangNQuads parser = new LangNQuads(tokenizer, profile, null) ; return parser ; }
@Test(expected = RiotParseException.class) public void tokenizer_charset_2() { ByteArrayInputStream in = bytes("'abcdé'") ; Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ; Token t = tokenizer.next() ; assertFalse(tokenizer.hasNext()) ; }
@Test public void tokenizer_charset_1() { ByteArrayInputStream in = bytes("'abc'") ; Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ; Token t = tokenizer.next() ; assertFalse(tokenizer.hasNext()) ; }
@Test(expected = RiotParseException.class) public void tokenizer_charset_3() { ByteArrayInputStream in = bytes("<http://example/abcdé>") ; Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ; Token t = tokenizer.next() ; assertFalse(tokenizer.hasNext()) ; }