@Override public ResultSet read(InputStream in, Context context) { return CSVInput.fromCSV(in) ; } @Override public ResultSet read(Reader in, Context context) { throw new NotImplemented("Reader") ; }
public static boolean booleanFromCSV(InputStream in) final List<Var> vars = vars(parser) ; if ( vars.size() != 1 ) { throw new ARQException("CSV Boolean Results malformed: variables line='"+vars+"'") ;
public static ResultSet fromCSV(InputStream in) { CSVParser parser = CSVParser.create(in) ; final List<Var> vars = vars(parser) ; List<String> varNames = Var.varNames(vars) ; Function<List<String>, Binding> transform = new Function<List<String>, Binding>(){ private int count = 1 ; @Override public Binding apply(List<String> row) { if ( row.size() != vars.size() ) FmtLog.warn(log, "Row %d: Length=%d: expected=%d", count, row.size(), vars.size()) ; BindingMap binding = BindingFactory.create() ; // Check. for (int i = 0 ; i < vars.size() ; i++ ) { Var v = vars.get(i) ; String field = (i<row.size()) ? row.get(i) : "" ; Node n = NodeFactory.createLiteral(field) ; binding.add(v, n); } count++ ; return binding ; }} ; Iterator<Binding> bindings = Iter.map(parser.iterator(), transform) ; //Generate an instance of ResultSetStream using TSVInputIterator //This will parse actual result rows as needed thus minimising memory usage return new ResultSetStream(varNames, null, bindings); }
private void parseCSV(String x) { byte[] b = StrUtils.asUTF8bytes(x); ByteArrayInputStream in = new ByteArrayInputStream(b); ResultSet rs2 = CSVInput.fromCSV(in); while (rs2.hasNext()) { rs2.nextBinding(); } }
@Override public ResultSetRewindable readFrom(Class<ResultSetRewindable> type, Type type1, Annotation[] antns, javax.ws.rs.core.MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream in) throws IOException { if (log.isTraceEnabled()) log.trace("Reading ResultSet with HTTP headers: {} MediaType: {}", httpHeaders, mediaType); // result set needs to be rewindable because results might be processed multiple times, e.g. to calculate hash and write response // TO-DO: construct Jena's ResultFormat and then pass to ResultSet.load(in, format) if (mediaType.isCompatible(com.atomgraph.core.MediaType.APPLICATION_SPARQL_RESULTS_XML_TYPE)) return ResultSetFactory.makeRewindable(ResultSetFactory.fromXML(in)); if (mediaType.isCompatible(com.atomgraph.core.MediaType.APPLICATION_SPARQL_RESULTS_JSON_TYPE)) return ResultSetFactory.makeRewindable(ResultSetFactory.fromJSON(in)); if (mediaType.isCompatible(com.atomgraph.core.MediaType.APPLICATION_SPARQL_RESULTS_CSV_TYPE)) return ResultSetFactory.makeRewindable(CSVInput.fromCSV(in)); if (mediaType.isCompatible(com.atomgraph.core.MediaType.APPLICATION_SPARQL_RESULTS_CSV_TYPE)) return ResultSetFactory.makeRewindable(TSVInput.fromTSV(in)); throw new IllegalStateException("ResultSet MediaType should be readable but no Jena reader matched"); }