tmpVars.clear(); tmpVars.addAll(vars); QuerySolution q = rsw.nextSolution();
private int[] colWidths(ResultSetRewindable rs) { int numCols = rs.getResultVars().size() ; int numRows = 0 ; int[] colWidths = new int[numCols] ; // Widths at least that of the variable name. Assumes we will print col headings. for ( int i = 0 ; i < numCols ; i++ ) colWidths[i] = (rs.getResultVars().get(i)).length() ; // Preparation pass : find the maximum width for each column for ( ; rs.hasNext() ; ) { numRows++ ; QuerySolution rBind = rs.nextSolution() ; int col = -1 ; for ( String s1 : rs.getResultVars() ) { col++; String rVar = s1; String s = getVarValueAsString( rBind, rVar ); if ( colWidths[col] < s.length() ) { colWidths[col] = s.length(); } } } rs.reset() ; return colWidths ; }
QuerySolution rBind = resultSetRewindable.nextSolution(); for ( int col = 0 ; col < numCols ; col++ ) { String rVar = resultSet.getResultVars().get(col);
/** * @param rsw * @param depth * @return true if there are more blanknodes */ public static boolean testResultSet(ResultSetRewindable rsw, int depth){ List<String> vars = new ArrayList<>(); vars.add("o0"); for (int i = 1; i <= depth; i++) { vars.add("o"+i); } while (rsw.hasNext()){ QuerySolution q = rsw.nextSolution(); if(!q.get("o0").isAnon()){ continue; }else{ if(!testOneQuerySolution(vars, q)){ rsw.reset(); return false; } } } rsw.reset(); return true; }
private static void logResults(final String name, final ResultSetRewindable results) { if (_logger.isLoggable(Level.WARNING)) { results.reset(); final StringBuilder sb = new StringBuilder(name + " (" + results.size() + ")="); while (results.hasNext()) { final QuerySolution result = results.nextSolution(); sb.append(result); } _logger.warning(sb.toString()); } if (_logger.isLoggable(Level.FINE)) { final ByteArrayOutputStream out = new ByteArrayOutputStream(); ResultSetFormatter.out(out, results); _logger.fine(out.toString()); } }
public static Collection<Map<String, RDFNode>> results(final ResultSetRewindable rs) { rs.reset(); final List<String> vars = rs.getResultVars(); final Set<Map<String, RDFNode>> results = new HashSet<>(); while (rs.hasNext()) { final QuerySolution qs = rs.nextSolution(); final Map<String, RDFNode> result = new HashMap<>(); for (final String var : vars) result.put(var, qs.get(var)); results.add(solutionMap(qs, vars)); } return results; }
/** * variable must be ?count * @param sparqlQueryString the SPARQL query * @return -1 on failure count on success */ public int queryAsCount(String sparqlQueryString) { SparqlQuery sq = new SparqlQuery(sparqlQueryString, sparqlEndpoint); ResultSetRewindable rsw = null; if(cache == null) { rsw = sq.send(); } else { // get JSON from cache and convert to result set String json = cache.executeSparqlQuery(sq); rsw = SparqlQuery.convertJSONtoResultSet(json); } int ret = -1; while(rsw.hasNext()){ QuerySolution qs = rsw.nextSolution(); ret = qs.getLiteral("count").getInt(); } return ret; }
public static boolean assertEquals(final ResultSet expectedResults, final ResultSet computedResults) { final ResultSetRewindable expected = ResultSetFactory.makeRewindable(expectedResults); final ResultSetRewindable computed = ResultSetFactory.makeRewindable(computedResults); if (expected.size() != computed.size()) { logResults("Expected", expected); logResults("Real", computed); Assert.fail("Expected " + expected.size() + " but got " + computed.size()); } final List<String> vars = expected.getResultVars(); final Collection<Map<String, RDFNode>> results = results(computed); for (expected.reset(); expected.hasNext();) { final QuerySolution qs = expected.nextSolution(); final Map<String, RDFNode> map = solutionMap(qs, vars); if (!results.contains(map)) { logResults("Expected", expected); logResults("Real", computed); Assert.fail("Solution not found : " + map + "\tin results : " + results); Assert.fail("Expected " + expected.size() + " but got " + computed.size()); } } return true; }