private static void answer(final ThreadedSPARQL sp, final IQuestion q) throws ExecutionException { String query = q.getSparqlQuery(); Set<RDFNode> answer = sp.sparql(query); if (answer != null) { q.setGoldenAnswers(SPARQL.extractAnswerStrings(answer)); } else { q.setGoldenAnswers(Sets.newHashSet()); } }
public String process(String uri) throws JsonGenerationException, IOException { String query = PREFIXES + "SELECT (GROUP_CONCAT(distinct ?type;separator=' ') as ?types) (GROUP_CONCAT(distinct ?property;separator=' ') as ?properties) WHERE {\n" + "<" + uri + "> rdf:type ?type . FILTER(STRSTARTS(STR(?type), 'http://dbpedia.org/ontology')) . \n" + "<" + uri + "> ?property ?value . FILTER(STRSTARTS(STR(?property), 'http://dbpedia.org/ontology')) . \n" + "}"; QueryExecution queryExecution = executeQuery(query); // Get all Ontology Classes and Properties for given entity Iterator<QuerySolution> results = queryExecution.execSelect(); LinkedHashSet<Field> fields = new LinkedHashSet<>(); while (results.hasNext()) { QuerySolution solution = results.next(); if (solution.get("types") != null && solution.get("properties") != null) { List<String> types = Arrays.asList(solution.get("types").asLiteral().getString().split(" ")); String[] split = solution.get("properties").asLiteral().getString().split(" "); HashSet<String> properties = Sets .newHashSet(split); // Get Relevant Properties based on CouchDB List<Field> relevantProperties = getRelevantProperties(uri, types, properties); fields.addAll(relevantProperties); } } queryExecution.close(); return JsonUtils.toPrettyString(fields); }
@Test public void streaming() { final Object testObject1 = new Object(); final Object testObject2 = new Object(); final PersistentSet<Object> testSet = PSet.empty().plus(testObject1).plus(testObject2); assertEquals(newHashSet(testObject1, testObject2), testSet.stream().collect(toSet())); } }
Set<String> skipServices = Sets.newHashSet(); Map<String, FindMyStuffHistoryEntity> existingHistories = Maps.newHashMap(); if (history != null){