/** * Creates a new prepared statement * * @param sparql * @param connection * Connection * @param type * Result Set type for result sets produced by this statement * @param fetchDir * Fetch Direction * @param fetchSize * Fetch Size * @param holdability * Result Set holdability * @param autoCommit * Auto-commit * @param transactionLevel * Transaction Isolation level * @throws SQLException * Thrown if there is a problem preparing the statement */ public JenaPreparedStatement(String sparql, JenaConnection connection, int type, int fetchDir, int fetchSize, int holdability, boolean autoCommit, int transactionLevel) throws SQLException { super(connection, type, fetchDir, fetchSize, holdability, autoCommit, transactionLevel); this.sparqlStr.setCommandText(sparql); this.paramMetadata = new JenaParameterMetadata(this.sparqlStr); }
/** * Creates a new prepared statement * * @param sparql * @param connection * Connection * @param type * Result Set type for result sets produced by this statement * @param fetchDir * Fetch Direction * @param fetchSize * Fetch Size * @param holdability * Result Set holdability * @param autoCommit * Auto-commit * @param transactionLevel * Transaction Isolation level * @throws SQLException * Thrown if there is a problem preparing the statement */ public JenaPreparedStatement(String sparql, JenaConnection connection, int type, int fetchDir, int fetchSize, int holdability, boolean autoCommit, int transactionLevel) throws SQLException { super(connection, type, fetchDir, fetchSize, holdability, autoCommit, transactionLevel); this.sparqlStr.setCommandText(sparql); this.paramMetadata = new JenaParameterMetadata(this.sparqlStr); }
/** * Update named graph by first deleting it and afterwards inserting the triples of the new model. * * @param graph named graph to be updated * @param model model that holds triples to set */ public String createUpdateNamedGraphQuery(String graph, Model model) { StringWriter sw = new StringWriter(); RDFDataMgr.write(sw, model, Lang.NTRIPLES); String query = "\nCLEAR GRAPH ?g;\n" + "\nINSERT DATA { GRAPH ?g { " + sw + "}};\n"; ParameterizedSparqlString pps = new ParameterizedSparqlString(); pps.setCommandText(query); pps.setIri("g", graph); return pps.toString(); }
public Model retrieveModel(String graphName) { String queryTemplate = "CONSTRUCT { ?s ?p ?o } WHERE { GRAPH ?g { ?s ?p ?o } . }"; ParameterizedSparqlString pps = new ParameterizedSparqlString(); pps.setCommandText(queryTemplate); pps.setIri("g", graphName); Query query = QueryFactory.create(pps.toString()); QueryExecution qexec = QueryExecutionFactory.sparqlService(sparqlEndpoint, query); Model model = qexec.execConstruct(); return model; }
pss.setCommandText(builder.toString()); pss.setNsPrefix("won", "http://purl.org/webofneeds/model#"); pss.setIri("msgUri", msg.getUri());
pps.setCommandText(queryString);
/** * Retrieve resource data of all known won nodes that are saved in the Sparql endpoint. * * @return Set of all known won node resource data */ public Set<WonNodeInfo> retrieveAllWonNodeInfo() { Set<WonNodeInfo> wonNodeInfos = new HashSet<>(); String queryString = "SELECT ?graphUri ?nodeUri WHERE { GRAPH ?graphUri {?nodeUri won:hasUriPrefixSpecification ?c} }"; ParameterizedSparqlString pps = new ParameterizedSparqlString(); pps.setCommandText(queryString); pps.setNsPrefix("won", "http://purl.org/webofneeds/model#"); log.debug("Query SPARQL Endpoint: {}", sparqlEndpoint); log.debug("Execute query: {}", pps.toString()); QueryExecution qexec = QueryExecutionFactory.sparqlService(sparqlEndpoint, pps.asQuery()); ResultSet results = qexec.execSelect(); while (results.hasNext()) { QuerySolution qs = results.nextSolution(); RDFNode rdfNode = qs.get("graphUri"); if (rdfNode != null) { String graphUri = rdfNode.asResource().getURI(); Dataset ds = retrieveDataset(graphUri); WonNodeInfo nodeInfo = getWonNodeInfoFromDataset(ds); wonNodeInfos.add(nodeInfo); } } qexec.close(); return wonNodeInfos; }
pps.setCommandText(queryString); pps.setLiteral("status", status.toString());
pps.setCommandText(queryTemplate); pps.setLiteral("fromDate", fromDate); pps.setLiteral("toDate", toDate);
pss.setCommandText(query); for (String key : parameterBindings.keySet()) {
public Dataset retrieveNeedDataset(String uri) { String queryString = "prefix won: <http://purl.org/webofneeds/model#> select distinct ?g where { " + "GRAPH ?g { ?uri a won:Need. ?a ?b ?c. } }"; ParameterizedSparqlString pps = new ParameterizedSparqlString(); pps.setCommandText(queryString); pps.setIri("uri", uri); Query query = QueryFactory.create(pps.toString()); QueryExecution qexec = QueryExecutionFactory.sparqlService(sparqlEndpoint, query); ResultSet results = qexec.execSelect(); Dataset ds = DatasetFactory.createGeneral(); while (results.hasNext()) { QuerySolution qs = results.next(); String graphUri = qs.getResource("g").getURI(); Model model = retrieveModel(graphUri); ds.addNamedModel(graphUri, model); } return ds; }
/** * To start crawling (http modification query) from a certain point in time, take last * modification date from a connection known in the database that is in status 'DONE' which means * it has been crawled. * * @param wonNodeUri won node uri for which connection modification dates should be retrieved * @return modification date to start crawling from or null if none exists */ public String retrieveConnectionModificationDateForCrawling(String wonNodeUri) { String queryString = "SELECT ?modificationDate WHERE {\n" + " ?connectionUri a won:Connection.\n" + " ?connectionUri won:hasWonNode ?wonNodeUri. \n" + " ?connectionUri dcterms:modified ?modificationDate. \n" + " ?connectionUri won:crawlStatus 'DONE'. \n" + "} ORDER BY DESC(?modificationDate) LIMIT 1\n"; ParameterizedSparqlString pps = new ParameterizedSparqlString(); pps.setNsPrefix("won", "http://purl.org/webofneeds/model#"); pps.setNsPrefix("dcterms", "http://purl.org/dc/terms/"); pps.setCommandText(queryString); pps.setIri("wonNodeUri", wonNodeUri); QueryExecution qexec = QueryExecutionFactory.sparqlService(sparqlEndpoint, pps.asQuery()); ResultSet results = qexec.execSelect(); String modificationDate = null; if (results.hasNext()) { QuerySolution qs = results.nextSolution(); modificationDate = qs.get("modificationDate").asLiteral().getString(); } qexec.close(); return modificationDate; }
/** * To start crawling (http modification query) from a certain point in time, take last * modification date from a need known in the database that is in status 'DONE' which means * it has been crawled. * * @param wonNodeUri won node uri for which need modification dates should be retrieved * @return modification date to start crawling from or null if none exists */ public String retrieveNeedModificationDateForCrawling(String wonNodeUri) { String queryString = "SELECT ?modificationDate WHERE {\n" + " ?needUri a won:Need.\n" + " ?needUri won:hasWonNode ?wonNodeUri. \n" + " ?needUri dcterms:modified ?modificationDate. \n" + " ?needUri won:crawlStatus 'DONE'. \n" + "} ORDER BY DESC(?modificationDate) LIMIT 1\n"; ParameterizedSparqlString pps = new ParameterizedSparqlString(); pps.setNsPrefix("won", "http://purl.org/webofneeds/model#"); pps.setNsPrefix("dcterms", "http://purl.org/dc/terms/"); pps.setCommandText(queryString); pps.setIri("wonNodeUri", wonNodeUri); QueryExecution qexec = QueryExecutionFactory.sparqlService(sparqlEndpoint, pps.asQuery()); ResultSet results = qexec.execSelect(); String modificationDate = null; if (results.hasNext()) { QuerySolution qs = results.nextSolution(); modificationDate = qs.get("modificationDate").asLiteral().getString(); } qexec.close(); return modificationDate; }
pq.setCommandText("SELECT * WHERE { ?s ?p ?o }"); pq.setIri("s", "_:" + bnode.getId()); Query q = pq.asQuery(); s.setCommandText("INSERT { ?o ?p ?s } WHERE { ?s ?p ?o }"); s.setIri("s", "_:" + bnode.getId()); UpdateRequest query = s.asUpdate();