Refine search
@Override public DatasetGraph getGraphStore() { DatasetGraph result = delegate.getDatasetGraph(); return result; }
@Override public Context getContext() { Context result = delegate.getContext(); return result; }
@Override public void execute() { delegate.execute(); } }
@Override public void update(UpdateRequest updateRequest) { UpdateProcessor updateProcessor = updateExecutionFactory.createUpdateProcessor(updateRequest); updateProcessor.execute(); }
private void exec(String endpoint, UpdateRequest req) { UpdateProcessor proc = UpdateExecutionFactory.createRemote(req, endpoint) ; proc.execute() ; }
/** * The dataset against which the query will execute. * May be null, implying the there isn't a local GraphStore target for this UpdateProcessor. * @deprecated Use {@link #getDatasetGraph()} */ @Deprecated public default DatasetGraph getGraphStore() { return getDatasetGraph() ; }
/** * executes a SPARQL INSERT into the triplestore * * TODO: needs to be extracted * * @return map */ public static void loadTripleStore(final String sparqlQuery, final URI loadEndpoint) { final UpdateRequest request = UpdateFactory.create(sparqlQuery); final UpdateProcessor proc = UpdateExecutionFactory.createRemote(request, loadEndpoint.toString()); proc.execute(); }
private void exec(String endpoint, UpdateRequest req) { UpdateProcessor proc = UpdateExecutionFactory.createRemote(req, endpoint) ; proc.execute() ; }
@Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { UpdateProcessor updateProcessor = uef.createUpdateProcessor(updateRequest); updateProcessor.execute(); return RepeatStatus.FINISHED; }
@Override public Dataset apply(Dataset dataset) { // creates an update request by grabbing the queryString variable, populated by the Acknowledged Section method UpdateRequest update = UpdateFactory.create(sparql); // create a clone of the input datast ... See the RdfUtils class comments Dataset copy = RdfUtils.cloneDataset(dataset); // perform a sparql update with the clone and the acknowledgement/query.sq UpdateProcessor updateProcessor = UpdateExecutionFactory.create(update,copy); updateProcessor.execute(); return copy; }
Model m = ModelFactory.createDefaultModel(); m.read("example.owl", "RDF/XML"); // Prepare your update... // Create an UpdateExecution on the local model UpdateProcessor processor = UpdateExecutionFactory.create(update, GraphStoreFactory.create(m)); processor.execute(); // Save the updated model updated.write(new FileOutputStream("example.owl"), "RDF/XML");
/** * insert data into triplestore */ public void updateTripleStore(String sparqlQuery, String endpoint) { logger.debug("updateTripleStore on {}: {}", endpoint, sparqlQuery); long start = getTime(); UpdateRequest request = UpdateFactory.create(sparqlQuery); UpdateProcessor proc; try { proc = UpdateExecutionFactory.createRemote(request, endpoint); proc.execute(); } catch (Exception e) { // problem might be the Stardog v5+ infamous endpoint distinction for update and select queries logger.warn("update query failed on {}: {}", endpoint, e.getMessage() ); // re-try with extended endpoint URL String endpointForStardogToBeTested = endpoint.concat("/update"); logger.info("try update query on {}", endpointForStardogToBeTested); proc = UpdateExecutionFactory.createRemote(request, endpointForStardogToBeTested); proc.execute(); } this.logTime(getTime() - start, "updateTripleStore: " + sparqlQuery); }
private static void execute$(UpdateRequest request, DatasetGraph datasetGraph, Binding inputBinding) { UpdateProcessor uProc = UpdateExecutionFactory.create(request, datasetGraph, inputBinding); if (uProc == null) throw new ARQException("No suitable update procesors are registered/able to execute your updates"); uProc.execute(); }
public static UpdateProcessor executeUnlessEmpty(UpdateExecutionFactory uef, UpdateRequest updateRequest) { UpdateProcessor result; if(updateRequest.getOperations().isEmpty()) { // Create a fake update request UpdateRequest update = UpdateFactory.create("PREFIX ex: <http://example.org/> INSERT { ex:_s ex:_p ex:_o } WHERE { ex:_s ex:_p ex:_o }"); result = org.apache.jena.update.UpdateExecutionFactory.create(update, DatasetGraphFactory.createGeneral()); result.execute(); } else { result = uef.createUpdateProcessor(updateRequest); try { result.execute(); } catch(Exception e) { RuntimeException f = HttpExceptionUtils.makeHumanFriendly(e); throw f; } } return result; }