/** * Add a quad to the dataset. * * @param quad an RDF Quad */ public void add(final Quad quad) { dataset.add(quad); }
@Override public void add(final BlankNodeOrIRI subject, final IRI predicate, final RDFTerm object) { dataset.add(namedGraph, subject, predicate, object); }
public TestResource(final IRI id, final Quad... quads) { this.id = id; for (final Quad q : quads) { dataset.add(q); } }
/** * Build a dataset. * @param resource the resource IRI * @param title a title * @param subject a subject * @return a new dataset */ default Dataset buildDataset(final IRI resource, final String title, final String subject) { final Dataset dataset = getInstance().createDataset(); dataset.add(Trellis.PreferUserManaged, resource, DC.title, getInstance().createLiteral(title)); dataset.add(Trellis.PreferUserManaged, resource, DC.subject, getInstance().createIRI(subject)); dataset.add(Trellis.PreferUserManaged, resource, type, SKOS.Concept); return dataset; } }
/** * Build a dataset. * @param resource the resource IRI * @param title a title * @param subject a subject * @return a new dataset */ default Dataset buildDataset(final IRI resource, final String title, final String subject) { final Dataset dataset = getInstance().createDataset(); dataset.add(Trellis.PreferUserManaged, resource, DC.title, getInstance().createLiteral(title)); dataset.add(Trellis.PreferUserManaged, resource, DC.subject, getInstance().createIRI(subject)); dataset.add(Trellis.PreferUserManaged, resource, type, SKOS.Concept); return dataset; } }
private ProducerRecord<String, String> buildContainmentMessage(final String topic, final IRI resource, final Resource parent, final Dataset dataset) throws Exception { try (final Dataset data = dataset.stream(of(PreferAudit), null, null, null).map(auditTypeMapper) .collect(toDataset())) { data.add(PreferContainment, parent.getIdentifier(), contains, resource); return new ProducerRecord<>(topic, parent.getIdentifier().getIRIString(), serialize(data)); } }
@Override public CompletionStage<Void> delete(final Metadata metadata) { LOGGER.debug("Deleting: {}", metadata.getIdentifier()); return runAsync(() -> { try (final Dataset dataset = rdf.createDataset()) { final Instant eventTime = now(); dataset.add(PreferServerManaged, metadata.getIdentifier(), DC.type, DeletedResource); dataset.add(PreferServerManaged, metadata.getIdentifier(), RDF.type, LDP.Resource); storeResource(metadata.getIdentifier(), dataset, eventTime, OperationType.DELETE); } catch (final Exception ex) { throw new RuntimeTrellisException("Error deleting resource: " + metadata.getIdentifier(), ex); } }); }
@Override public CompletionStage<Void> delete(final Metadata metadata) { LOGGER.debug("Deleting: {}", metadata.getIdentifier()); return runAsync(() -> { try (final Dataset dataset = rdf.createDataset()) { final Instant eventTime = now(); dataset.add(PreferServerManaged, metadata.getIdentifier(), DC.type, DeletedResource); dataset.add(PreferServerManaged, metadata.getIdentifier(), RDF.type, LDP.Resource); storeResource(metadata.getIdentifier(), dataset, eventTime, OperationType.DELETE); } catch (final Exception ex) { throw new RuntimeTrellisException("Error deleting resource: " + metadata.getIdentifier(), ex); } }); }
@Override public void add(final Triple triple) { dataset.add(namedGraph, triple.getSubject(), triple.getPredicate(), triple.getObject()); }
@Test public void add() throws Exception { dataset.add(new DummyQuad()); }
@Override public CompletionStage<Void> add(final IRI id, final Dataset dataset) { return runAsync(() -> { final IRI graphName = rdf.createIRI(id.getIRIString() + "?ext=audit"); try (final Dataset data = rdf.createDataset()) { dataset.getGraph(PreferAudit).ifPresent(g -> g.stream().forEach(t -> data.add(graphName, t.getSubject(), t.getPredicate(), t.getObject()))); executeWrite(rdfConnection, () -> rdfConnection.loadDataset(asJenaDataset(data))); } catch (final Exception ex) { throw new RuntimeTrellisException("Error storing audit dataset for " + id, ex); } }); }
@Override public CompletionStage<Void> add(final IRI id, final Dataset dataset) { return runAsync(() -> { final IRI graphName = rdf.createIRI(id.getIRIString() + "?ext=audit"); try (final Dataset data = rdf.createDataset()) { dataset.getGraph(PreferAudit).ifPresent(g -> g.stream().forEach(t -> data.add(graphName, t.getSubject(), t.getPredicate(), t.getObject()))); executeWrite(rdfConnection, () -> rdfConnection.loadDataset(asJenaDataset(data))); } catch (final Exception ex) { throw new RuntimeTrellisException("Error storing audit dataset for " + id, ex); } }); }
private Dataset createDataset2() { final RDF factory2 = createFactory(); final IRI name = factory2.createIRI("http://xmlns.com/foaf/0.1/name"); final Dataset g2 = factory2.createDataset(); final BlankNode b1 = createOwnBlankNode("b1", "bc8d3e45-a08f-421d-85b3-c25b373abf87"); g2.add(b1, b1, name, factory2.createLiteral("Charlie")); final BlankNode b2 = createOwnBlankNode("b2", "2209097a-5078-4b03-801a-6a2d2f50d739"); g2.add(b2, b2, name, factory2.createLiteral("Dave")); final IRI hasChild = factory2.createIRI("http://example.com/hasChild"); // NOTE: Opposite direction of loadDataset1 g2.add(b2, b2, hasChild, b1); return g2; }
/** * Add all quads from the source to the target. * <p> * The quads may be copied in any order. No special conversion or * adaptation of {@link BlankNode}s are performed. * * @param source * Source Dataset to copy quads from * @param target * Target Dataset where quads will be added */ private void addAllQuads(final Dataset source, final Dataset target) { // unordered() as we don't need to preserve quad order // sequential() as we don't (currently) require target Dataset to be // thread-safe try (Stream<? extends Quad> stream = source.stream()) { stream.unordered().sequential().forEach(t -> target.add(t)); } }
/** * Make a new dataset with two BlankNodes - each with a different * uniqueReference */ private Dataset createDataset1() { final RDF factory1 = createFactory(); final IRI name = factory1.createIRI("http://xmlns.com/foaf/0.1/name"); final Dataset g1 = factory1.createDataset(); final BlankNode b1 = createOwnBlankNode("b1", "0240eaaa-d33e-4fc0-a4f1-169d6ced3680"); g1.add(b1, b1, name, factory1.createLiteral("Alice")); final BlankNode b2 = createOwnBlankNode("b2", "9de7db45-0ce7-4b0f-a1ce-c9680ffcfd9f"); g1.add(b2, b2, name, factory1.createLiteral("Bob")); final IRI hasChild = factory1.createIRI("http://example.com/hasChild"); g1.add(null, b1, hasChild, b2); return g1; }
@Test public void addSPO() throws Exception { dataset.add(null, new DummyIRI(1), new DummyIRI(2), new DummyIRI(3)); }
@Test public void testDatasetNoConversion() { final Dataset dataset = jenaRdf.createDataset(); dataset.add(jenaRdf.createQuad(PreferUserManaged, subject, SKOS.prefLabel, literal)); dataset.add(jenaRdf.createQuad(PreferUserManaged, subject, type, SKOS.Concept)); dataset.add(jenaRdf.createQuad(PreferUserManaged, subject, DC.subject, AS.Activity)); assertEquals(3L, dataset.size(), "Confirm dataset size"); assertTrue(TriplestoreUtils.asJenaDataset(dataset).containsNamedModel(PreferUserManaged.getIRIString()), "Confirm presence of trellis:PreferUserManaged named graph"); assertEquals(TriplestoreUtils.asJenaDataset(dataset).asDatasetGraph(), TriplestoreUtils.asJenaDataset(dataset).asDatasetGraph(), "Confirm datasets are equal"); }
@Test public void testPutLdpRsWithoutBaseUrl() throws Exception { final TriplestoreResourceService svc = new TriplestoreResourceService( connect(wrap(rdf.createDataset().asJenaDatasetGraph()))); svc.initialize(); final Dataset dataset = rdf.createDataset(); dataset.add(Trellis.PreferUserManaged, resource, DC.title, rdf.createLiteral("title")); dataset.add(Trellis.PreferAudit, rdf.createBlankNode(), RDF.type, AS.Create); final Instant later = meanwhile(); assertDoesNotThrow(() -> allOf( svc.create(builder(resource).interactionModel(LDP.RDFSource).container(root).build(), dataset) .toCompletableFuture(), svc.touch(root).toCompletableFuture()).join(), "Unsuccessful create operation!"); allOf( svc.get(resource).thenAccept(checkResource(later, LDP.RDFSource, 1L, 1L, 0L)).toCompletableFuture(), svc.get(root).thenAccept(checkRoot(later, 1L)).toCompletableFuture()).join(); }
@Test public void testDatasetConversion() { final Dataset dataset = simpleRdf.createDataset(); dataset.add(simpleRdf.createQuad(PreferUserManaged, subject, SKOS.prefLabel, literal)); dataset.add(simpleRdf.createQuad(PreferUserManaged, subject, type, SKOS.Concept)); dataset.add(simpleRdf.createQuad(PreferUserManaged, subject, DC.subject, AS.Activity)); assertEquals(3L, dataset.size(), "Confirm dataset size"); assertTrue(TriplestoreUtils.asJenaDataset(dataset).containsNamedModel(PreferUserManaged.getIRIString()), "Confirm presence of trellis:PreferUserManaged named graph"); assertNotEquals(TriplestoreUtils.asJenaDataset(dataset).asDatasetGraph(), TriplestoreUtils.asJenaDataset(dataset).asDatasetGraph(), "Confirm dataset has been converted"); }
@Test public void containsLanguageTagsCaseInsensitive() { // COMMONSRDF-51: Ensure we can add/contains/remove with any casing // of literal language tag final Literal lower = factory.createLiteral("Hello there", "en-gb"); final Literal upper = factory.createLiteral("Hello there", "EN-GB"); final Literal mixed = factory.createLiteral("Hello there", "en-GB"); final IRI example1 = factory.createIRI("http://example.com/s1"); final IRI greeting = factory.createIRI("http://example.com/greeting"); dataset.add(null, example1, greeting, upper); // any kind of Triple should match assertTrue(dataset.contains(factory.createQuad(null, example1, greeting, upper))); assertTrue(dataset.contains(factory.createQuad(null, example1, greeting, lower))); assertTrue(dataset.contains(factory.createQuad(null, example1, greeting, mixed))); // or as patterns assertTrue(dataset.contains(null, null, null, upper)); assertTrue(dataset.contains(null, null, null, lower)); assertTrue(dataset.contains(null, null, null, mixed)); }