private String getKey(final IRI identifier) { final String id = identifier.getIRIString(); if (id.startsWith(PREFIX)) { return pathPrefix + id.substring(PREFIX.length()); } throw new RuntimeTrellisException("Invalid identifier: " + identifier); } }
/** * Read an entity into the provided {@link TrellisDataset}. * @param graphName the target graph * @param syntax the entity syntax * @param dataset the dataset */ protected void readEntityIntoDataset(final IRI graphName, final RDFSyntax syntax, final TrellisDataset dataset) { try (final InputStream input = entity) { getServices().getIOService().read(input, syntax, getIdentifier()) .map(skolemizeTriples(getServices().getResourceService(), getBaseUrl())) .filter(triple -> !RDF.type.equals(triple.getPredicate()) || !triple.getObject().ntriplesString().startsWith("<" + LDP.getNamespace())) .filter(triple -> !LDP.contains.equals(triple.getPredicate())) .map(toQuad(graphName)).forEachOrdered(dataset::add); } catch (final RuntimeTrellisException ex) { throw new BadRequestException("Invalid RDF content: " + ex.getMessage()); } catch (final IOException ex) { throw new WebApplicationException("Error processing input: " + ex.getMessage()); } }
triples = updateGraph(syntax, graphName); } catch (final RuntimeTrellisException ex) { throw new BadRequestException("Invalid RDF: " + ex.getMessage());
@Override public void close() { try { dataset.close(); } catch (final Exception ex) { throw new RuntimeTrellisException("Error closing dataset", ex); } }
@Override public void close() { try { innerGraph.close(); } catch (final Exception ex) { throw new RuntimeTrellisException("Error closing graph", ex); } }
@Override public void close() { try { dataset.close(); } catch (final Exception ex) { throw new RuntimeTrellisException("Error closing dataset", ex); } }
@Override public void close() { try { graph.close(); } catch (final Exception ex) { throw new RuntimeTrellisException("Error closing graph", ex); } }
@Override public CompletionStage<Void> touch(final IRI identifier) { final Literal time = rdf.createLiteral(now().toString(), XSD.dateTime); return runAsync(() -> { try { rdfConnection.update(buildUpdateModificationRequest(identifier, time)); } catch (final Exception ex) { throw new RuntimeTrellisException("Could not update data for " + identifier, ex); } }); }
protected static CompletionStage<Void> asyncException() { return runAsync(() -> { throw new RuntimeTrellisException("Expected exception"); }); }
/** * Create an AbstractResourceService with the given producer * @param baseUrl the base URL * @param producer the kafka producer * @param curator the zookeeper curator * @param notifications the event service * @param idSupplier a supplier of new identifiers * @param async write cached resources asynchronously if true, synchronously if false */ public AbstractResourceService(final String baseUrl, final Producer<String, String> producer, final CuratorFramework curator, final EventService notifications, final Supplier<String> idSupplier, final Boolean async) { this.baseUrl = baseUrl; this.notifications = notifications; this.async = async; this.idSupplier = idSupplier; this.producer = producer; this.curator = curator; try { this.curator.createContainers(ZNODE_COORDINATION); } catch (final Exception ex) { LOGGER.error("Could not create zk session node: {}", ex.getMessage()); throw new RuntimeTrellisException(ex); } }
@Override public CompletionStage<Void> touch(final IRI identifier) { final Literal time = rdf.createLiteral(now().toString(), XSD.dateTime); return runAsync(() -> { try { rdfConnection.update(buildUpdateModificationRequest(identifier, time)); } catch (final Exception ex) { throw new RuntimeTrellisException("Could not update data for " + identifier, ex); } }); }
/** * Serialize a list of quads * @param quads the quads * @return a string */ public static String serialize(final List<Quad> quads) { try (final StringWriter str = new StringWriter()) { final DatasetGraph datasetGraph = create(); quads.stream().map(rdf::asJenaQuad).forEach(datasetGraph::add); write(str, datasetGraph, NQUADS); datasetGraph.close(); return str.toString(); } catch (final IOException ex) { LOGGER.error("Error processing dataset in quad serializer: ", ex.getMessage()); throw new RuntimeTrellisException("Error processing dataset", ex); } }
@Override public void update(final Graph graph, final String update, final RDFSyntax syntax, final String base) { requireNonNull(graph, "The input graph may not be null"); requireNonNull(update, "The update command may not be null"); requireNonNull(syntax, "The RDF syntax may not be null"); if (!SPARQL_UPDATE.equals(syntax)) { throw new RuntimeTrellisException("The syntax " + syntax + " is not supported for updates."); } try { final org.apache.jena.graph.Graph g = rdf.asJenaGraph(graph); execute(create(update, base), g); } catch (final UpdateException | QueryParseException ex) { throw new RuntimeTrellisException(ex); } }
@Override public void update(final Graph graph, final String update, final RDFSyntax syntax, final String base) { requireNonNull(graph, "The input graph may not be null"); requireNonNull(update, "The update command may not be null"); requireNonNull(syntax, "The RDF syntax may not be null"); if (!SPARQL_UPDATE.equals(syntax)) { throw new RuntimeTrellisException("The syntax " + syntax + " is not supported for updates."); } try { final org.apache.jena.graph.Graph g = rdf.asJenaGraph(graph); execute(create(update, base), g); } catch (final UpdateException | QueryParseException ex) { throw new RuntimeTrellisException(ex); } }
/** * Read the state of the resource data at a particular point in time * @param directory the directory * @param identifier the identifier * @param time the time * @return the resource data, if it exists */ public static Optional<ResourceData> read(final File directory, final IRI identifier, final Instant time) { LOGGER.debug("Reading journal to generate the resource data"); return of(new File(directory, RESOURCE_JOURNAL)).filter(File::exists).flatMap(file -> { final List<Instant> mementos = new ArrayList<>(); final List<VersionRange> ranges = asTimeMap(file); ranges.stream().map(VersionRange::getFrom).findFirst().ifPresent(mementos::add); ranges.stream().map(VersionRange::getUntil).forEachOrdered(mementos::add); try (final Stream<Quad> stream = asStream(rdf, file, identifier, time)) { try (final Dataset dataset = stream.filter(isResourceTriple).collect(toDataset())) { LOGGER.debug("Creating resource: {} at {}", identifier, time); return from(identifier, dataset, mementos); } catch (final Exception ex) { throw new RuntimeTrellisException("Error processing dataset", ex); } } }); }
private void storeResource(final IRI identifier, final Dataset dataset, final Instant eventTime, final OperationType type) { final Literal time = rdf.createLiteral(eventTime.toString(), XSD.dateTime); try { rdfConnection.update(buildUpdateRequest(identifier, time, dataset, type)); } catch (final Exception ex) { throw new RuntimeTrellisException("Could not update data for " + identifier, ex); } }
private void storeResource(final IRI identifier, final Dataset dataset, final Instant eventTime, final OperationType type) { final Literal time = rdf.createLiteral(eventTime.toString(), XSD.dateTime); try { rdfConnection.update(buildUpdateRequest(identifier, time, dataset, type)); } catch (final Exception ex) { throw new RuntimeTrellisException("Could not update data for " + identifier, ex); } }
protected CompletionStage<Void> isntBadId(final IRI identifier) { return runAsync(() -> { if (identifier.equals(badId)) { throw new RuntimeTrellisException("Expected Exception"); } }); }
@Override public Stream<Triple> read(final InputStream input, final RDFSyntax syntax, final String base) { requireNonNull(input, "The input stream may not be null!"); requireNonNull(syntax, "The syntax value may not be null!"); try { final org.apache.jena.graph.Graph graph = createDefaultGraph(); final Lang lang = rdf.asJenaLang(syntax).orElseThrow(() -> new RuntimeTrellisException("Unsupported RDF Syntax: " + syntax.mediaType())); RDFParser.source(input).lang(lang).base(base).parse(graph); // Check the graph for any new namespace definitions final Set<String> namespaces = nsService.getNamespaces().entrySet().stream().map(Map.Entry::getValue) .collect(toSet()); graph.getPrefixMapping().getNsPrefixMap().forEach((prefix, namespace) -> { if (!namespaces.contains(namespace)) { LOGGER.debug("Setting prefix ({}) for namespace {}", prefix, namespace); nsService.setPrefix(prefix, namespace); } }); return rdf.asGraph(graph).stream().map(Triple.class::cast); } catch (final RiotException | AtlasException | IllegalArgumentException ex) { throw new RuntimeTrellisException(ex); } }
@Override public Stream<Triple> read(final InputStream input, final RDFSyntax syntax, final String base) { requireNonNull(input, "The input stream may not be null!"); requireNonNull(syntax, "The syntax value may not be null!"); try { final org.apache.jena.graph.Graph graph = createDefaultGraph(); final Lang lang = rdf.asJenaLang(syntax).orElseThrow(() -> new RuntimeTrellisException("Unsupported RDF Syntax: " + syntax.mediaType())); RDFParser.source(input).lang(lang).base(base).parse(graph); // Check the graph for any new namespace definitions final Set<String> namespaces = nsService.getNamespaces().entrySet().stream().map(Map.Entry::getValue) .collect(toSet()); graph.getPrefixMapping().getNsPrefixMap().forEach((prefix, namespace) -> { if (!namespaces.contains(namespace)) { LOGGER.debug("Setting prefix ({}) for namespace {}", prefix, namespace); nsService.setPrefix(prefix, namespace); } }); return rdf.asGraph(graph).stream().map(Triple.class::cast); } catch (final RiotException | AtlasException | IllegalArgumentException ex) { throw new RuntimeTrellisException(ex); } }