/** * Include all imports and load then into a single translated merge. * * @param owl * @return */ public Ontology translateAndClassify(OWLOntology owl) { Set<OWLOntology> owls = owl.getOWLOntologyManager().getImportsClosure(owl); log.debug("Load ontologies called. Translating {} ontologies.", owls.size()); OntologyBuilder builder = OntologyBuilderImpl.builder(rdfFactory); for (OWLOntology o : owls) { extractOntoloyVocabulary(o, builder); } for (OWLOntology o : owls) { OWLAxiomVisitorImpl visitor = new OWLAxiomVisitorImpl(o, builder); for (OWLAxiom axiom : o.getAxioms()) { axiom.accept(visitor); } } Ontology onto = builder.build(); log.debug("Ontology loaded: {}", onto); return onto; }
@Override public OBDASpecification transform(Mapping mapping, DBMetadata dbMetadata, Optional<Ontology> ontology) { if (ontology.isPresent()) { Mapping factsAsMapping = factConverter.convert(ontology.get().abox(), settings.isOntologyAnnotationQueryingEnabled(), mapping.getMetadata().getUriTemplateMatcher()); Mapping mappingWithFacts = mappingMerger.merge(mapping, factsAsMapping); return createSpecification(mappingWithFacts, dbMetadata, ontology.get().tbox()); } else { ClassifiedTBox emptyTBox = OntologyBuilderImpl.builder(rdfFactory).build().tbox(); return createSpecification(mapping, dbMetadata, emptyTBox); } }
@Override public OBDASpecification transform(Mapping mapping, DBMetadata dbMetadata, Optional<Ontology> ontology) { if (ontology.isPresent()) { Mapping factsAsMapping = factConverter.convert(ontology.get().abox(), mapping.getExecutorRegistry(), settings.isOntologyAnnotationQueryingEnabled(), mapping.getMetadata().getUriTemplateMatcher()); Mapping mappingWithFacts = mappingMerger.merge(mapping, factsAsMapping); return createSpecification(mappingWithFacts, dbMetadata, ontology.get().tbox()); } else { ClassifiedTBox emptyTBox = OntologyBuilderImpl.builder().build().tbox(); return createSpecification(mapping, dbMetadata, emptyTBox); } }
public static OntopSemanticIndexLoader loadRDFGraph(Dataset dataset, Properties properties) throws SemanticIndexException { // Merge default and named graphs to filter duplicates Set<IRI> graphURLs = new HashSet<>(); graphURLs.addAll(dataset.getDefaultGraphs()); graphURLs.addAll(dataset.getNamedGraphs()); OntopModelConfiguration defaultConfiguration = OntopModelConfiguration.defaultBuilder().build(); Injector injector = defaultConfiguration.getInjector(); RDF rdfFactory = injector.getInstance(RDF.class); CollectRDFVocabulary collectVocabulary = new CollectRDFVocabulary(rdfFactory); for (IRI graphURL : graphURLs) { processRDF(collectVocabulary, graphURL); } Ontology vocabulary = collectVocabulary.vb.build(); SIRepository repo = new SIRepository(vocabulary.tbox(), defaultConfiguration.getTermFactory(), defaultConfiguration.getTypeFactory(), injector.getInstance(TargetAtomFactory.class)); Connection connection = repo.createConnection(); // Load the data SemanticIndexRDFHandler insertData = new SemanticIndexRDFHandler(repo, connection, defaultConfiguration.getTypeFactory(), defaultConfiguration.getTermFactory(), injector.getInstance(RDF.class)); for (IRI graphURL : graphURLs) { processRDF(insertData, graphURL); } LOG.info("Inserted {} triples", insertData.count); return new OntopSemanticIndexLoaderImpl(repo, connection, properties, Optional.empty() /* no tbox */); }