Refine search
protected int countArcsTo(Property prop, Resource resource) { int numArcs = 0 ; StmtIterator sIter = resource.getModel().listStatements(null, prop, resource) ; for ( ; sIter.hasNext() ; ) { sIter.nextStatement() ; numArcs++ ; } sIter.close() ; return numArcs ; }
private Model normaliseLiterals(Model m) { Model result = ModelFactory.createDefaultModel(); for (StmtIterator it = m.listStatements(); it.hasNext();) { result.add(normaliseLiterals(it.next())); } return result; }
private void restitchItemLists(Model given, Model recon) { Statement G = given.listStatements(ANY, API.items, ANY).toList().get(0); Resource page = G.getSubject(); // Statement S = recon.listStatements(ANY, API.items, ANY).toList().get(0); Resource items = S.getObject().asResource(); S.remove(); recon.add(page, API.items, items); recon.removeAll(ANY, others, ANY); }
protected int countProperties(Resource r) { int numProp = 0 ; StmtIterator sIter = r.listProperties() ; for ( ; sIter.hasNext() ; ) { sIter.nextStatement() ; numProp++ ; } sIter.close() ; return numProp ; }
while (stmtIter.hasNext()) { Statement stmt = stmtIter.next(); Resource type = stmt.getSubject().getPropertyResourceValue(RDF.type); if (type == null || !type.equals(RDF.Statement)) { statementsToRemove.add(stmt); stmtIter.close(); while (iter.hasNext()) { Statement stmt = iter.next(); iter.close(); while (stmtIter.hasNext()) { Statement st = stmtIter.nextStatement(); stmtIter.close(); while (stmtExistingIter.hasNext()) { Statement stmt = stmtExistingIter.nextStatement(); stmtExistingIter.close(); log.info("Will remove " + statementsToRemove.size() + " statements."); while (stmtDiffIter.hasNext()) { Statement stmt = stmtDiffIter.nextStatement(); statementsToAdd.add(stmt); stmtDiffIter.close();
public static List<RDFNode> multiValue(Resource r, Property p) { List<RDFNode> values = new ArrayList<RDFNode>() ; StmtIterator sIter = r.listProperties(p) ; while(sIter.hasNext()) { Statement s = sIter.nextStatement() ; values.add(s.getObject()) ; } return values; }
thisMetaPage.addProperty( API.definition, uriForDefinition ); URI emv_uri = URIUtils.replaceQueryParam( URIUtils.newURI(thisMetaPage.getURI()), "_metadata", "all" ); thisMetaPage.addProperty( API.extendedMetadataVersion, metaModel.createResource( emv_uri.toString() ) ); thisMetaPage.addProperty( RDF.type, API.Page ); Resource l = licence.inModel(thisMetaPage.getModel()); thisMetaPage.addProperty(DCTerms.license, l); thisMetaPage.getModel().add(ResourceUtils.reachableClosure(l)); for (Resource d: notices) { thisMetaPage.addProperty(ELDA_API.notice, d); thisMetaPage.getModel().add(ResourceUtils.reachableClosure(d)); Model toRemove = ModelFactory.createDefaultModel(); for (Statement s: thisMetaPage.listProperties().toList()) { if (mc.drop(s.getPredicate())) { boolean isItems = s.getPredicate().equals(API.items); boolean isPrimaryTopic = s.getPredicate().equals(FOAF.primaryTopic); boolean isTypePage = s.getPredicate().equals(RDF.type) && s.getObject().equals(API.Page); boolean keep = isItems || isPrimaryTopic || (demandPage && isTypePage);
protected static void addRangeTypes( Model result, Model schema ) { Model toAdd = ModelFactory.createDefaultModel(); for (StmtIterator it = schema.listStatements( ANY, RDFS.range, ANY ); it.hasNext();) { Statement s = it.nextStatement(); RDFNode type = s.getObject(); Property property = s.getSubject().as( Property.class ); for (StmtIterator x = result.listStatements( ANY, property, ANY ); x.hasNext();) { RDFNode ob = x.nextStatement().getObject(); if (ob.isResource()) toAdd.add( (Resource) ob, RDF.type, type ); } } result.add( toAdd ); }
try { Model model = ModelFactory.createDefaultModel(); model.read((in), ""); StmtIterator iter; Statement stmt; iter = model.listStatements(); while (iter.hasNext()) { stmt = (Statement) iter.next(); String sub = stmt.getSubject().getURI(); String obj = stmt.getObject().toString(); if (sub != null) { try { if (stmt.getPredicate().getURI().indexOf("subClassOf") > 0) {
/** * Returns a pair of models. The first contains any statement containing at * least one blank node. The second contains all remaining statements. * @param g * @return */ private Model[] separateStatementsWithBlankNodes(Graph g) { Model gm = ModelFactory.createModelForGraph(g); Model blankNodeModel = ModelFactory.createDefaultModel(); Model nonBlankNodeModel = ModelFactory.createDefaultModel(); StmtIterator sit = gm.listStatements(); while (sit.hasNext()) { Statement stmt = sit.nextStatement(); if (!stmt.getSubject().isAnon() && !stmt.getObject().isAnon()) { nonBlankNodeModel.add(stmt); } else { blankNodeModel.add(stmt); } } Model[] result = new Model[2]; result[0] = blankNodeModel; result[1] = nonBlankNodeModel; return result; }
protected static void addSupertypes( Model result ) { Model temp = ModelFactory.createDefaultModel(); for (StmtIterator it = result.listStatements( ANY, RDF.type, ANY ); it.hasNext();) { Statement s = it.nextStatement(); Resource c = AssemblerHelp.getResource( s ); for (StmtIterator subclasses = result.listStatements( c, RDFS.subClassOf, ANY ); subclasses.hasNext();) { RDFNode type = subclasses.nextStatement().getObject(); // System.err.println( ">> adding super type: subject " + s.getSubject() + ", type " + type ); temp.add( s.getSubject(), RDF.type, type ); } } result.add( temp ); }
ProvenanceVocab.EXECUTED_PROCESS_RUN.getURI(), "urn:processrun"); Model model = ModelFactory.createDefaultModel(); Resource workflowRun = model.createResource("urn:workflowrun"); String workFlowRunUri = workflowRun.getURI(); Resource processRun = model.createResource("urn:processrun"); Model retrievedGraphModel = metadataService .retrieveGraphModel(workFlowRunUri); retrievedGraphModel.write(System.out, "N3"); retrievedGraphModel.contains(workflowRun, ProvenanceVocab.EXECUTED_PROCESS_RUN, processRun); StmtIterator listStatements = retrievedGraphModel.listStatements(); int statementsCount = 0; while (listStatements.hasNext()) { listStatements.next(); statementsCount++; listStatements.close(); assertEquals(2, statementsCount); metadataService.removeGraph(workFlowRunUri);
private void verbModel(Model model, String verb) { Model m = ModelFactory.createDefaultModel(); int testLimit = 1000; StmtIterator stmtIt = model.listStatements(); int count = 0; try { while (stmtIt.hasNext()) { count++; m.add(stmtIt.nextStatement()); if (count % testLimit == 0 || !stmtIt.hasNext()) { StringWriter sw = new StringWriter(); m.write(sw, "N-TRIPLE"); StringBuffer updateStringBuff = new StringBuffer(); String graphURI = graph.getGraphURI(); updateStringBuff.append(verb + " DATA { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "")); updateStringBuff.append(sw); updateStringBuff.append(((graphURI != null) ? " } " : "") + " }"); String updateString = updateStringBuff.toString(); //log.info(updateString); graph.executeUpdate(updateString); m.removeAll(); } } } finally { stmtIt.close(); } }
new SimpleSelector(null, ResourceMapConstants.IS_DESCRIBED_BY_PROPERTY, (Object) null); StmtIterator sItr = m.listStatements(isDescribedBySelector); while (sItr.hasNext()) { final Statement stmt = sItr.next(); final String rdfUri = stmt.getObject().toString(); if (loadedRems.contains(rdfUri)) { LOG.trace("Already loaded ReM from {}", rdfUri); Model loaded = ModelFactory.createDefaultModel(); RDFReader reader = loaded.getReader(); reader.setProperty(ResourceMapConstants.JENA_ERROR_MODE_URI, ResourceMapConstants.JENA_ERROR_MODE_STRICT); ResourceMapUtil.loadRDF(reader, loaded, baseDir, rdfUri); loadedRems.add(rdfUri); m = load(m.union(loaded), baseDir, loadedRems);
protected static void addSubclassesFrom( Model result, Model schema ) { for (StmtIterator it = schema.listStatements( ANY, RDFS.subClassOf, ANY ); it.hasNext();) { Statement s = it.nextStatement(); if (s.getSubject().isURIResource() && s.getObject().isURIResource()) result.add( s ); } }
protected static void addDomainTypes( Model result, Model schema ) { for (StmtIterator it = schema.listStatements( ANY, RDFS.domain, ANY ); it.hasNext();) { Statement s = it.nextStatement(); Property property = s.getSubject().as( Property.class ); RDFNode type = s.getObject(); for (StmtIterator x = result.listStatements( ANY, property, ANY ); x.hasNext();) { Statement t = x.nextStatement(); result.add( t.getSubject(), RDF.type, type ); } } }
Model m = ModelFactory.createDefaultModel(); for (StmtIterator i = r.listProperties(); i.hasNext(); ) { Statement s = i.nextStatement(); m.add( s ); if (s.getObject() instanceof Resource) { queue.add( s.getObject() );
private void createGraphNodeToTriplesNodeMap(KR2RMLMapping kr2rmlMapping) throws FileNotFoundException, UnsupportedEncodingException{ StmtIterator itr = model.listStatements(null, model.getProperty(Uris.KM_NODE_ID_URI), (RDFNode)null); Resource subject; Map<String,String> graphNodeIdToTriplesMapIdMap = kr2rmlMapping.getAuxInfo().getGraphNodeIdToTriplesMapIdMap(); while (itr.hasNext()) { Statement subjMapToNodeIdStmt = itr.next(); String nodeId = subjMapToNodeIdStmt.getObject().toString(); subject = subjMapToNodeIdStmt.getSubject(); if (subject != null) { StmtIterator itr2 = model.listStatements(null, model.getProperty(Uris.RR_SUBJECTMAP_URI), subject); while (itr2.hasNext()) { String triplesMapId = itr2.next().getSubject().toString(); graphNodeIdToTriplesMapIdMap.put(nodeId, triplesMapId); } } } }
/** Convert a model to a set of nodes with edges to other nodes */ void convert(Model m) { for (StmtIterator it = m.listStatements(); it.hasNext();) { Statement s = it.next(); RDFNode o = s.getObject(); if (o.isResource()) convert(s.getSubject(), o.asResource()); } }