@Override public void close() { paths.close(); } }
@Test public void testRenamePropertyForSomeNodes() throws Exception { List<Node> nodes = db.execute("UNWIND range(0,9) as id CREATE (f:Foo {id: id, name: 'name'+id}) RETURN f").<Node>columnAs("f").stream().collect(Collectors.toList()); db.execute("Create constraint on (n:Foo) assert n.name is UNIQUE"); testCall(db, "CALL apoc.refactor.rename.nodeProperty({oldName},{newName},{nodes})", map("oldName", "name", "newName", "surname","nodes",nodes.subList(0,3)), (r) -> {}); assertEquals(new Long(3), resultNodesMatches(null, "surname")); assertEquals(new Long(7), resultNodesMatches(null, "name")); }
if (labels.remove("*")) labels.addAll(db.getAllLabels().stream().map(Label::name).collect(Collectors.toSet())); Label[] singleLabel = {label}; try (ResourceIterator<Node> nodes = (labelName.equals("*")) ? db.getAllNodes().iterator() : db.findNodes(label)) { while (nodes.hasNext()) { List<Node> batch = Util.take(nodes, BATCHSIZE); futures.add(Util.inTxFuture(pool, db, () -> { aggregate(vn, nodeAggNames, nodeAggKeys.length > 0 ? node.getProperties(nodeAggKeys) : Collections.emptyMap()); NodeKey startKey = entry.getKey(); VirtualNode v1 = virtualNodes.get(startKey); for (Relationship rel : node.getRelationships(Direction.OUTGOING)) { if (includeRels != null && !includeRels.contains(rel.getType().name())) continue; Node endNode = rel.getEndNode(); Stream<VirtualNode> stream = fixAggregates(virtualNodes.values()).stream(); if (filter != null) stream = stream.filter(n -> filter(n.getLabels(), n.getAllProperties(), filter)); if (limitNodes > -1) stream = stream.limit(limitNodes); Stream<GroupResult> groupResultStream = stream.map(n -> new GroupResult(n, getRelationships(n, filter, (int) relsPerNode))); if (!orphans) groupResultStream = groupResultStream.filter(g -> g.relationships!=null && !g.relationships.isEmpty() && g.node.getDegree() > 0); groupResultStream = groupResultStream.flatMap(GroupResult::spread);
public Stream<CCResult> wcc() { List<List<CCVar>> results = new LinkedList<List<CCVar>>(); ResourceIterator<Node> nodes = db.getAllNodes().iterator(); PrimitiveLongSet allNodes = Primitive.longSet(0); while (nodes.hasNext()) { Node node = nodes.next(); if (node.getDegree() == 0) { List<CCVar> result = new LinkedList<CCVar>(); result.add(new CCVar(node.getId()+"",node.getLabels().iterator().next().name())); results.add(result); } else { nodes.close(); long n = it.next(); List<CCVar> result = new LinkedList<CCVar>(); PrimitiveLongIterator reachableIDs = go(db.getNodeById(n), Direction.BOTH,result).iterator(); while (reachableIDs.hasNext()) { long id = (long) reachableIDs.next(); return results.stream().map((x) ->new CCResult( x.stream().map((z) -> new Long(z.getId())).collect(Collectors.toList()), x.stream().collect(Collectors.groupingBy(CCVar::getType)).entrySet().stream().collect(Collectors.toMap( e -> e.getKey(), e -> e.getValue().size()))
private static List<Double> getWeightVectorForClass(Map<String, List<LinkedHashMap<String, Object>>> documents, String key, List<Integer> featureIndexList, GraphDatabaseService db) { List<Double> weightVector; Transaction tx = db.beginTx(); // Get class id Long classId = db.findNodesByLabelAndProperty(DynamicLabel.label("Class"), "name", key).iterator().next().getId(); // Get weight vector for class List<Long> longs = documents.get(key) .stream() .map(a -> ((Integer)a.get("feature")).longValue()) .collect(Collectors.toList()); weightVector = featureIndexList.stream().map(i -> longs.contains(i.longValue()) ? tfidf(db, i.longValue(), classId) : 0.0) .collect(Collectors.toList()); tx.success(); tx.close(); return weightVector; }
@Override ResourceIterator<StoreFileMetadata> files( GetStoreFileRequest request, NeoStoreDataSource neoStoreDataSource ) throws IOException { try ( ResourceIterator<StoreFileMetadata> resourceIterator = neoStoreDataSource.listStoreFiles( false ) ) { String fileName = request.file().getName(); return Iterators.asResourceIterator( onlyOne( resourceIterator.stream().filter( matchesRequested( fileName ) ).collect( Collectors.toList() ), fileName ) ); } } }
@UserFunction @Deprecated @Description("use either apoc.cypher.runFirstColumnMany for a list return or apoc.cypher.runFirstColumnSingle for returning the first row of the first column") public Object runFirstColumn(@Name("cypher") String statement, @Name("params") Map<String, Object> params, @Name(value = "expectMultipleValues",defaultValue = "true") boolean expectMultipleValues) { if (params == null) params = Collections.emptyMap(); String resolvedStatement = withParamMapping(statement, params.keySet()); if (!resolvedStatement.contains(" runtime")) resolvedStatement = "cypher runtime=slotted " + resolvedStatement; try (Result result = db.execute(resolvedStatement, params)) { String firstColumn = result.columns().get(0); try (ResourceIterator<Object> iter = result.columnAs(firstColumn)) { if (expectMultipleValues) return iter.stream().collect(Collectors.toList()); return iter.hasNext() ? iter.next() : null; } } }
@Test public void snapshotFilesDeletedWhenSnapshotReleased() throws IOException { Label label = Label.label( "testLabel" ); prepareDatabase( label ); ResourceIterator<File> firstCheckpointSnapshot = indexingService.snapshotIndexFiles(); generateData( label ); ResourceIterator<File> secondCheckpointSnapshot = indexingService.snapshotIndexFiles(); generateData( label ); ResourceIterator<File> thirdCheckpointSnapshot = indexingService.snapshotIndexFiles(); Set<String> firstSnapshotFileNames = getFileNames( firstCheckpointSnapshot ); Set<String> secondSnapshotFileNames = getFileNames( secondCheckpointSnapshot ); Set<String> thirdSnapshotFileNames = getFileNames( thirdCheckpointSnapshot ); generateData( label ); forceCheckpoint( checkPointer ); assertTrue( firstSnapshotFileNames.stream().map( File::new ).allMatch( fileSystem::fileExists ) ); assertTrue( secondSnapshotFileNames.stream().map( File::new ).allMatch( fileSystem::fileExists ) ); assertTrue( thirdSnapshotFileNames.stream().map( File::new ).allMatch( fileSystem::fileExists ) ); firstCheckpointSnapshot.close(); secondCheckpointSnapshot.close(); thirdCheckpointSnapshot.close(); generateData( label ); forceCheckpoint( checkPointer ); assertFalse( firstSnapshotFileNames.stream().map( File::new ).anyMatch( fileSystem::fileExists ) ); assertFalse( secondSnapshotFileNames.stream().map( File::new ).anyMatch( fileSystem::fileExists ) ); assertFalse( thirdSnapshotFileNames.stream().map( File::new ).anyMatch( fileSystem::fileExists ) ); }
private void doubleNumericPropertyValueForAllNodesWithLabel( Index<Node> index, String numericProperty, Label label ) { try ( Transaction transaction = graphDb.beginTx() ) { ResourceIterator<Node> nodes = graphDb.findNodes( label ); nodes.stream().forEach( node -> { node.setProperty( numericProperty, (Integer) node.getProperty( numericProperty ) * 2 ); index.remove( node, numericProperty ); index.add( node, numericProperty, new ValueContext( node.getProperty( numericProperty ) ).indexNumeric() ); } ); transaction.success(); } }
@Test public void doNotListFilesFromAdditionalProviderThatRegisterTwice() throws IOException { NeoStoreFileListing neoStoreFileListing = neoStoreDataSource.getNeoStoreFileListing(); MarkerFileProvider provider = new MarkerFileProvider(); neoStoreFileListing.registerStoreFileProvider( provider ); neoStoreFileListing.registerStoreFileProvider( provider ); ResourceIterator<StoreFileMetadata> metadataResourceIterator = neoStoreFileListing.builder().build(); assertEquals( 1, metadataResourceIterator.stream().filter( metadata -> "marker".equals( metadata.file().getName() ) ).count() ); }
InputIterator relationships = relationships( relationshipRandomSeed, relationshipCount, config.batchSize(), inputIdGenerator, groups ).iterator(); ResourceIterator<Node> dbNodes = db.getAllNodes().iterator() ) while ( dbNodes.hasNext() ) Node node = dbNodes.next(); String id = (String) node.getProperty( "id" ); assertNull( nodeByInputId.put( id, node ) ); verifiedNodes++; assertDegrees( node ); allNodesScanLabelCount += Iterables.count( node.getLabels() ); long labelScanStoreEntryCount = db.getAllLabels().stream() .flatMap( l -> db.findNodes( l ).stream() ) .count();