if (index[indices.get(u)] == 0) { first = u; iter.doBreak(); break;
if (color[indices.get(next)] == 0) { Q.add(next); iter.doBreak(); break;
nodesIterable.doBreak(); return metrics;
private double calculateR(Graph graph, double[] pagerankValues, HashMap<Node, Integer> indicies, boolean directed, double prob) { int N = graph.getNodeCount(); double r = (1.0 - prob) / N;//Initialize to damping factor //Calculate dangling nodes (nodes without out edges) contribution to all other nodes. //Necessary for all nodes page rank values sum to be 1 NodeIterable nodesIterable = graph.getNodes(); double danglingNodesRankContrib = 0; for (Node s : graph.getNodes()) { int s_index = indicies.get(s); int outDegree; if (directed) { outDegree = ((DirectedGraph) graph).getOutDegree(s); } else { outDegree = graph.getDegree(s); } if (outDegree == 0) { danglingNodesRankContrib += pagerankValues[s_index]; } if (isCanceled) { nodesIterable.doBreak(); break; } } danglingNodesRankContrib *= prob / N; r += danglingNodesRankContrib; return r; }
/** * See https://github.com/gephi/gephi/issues/603 Nodes position to NaN on applied layout * * @param graphModel */ public static void ensureSafeLayoutNodePositions(GraphModel graphModel) { Graph graph = graphModel.getGraph(); NodeIterable nodesIterable = graph.getNodes(); for (Node node : nodesIterable) { if (node.x() != 0 || node.y() != 0) { nodesIterable.doBreak(); return; } } //All at 0.0, init some random positions nodesIterable = graph.getNodes(); for (Node node : nodesIterable) { node.setX((float) ((0.01 + Math.random()) * 1000) - 500); node.setY((float) ((0.01 + Math.random()) * 1000) - 500); } } }
private Map<Node, Set<Node>> calculateInNeighborsPerNode(Graph graph, boolean directed) { Map<Node, Set<Node>> inNeighborsPerNode = new Object2ObjectOpenHashMap<>(); NodeIterable nodesIterable = graph.getNodes(); for (Node node : nodesIterable) { Set<Node> nodeInNeighbors = new ObjectOpenHashSet<>(); EdgeIterable edgesIterable; if (directed) { edgesIterable = ((DirectedGraph) graph).getInEdges(node); } else { edgesIterable = graph.getEdges(node); } for (Edge edge : edgesIterable) { if (!edge.isSelfLoop()) { Node neighbor = graph.getOpposite(node, edge); nodeInNeighbors.add(neighbor); } if (isCanceled) { edgesIterable.doBreak(); break; } } inNeighborsPerNode.put(node, nodeInNeighbors); if (isCanceled) { nodesIterable.doBreak(); break; } } return inNeighborsPerNode; }
nodesIterable.doBreak(); return pagerankValues;
nodesIterable.doBreak(); return resultValues;
nodesIterable.doBreak(); break;
private double bruteForce(Graph graph) { //The atrributes computed by the statistics Column clusteringColumn = initializeAttributeColunms(graph.getModel()); float totalCC = 0; graph.readLock(); try { Progress.start(progress, graph.getNodeCount()); int node_count = 0; NodeIterable nodesIterable = graph.getNodes(); for (Node node : nodesIterable) { float nodeClusteringCoefficient = computeNodeClusteringCoefficient(graph, node, isDirected); if (nodeClusteringCoefficient > -1) { saveCalculatedValue(node, clusteringColumn, nodeClusteringCoefficient); totalCC += nodeClusteringCoefficient; } if (isCanceled) { nodesIterable.doBreak(); break; } node_count++; Progress.progress(progress, node_count); } double clusteringCoeff = totalCC / graph.getNodeCount(); return clusteringCoeff; } finally { graph.readUnlockAll(); } }
nodesIterable.doBreak(); break;
nodesIterable.doBreak(); break;
nodesIterable.doBreak(); break;