/** * Add an OutPipe to the end of the Pipeline. * Emit the adjacent outgoing vertices of the incoming vertex. * * @param labels the edge labels to traverse * @return the extended Pipeline */ public GremlinPipeline<S, Vertex> out(final String... labels) { return this.out(Integer.MAX_VALUE, labels); }
@Override public void query(GraphRewrite event, GremlinPipeline<Vertex, Vertex> pipeline) { Predicate regexPredicate = new Predicate() { @Override public boolean evaluate(Object first, Object second) { return ((String) first).matches((String) second); } }; pipeline.as("result") .out(FileReferenceModel.FILE_MODEL) .out(JavaSourceFileModel.JAVA_CLASS_MODEL) .has(JavaClassModel.QUALIFIED_NAME, regexPredicate, compiledTypeFilterPattern.pattern()) .back("result"); } }
@Override public void run() { for (int t = 1; t <= trials; t++) { TitanTransaction tx = graph.newTransaction(); TitanVertex v = tx.getVertex(vids[random.nextInt(numV)]); for (int r = 0; r < repetitions; r++) { assertEquals((int) Math.pow(numE, 2), Iterables.size(new GremlinPipeline<Vertex, Vertex>(v) .out(label).out(label) )); } tx.commit(); } } });
public double getNodeOutDegree(Vertex vertex) { GremlinPipeline<String, Vertex> pipe = new GremlinPipeline<String, Vertex>(vertex).out(SIMILAR); return (double) pipe.count(); }
@Override public Set<Integer> getNeighborsIds(int nodeId) { Set<Integer> neighbors = new HashSet<Integer>(); Vertex vertex = titanGraph.getVertices(NODE_ID, nodeId).iterator().next(); GremlinPipeline<String, Vertex> pipe = new GremlinPipeline<String, Vertex>(vertex).out(SIMILAR); Iterator<Vertex> iter = pipe.iterator(); while (iter.hasNext()) { Integer neighborId = iter.next().getProperty(NODE_ID); neighbors.add(neighborId); } return neighbors; }
@Override public Set<Integer> getCommunitiesConnectedToNodeCommunities(int nodeCommunities) { Set<Integer> communities = new HashSet<Integer>(); Iterable<Vertex> vertices = titanGraph.getVertices(NODE_COMMUNITY, nodeCommunities); for (Vertex vertex : vertices) { GremlinPipeline<String, Vertex> pipe = new GremlinPipeline<String, Vertex>(vertex).out(SIMILAR); Iterator<Vertex> iter = pipe.iterator(); while (iter.hasNext()) { int community = iter.next().getProperty(COMMUNITY); communities.add(community); } } return communities; }
/** * Get a map of date when comments were added to each issue * * In the case that sys:comments_added is not set null is inserted into the map. * * @param reponame the name of the repository to mine * @return a Map that maps issue_ids to the date that the comments were downloaded */ public Map<Integer, Date> getIssueCommentsAddedAt(final String reponame) { final Vertex node = getOrCreateRepository(reponame); final HashMap<Integer, Date> map = new HashMap<Integer, Date>(); final GremlinPipeline<Vertex, Vertex> pipe = new GremlinPipeline<Vertex, Vertex>(); pipe.start(node).out(EdgeType.ISSUE); addValuesFromIterable(pipe, map, PropertyName.NUMBER, PropertyName.SYS_COMMENTS_ADDED); log.warn("number of issues: {}", map.size()); return map; }
/** * Return a mapping between pull requests and the date they were augmented * with discussions. * * @param reponame * @return */ public Map<Integer, Date> getPullRequestDiscussionsAddedAt(final String reponame) { Vertex node = getOrCreateRepository(reponame); HashMap<Integer, Date> map = new HashMap<Integer, Date>(); GremlinPipeline<Vertex, Vertex> pipe = new GremlinPipeline<Vertex, Vertex>(); pipe.start(node).out(EdgeType.PULLREQUEST.toString()); addValuesFromIterable(pipe, map, PropertyName.NUMBER, PropertyName.SYS_DISCUSSIONS_ADDED); return map; }
@Override protected Pipe getQueryPipe() { GremlinPipeline p; if (termPath.getTaxonomyName().equals("*")) { p = new GremlinPipeline().has("Taxonomy.name").out(); } else { p = new GremlinPipeline().has("Taxonomy.name", termPath.getTaxonomyName()).out(). has(Constants.ENTITY_TYPE_PROPERTY_KEY, Text.PREFIX, termPath.getFullyQualifiedName()); } return p; } }
@Override public void shortestPath(final Vertex fromNode, Integer node) { final Vertex v2 = titanGraph.getVertices(NODE_ID, node).iterator().next(); @SuppressWarnings("rawtypes") final GremlinPipeline<String, List> pathPipe = new GremlinPipeline<String, List>(fromNode).as(SIMILAR) .out(SIMILAR).loop(SIMILAR, new PipeFunction<LoopBundle<Vertex>, Boolean>() { // @Override public Boolean compute(LoopBundle<Vertex> bundle) { return bundle.getLoops() < 5 && !bundle.getObject().equals(v2); } }).path(); }
/** * Get a map of date when events were added to each issue * * In the case that sys_events_added is not set null is inserted into the map. * * @param reponame the name of the repository to mine * @return a Map that maps issue_ids to the date that the events were downloaded */ public Map<Integer, Date> getIssueEventsAddedAt(final IRepositoryIdProvider repo) { final Vertex node = getOrCreateRepository(repo.generateId()); final HashMap<Integer, Date> map = new HashMap<Integer, Date>(); GremlinPipeline<Vertex, Vertex> pipe = new GremlinPipeline<Vertex, Vertex>(); pipe.start(node).out(EdgeType.ISSUE.toString()); addValuesFromIterable(pipe, map, PropertyName.NUMBER, PropertyName.SYS_EVENTS_ADDED); return map; }
@Test public void testSingleTxDeepTraversals() throws Exception { //1) Write random graph (copied from above) final int numV = 5000; final int numE = 50; final String label = "knows"; final Random random = new Random(); final long[] vids = writeGraph(numV, numE, label); final int repetitions = 1000; long start = System.currentTimeMillis(); TitanTransaction tx = graph.buildTransaction().readOnly().start(); for (int r = 0; r < repetitions; r++) { TitanVertex v = tx.getVertex(vids[random.nextInt(numV)]); assertTrue((int) Math.pow(numE, 2) <= Iterables.size( new GremlinPipeline<Vertex, Vertex>(v) .both(label).both(label) )); assertEquals((int) Math.pow(numE, 2), Iterables.size( new GremlinPipeline<Vertex, Vertex>(v) .out(label).out(label) )); } tx.commit(); System.out.println("Time in ms for [" + (repetitions) + "] traversals in single tx: " + (System.currentTimeMillis() - start)); }
userList = pipe.start(repo).out(EdgeType.ISSUE).in(EdgeType.ISSUEOWNER).dedup().toList(); log.info("Issue owners: {}", userList.size()); users.addAll(userList); userList = pipe.start(repo).out(EdgeType.ISSUE).out(EdgeType.ISSUECOMMENT).in(EdgeType.ISSUECOMMENTOWNER).dedup().toList(); log.info("Issue Comment owners: {}", userList.size()); users.addAll(userList); userList = pipe.start(repo).out(EdgeType.PULLREQUEST).in(EdgeType.PULLREQUESTOWNER).dedup().toList(); log.info("Pull Request owners: {}", userList.size()); users.addAll(userList); userList = pipe.start(repo).out(EdgeType.PULLREQUEST). out(EdgeType.PULLREQUESTDISCUSSION).in(). has(PropertyName.TYPE, VertexType.USER).dedup().toList(); log.info("Discussion users: {}", userList.size());
pipeline.out(InlineHintModel.FILE_LOCATION_REFERENCE).has(WindupVertexFrame.TYPE_PROP, Text.CONTAINS, JavaTypeReferenceModel.TYPE); pipeline.back("inlineHintVertex");