/** * Returns the number of edges. This assumes that the tree is connected and returns * {@link #vertexCount()}-1 instead of really counting the edges */ public int edgeCount() { return vertexCount() - 1; }
int getNumOfConsensusVertices(TreePairMerger pair) { return pair.consensus.vertexCount(); }
int getNumRemainingUniqueClades(TreePairMerger pair) { return pair.consensus.vertexCount() - pair.backboneClades - pair.consensusNumOfTaxa; }
int getNumOfConsensusBackboneVertices(TreePairMerger pair) { return pair.consensus.vertexCount(); }
void pruneToCommonLeafes() { singleTaxa = new ArrayList<>(t1pruned.vertexCount() + t2pruned.vertexCount()); // is an upper bound for the list --> no resizing pruneLeafes(t1pruned); pruneLeafes(t2pruned); t1prunedVertexCount = t1pruned.vertexCount(); t2prunedVertexCount = t2pruned.vertexCount(); }
int getNumUniqueClades(TreePairMerger pair) { int t1Clades = pair.t1.vertexCount() - treeToTaxa.get(pair.t1).size(); int t2Clades = pair.t2.vertexCount() - treeToTaxa.get(pair.t2).size(); int t1BackboneClades = pair.t1prunedVertexCount - pair.commonLeafes.size(); int t2BackboneClades = pair.t2prunedVertexCount - pair.commonLeafes.size(); return (t1Clades - t1BackboneClades) + (t2Clades - t2BackboneClades); }
public static double getNormalizedBCNPartitionDist(Tree t1, Tree t2) { int dist = bcnPartitionDist(t1, t2) + bcnPartitionDist(t2, t1); return ((double) dist / (t1.vertexCount() + t2.vertexCount())); }
public void calculateConsensus(Consensus.ConsensusMethod consensusMethod) { if (commonLeafes.size() > 2) { pruneToCommonLeafes(); consensus = Consensus.getConsensus(Arrays.asList(t1pruned, t2pruned), consensusMethod); backboneClades = consensus.vertexCount() - commonLeafes.size(); consensusNumOfTaxa = reinsertSingleTaxa(consensus); } else { Logger.getGlobal().warning("Trees have nothing in common!"); } }
private static Map<Set<String>, TreeNode> getTreeAsLeafMap(final Tree tree, final boolean setEdgeweightsToZero) { Map<Set<String>, TreeNode> childrenSets = new HashMap<Set<String>, TreeNode>(tree.vertexCount()); for (TreeNode child : tree.getRoot().children()) { Set<String> childTaxaSet = new HashSet<String>(); addLeafesFromChildren(childrenSets, child, childTaxaSet, setEdgeweightsToZero); } return childrenSets; }
private double caclulateTreeResolution(Tree tree) { return TreeUtils.calculateTreeResolution(tree.getNumTaxa(), tree.vertexCount()); }
public static boolean treeEquals(Tree t1, Tree t2) { if (t1.equals(t2)) return true; if (t1.vertexCount() != t2.vertexCount()) return false; Set<String> l1 = TreeUtils.getLeafLabels(t1.getRoot()); Set<String> l2 = TreeUtils.getLeafLabels(t2.getRoot()); if (!l1.equals(l2)) return false; double[] rates = FN_FP_RateComputer.calculateRates(t1, t2, false); if (rates[2] == 0d && rates[3] == 0d) { return true; } return false; }
Tree strictConsensus = strict.getResult(); allLeafes = new TObjectIntHashMap<>(strictConsensus.vertexCount(), Constants.DEFAULT_LOAD_FACTOR, -2); numOfTaxa = -1; indexToLeaf1 = new TIntObjectHashMap<>(); Map<TreeNode, Bipartition> lcaToLabelsStrict = new HashMap<>(strictConsensus.vertexCount()); Set<Bipartition> partitionsStrict = new HashSet<>(strictConsensus.vertexCount()); createPartitionsAndLCAMap(strictConsensus, partitionsStrict, lcaToLabelsStrict); Map<TreeNode, Bipartition> lcaToLabels = new HashMap<>(strictConsensus.vertexCount()); Set<Bipartition> partitions = new HashSet<>(strictConsensus.vertexCount()); createPartitionsAndLCAMap(tree, partitions, lcaToLabels); partitions.removeAll(partitionsStrict);
Tree t2 = trees.get(1); allLeafes = new TObjectIntHashMap<>(t1.vertexCount(), Constants.DEFAULT_LOAD_FACTOR, -2); numOfTaxa = -1; indexToLeaf1 = new TIntObjectHashMap<>(); Map<TreeNode, Bipartition> lcaToLabels1 = new HashMap<>(t1.vertexCount()); Set<Bipartition> partitions1 = new HashSet<>(t1.vertexCount()); createPartitionsAndLCAMap(t1, partitions1, lcaToLabels1); Map<TreeNode, Bipartition> lcaToLabels2 = new HashMap<>(t2.vertexCount()); Set<Bipartition> partitions2 = new HashSet<>(t2.vertexCount()); createPartitionsAndLCAMap(t2, partitions2, lcaToLabels2);
public void reduceUnsupportedClades(Tree supertree) { int modifiedCharacters = 0; int deletedCharacters = 0; int innernodesBefore = supertree.vertexCount() - supertree.getNumTaxa(); int innernodesAfter = supertree.vertexCount() - supertree.getNumTaxa(); System.out.println("--------------> " + modifiedCharacters + "clades modified"); System.out.println("--------------> " + deletedCharacters + "clades deleted (counted)");
private void pruneLeafes(Tree t) { Map<Set<String>, Set<SingleTaxon>> commenInsertionPointTaxa; commenInsertionPointTaxa = new THashMap<>(t.vertexCount()); Deque<TreeNode> stack = new ArrayDeque<>(t.vertexCount());
public static Map<Set<String>, TreeNode> getChildrenMap(final Tree tree, final boolean setEdgeweightsToZero) { Map<Set<String>, TreeNode> childrenSets = new HashMap<Set<String>, TreeNode>(tree.vertexCount()); for (TreeNode node : tree.vertices()) { if (node.isInnerNode()) { if (node != tree.getRoot()) { if (setEdgeweightsToZero) node.getEdgeToParent().setWeight(0d); childrenSets.put(getLeafLabels(node), node); } } else { if (setEdgeweightsToZero) node.getEdgeToParent().setWeight(0d); } } return childrenSets; }
TreeNode root = (TreeNode) t.getRoot(); pruneTraverse(nt, root, null, labels); if (nt.vertexCount() > 0 && ((TreeNode) nt.getRoot()).childCount() > 0) { newTrees.add(nt);
THashSet<String> getLeafLabels(Tree tree1) { THashSet<String> taxonSet = new THashSet<>(tree1.vertexCount()); for (TreeNode taxon : tree1.getRoot().depthFirstIterator()) { if (taxon.isLeaf()) { taxonSet.add(taxon.getLabel()); } } return taxonSet; }
Tree subtree = Newick.getTreeFromString(singleTaxon.subtree); if (subtree.vertexCount() == 2) {
if (n != null) { if (tree.vertexCount() == 1) { TreeNode oldRoot = tree.getRoot(); TreeNode root = new TreeNode(); if (tree.vertexCount() == 0) { TreeNode root = new TreeNode(); tree.addVertex(root); tree.addEdge(root, nn); } else if (tree.vertexCount() == 1) {