public int size() { return this.ints.size() + this.longs.size(); }
@Override protected int bucketSize( Bucket bucket ) { return bucket.singles.n + bucket.dupes.size( ); }
@Override protected int bucketSize( Bucket bucket ) { return bucket.singles.n + bucket.dupes.size( ); }
public int tracking_pathNodeEdgeLookupSize() { return firstKmerEdgeLookup.size(); } public int tracking_edgeLookupMaxKmerNodeCount() {
public int tracking_kmerCount() { return lookup.size(); } public int tracking_supportNodeCount() {
@Override protected int bucketSize( Bucket bucket ) { return bucket.singles.n / 2 + bucket.dupes.size( ); }
@Override public int size() { return map.size(); }
protected int mapSize() { return map.size(); } }
public int tracking_kmerCount() { return byKmer.size(); } public int tracking_aggregatorQueueSize() {
public int tracking_edgeLookupSize() { return edgeLookup.size(); } public int tracking_pathNodeEdgeLookupSize() {
public int size() { return this.ints.size() + this.longs.size(); }
@Override public void putAll(Map<? extends Long, ? extends V> m) { if (f <= .5) ensureCapacity(m.size()); // The resulting map will be sized for m.size() elements else tryCapacity(size() + m.size()); // The resulting map will be tentatively sized for size() + m.size() // elements super.putAll(m); }
@Override public Iterable<LongWritable> getPartitionDestinationVertices( int partitionId) { Long2ObjectOpenHashMap<T> partitionMap = map.get(partitionId); List<LongWritable> vertices = Lists.newArrayListWithCapacity(partitionMap.size()); LongIterator iterator = partitionMap.keySet().iterator(); while (iterator.hasNext()) { vertices.add(new LongWritable(iterator.nextLong())); } return vertices; } }
private HashMap<String, GroupedValue> convert(MapResult mapResult) { HashMap<String, GroupedValue> ret = new HashMap<String, GroupedValue>(mapResult.results.size()); String[] temp = new String[mapResult.dictionaries.length]; int[] numBits = new int[columns.length]; for (int i = 0; i < columns.length; i++) { numBits[i] = BitHacks.findLogBase2(mapResult.dictionaries[i].size()) + 1; } for (long key : mapResult.results.keySet()) { ret.put(decodeKey(temp, mapResult.dictionaries, numBits, key), mapResult.results.get(key)); } return ret; }
@Override public void write(DataOutput out) throws IOException { out.writeInt(map.size()); ObjectIterator<Long2ObjectMap.Entry<V>> iterator = map.long2ObjectEntrySet().fastIterator(); while (iterator.hasNext()) { Long2ObjectMap.Entry<V> entry = iterator.next(); out.writeLong(entry.getLongKey()); valueWriter.write(out, entry.getValue()); } }
@Override public void writePartition(DataOutput out, int partitionId) throws IOException { Long2ObjectOpenHashMap<DataInputOutput> partitionMap = map.get(partitionId); out.writeInt(partitionMap.size()); ObjectIterator<Long2ObjectMap.Entry<DataInputOutput>> iterator = partitionMap.long2ObjectEntrySet().fastIterator(); while (iterator.hasNext()) { Long2ObjectMap.Entry<DataInputOutput> entry = iterator.next(); out.writeLong(entry.getLongKey()); entry.getValue().write(out); } }
private boolean sanityCheck() { // kmer lookup is correct assert(byKmer.long2ObjectEntrySet().stream().allMatch(kvp -> kvp.getLongKey() == kvp.getValue().kmer)); // empty aggregators have been removed assert(byKmer.values().stream().allMatch(ag -> !ag.active.isEmpty())); // could have many start position entries, but only one position is valid (and even that could have duplicate entries) assert(byEnd.size() >= byKmer.size()); assert(byEnd.stream().allMatch(snapshot -> !snapshot.isValid() || byKmer.containsKey(snapshot.aggregator().kmer))); assert(byKmer.values().stream().allMatch(ag -> ag.end() >= inputPosition)); if (outputSortBuffer.isEmpty()) { assert(byKmer.isEmpty()); assert(byEnd.isEmpty()); assert(!underlying.hasNext()); } return true; } public int tracking_processedSize() {
@Override protected void chooseDividers( float xMin, float xMax, float yMin, float yMax, Bucket bucket, float[] result ) { Long2ObjectOpenHashMap<IntsArray> dupes = bucket.dupes; IntsArray singles = bucket.singles; int[] a = singles.a; int n = singles.n; double oneOverSize = 1.0 / ( n + dupes.size( ) ); double xMean = 0; double yMean = 0; for ( int i = 0; i < n; i++ ) { int v = a[i]; xMean += truncInf( x( v ) ) * oneOverSize; yMean += truncInf( y( v ) ) * oneOverSize; } for ( Entry<IntsArray> en : dupes.long2ObjectEntrySet( ) ) { long xyKey = en.getLongKey( ); xMean += truncInf( xFromKey( xyKey ) ) * oneOverSize; yMean += truncInf( yFromKey( xyKey ) ) * oneOverSize; } result[0] = truncInf( ( float ) xMean ); result[1] = truncInf( ( float ) yMean ); }
@Override protected void chooseDividers( float xMin, float xMax, float yMin, float yMax, Bucket bucket, float[] result ) { Long2ObjectOpenHashMap<LongsArray> dupes = bucket.dupes; LongsArray singles = bucket.singles; long[] a = singles.a; int n = singles.n; double oneOverSize = 1.0 / ( n + dupes.size( ) ); double xMean = 0; double yMean = 0; for ( int i = 0; i < n; i++ ) { long v = a[i]; xMean += truncInf( x( v ) ) * oneOverSize; yMean += truncInf( y( v ) ) * oneOverSize; } for ( Entry<LongsArray> en : dupes.long2ObjectEntrySet( ) ) { long xyKey = en.getLongKey( ); xMean += truncInf( xFromKey( xyKey ) ) * oneOverSize; yMean += truncInf( yFromKey( xyKey ) ) * oneOverSize; } result[0] = truncInf( ( float ) xMean ); result[1] = truncInf( ( float ) yMean ); }
@Override protected void chooseDividers( float xMin, float xMax, float yMin, float yMax, Bucket bucket, float[] result ) { Long2ObjectOpenHashMap<LongsArray> dupes = bucket.dupes; LongsArray singles = bucket.singles; long[] a = singles.a; int n = singles.n; double oneOverSize = 1.0 / ( n / 2 + dupes.size( ) ); double xMean = 0; double yMean = 0; for ( int i = 0; i < n; i += 2 ) { long v1 = a[i]; long v2 = a[i + 1]; xMean += truncInf( x( v1, v2 ) ) * oneOverSize; yMean += truncInf( y( v1, v2 ) ) * oneOverSize; } for ( Entry<LongsArray> en : dupes.long2ObjectEntrySet( ) ) { long xyKey = en.getLongKey( ); xMean += truncInf( xFromKey( xyKey ) ) * oneOverSize; yMean += truncInf( yFromKey( xyKey ) ) * oneOverSize; } result[0] = truncInf( ( float ) xMean ); result[1] = truncInf( ( float ) yMean ); }