public void addToTieredStat(final String statName, final String tier, final long value) { perTierStats.computeIfAbsent(statName, ignored -> new Object2LongOpenHashMap<>()) .addTo(tier, value); }
private List<LocalRegion> getAllSortedRegionList() { List<LocalRegion> allRegionList = getAllRegionList(); // Capture the sizes so that they do not change while sorting final Object2LongOpenHashMap<LocalRegion> sizes = new Object2LongOpenHashMap<>(allRegionList.size()); for (LocalRegion region : allRegionList) { long size = region instanceof BucketRegion ? ((BucketRegion) region).getSizeForEviction() : region.size(); sizes.put(region, size); } // Sort with respect to other PR buckets also in case of multiple PRs allRegionList.sort((region1, region2) -> { long numEntries1 = sizes.get(region1); long numEntries2 = sizes.get(region2); if (numEntries1 > numEntries2) { return -1; } else if (numEntries1 < numEntries2) { return 1; } return 0; }); return allRegionList; }
public long getGlobalStat(final String statName) { return globalStats.getLong(statName); }
name -> new Object2LongOpenHashMap<>() ); for (final Object2LongMap.Entry<String> entry : urStat.object2LongEntrySet()) { myStat.addTo(entry.getKey(), entry.getLongValue()); final Object2LongOpenHashMap<String> myStat = perDataSourceStats.computeIfAbsent( statName, k -> new Object2LongOpenHashMap<>() ); for (Entry<String> entry : urStat.object2LongEntrySet()) { myStat.addTo(entry.getKey(), entry.getLongValue()); for (final Object2LongMap.Entry<String> entry : stats.globalStats.object2LongEntrySet()) { globalStats.addTo(entry.getKey(), entry.getLongValue());
@Override public Object2LongOpenHashMap<String> remainingSegmentSizeBytes() { final Object2LongOpenHashMap<String> resultMap = new Object2LongOpenHashMap<>(); resultMap.defaultReturnValue(UNKNOWN_REMAINING_SEGMENT_SIZE); for (QueueEntry entry : queue) { final VersionedIntervalTimeline<String, DataSegment> timeline = dataSources.get(entry.getDataSource()); final Interval interval = new Interval(timeline.first().getInterval().getStart(), entry.interval.getEnd()); final List<TimelineObjectHolder<String, DataSegment>> holders = timeline.lookup(interval); resultMap.put( entry.getDataSource(), holders.stream() .flatMap(holder -> StreamSupport.stream(holder.getObject().spliterator(), false)) .mapToLong(chunk -> chunk.getObject().getSize()) .sum() ); } return resultMap; }
Object2LongOpenHashMap<String> userIdMap = new Object2LongOpenHashMap<>(); AtomicLong counter = new AtomicLong(); if (!userIdMap.containsKey(s[0])) { userIdMap.put(s[0], counter.incrementAndGet()); t.userId = userIdMap.getLong(s[0]); t.timestamp = new Date(Long.parseLong(s[2]) + timestampOffset); tList.add(t);
private List<LocalRegion> getAllSortedRegionList(){ List<LocalRegion> allRegionList = getAllRegionList(); //Capture the sizes so that they do not change while sorting final Object2LongOpenHashMap sizes = new Object2LongOpenHashMap(allRegionList.size()); for(LocalRegion r : allRegionList) { long size = r instanceof BucketRegion ?((BucketRegion)r).getSizeForEviction() : r.size(); sizes.put(r, size); } //Sort with respect to other PR buckets also in case of multiple PRs Collections.sort(allRegionList, new Comparator<LocalRegion>() { public int compare(LocalRegion r1, LocalRegion r2) { long numEntries1 = sizes.get(r1); long numEntries2 = sizes.get(r2); if (numEntries1 > numEntries2) { return -1; } else if (numEntries1 < numEntries2) { return 1; } return 0; } }); return allRegionList; }
public CoordinatorStats() { perTierStats = new HashMap<>(); perDataSourceStats = new HashMap<>(); globalStats = new Object2LongOpenHashMap<>(); }
/** * Increases the count for a given <code>sample</code>. * * @param sample * the sample to increase the count for * @param number * the number to increase by */ public void addSample(T sample, long number) { this.n = this.n + number; long sampleFreq = number; if (freqDist.containsKey(sample)) { sampleFreq = freqDist.get(sample) + number; } freqDist.put(sample, sampleFreq); if (sampleFreq > maxFreq) { maxFreq = sampleFreq; maxSample = sample; } }
@Override public long set(K k, long v) { long rv = counts.put(k, v); sumOfCounts = sumOfCounts - rv + v; return rv; }
public void load(File file) throws IOException, ClassNotFoundException { ObjectInputStream in = new ObjectInputStream(new FileInputStream(file)); freqDist = (Object2LongOpenHashMap<T>) in.readObject(); in.close(); int samples = 0; LongIterator sampleIter = freqDist.values().iterator(); // determine total frequency while (sampleIter.hasNext()) { long count = sampleIter.next(); samples += count; } n = samples; // determine max sample for (T key : freqDist.keySet()) { Long freq = freqDist.get(key); if (freq > maxFreq) { maxFreq = freq; maxSample = key; } } }
public Set<String> getDataSources(String statName) { final Object2LongOpenHashMap<String> stat = perDataSourceStats.get(statName); if (stat == null) { return Collections.emptySet(); } return Collections.unmodifiableSet(stat.keySet()); }
public void forEachTieredStat(final String statName, final ObjLongConsumer<String> consumer) { final Object2LongOpenHashMap<String> theStat = perTierStats.get(statName); if (theStat != null) { for (final Object2LongMap.Entry<String> entry : theStat.object2LongEntrySet()) { consumer.accept(entry.getKey(), entry.getLongValue()); } } }
/** * Returns the count for a given <code>sample</code>. If no such samples have been recorded yet, * <code>0</code> will be returned. * * @param sample * the sample to get the count for * @return the count for a given sample */ public long getCount(T sample) { if (freqDist.containsKey(sample)) { return freqDist.get(sample); } else { return 0; } }
@Override public void clear() { Object2LongOpenHashMap.this.clear(); } /** {@inheritDoc} */
@Override public void putAll(Map<? extends K, ? extends Long> m) { if (f <= .5) ensureCapacity(m.size()); // The resulting map will be sized for m.size() elements else tryCapacity(size() + m.size()); // The resulting map will be tentatively sized for size() + m.size() // elements super.putAll(m); } @SuppressWarnings("unchecked")
/** * Returns the total number of sample values (or bins) that have counts greater than zero. This * is equal to the accumulated counts of all distinct samples (duplicates excluded). * * @return the total number of bins */ public long getB() { return this.freqDist.size(); }
/** * Indicates whether this distribution contains outcomes for a given <code>sample</code>. * * @param sample * the sample to look up * @return true if samples exist */ public boolean contains(T sample) { return this.freqDist.containsKey(sample); }