@Override public void add(double x, int w) { add(x, w, (List<Double>) null); }
public AgentDigest(short compression, long dispatchTimeMillis) { Preconditions.checkArgument(compression >= 20D); Preconditions.checkArgument(compression <= 1000D); int numCentroids = defaultSizeForCompression(compression); int numBuffered = bufferSizeForCompression(compression); this.compression = compression; weight = new double[numCentroids]; mean = new double[numCentroids]; mergeWeight = new double[numCentroids]; mergeMean = new double[numCentroids]; tempWeight = new double[numBuffered]; tempMean = new double[numBuffered]; order = new int[numBuffered]; lastUsedCell = 0; this.dispatchTimeMillis = dispatchTimeMillis; }
private int encodedSize() { return FIXED_SIZE + centroidCount() * PER_CENTROID_SIZE; }
/** * Update {@code AgentDigest} in the cache with a double value. If such {@code AgentDigest} does not exist for * the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value value to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch for new bins */ public void put(HistogramKey key, double value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis() )); t.add(value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis() )); v.add(value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with another {@code AgentDigest}. * * @param key histogram key * @param value {@code AgentDigest} to be merged */ public void put(HistogramKey key, @Nonnull AgentDigest value) { cache.asMap().compute(key, (k, v) -> { if (v == null) { keyIndex.put(key, value.getDispatchTimeMillis()); return value; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis())); v.add(value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with a {@code Histogram} value. If such {@code AgentDigest} does not exist * for the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value a {@code Histogram} to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch in milliseconds for new bins */ public void put(HistogramKey key, Histogram value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis())); mergeHistogram(t, value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis())); mergeHistogram(v, value); return v; } }); }
if (digestA != null && digestB != null) { if (digestA.centroidCount() >= digestB.centroidCount()) { digestA.add(digestB); return digestA; } else { digestB.add(digestA); return digestB;
/** * Number of centroids of this AgentDigest (does compress if necessary) */ public int centroidCount() { mergeNewValues(); return lastUsedCell + (weight[lastUsedCell] == 0 ? 0 : 1); }
logger.log(Level.SEVERE, "Failed dispatching entry " + k, e); dispatchLagMillis.update(System.currentTimeMillis() - v.getDispatchTimeMillis()); index.remove(); dispatchedCount.incrementAndGet();
private double mergeCentroid(double wSoFar, double k1, double w, double m, List<Double> newData) { double k2 = integratedLocation(wSoFar / totalWeight); if (k2 - k1 <= 1 || mergeWeight[lastUsedCell] == 0) { // merge into existing centroid mergeWeight[lastUsedCell] += w; mergeMean[lastUsedCell] = mergeMean[lastUsedCell] + (m - mergeMean[lastUsedCell]) * w / mergeWeight[lastUsedCell]; } else { // create new centroid lastUsedCell++; mergeMean[lastUsedCell] = m; mergeWeight[lastUsedCell] = w; k1 = integratedLocation((wSoFar - w) / totalWeight); } if (mergeData != null) { while (mergeData.size() <= lastUsedCell) { mergeData.add(new ArrayList<>()); } mergeData.get(lastUsedCell).addAll(newData); } return k1; }
/** * Exposed for testing. */ int checkWeights() { return checkWeights(weight, totalWeight, lastUsedCell); }
@NotNull @Override public AgentDigest read(Bytes in, long size, @Nullable AgentDigest using) { Preconditions.checkArgument(size >= FIXED_SIZE); short compression = in.readShort(); if (using == null || using.compression != compression) { using = new AgentDigest(compression, in.readLong()); } else { using.dispatchTimeMillis = in.readLong(); } using.totalWeight = 0d; using.lastUsedCell = (int) ((size - FIXED_SIZE) / PER_CENTROID_SIZE); using.tempUsed = 0; using.unmergedWeight = 0D; // need explicit nulling of weight past lastUsedCell Arrays.fill(using.weight, using.lastUsedCell, using.weight.length, 0D); for (int i = 0; i < using.lastUsedCell; ++i) { float weight = in.readFloat(); using.weight[i] = weight; using.mean[i] = in.readFloat(); using.totalWeight += weight; } return using; }
if (tempMean[ix] <= mean[j]) { wSoFar += tempWeight[ix]; k1 = mergeCentroid(wSoFar, k1, tempWeight[ix], tempMean[ix], tempData != null ? tempData.get(ix) : null); i++; } else { wSoFar += weight[j]; k1 = mergeCentroid(wSoFar, k1, weight[j], mean[j], data != null ? data.get(j) : null); j++; int ix = order[i]; wSoFar += tempWeight[ix]; k1 = mergeCentroid(wSoFar, k1, tempWeight[ix], tempMean[ix], tempData != null ? tempData.get(ix) : null); i++; k1 = mergeCentroid(wSoFar, k1, weight[j], mean[j], data != null ? data.get(j) : null); j++;
/** * Update {@code AgentDigest} in the cache with a double value. If such {@code AgentDigest} does not exist for * the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value value to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch for new bins */ public void put(HistogramKey key, double value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis() )); t.add(value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis() )); v.add(value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with another {@code AgentDigest}. * * @param key histogram key * @param value {@code AgentDigest} to be merged */ public void put(HistogramKey key, @Nonnull AgentDigest value) { cache.asMap().compute(key, (k, v) -> { if (v == null) { keyIndex.put(key, value.getDispatchTimeMillis()); return value; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis())); v.add(value); return v; } }); }
/** * Update {@code AgentDigest} in the cache with a {@code Histogram} value. If such {@code AgentDigest} does not exist * for the specified key, it will be created with the specified compression and ttlMillis settings. * * @param key histogram key * @param value a {@code Histogram} to be merged into the {@code AgentDigest} * @param compression default compression level for new bins * @param ttlMillis default time-to-dispatch in milliseconds for new bins */ public void put(HistogramKey key, Histogram value, short compression, long ttlMillis) { cache.asMap().compute(key, (k, v) -> { if (v == null) { binCreatedCounter.inc(); AgentDigest t = new AgentDigest(compression, System.currentTimeMillis() + ttlMillis); keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < t.getDispatchTimeMillis() ? v1 : t.getDispatchTimeMillis())); mergeHistogram(t, value); return t; } else { keyIndex.compute(key, (k1, v1) -> ( v1 != null && v1 < v.getDispatchTimeMillis() ? v1 : v.getDispatchTimeMillis())); mergeHistogram(v, value); return v; } }); }
if (digestA != null && digestB != null) { if (digestA.centroidCount() >= digestB.centroidCount()) { digestA.add(digestB); return digestA; } else { digestB.add(digestA); return digestB;
/** * Number of centroids of this AgentDigest (does compress if necessary) */ public int centroidCount() { mergeNewValues(); return lastUsedCell + (weight[lastUsedCell] == 0 ? 0 : 1); }
logger.log(Level.SEVERE, "Failed dispatching entry " + k, e); dispatchLagMillis.update(System.currentTimeMillis() - v.getDispatchTimeMillis()); index.remove(); dispatchedCount.incrementAndGet();
private double mergeCentroid(double wSoFar, double k1, double w, double m, List<Double> newData) { double k2 = integratedLocation(wSoFar / totalWeight); if (k2 - k1 <= 1 || mergeWeight[lastUsedCell] == 0) { // merge into existing centroid mergeWeight[lastUsedCell] += w; mergeMean[lastUsedCell] = mergeMean[lastUsedCell] + (m - mergeMean[lastUsedCell]) * w / mergeWeight[lastUsedCell]; } else { // create new centroid lastUsedCell++; mergeMean[lastUsedCell] = m; mergeWeight[lastUsedCell] = w; k1 = integratedLocation((wSoFar - w) / totalWeight); } if (mergeData != null) { while (mergeData.size() <= lastUsedCell) { mergeData.add(new ArrayList<>()); } mergeData.get(lastUsedCell).addAll(newData); } return k1; }