Refine search
/** * Serializes histogram fields that are common to both the full and sparse encoding modes. * * @param buf Destination buffer */ private void writeByteBufferCommonFields(ByteBuffer buf) { buf.putDouble(lowerLimit); buf.putDouble(upperLimit); buf.putInt(numBuckets); buf.put((byte) outlierHandlingMode.ordinal()); buf.putLong(count); buf.putLong(lowerOutlierCount); buf.putLong(upperOutlierCount); buf.putLong(missingValueCount); buf.putDouble(max); buf.putDouble(min); }
@Override public byte[] getCacheKey() { byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); return ByteBuffer.allocate(1 + fieldNameBytes.length + Integer.BYTES * 2 + Double.BYTES * 2) .put(AggregatorUtil.FIXED_BUCKET_HIST_CACHE_TYPE_ID) .put(fieldNameBytes) .putInt(outlierHandlingMode.ordinal()) .putInt(numBuckets) .putDouble(lowerLimit) .putDouble(upperLimit).array(); }
/** * Creates WKB for a 2D {x,y} point. * @param x coordinate * @param y coordinate * @return OGC WKB byte array */ private static byte[] buildWKBPoint(double x, double y) { ByteBuffer wkb = ByteBuffer.allocate(WKB_POINT_SIZE); wkb.put((byte)1); // BOM wkb.order(ByteOrder.LITTLE_ENDIAN); wkb.putInt(WKB_POINT); wkb.putDouble(x); wkb.putDouble(y); return wkb.array(); }
@Override public void asSmallBytes(ByteBuffer buf) { compress(); buf.putInt(Encoding.SMALL_ENCODING.code); // 4 buf.putDouble(min); // + 8 buf.putDouble(max); // + 8 buf.putFloat((float) publicCompression); // + 4 buf.putShort((short) mean.length); // + 2 buf.putShort((short) tempMean.length); // + 2 buf.putShort((short) lastUsedCell); // + 2 = 30 for (int i = 0; i < lastUsedCell; i++) { buf.putFloat((float) weight[i]); buf.putFloat((float) mean[i]); } }
public byte[] toBytes() { byte[] bytes = new byte[getByteSize()]; ByteBuffer byteBuffer = ByteBuffer.wrap(bytes); byteBuffer.putDouble(maxError); byteBuffer.putDouble(alpha); byteBuffer.putLong(landmarkInSeconds); byteBuffer.putLong(min); byteBuffer.putLong(max); byteBuffer.putInt(totalNodeCount); postOrderTraversal(root, node -> { serializeNode(byteBuffer, node); return true; }); return bytes; }
@Override public byte[] serialize(DoubleArrayList doubleArrayList) { int size = doubleArrayList.size(); byte[] bytes = new byte[Integer.BYTES + size * Double.BYTES]; ByteBuffer byteBuffer = ByteBuffer.wrap(bytes); byteBuffer.putInt(size); double[] values = doubleArrayList.elements(); for (int i = 0; i < size; i++) { byteBuffer.putDouble(values[i]); } return bytes; }
/** * Outputs a histogram as bytes using a particularly cheesy encoding. */ @Override public void asBytes(ByteBuffer buf) { buf.putInt(VERBOSE_ENCODING); buf.putDouble(min); buf.putDouble(max); buf.putDouble((float) compression()); buf.putInt(summary.size()); for (Centroid centroid : summary) { buf.putDouble(centroid.mean()); } for (Centroid centroid : summary) { buf.putInt(centroid.count()); } }
/** * Outputs a histogram as bytes using a particularly cheesy encoding. */ public void asBytes(ByteBuffer buf) { buf.putInt(VERBOSE_ENCODING); buf.putDouble(compression()); buf.putInt(summary.size()); for (Group group : summary) { buf.putDouble(group.mean()); } for (Group group : summary) { buf.putInt(group.count()); } }
@Override public ByteBuffer toBuffer(int headerPos) { ByteBuffer buffer = ByteBuffer.allocate(headerPos + 1 /* version */ + 1 /* raw metric type */ + Long.BYTES /* time */ + Integer.BYTES /* broker id */ + Double.BYTES /* value */); buffer.position(headerPos); buffer.put(METRIC_VERSION); buffer.put(rawMetricType().id()); buffer.putLong(time()); buffer.putInt(brokerId()); buffer.putDouble(value()); return buffer; }
public ByteBuffer toBuffer(int headerPos) { byte[] topic = _topic.getBytes(StandardCharsets.UTF_8); ByteBuffer buffer = ByteBuffer.allocate(headerPos + 1 /* version */ + 1 /* raw metric type */ + Long.BYTES /* time */ + Integer.BYTES /* broker id */ + Integer.BYTES /* topic length */ + topic.length /* topic */ + Double.BYTES /* value */); buffer.position(headerPos); buffer.put(METRIC_VERSION); buffer.put(rawMetricType().id()); buffer.putLong(time()); buffer.putInt(brokerId()); buffer.putInt(topic.length); buffer.put(topic); buffer.putDouble(value()); return buffer; }
@Override public void asSmallBytes(ByteBuffer buf) { buf.putInt(SMALL_ENCODING); buf.putDouble(min); buf.putDouble(max); buf.putDouble(compression()); buf.putInt(summary.size()); double x = 0; for (Centroid centroid : summary) { double delta = centroid.mean() - x; x = centroid.mean(); buf.putFloat((float) delta); } for (Centroid centroid : summary) { int n = centroid.count(); encode(buf, n); } }
public ByteBuffer toBuffer(int headerPos) { byte[] topic = topic().getBytes(StandardCharsets.UTF_8); ByteBuffer buffer = ByteBuffer.allocate(headerPos + 1 /* version */ + 1 /* metric type */ + Long.BYTES /* time */ + Integer.BYTES /* broker id */ + Integer.BYTES /* topic length */ + topic.length /* topic */ + Integer.BYTES /* partition */ + Double.BYTES /* value */); buffer.position(headerPos); buffer.put(METRIC_VERSION); buffer.put(rawMetricType().id()); buffer.putLong(time()); buffer.putInt(brokerId()); buffer.putInt(topic.length); buffer.put(topic); buffer.putInt(_partition); buffer.putDouble(value()); return buffer; }
ByteBuffer byteBuffer = ByteBuffer.wrap(bytes).order(PinotDataBuffer.NATIVE_ORDER); for (int dimension : starTreeRecord._dimensions) { byteBuffer.putInt(dimension); break; case DOUBLE: byteBuffer.putDouble((Double) starTreeRecord._metrics[i]); break; case BYTES: byteBuffer.putInt(metricBytes[i].length); byteBuffer.put(metricBytes[i]); break;
/** * Encode this histogram into a ByteBuffer * @param buffer The buffer to encode into * @return The number of bytes written to the buffer */ synchronized public int encodeIntoByteBuffer(final ByteBuffer buffer) { final long maxValue = getMaxValue(); final int relevantLength = countsArrayIndex(maxValue) + 1; if (buffer.capacity() < getNeededByteBufferCapacity(relevantLength)) { throw new ArrayIndexOutOfBoundsException("buffer does not have capacity for " + getNeededByteBufferCapacity(relevantLength) + " bytes"); } int initialPosition = buffer.position(); buffer.putInt(getEncodingCookie()); buffer.putInt(0); // Placeholder for payload length in bytes. buffer.putInt(getNormalizingIndexOffset()); buffer.putInt(numberOfSignificantValueDigits); buffer.putLong(lowestDiscernibleValue); buffer.putLong(highestTrackableValue); buffer.putDouble(getIntegerToDoubleValueConversionRatio()); int payloadStartPosition = buffer.position(); fillBufferFromCountsArray(buffer); buffer.putInt(initialPosition + 4, buffer.position() - payloadStartPosition); // Record the payload length return buffer.position() - initialPosition; }
@Override public void writePoint( CoordinateReferenceSystem crs, double[] coordinate ) { checkArgument( coordinate.length == crs.getDimension(), "Dimension for %s is %d, got %d", crs.getName(), crs.getDimension(), coordinate.length ); buf.putInt( crs.getCode() ); for ( int i = 0; i < crs.getDimension(); i++ ) { buf.putDouble( coordinate[i] ); } }
public void asSmallBytes(ByteBuffer buf) { buf.putInt(SMALL_ENCODING); buf.putDouble(compression()); buf.putInt(summary.size()); double x = 0; for (Group group : summary) { double delta = group.mean() - x; x = group.mean(); buf.putFloat((float) delta); } for (Group group : summary) { int n = group.count(); encode(buf, n); } }
ByteBuffer buffer = ByteBuffer.allocate(457 + hostBytes.length); buffer.put(_deserializationVersion); buffer.putInt(entity().brokerId()); buffer.putShort((short) hostBytes.length); buffer.put(hostBytes); buffer.putDouble(metricValue(CPU_USAGE)); buffer.putDouble(metricValue(LEADER_BYTES_IN)); buffer.putDouble(metricValue(LEADER_BYTES_OUT)); buffer.putDouble(metricValue(REPLICATION_BYTES_IN_RATE)); buffer.putDouble(metricValue(REPLICATION_BYTES_OUT_RATE)); buffer.putDouble(metricValue(MESSAGE_IN_RATE)); buffer.putDouble(metricValue(BROKER_PRODUCE_REQUEST_RATE)); buffer.putDouble(metricValue(BROKER_CONSUMER_FETCH_REQUEST_RATE)); buffer.putDouble(metricValue(BROKER_FOLLOWER_FETCH_REQUEST_RATE)); buffer.putDouble(metricValue(BROKER_REQUEST_HANDLER_POOL_IDLE_PERCENT)); buffer.putDouble(metricValue(DISK_USAGE)); buffer.putDouble(metricValue(PRODUCE_RATE)); buffer.putDouble(metricValue(FETCH_RATE)); buffer.putLong(_sampleTime); buffer.putInt(metricValue(BROKER_REQUEST_QUEUE_SIZE).intValue()); buffer.putInt(metricValue(BROKER_RESPONSE_QUEUE_SIZE).intValue()); buffer.putDouble(metricValue(BROKER_PRODUCE_REQUEST_QUEUE_TIME_MS_MAX)); buffer.putDouble(metricValue(BROKER_PRODUCE_REQUEST_QUEUE_TIME_MS_MEAN)); buffer.putDouble(metricValue(BROKER_CONSUMER_FETCH_REQUEST_QUEUE_TIME_MS_MAX)); buffer.putDouble(metricValue(BROKER_CONSUMER_FETCH_REQUEST_QUEUE_TIME_MS_MEAN)); buffer.putDouble(metricValue(BROKER_FOLLOWER_FETCH_REQUEST_QUEUE_TIME_MS_MAX)); buffer.putDouble(metricValue(BROKER_FOLLOWER_FETCH_REQUEST_QUEUE_TIME_MS_MEAN)); buffer.putDouble(metricValue(BROKER_PRODUCE_TOTAL_TIME_MS_MAX)); buffer.putDouble(metricValue(BROKER_PRODUCE_TOTAL_TIME_MS_MEAN));
/** * Writes this object to {@link ByteBuffer}. * * @param buf Buffer. */ @SuppressWarnings("ForLoopReplaceableByForEach") void write(ByteBuffer buf) { buf.putInt(intVal); buf.putLong(longVal); buf.put((byte)(boolVal ? 1 : 0)); buf.putInt(longArr.length); for (long l : longArr) buf.putLong(l); buf.putInt(dblArr.length); for (double d : dblArr) buf.putDouble(d); buf.putInt(list.size()); for (int i = 0; i < list.size(); i++) buf.putFloat(list.get(i)); buf.putInt(map.size()); for (Map.Entry<Integer, Character> e : map.entrySet()) { buf.putInt(e.getKey()); buf.putChar(e.getValue()); } }