Writer() { cellBlockBuffers = new ByteBuffer[info.colBlocks.length]; for (int i = 0; i < cellBlockBuffers.length; i++) { cellBlockBuffers[i] = cellBlocks[i].asBuffer(); } }
@Override public void convert(ByteArray currentObject, GTRecord record) { record.loadColumns(currentObject.asBuffer()); }
public Object getValue(int i) { return info.codeSystem.decodeColumnValue(i, cols[i].asBuffer()); }
public Object decodeValue(int c) { ByteArray col = cols[c]; if (col != null && col.array() != null) { return info.codeSystem.decodeColumnValue(c, col.asBuffer()); } return null; }
public void setValue(int i, Object value) { ByteArray space = new ByteArray(info.codeSystem.maxCodeLength(i)); ByteBuffer buf = space.asBuffer(); info.codeSystem.encodeColumnValue(i, value, buf); set(i, space); cols[i].reset(buf.array(), buf.arrayOffset(), buf.position()); }
/** set record to the codes of specified values, reuse given space to hold the codes */ public GTRecord setValues(ImmutableBitSet selectedCols, ByteArray space, Object... values) { assert selectedCols.cardinality() == values.length; ByteBuffer buf = space.asBuffer(); int pos = buf.position(); for (int i = 0; i < selectedCols.trueBitCount(); i++) { int c = selectedCols.trueBitAt(i); info.codeSystem.encodeColumnValue(c, values[i], buf); int newPos = buf.position(); cols[c].reset(buf.array(), buf.arrayOffset() + pos, newPos - pos); pos = newPos; } return this; }
@SuppressWarnings("unchecked") protected void aggregate(MeasureAggregator[] aggregators, GTRecord record) { for (int i = 0; i < aggregators.length; i++) { int c = metrics.trueBitAt(i); Object metric = codeSystem.decodeColumnValue(c, record.cols[c].asBuffer()); aggregators[i].aggregate(metric); } }
public CubeStatsResult(Path path, int precision) throws IOException { Configuration hadoopConf = HadoopUtil.getCurrentConfiguration(); Option seqInput = SequenceFile.Reader.file(path); try (Reader reader = new SequenceFile.Reader(hadoopConf, seqInput)) { LongWritable key = (LongWritable) ReflectionUtils.newInstance(reader.getKeyClass(), hadoopConf); BytesWritable value = (BytesWritable) ReflectionUtils.newInstance(reader.getValueClass(), hadoopConf); while (reader.next(key, value)) { if (key.get() == 0L) { percentage = Bytes.toInt(value.getBytes()); } else if (key.get() == -1) { mapperOverlapRatio = Bytes.toDouble(value.getBytes()); } else if (key.get() == -2) { mapperNumber = Bytes.toInt(value.getBytes()); } else if (key.get() == -3) { sourceRecordCount = Bytes.toLong(value.getBytes()); } else if (key.get() > 0) { HLLCounter hll = new HLLCounter(precision); ByteArray byteArray = new ByteArray(value.getBytes()); hll.readRegisters(byteArray.asBuffer()); counterMap.put(key.get(), hll); } } } }
HLLCounter hll = new HLLCounter(kylinConfig.getCubeStatsHLLPrecision()); ByteArray byteArray = new ByteArray(valueW.getBytes()); hll.readRegisters(byteArray.asBuffer());
HLLCounter hll = new HLLCounter(kylinConfig.getCubeStatsHLLPrecision()); ByteArray byteArray = new ByteArray(value.getBytes()); hll.readRegisters(byteArray.asBuffer());
public void testWriteReadUnsignedInt(int testInt, int length) { ByteArray ba = new ByteArray(new byte[length]); BytesUtil.writeUnsigned(testInt, length, ba.asBuffer()); byte[] newBytes = new byte[length]; System.arraycopy(ba.array(), 0, newBytes, 0, length); int value = BytesUtil.readUnsigned(new ByteArray(newBytes).asBuffer(), length); assertEquals(value, testInt); byte[] anOtherNewBytes = new byte[length]; BytesUtil.writeUnsigned(testInt, anOtherNewBytes, 0, length); assertTrue(Arrays.equals(anOtherNewBytes, ba.array())); }
HLLCounter hll = new HLLCounter(kylinConf.getCubeStatsHLLPrecision()); ByteArray byteArray = new ByteArray(value.getBytes()); hll.readRegisters(byteArray.asBuffer());
reader.next(key, value); ByteBuffer buffer = new ByteArray((byte[]) value.get()).asBuffer(); try (DataInputStream is = new DataInputStream(new ByteBufferBackedInputStream(buffer))) { String dictClassName = is.readUTF();
if (aggrMask[i]) { int col = metrics.trueBitAt(i); Object metrics = info.codeSystem.decodeColumnValue(col, r.cols[col].asBuffer()); aggrs[i].aggregate(metrics);
public static CompareTupleFilter getCompareTupleFilter(int col, Object value) { TblColRef colRef = gtInfo.colRef(col); ColumnTupleFilter colFilter = new ColumnTupleFilter(colRef); ByteArray space = new ByteArray(gtInfo.getCodeSystem().maxCodeLength(col)); gtInfo.getCodeSystem().encodeColumnValue(col, value, space.asBuffer()); ConstantTupleFilter constFilter = new ConstantTupleFilter(space); CompareTupleFilter compareFilter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ); compareFilter.addChild(colFilter); compareFilter.addChild(constFilter); return compareFilter; }
/** set record to the codes of specified values, reuse given space to hold the codes */ public GTRecord setValues(ImmutableBitSet selectedCols, ByteArray space, Object... values) { assert selectedCols.cardinality() == values.length; ByteBuffer buf = space.asBuffer(); int pos = buf.position(); for (int i = 0; i < selectedCols.trueBitCount(); i++) { int c = selectedCols.trueBitAt(i); info.codeSystem.encodeColumnValue(c, values[i], buf); int newPos = buf.position(); cols[c].reset(buf.array(), buf.arrayOffset() + pos, newPos - pos); pos = newPos; } return this; }
public void setValue(int i, Object value) { ByteArray space = new ByteArray(info.codeSystem.maxCodeLength(i)); ByteBuffer buf = space.asBuffer(); info.codeSystem.encodeColumnValue(i, value, buf); set(i, space); cols[i].reset(buf.array(), buf.arrayOffset(), buf.position()); }
@SuppressWarnings("unchecked") protected void aggregate(MeasureAggregator[] aggregators, GTRecord record) { for (int i = 0; i < aggregators.length; i++) { int c = metrics.trueBitAt(i); Object metric = codeSystem.decodeColumnValue(c, record.cols[c].asBuffer()); aggregators[i].aggregate(metric); } }