public RowValueDecoder(HBaseColumnDesc hbaseColumn) { this.hbaseColumn = hbaseColumn; this.hbaseColumnFamily = Bytes.toBytes(hbaseColumn.getColumnFamilyName()); this.hbaseColumnQualifier = Bytes.toBytes(hbaseColumn.getQualifier()); this.projectionIndex = new BitSet(); this.measures = hbaseColumn.getMeasures(); this.codec = new MeasureCodec(measures); this.values = new Object[measures.length]; }
public void decode(ByteBuffer buf, Object[] result) { codec.decode(buf, result); }
public ByteBuffer encode(Object[] values) { if (buf == null) { setBufferSize(DEFAULT_BUFFER_SIZE); } assert values.length == codec.getMeasuresCount(); while (true) { try { buf.clear(); for (int i = 0, pos = 0; i < codec.getMeasuresCount(); i++) { codec.encode(i, values[i], buf); measureSizes[i] = buf.position() - pos; pos = buf.position(); } return buf; } catch (BufferOverflowException boe) { if (buf.capacity() >= MAX_BUFFER_SIZE) throw boe; setBufferSize(buf.capacity() * 2); } } } }
public BufferedMeasureCodec(MeasureDesc... measureDescs) { this.codec = new MeasureCodec(measureDescs); this.measureSizes = new int[codec.getMeasuresCount()]; }
@Override public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException { aggs.reset(); for (Text value : values) { codec.decode(ByteBuffer.wrap(value.getBytes(), 0, value.getLength()), input); if (cuboidLevel > 0) { aggs.aggregate(input, needAggr); } else { aggs.aggregate(input); } } aggs.collectStates(result); valueBuf.clear(); codec.encode(result, valueBuf); outputValue.set(valueBuf.array(), 0, valueBuf.position()); context.write(key, outputValue); counter++; if (counter % BatchConstants.COUNTER_MAX == 0) { logger.info("Handled " + counter + " records!"); } }
public MeasureCodec(String... dataTypes) { init(dataTypes); }
public KeyValue create(Text key, Object[] measureValues) { for (int i = 0; i < colValues.length; i++) { colValues[i] = measureValues[refIndex[i]]; } valueBuf.clear(); codec.encode(colValues, valueBuf); return create(key, valueBuf.array(), 0, valueBuf.position()); }
public BufferedMeasureCodec(String... dataTypes) { this.codec = new MeasureCodec(dataTypes); this.measureSizes = new int[codec.getMeasuresCount()]; }
public MeasureCodec(DataType... dataTypes) { init(dataTypes); }
private void buildValue(SplittedBytes[] splitBuffers) { for (int i = 0; i < measures.length; i++) { measures[i] = buildValueOf(i, splitBuffers); } valueBuf.clear(); measureCodec.encode(measures, valueBuf); }
@Override protected void doSetup(Context context) throws IOException { super.bindCurrentConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(); CubeManager cubeMgr = CubeManager.getInstance(config); cubeDesc = cubeMgr.getCube(cubeName).getDescriptor(); inputCodec = new MeasureCodec(cubeDesc.getMeasures()); inputMeasures = new Object[cubeDesc.getMeasures().size()]; keyValueCreators = Lists.newArrayList(); for (HBaseColumnFamilyDesc cfDesc : cubeDesc.getHbaseMapping().getColumnFamily()) { for (HBaseColumnDesc colDesc : cfDesc.getColumns()) { keyValueCreators.add(new KeyValueCreator(cubeDesc, colDesc)); } } }
public BufferedMeasureCodec(Collection<MeasureDesc> measureDescs) { this.codec = new MeasureCodec(measureDescs); this.measureSizes = new int[codec.getMeasuresCount()]; }
private void decode(ByteBuffer buffer, boolean convertToJavaObject) { codec.decode(buffer, values); if (convertToJavaObject) { convertToJavaObjects(values, values, convertToJavaObject); } }
private void init(String[] dataTypes) { DataType[] typeInstances = new DataType[dataTypes.length]; for (int i = 0; i < dataTypes.length; i++) { typeInstances[i] = DataType.getType(dataTypes[i]); } init(typeInstances); }
public ByteBuffer encode(Object[] values) { if (buf == null) { setBufferSize(DEFAULT_BUFFER_SIZE); } assert values.length == codec.getMeasuresCount(); while (true) { try { buf.clear(); for (int i = 0, pos = 0; i < codec.getMeasuresCount(); i++) { codec.encode(i, values[i], buf); measureSizes[i] = buf.position() - pos; pos = buf.position(); } return buf; } catch (BufferOverflowException boe) { if (buf.capacity() >= MAX_BUFFER_SIZE) throw boe; setBufferSize(buf.capacity() * 2); } } } }
final CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId); final MeasureCodec inputCodec = new MeasureCodec(cubeDesc.getMeasures()); final List<KeyValueCreator> keyValueCreators = Lists.newArrayList();
public BufferedMeasureCodec(DataType... dataTypes) { this.codec = new MeasureCodec(dataTypes); this.measureSizes = new int[codec.getMeasuresCount()]; }
@Override public Iterator<Tuple2<RowKeyWritable, KeyValue>> call(Tuple2<Text, Text> textTextTuple2) throws Exception { List<Tuple2<RowKeyWritable, KeyValue>> result = Lists.newArrayListWithExpectedSize(cfNum); Object[] inputMeasures = new Object[cubeDesc.getMeasures().size()]; inputCodec.decode(ByteBuffer.wrap(textTextTuple2._2.getBytes(), 0, textTextTuple2._2.getLength()), inputMeasures); for (int i = 0; i < cfNum; i++) { KeyValue outputValue = keyValueCreators.get(i).create(textTextTuple2._1, inputMeasures); result.add(new Tuple2<>(new RowKeyWritable(outputValue.createKeyOnly(false).getKey()), outputValue)); } return result.iterator(); } });
public MeasureCodec(MeasureDesc... measureDescs) { String[] dataTypes = new String[measureDescs.length]; for (int i = 0; i < dataTypes.length; i++) { dataTypes[i] = measureDescs[i].getFunction().getReturnType(); } init(dataTypes); }