public BufferedMeasureCodec(MeasureDesc... measureDescs) { this.codec = new MeasureCodec(measureDescs); this.measureSizes = new int[codec.getMeasuresCount()]; }
public BufferedMeasureCodec(String... dataTypes) { this.codec = new MeasureCodec(dataTypes); this.measureSizes = new int[codec.getMeasuresCount()]; }
public BufferedMeasureCodec(Collection<MeasureDesc> measureDescs) { this.codec = new MeasureCodec(measureDescs); this.measureSizes = new int[codec.getMeasuresCount()]; }
public BufferedMeasureCodec(DataType... dataTypes) { this.codec = new MeasureCodec(dataTypes); this.measureSizes = new int[codec.getMeasuresCount()]; }
public RowValueDecoder(HBaseColumnDesc hbaseColumn) { this.hbaseColumn = hbaseColumn; this.hbaseColumnFamily = Bytes.toBytes(hbaseColumn.getColumnFamilyName()); this.hbaseColumnQualifier = Bytes.toBytes(hbaseColumn.getQualifier()); this.projectionIndex = new BitSet(); this.measures = hbaseColumn.getMeasures(); this.codec = new MeasureCodec(measures); this.values = new Object[measures.length]; }
@Override protected void doSetup(Context context) throws IOException { super.bindCurrentConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(); CubeManager cubeMgr = CubeManager.getInstance(config); cubeDesc = cubeMgr.getCube(cubeName).getDescriptor(); inputCodec = new MeasureCodec(cubeDesc.getMeasures()); inputMeasures = new Object[cubeDesc.getMeasures().size()]; keyValueCreators = Lists.newArrayList(); for (HBaseColumnFamilyDesc cfDesc : cubeDesc.getHbaseMapping().getColumnFamily()) { for (HBaseColumnDesc colDesc : cfDesc.getColumns()) { keyValueCreators.add(new KeyValueCreator(cubeDesc, colDesc)); } } }
final CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId); final MeasureCodec inputCodec = new MeasureCodec(cubeDesc.getMeasures()); final List<KeyValueCreator> keyValueCreators = Lists.newArrayList();
@Before public void setup() throws Exception { this.createTestMetadata(); // hack for distributed cache FileUtils.deleteDirectory(new File("../job/meta")); FileUtils.copyDirectory(new File(getTestConfig().getMetadataUrl().toString()), new File("../job/meta")); CubeDesc desc = CubeManager.getInstance(getTestConfig()).getCube(cubeName).getDescriptor(); codec = new MeasureCodec(desc.getMeasures()); }
public BufferedMeasureCodec(MeasureDesc... measureDescs) { this.codec = new MeasureCodec(measureDescs); this.measureSizes = new int[codec.getMeasuresCount()]; }
public BufferedMeasureCodec(DataType... dataTypes) { this.codec = new MeasureCodec(dataTypes); this.measureSizes = new int[codec.getMeasuresCount()]; }
public BufferedMeasureCodec(Collection<MeasureDesc> measureDescs) { this.codec = new MeasureCodec(measureDescs); this.measureSizes = new int[codec.getMeasuresCount()]; }
public BufferedMeasureCodec(String... dataTypes) { this.codec = new MeasureCodec(dataTypes); this.measureSizes = new int[codec.getMeasuresCount()]; }
public RowValueDecoder(HBaseColumnDesc hbaseColumn) { this.hbaseColumn = hbaseColumn; this.projectionIndex = new BitSet(); this.names = new ArrayList<String>(); this.measures = hbaseColumn.getMeasures(); for (MeasureDesc measure : measures) { this.names.add(measure.getFunction().getRewriteFieldName()); } this.codec = new MeasureCodec(measures); this.values = new Object[measures.length]; }
public RowValueDecoder(RowValueDecoder rowValueDecoder) { this.hbaseColumn = rowValueDecoder.getHBaseColumn(); this.projectionIndex = rowValueDecoder.getProjectionIndex(); this.names = new ArrayList<String>(); this.measures = hbaseColumn.getMeasures(); for (MeasureDesc measure : measures) { this.names.add(measure.getFunction().getRewriteFieldName()); } this.codec = new MeasureCodec(measures); this.values = new Object[measures.length]; }
public RowValueDecoder(HBaseColumnDesc hbaseColumn) { this.hbaseColumn = hbaseColumn; this.hbaseColumnFamily = Bytes.toBytes(hbaseColumn.getColumnFamilyName()); this.hbaseColumnQualifier = Bytes.toBytes(hbaseColumn.getQualifier()); this.projectionIndex = new BitSet(); this.measures = hbaseColumn.getMeasures(); this.codec = new MeasureCodec(measures); this.values = new Object[measures.length]; }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(context.getConfiguration()); CubeManager cubeMgr = CubeManager.getInstance(config); cubeDesc = cubeMgr.getCube(cubeName).getDescriptor(); inputCodec = new MeasureCodec(cubeDesc.getMeasures()); inputMeasures = new Object[cubeDesc.getMeasures().size()]; keyValueCreators = Lists.newArrayList(); for (HBaseColumnFamilyDesc cfDesc : cubeDesc.getHBaseMapping().getColumnFamily()) { for (HBaseColumnDesc colDesc : cfDesc.getColumns()) { keyValueCreators.add(new KeyValueCreator(cubeDesc, colDesc)); } } }
@Override protected void doSetup(Context context) throws IOException { super.bindCurrentConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(); CubeManager cubeMgr = CubeManager.getInstance(config); cubeDesc = cubeMgr.getCube(cubeName).getDescriptor(); inputCodec = new MeasureCodec(cubeDesc.getMeasures()); inputMeasures = new Object[cubeDesc.getMeasures().size()]; keyValueCreators = Lists.newArrayList(); for (HBaseColumnFamilyDesc cfDesc : cubeDesc.getHbaseMapping().getColumnFamily()) { for (HBaseColumnDesc colDesc : cfDesc.getColumns()) { keyValueCreators.add(new KeyValueCreator(cubeDesc, colDesc)); } } }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(); // only used in Build job, not in Merge job cuboidLevel = context.getConfiguration().getInt(BatchConstants.CFG_CUBE_CUBOID_LEVEL, 0); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(context.getConfiguration()); cubeDesc = CubeManager.getInstance(config).getCube(cubeName).getDescriptor(); measuresDescs = cubeDesc.getMeasures(); codec = new MeasureCodec(measuresDescs); aggs = new MeasureAggregators(measuresDescs); input = new Object[measuresDescs.size()]; result = new Object[measuresDescs.size()]; needAggr = new boolean[measuresDescs.size()]; if (cuboidLevel > 0) { for (int i = 0; i < measuresDescs.size(); i++) { needAggr[i] = !measuresDescs.get(i).getFunction().getMeasureType().onlyAggrInBaseCuboid(); } } }
public KeyValueCreator(CubeDesc cubeDesc, HBaseColumnDesc colDesc) { cfBytes = Bytes.toBytes(colDesc.getColumnFamilyName()); qBytes = Bytes.toBytes(colDesc.getQualifier()); timestamp = 0; // use 0 for timestamp List<MeasureDesc> measures = cubeDesc.getMeasures(); String[] measureNames = getMeasureNames(cubeDesc); String[] refs = colDesc.getMeasureRefs(); refIndex = new int[refs.length]; refMeasures = new MeasureDesc[refs.length]; for (int i = 0; i < refs.length; i++) { refIndex[i] = indexOf(measureNames, refs[i]); refMeasures[i] = measures.get(refIndex[i]); } codec = new MeasureCodec(refMeasures); colValues = new Object[refs.length]; isFullCopy = true; for (int i = 0; i < measures.size(); i++) { if (refIndex.length <= i || refIndex[i] != i) isFullCopy = false; } }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(); segmentName = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_NAME); intermediateTableRowDelimiter = context.getConfiguration().get(BatchConstants.CFG_CUBE_INTERMEDIATE_TABLE_ROW_DELIMITER, Character.toString(BatchConstants.INTERMEDIATE_TABLE_ROW_DELIMITER)); if (Bytes.toBytes(intermediateTableRowDelimiter).length > 1) { throw new RuntimeException("Expected delimiter byte length is 1, but got " + Bytes.toBytes(intermediateTableRowDelimiter).length); } byteRowDelimiter = Bytes.toBytes(intermediateTableRowDelimiter)[0]; KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(context.getConfiguration()); cube = CubeManager.getInstance(config).getCube(cubeName); cubeDesc = cube.getDescriptor(); cubeSegment = cube.getSegment(segmentName, SegmentStatusEnum.NEW); long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc); baseCuboid = Cuboid.findById(cubeDesc, baseCuboidId); intermediateTableDesc = new CubeJoinedFlatTableDesc(cube.getDescriptor(), cubeSegment); bytesSplitter = new BytesSplitter(200, 16384); rowKeyEncoder = AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid); measureCodec = new MeasureCodec(cubeDesc.getMeasures()); measures = new Object[cubeDesc.getMeasures().size()]; int colCount = cubeDesc.getRowkey().getRowKeyColumns().length; keyBytesBuf = new byte[colCount][]; aggrIngesters = MeasureIngester.create(cubeDesc.getMeasures()); dictionaryMap = cubeSegment.buildDictionaryMap(); initNullBytes(); }