SplitMerger() { reuseAggrs = new MeasureAggregators(cubeDesc.getMeasures()); reuseMetricsArray = new Object[cubeDesc.getMeasures().size()]; }
Merger() { reuseAggrs = new MeasureAggregators(cubeDesc.getMeasures()); reuseMetricsArray = new Object[cubeDesc.getMeasures().size()]; }
public void flush() throws IOException { logger.info("AggregationCache(size={} est_mem_size={} threshold={}) will spill to {}", buffMap.size(), estMemSize, spillThreshold, dumpedFile.getAbsolutePath()); ByteArrayOutputStream baos = new ByteArrayOutputStream(MAX_BUFFER_SIZE); if (buffMap != null) { DataOutputStream bos = new DataOutputStream(baos); Object[] aggrResult = null; try { bos.writeInt(buffMap.size()); for (Entry<byte[], MeasureAggregator[]> entry : buffMap.entrySet()) { MeasureAggregators aggs = new MeasureAggregators(entry.getValue()); aggrResult = new Object[metrics.trueBitCount()]; aggs.collectStates(aggrResult); ByteBuffer metricsBuf = measureCodec.encode(aggrResult); bos.writeInt(entry.getKey().length); bos.write(entry.getKey()); bos.writeInt(metricsBuf.position()); bos.write(metricsBuf.array(), 0, metricsBuf.position()); } } finally { buffMap = null; IOUtils.closeQuietly(bos); } } spillBuffer = baos.toByteArray(); IOUtils.closeQuietly(baos); logger.info("Accurately spill data size = {}", spillBuffer.length); }
@Override protected void doSetup(Context context) throws IOException { super.bindCurrentConfiguration(context.getConfiguration()); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(); String cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(Locale.ROOT); CubeInstance cube = CubeManager.getInstance(config).getCube(cubeName); CubeDesc cubeDesc = cube.getDescriptor(); List<MeasureDesc> measuresDescs = cubeDesc.getMeasures(); codec = new BufferedMeasureCodec(measuresDescs); aggs = new MeasureAggregators(measuresDescs); input = new Object[measuresDescs.size()]; result = new Object[measuresDescs.size()]; outputKey = new Text(); outputValue = new Text(); }
@Override protected void doSetup(Context context) throws IOException { super.bindCurrentConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(Locale.ROOT); // only used in Build job, not in Merge job cuboidLevel = context.getConfiguration().getInt(BatchConstants.CFG_CUBE_CUBOID_LEVEL, 0); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(); cubeDesc = CubeManager.getInstance(config).getCube(cubeName).getDescriptor(); measuresDescs = cubeDesc.getMeasures(); codec = new BufferedMeasureCodec(measuresDescs); aggs = new MeasureAggregators(measuresDescs); input = new Object[measuresDescs.size()]; result = new Object[measuresDescs.size()]; List<Integer> needAggMeasuresList = Lists.newArrayList(); for (int i = 0; i < measuresDescs.size(); i++) { if (cuboidLevel == 0) { needAggMeasuresList.add(i); } else { if (!measuresDescs.get(i).getFunction().getMeasureType().onlyAggrInBaseCuboid()) { needAggMeasuresList.add(i); } } } needAggrMeasures = new int[needAggMeasuresList.size()]; for (int i = 0; i < needAggMeasuresList.size(); i++) { needAggrMeasures[i] = needAggMeasuresList.get(i); } }
public void init() { KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl); try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig .setAndUnsetThreadLocalConfig(kConfig)) { CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName); cubeDesc = cubeInstance.getDescriptor(); aggregators = new MeasureAggregators(cubeDesc.getMeasures()); measureNum = cubeDesc.getMeasures().size(); } }
final MeasureAggregators aggregators = new MeasureAggregators(cubeDesc.getMeasures()); final Function2 reduceFunction = new Function2<Object[], Object[], Object[]>() { @Override
SplitMerger() { reuseAggrs = new MeasureAggregators(cubeDesc.getMeasures()); reuseMetricsArray = new Object[cubeDesc.getMeasures().size()]; }
Merger() { reuseAggrs = new MeasureAggregators(cubeDesc.getMeasures()); reuseMetricsArray = new Object[cubeDesc.getMeasures().size()]; }
public void flush() throws IOException { logger.info("AggregationCache(size={} est_mem_size={} threshold={}) will spill to {}", buffMap.size(), estMemSize, spillThreshold, dumpedFile.getAbsolutePath()); ByteArrayOutputStream baos = new ByteArrayOutputStream(MAX_BUFFER_SIZE); if (buffMap != null) { DataOutputStream bos = new DataOutputStream(baos); Object[] aggrResult = null; try { bos.writeInt(buffMap.size()); for (Entry<byte[], MeasureAggregator[]> entry : buffMap.entrySet()) { MeasureAggregators aggs = new MeasureAggregators(entry.getValue()); aggrResult = new Object[metrics.trueBitCount()]; aggs.collectStates(aggrResult); ByteBuffer metricsBuf = measureCodec.encode(aggrResult); bos.writeInt(entry.getKey().length); bos.write(entry.getKey()); bos.writeInt(metricsBuf.position()); bos.write(metricsBuf.array(), 0, metricsBuf.position()); } } finally { buffMap = null; IOUtils.closeQuietly(bos); } } spillBuffer = baos.toByteArray(); IOUtils.closeQuietly(baos); logger.info("Accurately spill data size = {}", spillBuffer.length); }
@Override protected void doSetup(Context context) throws IOException { super.bindCurrentConfiguration(context.getConfiguration()); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(); String cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(Locale.ROOT); CubeInstance cube = CubeManager.getInstance(config).getCube(cubeName); CubeDesc cubeDesc = cube.getDescriptor(); List<MeasureDesc> measuresDescs = cubeDesc.getMeasures(); codec = new BufferedMeasureCodec(measuresDescs); aggs = new MeasureAggregators(measuresDescs); input = new Object[measuresDescs.size()]; result = new Object[measuresDescs.size()]; outputKey = new Text(); outputValue = new Text(); }
@Override protected void doSetup(Context context) throws IOException { super.bindCurrentConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(Locale.ROOT); // only used in Build job, not in Merge job cuboidLevel = context.getConfiguration().getInt(BatchConstants.CFG_CUBE_CUBOID_LEVEL, 0); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(); cubeDesc = CubeManager.getInstance(config).getCube(cubeName).getDescriptor(); measuresDescs = cubeDesc.getMeasures(); codec = new BufferedMeasureCodec(measuresDescs); aggs = new MeasureAggregators(measuresDescs); input = new Object[measuresDescs.size()]; result = new Object[measuresDescs.size()]; List<Integer> needAggMeasuresList = Lists.newArrayList(); for (int i = 0; i < measuresDescs.size(); i++) { if (cuboidLevel == 0) { needAggMeasuresList.add(i); } else { if (!measuresDescs.get(i).getFunction().getMeasureType().onlyAggrInBaseCuboid()) { needAggMeasuresList.add(i); } } } needAggrMeasures = new int[needAggMeasuresList.size()]; for (int i = 0; i < needAggMeasuresList.size(); i++) { needAggrMeasures[i] = needAggMeasuresList.get(i); } }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(); // only used in Build job, not in Merge job cuboidLevel = context.getConfiguration().getInt(BatchConstants.CFG_CUBE_CUBOID_LEVEL, 0); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(context.getConfiguration()); cubeDesc = CubeManager.getInstance(config).getCube(cubeName).getDescriptor(); measuresDescs = cubeDesc.getMeasures(); codec = new MeasureCodec(measuresDescs); aggs = new MeasureAggregators(measuresDescs); input = new Object[measuresDescs.size()]; result = new Object[measuresDescs.size()]; needAggr = new boolean[measuresDescs.size()]; if (cuboidLevel > 0) { for (int i = 0; i < measuresDescs.size(); i++) { needAggr[i] = !measuresDescs.get(i).getFunction().getMeasureType().onlyAggrInBaseCuboid(); } } }
public void init() { KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl); try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig .setAndUnsetThreadLocalConfig(kConfig)) { CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName); cubeDesc = cubeInstance.getDescriptor(); aggregators = new MeasureAggregators(cubeDesc.getMeasures()); measureNum = cubeDesc.getMeasures().size(); } }
final MeasureAggregators aggregators = new MeasureAggregators(cubeDesc.getMeasures()); final Function2 reduceFunction = new Function2<Object[], Object[], Object[]>() { @Override