public static IStorageQuery createQuery(IRealization realization) { return storage(realization).createQuery(realization); }
/** * in contrast to the limit in SQL concept, "limit push down" means * whether the limit is effective in storage level. Some queries are not possible * to leverage limit clause, checkout * {@link GTCubeStorageQueryBase#enableStorageLimitIfPossible(org.apache.kylin.cube.cuboid.Cuboid, java.util.Collection, java.util.Set, java.util.Collection, org.apache.kylin.metadata.filter.TupleFilter, java.util.Set, java.util.Collection, org.apache.kylin.storage.StorageContext)} */ public boolean isLimitPushDownEnabled() { return isValidPushDownLimit(finalPushDownLimit); }
public static <T> T createEngineAdapter(IStorageAware aware, Class<T> engineInterface) { return storage(aware).adaptToBuildEngine(engineInterface); }
public HybridStorageQuery(HybridInstance hybridInstance) { this.realizations = hybridInstance.getRealizations(); storageEngines = new IStorageQuery[realizations.length]; for (int i = 0; i < realizations.length; i++) { storageEngines[i] = StorageFactory.createQuery(realizations[i]); } }
@Override public void run() { s[1] = StorageFactory.storage(new MockupStorageAware()); } });
public static IMRBatchCubingOutputSide2 getBatchCubingOutputSide2(CubeSegment seg) { return StorageFactory.createEngineAdapter(seg, IMROutput2.class).getBatchCubingOutputSide(seg); }
private ITupleIterator queryStorage() { logger.debug("query storage..."); // bind dynamic variables olapContext.bindVariable(optiqContext); olapContext.resetSQLDigest(); SQLDigest sqlDigest = olapContext.getSQLDigest(); // query storage engine IStorageQuery storageEngine = StorageFactory.createQuery(olapContext.realization); ITupleIterator iterator = storageEngine.search(olapContext.storageContext, sqlDigest, olapContext.returnTupleInfo); if (logger.isDebugEnabled()) { logger.debug("return TupleIterator..."); } return iterator; }
public StorageResponseGTScatter(GTScanRequest scanRequest, IPartitionStreamer partitionStreamer, StorageContext context) { this.info = scanRequest.getInfo(); this.partitionStreamer = partitionStreamer; this.blocks = partitionStreamer.asByteArrayIterator(); this.columns = scanRequest.getColumns(); this.groupByDims = scanRequest.getAggrGroupBy(); this.needSorted = (context.getFinalPushDownLimit() != Integer.MAX_VALUE) || context.isStreamAggregateEnabled(); }
public TupleFilter buildOrFilter(List<TblColRef> columns) { CompareTupleFilter compareFilter1 = buildFilter1(columns.get(0)); CompareTupleFilter compareFilter2 = buildFilter2(columns.get(1)); LogicalTupleFilter logicFilter = new LogicalTupleFilter(TupleFilter.FilterOperatorEnum.OR); logicFilter.addChild(compareFilter1); logicFilter.addChild(compareFilter2); return logicFilter; } }
public boolean mergeSortPartitionResults() { return mergeSortPartitionResults(finalPushDownLimit); }
private void flushScanCountDelta() { context.increaseProcessedRowCount(scanCountDelta); scanCountDelta = 0; }
@Override public void run() { s[0] = StorageFactory.storage(new MockupStorageAware()); } });
public static IMROutput2.IMRBatchOptimizeOutputSide2 getBatchOptimizeOutputSide2(CubeSegment seg) { return StorageFactory.createEngineAdapter(seg, IMROutput2.class).getBatchOptimizeOutputSide(seg); }
public TupleFilter buildAndFilter(List<TblColRef> columns) { CompareTupleFilter compareFilter1 = buildFilter1(columns.get(0)); CompareTupleFilter compareFilter2 = buildFilter2(columns.get(1)); LogicalTupleFilter andFilter = new LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND); andFilter.addChild(compareFilter1); andFilter.addChild(compareFilter2); return andFilter; }
public static boolean mergeSortPartitionResults(int finalPushDownLimit) { return isValidPushDownLimit(finalPushDownLimit); }
@Test public void testSingleThread() { IStorage s1 = StorageFactory.storage(new MockupStorageAware()); IStorage s2 = StorageFactory.storage(new MockupStorageAware()); Assert.assertSame(s1, s2); }
public static ISparkOutput.ISparkBatchMergeOutputSide getBatchMergeOutputSide2(CubeSegment seg) { return StorageFactory.createEngineAdapter(seg, ISparkOutput.class).getBatchMergeOutputSide(seg); }
public static IMRBatchMergeOutputSide2 getBatchMergeOutputSide2(CubeSegment seg) { return StorageFactory.createEngineAdapter(seg, IMROutput2.class).getBatchMergeOutputSide(seg); }
public static IMROutput2.IMRBatchOptimizeOutputSide2 getBatchOptimizeOutputSide2(CubeSegment seg) { return StorageFactory.createEngineAdapter(seg, IMROutput2.class).getBatchOptimizeOutputSide(seg); }
public static ISparkOutput.ISparkBatchCubingOutputSide getBatchCubingOutputSide(CubeSegment seg) { return StorageFactory.createEngineAdapter(seg, ISparkOutput.class).getBatchCubingOutputSide(seg); }