@SuppressWarnings("nls") private Operator genLimitPlan(String dest, QB qb, Operator input, int offset, int limit) throws SemanticException { // A map-only job can be optimized - instead of converting it to a // map-reduce job, we can have another map // job to do the same to avoid the cost of sorting in the map-reduce phase. // A better approach would be to // write into a local file and then have a map-only job. // Add the limit operator to get the value fields RowResolver inputRR = opParseCtx.get(input).getRowResolver(); LimitDesc limitDesc = new LimitDesc(offset, limit); globalLimitCtx.setLastReduceLimitDesc(limitDesc); Operator limitMap = putOpInsertMap(OperatorFactory.getAndMakeChild( limitDesc, new RowSchema(inputRR.getColumnInfos()), input), inputRR); if (LOG.isDebugEnabled()) { LOG.debug("Created LimitOperator Plan for clause: " + dest + " row schema: " + inputRR.toString()); } return limitMap; }
@SuppressWarnings("nls") private Operator genLimitPlan(String dest, QB qb, Operator input, int offset, int limit) throws SemanticException { // A map-only job can be optimized - instead of converting it to a // map-reduce job, we can have another map // job to do the same to avoid the cost of sorting in the map-reduce phase. // A better approach would be to // write into a local file and then have a map-only job. // Add the limit operator to get the value fields RowResolver inputRR = opParseCtx.get(input).getRowResolver(); LimitDesc limitDesc = new LimitDesc(offset, limit); globalLimitCtx.setLastReduceLimitDesc(limitDesc); Operator limitMap = putOpInsertMap(OperatorFactory.getAndMakeChild( limitDesc, new RowSchema(inputRR.getColumnInfos()), input), inputRR); if (LOG.isDebugEnabled()) { LOG.debug("Created LimitOperator Plan for clause: " + dest + " row schema: " + inputRR.toString()); } return limitMap; }
int limit = RexLiteral.intValue(sortRel.fetch); int offset = sortRel.offset == null ? 0 : RexLiteral.intValue(sortRel.offset); LimitDesc limitDesc = new LimitDesc(offset,limit); ArrayList<ColumnInfo> cinfoLst = createColInfos(resultOp); resultOp = OperatorFactory.getAndMakeChild(limitDesc, new RowSchema(cinfoLst), resultOp);
private void validateVectorLimitOperator(int limit, int batchSize, int expectedBatchSize) throws HiveException { @SuppressWarnings("unchecked") FakeVectorRowBatchFromObjectIterables frboi = new FakeVectorRowBatchFromObjectIterables( batchSize, new String[] {"tinyint", "double"}, Arrays.asList(new Object[] {1, 2, 3, 4}), Arrays.asList(new Object[] {323.0, 34.5, null, 89.3})); // Get next batch VectorizedRowBatch vrb = frboi.produceNextBatch(); // Create limit desc with limit value LimitDesc ld = new LimitDesc(limit); VectorLimitDesc vectorDesc = new VectorLimitDesc(); VectorLimitOperator lo = new VectorLimitOperator( new CompilationOpContext(), ld, null, vectorDesc); lo.initialize(new Configuration(), null); // Process the batch lo.process(vrb, 0); // Verify batch size Assert.assertEquals(vrb.size, expectedBatchSize); } }
int limit = RexLiteral.intValue(sortRel.fetch); int offset = sortRel.offset == null ? 0 : RexLiteral.intValue(sortRel.offset); LimitDesc limitDesc = new LimitDesc(offset,limit); ArrayList<ColumnInfo> cinfoLst = createColInfos(resultOp); resultOp = OperatorFactory.getAndMakeChild(limitDesc, new RowSchema(cinfoLst), resultOp);
@SuppressWarnings("nls") private Operator genLimitPlan(String dest, QB qb, Operator input, int limit) throws SemanticException { // A map-only job can be optimized - instead of converting it to a // map-reduce job, we can have another map // job to do the same to avoid the cost of sorting in the map-reduce phase. // A better approach would be to // write into a local file and then have a map-only job. // Add the limit operator to get the value fields RowResolver inputRR = opParseCtx.get(input).getRowResolver(); Operator limitMap = putOpInsertMap(OperatorFactory.getAndMakeChild( new LimitDesc(limit), new RowSchema(inputRR.getColumnInfos()), input), inputRR); if (LOG.isDebugEnabled()) { LOG.debug("Created LimitOperator Plan for clause: " + dest + " row schema: " + inputRR.toString()); } return limitMap; }
@SuppressWarnings("nls") private Operator genLimitPlan(String dest, QB qb, Operator input, int limit) throws SemanticException { // A map-only job can be optimized - instead of converting it to a // map-reduce job, we can have another map // job to do the same to avoid the cost of sorting in the map-reduce phase. // A better approach would be to // write into a local file and then have a map-only job. // Add the limit operator to get the value fields RowResolver inputRR = opParseCtx.get(input).getRowResolver(); LimitDesc limitDesc = new LimitDesc(limit); globalLimitCtx.setLastReduceLimitDesc(limitDesc); Operator limitMap = putOpInsertMap(OperatorFactory.getAndMakeChild( limitDesc, new RowSchema(inputRR.getColumnInfos()), input), inputRR); if (LOG.isDebugEnabled()) { LOG.debug("Created LimitOperator Plan for clause: " + dest + " row schema: " + inputRR.toString()); } return limitMap; }
LimitDesc limitDesc = new LimitDesc(limit);