@Test public void testPartiallyPreGroupedPartitionWithEmptyInput() { List<Page> input = rowPagesBuilder(BIGINT, VARCHAR, BIGINT, VARCHAR) .pageBreak() .pageBreak() .build(); WindowOperatorFactory operatorFactory = createFactoryUnbounded( ImmutableList.of(BIGINT, VARCHAR, BIGINT, VARCHAR), Ints.asList(0, 1, 2, 3), ROW_NUMBER, Ints.asList(0, 1), Ints.asList(1), Ints.asList(3), ImmutableList.of(SortOrder.ASC_NULLS_LAST), 0); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, VARCHAR, BIGINT, VARCHAR, BIGINT) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected); }
@Test(dataProvider = "hashEnabledValues") public void testMultiChannel(boolean hashEnabled) List<Type> types = ImmutableList.of(VARCHAR, VARCHAR, BIGINT, DOUBLE, BOOLEAN, VARCHAR); List<Type> joinTypes = ImmutableList.of(VARCHAR, BIGINT, DOUBLE, BOOLEAN); List<Type> outputTypes = ImmutableList.of(VARCHAR, BIGINT, DOUBLE, BOOLEAN, VARCHAR); List<Integer> joinChannels = Ints.asList(1, 2, 3, 4); List<Integer> outputChannels = Ints.asList(1, 2, 3, 4, 0); types = ImmutableList.of(VARCHAR, VARCHAR, BIGINT, DOUBLE, BOOLEAN, VARCHAR, BIGINT); outputTypes = ImmutableList.of(VARCHAR, BIGINT, DOUBLE, BOOLEAN, VARCHAR, BIGINT); outputChannels = Ints.asList(1, 2, 3, 4, 0, 6);
@Test(expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded per-node user memory limit of.*", dataProvider = "testMemoryLimitProvider") public void testMemoryLimit(boolean parallelBuild, boolean buildHashEnabled) { TaskContext taskContext = TestingTaskContext.createTaskContext(executor, scheduledExecutor, TEST_SESSION, new DataSize(100, BYTE)); RowPagesBuilder buildPages = rowPagesBuilder(buildHashEnabled, Ints.asList(0), ImmutableList.of(VARCHAR, BIGINT, BIGINT)) .addSequencePage(10, 20, 30, 40); BuildSideSetup buildSideSetup = setupBuildSide(parallelBuild, taskContext, Ints.asList(0), buildPages, Optional.empty(), false, SINGLE_STREAM_SPILLER_FACTORY); instantiateBuildDrivers(buildSideSetup, taskContext); buildLookupSource(buildSideSetup); }
@Test(dataProvider = "hashEnabledValues") public void testSingleChannel(boolean hashEnabled) List<Type> joinTypes = ImmutableList.of(VARCHAR); List<Integer> joinChannels = Ints.asList(0); List<Block> channel = ImmutableList.of( BlockAssertions.createStringSequenceBlock(10, 20), BlockAssertions.createStringSequenceBlock(20, 30), List<List<Block>> channels = ImmutableList.of(channel); if (hashEnabled) { ImmutableList.Builder<Block> hashChannelBuilder = ImmutableList.builder();
@Test(dataProvider = "hashEnabledValues") public void testDistinctLimitWithPageAlignment(boolean hashEnabled) { RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), BIGINT); List<Page> input = rowPagesBuilder .addSequencePage(3, 1) .addSequencePage(3, 2) .build(); OperatorFactory operatorFactory = new DistinctLimitOperator.DistinctLimitOperatorFactory(0, new PlanNodeId("test"), rowPagesBuilder.getTypes(), Ints.asList(0), 3, rowPagesBuilder.getHashChannel(), joinCompiler); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT) .row(1L) .row(2L) .row(3L) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected, hashEnabled, ImmutableList.of(1)); }
@Test public void testArrayToArrayConcat() assertFunction("ARRAY [1, 2] || ARRAY[3, 4]", new ArrayType(INTEGER), ImmutableList.of(1, 2, 3, 4)); assertFunction("ARRAY [1, 2] || ARRAY[3, BIGINT '4']", new ArrayType(BIGINT), ImmutableList.of(1L, 2L, 3L, 4L)); assertFunction("ARRAY [1, 2] || ARRAY[3, 40000000000]", new ArrayType(BIGINT), ImmutableList.of(1L, 2L, 3L, 40000000000L)); assertFunction("ARRAY [NULL] || ARRAY[NULL]", new ArrayType(UNKNOWN), Lists.newArrayList(null, null)); assertFunction("ARRAY ['puppies'] || ARRAY ['kittens']", new ArrayType(createVarcharType(7)), ImmutableList.of("puppies", "kittens")); assertFunction("ARRAY [ARRAY[ARRAY[1]]] || ARRAY [ARRAY[ARRAY[2]]]", new ArrayType(new ArrayType(new ArrayType(INTEGER))), asList(singletonList(Ints.asList(1)), singletonList(Ints.asList(2)))); assertFunction("ARRAY [] || ARRAY []", new ArrayType(UNKNOWN), ImmutableList.of()); assertFunction("ARRAY [TRUE] || ARRAY [FALSE] || ARRAY [TRUE]", new ArrayType(BOOLEAN), ImmutableList.of(true, false, true));
@Test(expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded per-node user memory limit of 10B.*") public void testMemoryLimit() { List<Page> input = rowPagesBuilder(BIGINT, DOUBLE) .row(1L, 0.1) .row(2L, 0.2) .pageBreak() .row(-1L, -0.1) .row(4L, 0.4) .build(); DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION, new DataSize(10, Unit.BYTE)) .addPipelineContext(0, true, true, false) .addDriverContext(); WindowOperatorFactory operatorFactory = createFactoryUnbounded( ImmutableList.of(BIGINT, DOUBLE), Ints.asList(1), ROW_NUMBER, Ints.asList(), Ints.asList(0), ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST})); toPages(operatorFactory, driverContext, input); }
@Test public void testCopyPositions() { Slice[] expectedValues = createExpectedValues(); InterleavedBlockBuilder blockBuilder = createBlockBuilderWithValues(expectedValues, ImmutableList.of(VARCHAR, BIGINT)); assertBlockFilteredPositions(expectedValues, blockBuilder, Ints.asList(0, 1, 4, 5, 6, 7, 14, 15)); assertBlockFilteredPositions(expectedValues, blockBuilder.build(), Ints.asList(0, 1, 4, 5, 6, 7, 14, 15)); assertBlockFilteredPositions(expectedValues, blockBuilder.build(), Ints.asList(2, 3, 4, 5, 8, 9, 12, 13)); }
@Test(dataProvider = "hashJoinTestValues") public void testInnerJoinWithEmptyLookupSource(boolean parallelBuild, boolean probeHashEnabled, boolean buildHashEnabled) List<Type> buildTypes = ImmutableList.of(VARCHAR); RowPagesBuilder buildPages = rowPagesBuilder(buildHashEnabled, Ints.asList(0), buildTypes); BuildSideSetup buildSideSetup = setupBuildSide(parallelBuild, taskContext, Ints.asList(0), buildPages, Optional.empty(), false, SINGLE_STREAM_SPILLER_FACTORY); JoinBridgeManager<PartitionedLookupSourceFactory> lookupSourceFactoryManager = buildSideSetup.getLookupSourceFactoryManager(); List<Type> probeTypes = ImmutableList.of(VARCHAR); RowPagesBuilder probePages = rowPagesBuilder(probeHashEnabled, Ints.asList(0), probeTypes); OperatorFactory joinOperatorFactory = new LookupJoinOperators().innerJoin( 0, lookupSourceFactoryManager, probePages.getTypes(), Ints.asList(0), getHashChannelAsInt(probePages), Optional.empty(), Operator operator = joinOperatorFactory.createOperator(taskContext.addPipelineContext(0, true, true, false).addDriverContext()); List<Page> pages = probePages.row("test").build(); operator.addInput(pages.get(0)); Page outputPage = operator.getOutput();
@Test(expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded per-node user memory limit of.*", dataProvider = "testMemoryLimitProvider") public void testMemoryLimit(boolean parallelBuild, boolean buildHashEnabled) { TaskContext taskContext = TestingTaskContext.createTaskContext(executor, scheduledExecutor, TEST_SESSION, new DataSize(100, BYTE)); RowPagesBuilder buildPages = rowPagesBuilder(buildHashEnabled, Ints.asList(0), ImmutableList.of(VARCHAR, BIGINT, BIGINT)) .addSequencePage(10, 20, 30, 40); BuildSideSetup buildSideSetup = setupBuildSide(parallelBuild, taskContext, Ints.asList(0), buildPages, Optional.empty(), false, SINGLE_STREAM_SPILLER_FACTORY); instantiateBuildDrivers(buildSideSetup, taskContext); buildLookupSource(buildSideSetup); }
@Test(dataProvider = "hashJoinTestValues") public void testLookupOuterJoinWithEmptyLookupSource(boolean parallelBuild, boolean probeHashEnabled, boolean buildHashEnabled) List<Type> buildTypes = ImmutableList.of(VARCHAR); RowPagesBuilder buildPages = rowPagesBuilder(buildHashEnabled, Ints.asList(0), buildTypes); BuildSideSetup buildSideSetup = setupBuildSide(parallelBuild, taskContext, Ints.asList(0), buildPages, Optional.empty(), false, SINGLE_STREAM_SPILLER_FACTORY); JoinBridgeManager<PartitionedLookupSourceFactory> lookupSourceFactoryManager = buildSideSetup.getLookupSourceFactoryManager(); List<Type> probeTypes = ImmutableList.of(VARCHAR); RowPagesBuilder probePages = rowPagesBuilder(probeHashEnabled, Ints.asList(0), probeTypes); OperatorFactory joinOperatorFactory = new LookupJoinOperators().lookupOuterJoin( 0, lookupSourceFactoryManager, probePages.getTypes(), Ints.asList(0), getHashChannelAsInt(probePages), Optional.empty(), Operator operator = joinOperatorFactory.createOperator(taskContext.addPipelineContext(0, true, true, false).addDriverContext()); List<Page> pages = probePages.row("test").build(); operator.addInput(pages.get(0)); Page outputPage = operator.getOutput();
@Test(expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded per-node user memory limit of.*", dataProvider = "testMemoryLimitProvider") public void testMemoryLimit(boolean parallelBuild, boolean buildHashEnabled) { TaskContext taskContext = TestingTaskContext.createTaskContext(executor, scheduledExecutor, TEST_SESSION, new DataSize(100, BYTE)); RowPagesBuilder buildPages = rowPagesBuilder(buildHashEnabled, Ints.asList(0), ImmutableList.of(VARCHAR, BIGINT, BIGINT)) .addSequencePage(10, 20, 30, 40); BuildSideSetup buildSideSetup = setupBuildSide(parallelBuild, taskContext, Ints.asList(0), buildPages, Optional.empty(), false, SINGLE_STREAM_SPILLER_FACTORY); instantiateBuildDrivers(buildSideSetup, taskContext); buildLookupSource(buildSideSetup); }
@Test(dataProvider = "hashEnabledValues") public void testDistinctLimitValuesLessThanLimit(boolean hashEnabled) { RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), BIGINT); List<Page> input = rowPagesBuilder .addSequencePage(3, 1) .addSequencePage(3, 2) .build(); OperatorFactory operatorFactory = new DistinctLimitOperator.DistinctLimitOperatorFactory(0, new PlanNodeId("test"), rowPagesBuilder.getTypes(), Ints.asList(0), 5, rowPagesBuilder.getHashChannel(), joinCompiler); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT) .row(1L) .row(2L) .row(3L) .row(4L) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected, hashEnabled, ImmutableList.of(1)); }
@Test(dataProvider = "hashEnabledValues") public void testDistinctLimitWithPageAlignment(boolean hashEnabled) { RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), BIGINT); List<Page> input = rowPagesBuilder .addSequencePage(3, 1) .addSequencePage(3, 2) .build(); OperatorFactory operatorFactory = new DistinctLimitOperator.DistinctLimitOperatorFactory(0, new PlanNodeId("test"), rowPagesBuilder.getTypes(), Ints.asList(0), 3, rowPagesBuilder.getHashChannel(), joinCompiler); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT) .row(1L) .row(2L) .row(3L) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected, hashEnabled, ImmutableList.of(1)); }
@Test(dataProvider = "hashEnabledValues") public void testMarkDistinct(boolean hashEnabled) { RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), BIGINT); List<Page> input = rowPagesBuilder .addSequencePage(100, 0) .addSequencePage(100, 0) .build(); OperatorFactory operatorFactory = new MarkDistinctOperatorFactory(0, new PlanNodeId("test"), rowPagesBuilder.getTypes(), ImmutableList.of(0), rowPagesBuilder.getHashChannel(), joinCompiler); MaterializedResult.Builder expected = resultBuilder(driverContext.getSession(), BIGINT, BOOLEAN); for (long i = 0; i < 100; i++) { expected.row(i, true); expected.row(i, false); } OperatorAssertion.assertOperatorEqualsIgnoreOrder(operatorFactory, driverContext, input, expected.build(), hashEnabled, Optional.of(1)); }
@Test(dataProvider = "hashEnabledValues") public void testMarkDistinct(boolean hashEnabled) { RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), BIGINT); List<Page> input = rowPagesBuilder .addSequencePage(100, 0) .addSequencePage(100, 0) .build(); OperatorFactory operatorFactory = new MarkDistinctOperatorFactory(0, new PlanNodeId("test"), rowPagesBuilder.getTypes(), ImmutableList.of(0), rowPagesBuilder.getHashChannel(), joinCompiler); MaterializedResult.Builder expected = resultBuilder(driverContext.getSession(), BIGINT, BOOLEAN); for (long i = 0; i < 100; i++) { expected.row(i, true); expected.row(i, false); } OperatorAssertion.assertOperatorEqualsIgnoreOrder(operatorFactory, driverContext, input, expected.build(), hashEnabled, Optional.of(1)); }
@Test public void testRowNumberPartition() { List<Page> input = rowPagesBuilder(VARCHAR, BIGINT, DOUBLE, BOOLEAN) .row("b", -1L, -0.1, true) .row("a", 2L, 0.3, false) .row("a", 4L, 0.2, true) .pageBreak() .row("b", 5L, 0.4, false) .row("a", 6L, 0.1, true) .build(); WindowOperatorFactory operatorFactory = createFactoryUnbounded( ImmutableList.of(VARCHAR, BIGINT, DOUBLE, BOOLEAN), Ints.asList(0, 1, 2, 3), ROW_NUMBER, Ints.asList(0), Ints.asList(1), ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST})); MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, DOUBLE, BOOLEAN, BIGINT) .row("a", 2L, 0.3, false, 1L) .row("a", 4L, 0.2, true, 2L) .row("a", 6L, 0.1, true, 3L) .row("b", -1L, -0.1, true, 1L) .row("b", 5L, 0.4, false, 2L) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected); }
@Test public void testRowNumber() { List<Page> input = rowPagesBuilder(BIGINT, DOUBLE) .row(2L, 0.3) .row(4L, 0.2) .row(6L, 0.1) .pageBreak() .row(-1L, -0.1) .row(5L, 0.4) .build(); WindowOperatorFactory operatorFactory = createFactoryUnbounded( ImmutableList.of(BIGINT, DOUBLE), Ints.asList(1, 0), ROW_NUMBER, Ints.asList(), Ints.asList(0), ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST})); MaterializedResult expected = resultBuilder(driverContext.getSession(), DOUBLE, BIGINT, BIGINT) .row(-0.1, -1L, 1L) .row(0.3, 2L, 2L) .row(0.2, 4L, 3L) .row(0.4, 5L, 4L) .row(0.1, 6L, 5L) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected); }
@Test public void testPageSorterSorted() { List<Type> types = ImmutableList.of(BIGINT, DOUBLE, VARCHAR); List<Integer> sortChannels = Ints.asList(0); List<SortOrder> sortOrders = ImmutableList.of(ASC_NULLS_FIRST); List<Page> inputPages = RowPagesBuilder.rowPagesBuilder(types) .row(-12L, 2.2, "a") .row(-2L, 2.2, "b") .pageBreak() .row(1L, 2.2, "d") .row(2L, 1.1, "c") .build(); List<Page> expectedPages = RowPagesBuilder.rowPagesBuilder(types) .row(-12L, 2.2, "a") .row(-2L, 2.2, "b") .row(1L, 2.2, "d") .row(2L, 1.1, "c") .build(); assertSorted(inputPages, expectedPages, types, sortChannels, sortOrders, 100); }
@Test(dataProvider = "hashEnabledValues") public void testDistinctLimit(boolean hashEnabled) { RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), BIGINT); List<Page> input = rowPagesBuilder .addSequencePage(3, 1) .addSequencePage(5, 2) .build(); OperatorFactory operatorFactory = new DistinctLimitOperator.DistinctLimitOperatorFactory(0, new PlanNodeId("test"), rowPagesBuilder.getTypes(), Ints.asList(0), 5, rowPagesBuilder.getHashChannel(), joinCompiler); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT) .row(1L) .row(2L) .row(3L) .row(4L) .row(5L) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected, hashEnabled, ImmutableList.of(1)); }