@Test public void testLongDirectoryNames() throws Exception { String dfs_tmp = getDfsTestTmpSchemaLocation(); File directory = new File(dfs_tmp, "nestedTable"); directory.mkdir(); File directory2 = new File(dfs_tmp, "nestedTable/veryLongDirectoryName/"); directory2.mkdir(); File data = new File(dfs_tmp, "nestedTable/veryLongDirectoryName/file.csv"); PrintStream printStream = new PrintStream(data); for (int i = 0; i < 5000; i++) { printStream.println("a"); } printStream.close(); test("select * from dfs_test.nestedTable"); }
@After public void resetOptions() throws Exception { testNoResult("ALTER SESSION RESET ALL"); }
@Test public void dateTimeTypes() throws Exception { testPlanMatchingPatterns("SELECT CURRENT_TIME ct, CURRENT_TIME(0) ct0," + " CURRENT_TIMESTAMP cts, CURRENT_TIMESTAMP(1) cts1 FROM (VALUES(1))", new String[] {"TIME\\(3\\) ct, TIME\\(3\\) ct0, TIMESTAMP\\(3\\) cts, TIMESTAMP\\(3\\) cts1"}); }
@Test public void testMultipleCountDistinctWithGroupBy() throws Exception { String query = "select n_regionkey, count(distinct n_nationkey), count(distinct n_name) from cp.\"tpch/nation.parquet\" group by n_regionkey;"; try( AutoCloseable ac1 = withOption(PlannerSettings.HASHAGG, false); AutoCloseable ac2 = withOption(PlannerSettings.STREAMAGG, true); ){ test(query); try(AutoCloseable ac3 = withOption(ExecConstants.SLICE_TARGET_OPTION, 1)){ test(query); } } try( AutoCloseable ac1 = withOption(PlannerSettings.HASHAGG, true); AutoCloseable ac2 = withOption(PlannerSettings.STREAMAGG, false); ){ test(query); try(AutoCloseable ac3 = withOption(ExecConstants.SLICE_TARGET_OPTION, 1)){ test(query); } } }
@Test // DRILL-811 public void testDRILL_811ViewJoin() throws Exception { test("use dfs_test"); test("create view nation_view_testexamplequeries as select * from cp.\"tpch/nation.parquet\";"); test("create view region_view_testexamplequeries as select * from cp.\"tpch/region.parquet\";"); test("select n.n_nationkey, n.n_regionkey, r.r_name from region_view_testexamplequeries r , nation_view_testexamplequeries n where r.r_regionkey = n.n_regionkey "); test("select n.n_regionkey, count(*) as cnt from region_view_testexamplequeries r , nation_view_testexamplequeries n where r.r_regionkey = n.n_regionkey and n.n_nationkey > 8 group by n.n_regionkey order by n.n_regionkey"); test("select n.n_regionkey, count(*) as cnt from region_view_testexamplequeries r join nation_view_testexamplequeries n on r.r_regionkey = n.n_regionkey and n.n_nationkey > 8 group by n.n_regionkey order by n.n_regionkey"); test("drop view region_view_testexamplequeries "); test("drop view nation_view_testexamplequeries "); }
@Test // DRILL-2094 public void testOrderbyArrayElementInSubquery() throws Exception { String root = FileUtils.getResourceAsFile("/store/json/orderByArrayElement.json").toURI().getPath().toString(); String query = String.format("select s.id from \n" + "(select id \n" + "from dfs.\"%s\" \n" + "order by list[0]) s", root); testBuilder() .sqlQuery(query) .ordered() .baselineColumns("id") .baselineValues((long) 1) .baselineValues((long) 5) .baselineValues((long) 4) .baselineValues((long) 2) .baselineValues((long) 3) .build().run(); }
@Test public void testEmptyListSchemaLearning() throws Exception { String dfs_tmp = getDfsTestTmpSchemaLocation(); File directory = new File(dfs_tmp, "emptyList"); directory.mkdir(); file.println("{\"a\":1,\"b\":[]}"); file.close(); test("select * from dfs_test.emptyList"); Thread.sleep(1200); // sleep so we make sure the filesystem uses a different modification time for the second file. file2.close(); getSabotContext().getCatalogService().refreshSource(new NamespaceKey("dfs_test"), CatalogService.REFRESH_EVERYTHING_NOW, UpdateType.FULL); try { test("select * from dfs_test.emptyList"); } catch (Exception e) { logger.debug("Schema learned", e); final List<QueryDataBatch> results = testSqlWithResults("select * from dfs_test.emptyList"); try { Assert.assertEquals(MinorType.VARCHAR, results.get(0).getHeader().getDef().getField(1).getChild(2).getMajorType().getMinorType());
@Test // DRILL-2311 @Ignore("Move to TestParquetWriter. Have to ensure same file name does not exist on filesystem.") public void testCreateTableSameColumnNames() throws Exception { String creatTable = "CREATE TABLE CaseInsensitiveColumnNames as " + "select cast(r_regionkey as BIGINT) BIGINT_col, cast(r_regionkey as DECIMAL) bigint_col \n" + "FROM cp.\"tpch/region.parquet\";\n"; test("USE dfs_test"); test(creatTable); testBuilder() .sqlQuery("select * from \"CaseInsensitiveColumnNames\"") .unOrdered() .baselineColumns("BIGINT_col", "bigint_col0\n") .baselineValues((long) 0, new BigDecimal(0)) .baselineValues((long) 1, new BigDecimal(1)) .baselineValues((long) 2, new BigDecimal(2)) .baselineValues((long) 3, new BigDecimal(3)) .baselineValues((long) 4, new BigDecimal(4)) .build().run(); }
System.out.println(getDfsTestTmpSchemaLocation()); String root = FileUtils.getResourceAsFile("/store/text/data/regions.csv").toURI().getPath().toString(); String queryCTAS1 = "CREATE TABLE TestExampleQueries_testCTASOrderByCoumnNotInSelectClause1 as " + String query2 = "select * from TestExampleQueries_testCTASOrderByCoumnNotInSelectClause2"; test("use dfs_test"); test(queryCTAS1); test(queryCTAS2); testBuilder() .sqlQuery(query1) .ordered() .build().run(); testBuilder() .sqlQuery(query2) .ordered()
@Test // DRILL-1973 public void testLimit0SubqueryWithFilter() throws Exception { String query1 = "select * from (select sum(1) as x from cp.\"tpch/region.parquet\" limit 0) WHERE x < 10"; String query2 = "select * from (select sum(1) as x from cp.\"tpch/region.parquet\" limit 0) WHERE (0 = 1)"; int actualRecordCount = 0; int expectedRecordCount = 0; actualRecordCount = testSql(query1); assertEquals(expectedRecordCount, actualRecordCount); actualRecordCount = testSql(query2); assertEquals(expectedRecordCount, actualRecordCount); }
@Test // DRILL-811 public void testDRILL_811View() throws Exception { test("use dfs_test"); test("create view nation_view_testexamplequeries as select * from cp.\"tpch/nation.parquet\";"); test("select n.n_nationkey, n.n_name, n.n_regionkey from nation_view_testexamplequeries n where n.n_nationkey > 8 order by n.n_regionkey"); test("select n.n_regionkey, count(*) as cnt from nation_view_testexamplequeries n where n.n_nationkey > 8 group by n.n_regionkey order by n.n_regionkey"); test("drop view nation_view_testexamplequeries "); }
@Test // DRILL-2094 public void testOrderbyArrayElement() throws Exception { String root = FileUtils.getResourceAsFile("/store/json/orderByArrayElement.json").toURI().getPath().toString(); String query = String.format("select t.id, t.list[0] as SortingElem " + "from dfs.\"%s\" t " + "order by t.list[0]", root); testBuilder() .sqlQuery(query) .ordered() .baselineColumns("id", "SortingElem") .baselineValues((long) 1, (long) 1) .baselineValues((long) 5, (long) 2) .baselineValues((long) 4, (long) 3) .baselineValues((long) 2, (long) 5) .baselineValues((long) 3, (long) 6) .build().run(); }
@Test // DRILL-1846 (this tests issue with SimpleMergeExchange) public void testOrderByDiffColumnsInSubqAndOuter() throws Exception { String query = "select n.n_nationkey from (select n_nationkey, n_regionkey from cp.\"tpch/nation.parquet\" order by n_regionkey) n order by n.n_nationkey"; // set slice_target = 1 to force exchanges try(AutoCloseable ac = withOption(ExecConstants.SLICE_TARGET_OPTION, 1)){ test(query); } }
@Test public void testSimilar() throws Exception { String query = "select n_nationkey " + "from cp.\"tpch/nation.parquet\" " + "where n_name similar to 'CHINA' " + "order by n_regionkey"; testBuilder() .sqlQuery(query) .unOrdered() .optionSettingQueriesForTestQuery("alter session set \"planner.slice_target\" = 1") .baselineColumns("n_nationkey") .baselineValues(18) .go(); test("alter session set \"planner.slice_target\" = " + ExecConstants.SLICE_TARGET_DEFAULT); }
@Test // DRILL-2019 public void testFilterInSubqueryAndOutside() throws Exception { String query1 = "select r_regionkey from (select r_regionkey from cp.\"tpch/region.parquet\" o where r_regionkey < 2) where r_regionkey > 2"; String query2 = "select r_regionkey from (select r_regionkey from cp.\"tpch/region.parquet\" o where r_regionkey < 4) where r_regionkey > 1"; int actualRecordCount = 0; int expectedRecordCount = 0; actualRecordCount = testSql(query1); assertEquals(expectedRecordCount, actualRecordCount); expectedRecordCount = 2; actualRecordCount = testSql(query2); assertEquals(expectedRecordCount, actualRecordCount); }
@Test public void testMixedTypeGroupBy() throws Exception { test("alter session set \"planner.enable_streamagg\" = false"); test("alter session set \"planner.enable_hashagg\" = true"); test("select count(*) from cp.\"vector/complex/mixed.json\" group by a, typeof(a)"); test("alter session set \"planner.enable_streamagg\" = true"); }
@Test public void testValues2() throws Exception { String query = "SELECT id FROM (VALUES(''),(''),('non-null-value')) tbl(id) WHERE NULLIF(id,'') IS NOT NULL"; testBuilder().sqlQuery(query).unOrdered().baselineColumns("id").baselineValues("non-null-value").go(); }
@Test // DRILL-1561 public void test2PhaseAggAfterOrderBy() throws Exception { String query = "select count(*) from (select o_custkey from cp.\"tpch/orders.parquet\" order by o_custkey)"; // set slice_target = 1 to force exchanges and 2-phase aggregation try(AutoCloseable ac = withOption(ExecConstants.SLICE_TARGET_OPTION, 1)){ test(query); } }
@Test public void testParquetAlternatingNullRuns() throws Exception { test(String.format("create table dfs_test.f as select * from dfs.\"%s/json/tableWithNullStrings\"", TEST_RES_PATH)); String query = "select a, b from dfs_test.f where b like '%hell%'"; TestBuilder builder = testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("a", "b"); for (int i = 0; i < 10; i++) { builder.baselineValues(null, "hello"); } for (int i = 0; i < 9; i++) { builder.baselineValues("hello", "hello"); } for (int i = 0; i < 10; i++) { builder.baselineValues(null, "hello"); } for (int i = 0; i < 9; i++) { builder.baselineValues("hello", "hello"); } builder.go(); }
@Test // DRILL-1544 public void testLikeEscape() throws Exception { int actualRecordCount = testSql("select id, name from cp.\"jsoninput/specialchar.json\" where name like '%#_%' ESCAPE '#'"); int expectedRecordCount = 1; assertEquals(String.format("Received unexpected number of rows in output: expected=%d, received=%s", expectedRecordCount, actualRecordCount), expectedRecordCount, actualRecordCount); }