@Test public void queryEmptyHiveTable() throws Exception { testBuilder() .sqlQuery("SELECT * FROM hive.empty_table") .expectsEmptyResultSet() .go(); }
@Test public void testLiteralCastToBIGINTYieldsBIGINT() throws Exception { testBuilder() .sqlQuery( "SELECT CAST( 64 AS BIGINT ) AS ShouldBeBIGINT " + "FROM cp.\"employee.json\" LIMIT 1" ) .unOrdered() .baselineColumns("ShouldBeBIGINT") .baselineValues(new Long(64)) .go(); }
@Test public void testLiteralCastToINTEGERYieldsINTEGER() throws Exception { testBuilder() .sqlQuery( "SELECT CAST( 32 AS INTEGER ) AS ShouldBeINTEGER " + "FROM cp.\"employee.json\" LIMIT 1" ) .unOrdered() .baselineColumns("ShouldBeINTEGER") .baselineValues(new Integer(32)) .go(); }
private void testWithResult(String query, Object... expectedResult) throws Exception { TestBuilder builder = testBuilder() .sqlQuery(query) .ordered() .baselineColumns("columns"); for (Object o : expectedResult) { builder = builder.baselineValues(o); } builder.build().run(); }
private void queryViewHelper(final String queryUser, final String query) throws Exception { updateClient(queryUser); testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("rownum") .baselineValues(1) .go(); }
@Test public void testSelStarOrderByLimit() throws Exception{ testBuilder() .ordered() .sqlQuery(" select * from cp.\"employee.json\" order by last_name limit 2") .sqlBaselineQuery(" select employee_id, full_name,first_name,last_name,position_id,position_title,store_id," + " department_id,birth_date,hire_date,salary,supervisor_id,education_level,marital_status,gender,management_role " + " from cp.\"employee.json\" " + " order by last_name limit 2") .build().run(); }
@Test public void testSelStarBothSideJoin() throws Exception { testBuilder() .unOrdered() .sqlQuery("select n.*, r.* from cp.\"tpch/nation.parquet\" n, cp.\"tpch/region.parquet\" r where n.n_regionkey = r.r_regionkey") .sqlBaselineQuery("select n.n_nationkey,n.n_name,n.n_regionkey,n.n_comment,r.r_regionkey,r.r_name,r.r_comment from cp.\"tpch/nation.parquet\" n, cp.\"tpch/region.parquet\" r where n.n_regionkey = r.r_regionkey order by n.n_name") .build().run(); }
@Test // DRILL-3739 public void readingFromStorageHandleBasedTable() throws Exception { testBuilder() .sqlQuery("SELECT * FROM hive.kv_sh ORDER BY key LIMIT 2") .ordered() .baselineColumns("key", "value") .expectsEmptyResultSet() .go(); }
@Test public void testMapOrdering() throws Exception { testBuilder() .sqlQuery("select * from cp.\"/testframework/map_reordering.json\"") .unOrdered() .jsonBaselineFile("testframework/map_reordering2.json") .build().run(); }
@Test public void testConcatFunction() throws Exception { String query = "SELECT " + "concat('1234', ' COL_VALUE ', R_REGIONKEY, ' - STRING') as STR_1 " + "FROM cp.\"tpch/region.parquet\" limit 1"; testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("STR_1") .baselineValues("1234 COL_VALUE 0 - STRING") .go(); }
@Test public void testSignFunction() throws Exception { String query = "select sign(cast('1.23' as float)) as SIGN_FLOAT, sign(-1234.4567) as SIGN_DOUBLE, sign(23) as SIGN_INT " + "from cp.\"employee.json\" where employee_id < 2"; testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("SIGN_FLOAT", "SIGN_DOUBLE", "SIGN_INT") .baselineValues(1F, -1D, 1) .go(); }
@Test // DRILL-4521 public void ensureVarianceIsAggregateReduced() throws Exception { String query01 = "select variance(salary) from cp.\"employee.json\""; testPlanSubstrPatterns(query01, new String[] {"EXPR$0=[/(-($0, /(*($1, $1), $2)), CASE(=($2, 1), null, -($2, 1)))]"}, new String[] {"EXPR$0=[VARIANCE($0)]"}); testBuilder().sqlQuery(query01).approximateEquality().unOrdered().baselineColumns("EXPR$0").baselineValues(2.8856749581279494E7).go(); String query02 = "select var_samp(salary) from cp.\"employee.json\""; testBuilder().sqlQuery(query02).approximateEquality().unOrdered().baselineColumns("EXPR$0").baselineValues(2.8856749581279494E7).go(); String query03 = "select var_pop(salary) from cp.\"employee.json\""; testBuilder().sqlQuery(query03).approximateEquality().unOrdered().baselineColumns("EXPR$0").baselineValues(2.8831765382507823E7).go(); }
@Test public void testDRILL2361_JoinColumnAliasWithDots() throws Exception { testBuilder() .sqlQuery("select count(*) as cnt from (select o_custkey as \"x.y\" from cp.\"tpch/orders.parquet\") o inner join cp.\"tpch/customer.parquet\" c on o.\"x.y\" = c.c_custkey") .unOrdered() .baselineColumns("cnt") .baselineValues(15000L) .build().run(); }
@Test // DRILL-3328 public void convertFromOnHiveBinaryType() throws Exception { testBuilder() .sqlQuery("SELECT convert_from(binary_field, 'UTF8') col1 from hive.readtest") .unOrdered() .baselineColumns("col1") .baselineValues("binaryfield") .baselineValues(new Object[]{null}) .go(); }
@Test public void stringLiteralComparison() throws Exception { String sql = "SELECT a = b as e FROM (VALUES('foo', 'foo'),('bar', 'bar ')) tbl(a, b)"; testBuilder() .sqlQuery(sql) .ordered() .baselineColumns("e") .baselineValues(true) .baselineValues(false) .go(); }
@Test @Ignore("decimal") public void testCastDecimalDouble() throws Exception { String query = "select cast((cast('1.0001' as decimal(18, 9))) as double) DECIMAL_DOUBLE_CAST " + "from cp.\"employee.json\" where employee_id = 1"; testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("DECIMAL_DOUBLE_CAST") .baselineValues(1.0001d) .go(); }
@Test public void defaultTwoLevelSchemaHive() throws Exception{ testBuilder() .sqlQuery("SELECT * FROM kv_db1 LIMIT 2") .unOrdered() .optionSettingQueriesForTestQuery("USE hive.db1") .baselineColumns("key", "value") .baselineValues("1", " key_1") .baselineValues("2", " key_2") .go(); } }
@Test // Union-All where same column is projected twice in right child public void testUnionAll6() throws Exception { String query = "select n_nationkey, n_regionkey from cp.\"tpch/nation.parquet\" where n_regionkey = 1 union all select r_regionkey, r_regionkey from cp.\"tpch/region.parquet\" where r_regionkey = 2"; testBuilder() .sqlQuery(query) .unOrdered() .csvBaselineFile("testframework/testUnionAllQueries/q6.tsv") .baselineTypes(MinorType.INT, MinorType.INT) .baselineColumns("n_nationkey", "n_regionkey") .build().run(); }
@Test // Union-All where same column is projected twice in left and right child public void testUnionAll6_1() throws Exception { String query = "select n_nationkey, n_nationkey from cp.\"tpch/nation.parquet\" union all select r_regionkey, r_regionkey from cp.\"tpch/region.parquet\""; testBuilder() .sqlQuery(query) .unOrdered() .csvBaselineFile("testframework/testUnionAllQueries/q6_1.tsv") .baselineTypes(MinorType.INT, MinorType.INT) .baselineColumns("n_nationkey", "n_nationkey1") .build().run(); }
@Test // see DRILL-1825 @Ignore public void testSelStarWithAdditionalColumnLimit() throws Exception { testBuilder() .sqlQuery("select *, n_nationkey, *, n_name from cp.\"tpch/nation.parquet\" limit 2") .ordered() .csvBaselineFile("testframework/testStarQueries/testSelStarWithAdditionalColumnLimit/q1.tsv") .baselineTypes(MinorType.INT, MinorType.VARCHAR, MinorType.INT, MinorType.VARCHAR, MinorType.INT, MinorType.INT, MinorType.VARCHAR, MinorType.INT, MinorType.VARCHAR, MinorType.VARCHAR) .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "n_nationkey0", "n_nationkey1", "n_name0", "n_regionkey0", "n_comment0", "n_name1") .build().run(); }