@Test public void queryEmptyHiveTable() throws Exception { testBuilder() .sqlQuery("SELECT * FROM hive.empty_table") .expectsEmptyResultSet() .go(); }
@Test public void testLiteralCastToBIGINTYieldsBIGINT() throws Exception { testBuilder() .sqlQuery( "SELECT CAST( 64 AS BIGINT ) AS ShouldBeBIGINT " + "FROM cp.\"employee.json\" LIMIT 1" ) .unOrdered() .baselineColumns("ShouldBeBIGINT") .baselineValues(new Long(64)) .go(); }
@Test public void testLiteralCastToINTEGERYieldsINTEGER() throws Exception { testBuilder() .sqlQuery( "SELECT CAST( 32 AS INTEGER ) AS ShouldBeINTEGER " + "FROM cp.\"employee.json\" LIMIT 1" ) .unOrdered() .baselineColumns("ShouldBeINTEGER") .baselineValues(new Integer(32)) .go(); }
@Test public void testAvgWithNullableScalarFunction() throws Exception { String query = " select avg(length(b1)) as col from cp.\"jsoninput/nullable1.json\""; testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("col") .baselineValues(3.0d) .go(); }
private void queryViewHelper(final String queryUser, final String query) throws Exception { updateClient(queryUser); testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("rownum") .baselineValues(1) .go(); }
@Test public void testCaseNullableTypesVarchar() throws Exception { testBuilder() .sqlQuery("select (res1 = 'qwe') res2 from (select (case when (false) then null else 'qwe' end) res1 from (values(1)))") .unOrdered() .baselineColumns("res2") .baselineValues(true) .go(); }
@Test public void describeTableWithSchemaAndColumnName() throws Exception{ testBuilder() .sqlQuery("DESCRIBE INFORMATION_SCHEMA.\"TABLES\" TABLE_CATALOG") .unOrdered() .baselineColumns("COLUMN_NAME", "DATA_TYPE", "IS_NULLABLE") .baselineValues("TABLE_CATALOG", "CHARACTER VARYING", "YES") .go(); }
@Test public void useSchema() throws Exception{ testBuilder() .sqlQuery("USE dfs") .unOrdered() .baselineColumns("ok", "summary") .baselineValues(true, "Default schema changed to [dfs]") .go(); }
@Test // DRILL-3739 public void readingFromStorageHandleBasedTable() throws Exception { testBuilder() .sqlQuery("SELECT * FROM hive.kv_sh ORDER BY key LIMIT 2") .ordered() .baselineColumns("key", "value") .expectsEmptyResultSet() .go(); }
@Test // DX-11283 public void datePartMillennium() throws Exception { final String query = "SELECT DATE_PART('MILLENNIUM', date '2018-04-22') AS col1 FROM (VALUES(1))"; testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("col1") .baselineValues(3L) .go(); } }
@Test public void testConcatFunction() throws Exception { String query = "SELECT " + "concat('1234', ' COL_VALUE ', R_REGIONKEY, ' - STRING') as STR_1 " + "FROM cp.\"tpch/region.parquet\" limit 1"; testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("STR_1") .baselineValues("1234 COL_VALUE 0 - STRING") .go(); }
@Test public void testSignFunction() throws Exception { String query = "select sign(cast('1.23' as float)) as SIGN_FLOAT, sign(-1234.4567) as SIGN_DOUBLE, sign(23) as SIGN_INT " + "from cp.\"employee.json\" where employee_id < 2"; testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("SIGN_FLOAT", "SIGN_DOUBLE", "SIGN_INT") .baselineValues(1F, -1D, 1) .go(); }
@Test public void simpleSelect() throws Exception { DateTimeFormatter formatter = DateFunctionsUtils.getISOFormatterForFormatString("YYYY-MM-DD"); testBuilder() .sqlQuery(String.format("SELECT * FROM %s", viewName)) .ordered() .baselineColumns("employee_id", "full_name", "position_id", "department_id", "birth_date", "hire_date", "salary", "fsalary", "single", "education_level", "gender") .baselineValues(1, "Sheri Nowmer", 1, 1L, formatter.parseLocalDateTime("1961-08-26"), formatter.parseLocalDateTime("1994-12-01"), 80000.0D, 80000.0F, true, "Graduate Degree", "F") .go(); }
@Test // DRILL-4521 public void ensureVarianceIsAggregateReduced() throws Exception { String query01 = "select variance(salary) from cp.\"employee.json\""; testPlanSubstrPatterns(query01, new String[] {"EXPR$0=[/(-($0, /(*($1, $1), $2)), CASE(=($2, 1), null, -($2, 1)))]"}, new String[] {"EXPR$0=[VARIANCE($0)]"}); testBuilder().sqlQuery(query01).approximateEquality().unOrdered().baselineColumns("EXPR$0").baselineValues(2.8856749581279494E7).go(); String query02 = "select var_samp(salary) from cp.\"employee.json\""; testBuilder().sqlQuery(query02).approximateEquality().unOrdered().baselineColumns("EXPR$0").baselineValues(2.8856749581279494E7).go(); String query03 = "select var_pop(salary) from cp.\"employee.json\""; testBuilder().sqlQuery(query03).approximateEquality().unOrdered().baselineColumns("EXPR$0").baselineValues(2.8831765382507823E7).go(); }
@Test public void defaultSchemaClasspath() throws Exception{ testBuilder() .sqlQuery("SELECT full_name FROM \"employee.json\" LIMIT 1") .unOrdered() .optionSettingQueriesForTestQuery("USE cp") .baselineColumns("full_name") .baselineValues("Sheri Nowmer") .go(); }
@Test // DRILL-3328 public void convertFromOnHiveBinaryType() throws Exception { testBuilder() .sqlQuery("SELECT convert_from(binary_field, 'UTF8') col1 from hive.readtest") .unOrdered() .baselineColumns("col1") .baselineValues("binaryfield") .baselineValues(new Object[]{null}) .go(); }
@Test public void stringLiteralComparison() throws Exception { String sql = "SELECT a = b as e FROM (VALUES('foo', 'foo'),('bar', 'bar ')) tbl(a, b)"; testBuilder() .sqlQuery(sql) .ordered() .baselineColumns("e") .baselineValues(true) .baselineValues(false) .go(); }
@Test @Ignore("decimal") public void testCastDecimalDouble() throws Exception { String query = "select cast((cast('1.0001' as decimal(18, 9))) as double) DECIMAL_DOUBLE_CAST " + "from cp.\"employee.json\" where employee_id = 1"; testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("DECIMAL_DOUBLE_CAST") .baselineValues(1.0001d) .go(); }
@Test public void defaultTwoLevelSchemaHive() throws Exception{ testBuilder() .sqlQuery("SELECT * FROM kv_db1 LIMIT 2") .unOrdered() .optionSettingQueriesForTestQuery("USE hive.db1") .baselineColumns("key", "value") .baselineValues("1", " key_1") .baselineValues("2", " key_2") .go(); } }
@Test public void testDateMinusInterval() throws Exception { testBuilder() .sqlQuery("SELECT ({d '2017-01-13'} - CAST(CAST(a AS VARCHAR) AS INTERVAL DAY)) res1 FROM (VALUES('P1D'),('P2D')) t(a)") .ordered() .baselineColumns("res1") .baselineValues(newDateTime(1484179200000L)) .baselineValues(newDateTime(1484092800000L)) .go(); }