@Test public void keepLearningSchemaAcrossFiles() throws Exception { test("ALTER SYSTEM SET \"" + ExecConstants.ENABLE_REATTEMPTS.getOptionName() + "\" = true"); try { final String query = String.format("select * from dfs_root.\"%s/schemachange/differentschemas/\"", TEST_RES_PATH); try { testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("results/differentschemas.json") .build() .run(); } catch (Exception e) { // first attempt may fail as a batch may have been sent to the user, so if it does, this must be the message .. assertTrue(e.getMessage() .contains("New schema found and recorded. Please reattempt the query.")); } // .. but the second attempt must not fail, full schema must have been learnt at this point testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("results/differentschemas.json") .build() .run(); } finally { test("ALTER SYSTEM RESET \"" + ExecConstants.ENABLE_REATTEMPTS.getOptionName() + "\""); } } }
private void runFilterNonComplexColumn() throws Exception { String query = String.format("select t1.user_info.cust_id as cust_id from %s t1 where t1.trans_id > 0 and " + "t1.trans_id < 4", DATAFILE); TestBuilder builder = testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("store/parquet/complex/baseline.json") .baselineColumns("cust_id"); builder.baselineValues(86623L) .baselineValues(11L) .baselineValues(666L).build().run(); }
private void createAndQuery(String datafile) throws Exception { String query = String.format("select * from cp.\"parquet/%s\"", datafile); String tableName = "test_repeated_readers_"+datafile; try { test("create table dfs_test.\"%s\" as %s", tableName, query); testBuilder() .sqlQuery("select * from dfs_test.\"%s\" d", tableName) .ordered() .jsonBaselineFile("parquet/" + datafile) .go(); } finally { deleteTableIfExists(tableName); } }
@Test //DRILL-1649 public void testNestedFlattenWithJoin() throws Exception { final String query="" + " select event_info.uid, transaction_info.trans_id, event_info.event.evnt_id evnt_id "+ "from ( "+ " select userinfo.transaction.trans_id trans_id, max(userinfo.event.event_time) max_event_time "+ " from ( "+ " select uid, flatten(events) event, flatten(transactions) transaction from cp.\"complex/json/single-user-transactions.json\" "+ ") userinfo "+ "where userinfo.transaction.trans_time >= userinfo.event.event_time "+ "group by userinfo.transaction.trans_id "+ ") transaction_info "+ "inner join "+ "( "+ " select uid, flatten(events) event "+ " from cp.\"complex/json/single-user-transactions.json\" "+ ") event_info "+ "on transaction_info.max_event_time = event_info.event.event_time "+ ""; test("alter session set \"exec.enable_union_type\"=true"); testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("complex/drill-1649-result.json") .go(); test("alter session set \"exec.enable_union_type\"=false"); }
@Test public void testRepeatedColumnMatching() throws Exception { try { testBuilder() .sqlQuery("select * from cp.\"store/json/schema_change_int_to_string.json\"") .optionSettingQueriesForTestQuery("alter system set \"store.json.all_text_mode\" = true") .ordered() .jsonBaselineFile("testframework/schema_change_int_to_string_non-matching.json") .optionSettingQueriesForBaseline("alter system set \"store.json.all_text_mode\" = true") .build().run(); } catch (Exception ex) { assertThat(ex.getMessage(), CoreMatchers.containsString("at position 1 column '`field_1`' mismatched values,")); assertThat(ex.getMessage(), CoreMatchers.containsString("expected (JsonStringArrayList):")); assertThat(ex.getMessage(), CoreMatchers.containsString("[\"5\",\"2\",\"3\",\"4\",\"1\",\"2\"]")); assertThat(ex.getMessage(), CoreMatchers.containsString("but received (JsonStringArrayList):")); assertThat(ex.getMessage(), CoreMatchers.containsString("[\"5\"]")); // this indicates successful completion of the test test("alter system set \"store.json.all_text_mode\" = false"); return; } throw new Exception("Test framework verification failed, expected failure on order check."); }
@Test public void testComplexJSON_all_text() throws Exception { testBuilder() .sqlQuery("select * from cp.\"store/json/schema_change_int_to_string.json\"") .optionSettingQueriesForTestQuery("alter system set \"store.json.all_text_mode\" = true") .ordered() .jsonBaselineFile("store/json/schema_change_int_to_string.json") .optionSettingQueriesForBaseline("alter system set \"store.json.all_text_mode\" = true") .build().run(); testBuilder() .sqlQuery("select * from cp.\"store/json/schema_change_int_to_string.json\"") .optionSettingQueriesForTestQuery("alter system set \"store.json.all_text_mode\" = true") .unOrdered() // Check other verification method with same files .jsonBaselineFile("store/json/schema_change_int_to_string.json") .optionSettingQueriesForBaseline("alter system set \"store.json.all_text_mode\" = true") .build().run(); test("alter system set \"store.json.all_text_mode\" = false"); }
@Test @Ignore public void testMixedNumberTypes() throws Exception { try { testBuilder() .sqlQuery("select * from cp.\"jsoninput/mixed_number_types.json\"") .unOrdered() .jsonBaselineFile("jsoninput/mixed_number_types.json") .build().run(); } catch (Exception ex) { assertTrue(ex.getMessage().contains("DATA_READ ERROR: Error parsing JSON - You tried to write a BigInt type when you are using a ValueWriter of type NullableFloat8WriterImpl.")); // this indicates successful completion of the test return; } throw new Exception("Mixed number types verification failed, expected failure on conflicting number types."); }
@Test public void mergeJoin() throws Exception{ test("alter session set \"planner.enable_hashjoin\" = false"); test("alter session set \"planner.enable_mergejoin\" = true"); String query = String.format("select t1.amount, t1.\"date\", t1.marketing_info, t1.\"time\", t1.trans_id, t1.trans_info, t1.user_info " + "from %s t1, %s t2 where t1.amount = t2.amount", DATAFILE, DATAFILE); testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("store/parquet/complex/baseline.json") .build() .run(); test("alter session set \"planner.enable_mergejoin\" = false"); }
@Test public void testBasicJSON() throws Exception { testBuilder() .sqlQuery("select * from cp.\"scan_json_test_3.json\"") .ordered() .jsonBaselineFile("/scan_json_test_3.json") .build().run(); testBuilder() .sqlQuery("select * from cp.\"scan_json_test_3.json\"") .unOrdered() // Check other verification method with same files .jsonBaselineFile("/scan_json_test_3.json") .build().run(); }
@Test // DRILL-2771, similar problem as DRILL-2197 except problem reproduces with right outer join instead of left @Ignore public void testRightJoinWithMap() throws Exception { final String query = " select a.id, b.oooi.oa.oab.oabc oabc, b.ooof.oa.oab oab from " + "cp.\"join/complex_1.json\" b right outer join cp.\"join/complex_1.json\" a on a.id = b.id order by a.id"; testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("join/DRILL-2197-result-1.json") .build() .run(); } @Test
@Test @Ignore public void missingColumnInMap() throws Exception { String query = "select t.trans_info.keywords as keywords from cp.\"store/parquet/complex/complex.parquet\" t"; String[] columns = {"keywords"}; testBuilder() .sqlQuery(query) .ordered() .jsonBaselineFile("store/parquet/complex/baseline2.json") .baselineColumns(columns) .build() .run(); }
@Test //DRILL-2163 public void testNestedTypesPastJoinReportsValidResult() throws Exception { final String query = "select t1.uid, t1.events, t1.events[0].evnt_id as event_id, t2.transactions, " + "t2.transactions[0] as trans, t1.odd, t2.even from cp.\"project/complex/a.json\" t1, " + "cp.\"project/complex/b.json\" t2 where t1.uid = t2.uid"; testBuilder() .sqlQuery(query) .ordered() .jsonBaselineFile("project/complex/drill-2163-result.json") .build() .run(); }
@Test public void selectMap() throws Exception { String query = "select marketing_info from cp.\"store/parquet/complex/complex.parquet\""; testBuilder() .sqlQuery(query) .ordered() .jsonBaselineFile("store/parquet/complex/baseline5.json") .build() .run(); }
@Test public void selectAllColumns() throws Exception { String query = String.format("select amount, \"date\", marketing_info, \"time\", trans_id, trans_info, user_info from %s", DATAFILE); testBuilder() .sqlQuery(query) .ordered() .jsonBaselineFile("store/parquet/complex/baseline.json") .build() .run(); }
@Test public void hashJoin() throws Exception{ String query = String.format("select t1.amount, t1.\"date\", t1.marketing_info, t1.\"time\", t1.trans_id, t1.trans_info, t1.user_info " + "from %s t1, %s t2 where t1.amount = t2.amount", DATAFILE, DATAFILE); testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("store/parquet/complex/baseline.json") .build() .run(); }
@Test //DRILL-2254 public void testSingleFlattenFromNestedRepeatedList() throws Exception { final String query = "select t.uid, flatten(t.odd) odd from cp.\"project/complex/a.json\" t"; testPlanSubstrPatterns(query, new String[] {"columns=[`odd`, `uid`]"}, null); testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("flatten/drill-2254-result-single.json") .build() .run(); }
@Test //DRILL-1832 @Ignore public void testJsonWithNulls2() throws Exception { final String query="select SUM(1) as \"sum_Number_of_Records_ok\" from cp.\"/jsoninput/twitter_43.json\" having (COUNT(1) > 0)"; testBuilder() .sqlQuery(query) .ordered() .jsonBaselineFile("jsoninput/drill-1832-2-result.json") .go(); }
@Test //DRILL-1832 @Ignore("update baseline") public void testJsonWithNulls1() throws Exception { final String query="select * from cp.\"jsoninput/twitter_43.json\""; testBuilder() .sqlQuery(query) .ordered() .jsonBaselineFile("jsoninput/drill-1832-1-result.json") .go(); }
@Test //DRILL-2268 public void testFlattenAfterJoin3() throws Exception { String query = "select flatten(sub1.lst_lst) flat_lst_lst from "+ "(select t1.lst_lst lst_lst from cp.\"complex/json/flatten_join.json\" t1 "+ "inner join cp.\"complex/json/flatten_join.json\" t2 on t1.id=t2.id) sub1"; testPlanSubstrPatterns(query, new String[] {"columns=[`id`, `lst_lst`]", "columns=[`id`]"}, null); testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("complex/drill-2268-3-result.json") .go(); }
@Test //DRILL-2268 public void testFlattenAfterJoin1() throws Exception { String query = "select flatten(sub1.events) flat_events from "+ "(select t1.events events from cp.\"complex/json/flatten_join.json\" t1 "+ "inner join cp.\"complex/json/flatten_join.json\" t2 on t1.id=t2.id) sub1"; testPlanSubstrPatterns(query, new String[] {"columns=[`id`, `events`]", "columns=[`id`]"}, null); testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("complex/drill-2268-1-result.json") .go(); }