@Test public void tesIsNull() throws Exception { final String query = "select r_name is null as col from cp.\"tpch/region.parquet\" limit 0"; List<Pair<SchemaPath, MajorType>> expectedSchema = Lists.newArrayList(); MajorType majorType = Types.required(MinorType.BIT); expectedSchema.add(Pair.of(SchemaPath.getSimplePath("col"), majorType)); testBuilder() .sqlQuery(query) .schemaBaseLine(expectedSchema) .build() .run(); }
RecordBatchLoader loader = new RecordBatchLoader(getAllocator()); actual = BaseTestQuery.testRunAndReturn(queryType, query); checkNumBatches(actual); addTypeInfoIfMissing(actual.get(0), testBuilder); addToMaterializedResults(actualRecords, actual, loader); BaseTestQuery.test(baselineOptionSettingQueries); expected = BaseTestQuery.testRunAndReturn(baselineQueryType, testBuilder.getValidationQuery()); addToMaterializedResults(expectedRecords, expected, loader); } else { expectedRecords = baselineRecords; compareResults(expectedRecords, actualRecords); } finally { cleanupBatches(actual, expected);
public DremioTestWrapper build() throws Exception { if ( ! ordered && highPerformanceComparison ) { throw new Exception("High performance comparison only available for ordered checks, to enforce this restriction, ordered() must be called first."); } return new DremioTestWrapper(this, allocator, query, queryType, baselineOptionSettingQueries, testOptionSettingQueries, getValidationQueryType(), ordered, highPerformanceComparison, baselineRecords, expectedNumBatches); }
public void compareResultsHyperVector() throws Exception { RecordBatchLoader loader = new RecordBatchLoader(getAllocator()); BaseTestQuery.test(testOptionSettingQueries); List<QueryDataBatch> results = BaseTestQuery.testRunAndReturn(queryType, query); checkNumBatches(results); // To avoid extra work for test writers, types can optionally be inferred from the test query addTypeInfoIfMissing(results.get(0), testBuilder); Map<String, HyperVectorValueIterator> actualSuperVectors = addToHyperVectorMap(results, loader); BaseTestQuery.test(baselineOptionSettingQueries); List<QueryDataBatch> expected = BaseTestQuery.testRunAndReturn(baselineQueryType, testBuilder.getValidationQuery()); Map<String, HyperVectorValueIterator> expectedSuperVectors = addToHyperVectorMap(expected, loader); compareHyperVectors(expectedSuperVectors, actualSuperVectors); cleanupBatches(results, expected); }
public void compareMergedOnHeapVectors() throws Exception { RecordBatchLoader loader = new RecordBatchLoader(getAllocator()); BatchSchema schema = null; actual = BaseTestQuery.testRunAndReturn(queryType, query); checkNumBatches(actual); addTypeInfoIfMissing(actual.get(0), testBuilder); actualSuperVectors = addToCombinedVectorResults(batchIter); batchIter.close(); expected = BaseTestQuery.testRunAndReturn(baselineQueryType, testBuilder.getValidationQuery()); BatchIterator exBatchIter = new BatchIterator(expected, loader); expectedSuperVectors = addToCombinedVectorResults(exBatchIter); exBatchIter.close(); } else { expectedSuperVectors = translateRecordListToHeapVectors(baselineRecords); compareMergedVectors(expectedSuperVectors, actualSuperVectors); } catch (Exception e) { throw new Exception(e.getMessage() + "\nFor query: " + query , e); } finally { cleanupBatches(expected, actual);
RecordBatchLoader loader = new RecordBatchLoader(getAllocator()); List<Map<String, Object>> records = new ArrayList<>(); DremioTestWrapper.addToMaterializedResults(records, queryBatch, loader);
@Test public void hashJoin() throws Exception{ String query = String.format("select t1.amount, t1.\"date\", t1.marketing_info, t1.\"time\", t1.trans_id, t1.trans_info, t1.user_info " + "from %s t1, %s t2 where t1.amount = t2.amount", DATAFILE, DATAFILE); testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("store/parquet/complex/baseline.json") .build() .run(); }
@Test public void selectAllColumns() throws Exception { String query = String.format("select amount, \"date\", marketing_info, \"time\", trans_id, trans_info, user_info from %s", DATAFILE); testBuilder() .sqlQuery(query) .ordered() .jsonBaselineFile("store/parquet/complex/baseline.json") .build() .run(); }
@Test public void sort() throws Exception { String query = String.format("select * from %s order by amount", DATAFILE); testBuilder() .sqlQuery(query) .ordered() .jsonBaselineFile("store/parquet/complex/baseline_sorted.json") .build() .run(); }
@Test public void topN() throws Exception { String query = String.format("select * from %s order by amount limit 5", DATAFILE); testBuilder() .sqlQuery(query) .ordered() .jsonBaselineFile("store/parquet/complex/baseline_sorted.json") .build() .run(); }
@Test public void testSimpleQueryWithCast() throws Exception { String query = String.format("select cast(department_id as bigint) as c from %s where cast(employee_id as decimal) = 170", DATAFILE); testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("c") .baselineValues(80L) .build() .run(); }
@Test public void testGlobSingleCharacter() throws Exception { testBuilder() .sqlQuery(String.format("select count(*) from dfs_test.\"%s/parquet/199?/*\"", MULTILEVEL)) .unOrdered() .baselineColumns("EXPR$0") .baselineValues(120L) .build().run(); }
@Test public void testGlobWildcard() throws Exception { testBuilder() .sqlQuery(String.format("select count(*) from dfs_test.\"%s/parquet/1994/*\"", MULTILEVEL)) .unOrdered() .baselineColumns("EXPR$0") .baselineValues(40L) .build().run(); }
@Test public void testGlobSingleCharacterRange() throws Exception { testBuilder() .sqlQuery(String.format("select count(*) from dfs_test.\"%s/parquet/199[4-5]/*\"", MULTILEVEL)) .unOrdered() .baselineColumns("EXPR$0") .baselineValues(80L) .build().run(); } }
@Test public void secondElementInMap() throws Exception { String query = String.format("select t.\"marketing_info\".keywords as keywords from %s t", DATAFILE); String[] columns = {"keywords"}; testBuilder() .sqlQuery(query) .ordered() .jsonBaselineFile("store/parquet/complex/baseline3.json") .baselineColumns(columns) .build() .run(); }
@Test public void elementsOfArray() throws Exception { String query = String.format("select t.\"marketing_info\".keywords[0] as keyword0, t.\"marketing_info\".keywords[2] as keyword2 from %s t", DATAFILE); String[] columns = {"keyword0", "keyword2"}; testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("store/parquet/complex/baseline4.json") .baselineColumns(columns) .build() .run(); }
@Test public void testNullCount() throws Exception { String query = String.format("select count(*) as c from %s where department_id is null", DATAFILE); testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("c") .baselineValues(1L) .build() .run(); }
@Test public void testNotNullCount() throws Exception { String query = String.format("select count(*) as c from %s where department_id is not null", DATAFILE); testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("c") .baselineValues(106L) .build() .run(); }
@Test public void testGlobSet() throws Exception { testBuilder() .sqlQuery(String.format("select count(*) from dfs_test.\"%s/parquet/{1994,1995}\"", MULTILEVEL)) .unOrdered() .baselineColumns("EXPR$0") .baselineValues(80L) .build().run(); }
@Test public void elementsOfArrayCaseInsensitive() throws Exception { String query = String.format("select t.\"MARKETING_INFO\".keywords[0] as keyword0, t.\"Marketing_Info\".Keywords[2] as keyword2 from %s t", DATAFILE); String[] columns = {"keyword0", "keyword2"}; testBuilder() .sqlQuery(query) .unOrdered() .jsonBaselineFile("store/parquet/complex/baseline4.json") .baselineColumns(columns) .build() .run(); }