public TestBuilder sqlQuery(String query) { this.query = QueryTestUtil.normalizeQuery(query); this.queryType = UserBitShared.QueryType.SQL; return this; }
public static void test(final String query) throws Exception { QueryTestUtil.test(client, query); }
/** * Close the current <i>client</i> and open a new client using the given <i>properties</i>. All tests executed * after this method call use the new <i>client</i>. * * @param properties */ public static void updateClient(Properties properties) throws Exception { Preconditions.checkState(nodes != null && nodes[0] != null, "Nodes are not setup."); if (client != null) { client.close(); client = null; } client = QueryTestUtil.createClient(config, clusterCoordinator, MAX_WIDTH_PER_NODE, properties); }
/** * Execute one or more queries separated by semicolons, and print the results. * * @param client Dremio client to use * @param queryString the query string * @throws Exception */ public static void test(final DremioClient client, final String queryString) throws Exception{ final String query = normalizeQuery(queryString); String[] queries = query.split(";"); for (String q : queries) { final String trimmedQuery = q.trim(); if (trimmedQuery.isEmpty()) { continue; } testRunAndPrint(client, QueryType.SQL, trimmedQuery); } }
public static int testRunAndPrint(final QueryType type, final String query) throws Exception { return QueryTestUtil.testRunAndPrint(client, type, query); }
protected static void testWithListener(QueryType type, String query, UserResultsListener resultListener) { QueryTestUtil.testWithListener(client, type, query, resultListener); }
@Test public void testConcurrentQueries() throws Exception { QueryTestUtil.testRunAndPrint(client, UserBitShared.QueryType.SQL, alterSession);
/** * Execute a single query with a user supplied result listener. * * @param client Dremio client to use * @param type type of query * @param queryString the query string * @param resultListener the result listener */ public static void testWithListener(final DremioClient client, final QueryType type, final String queryString, final UserResultsListener resultListener) { final String query = QueryTestUtil.normalizeQuery(queryString); client.runQuery(type, query, resultListener); } }
/** * Execute one or more queries separated by semicolons, and print the results, with the option to * add formatted arguments to the query string. * * @param client Dremio client to use * @param query the query string; may contain formatting specifications to be used by * {@link String#format(String, Object...)}. * @param args optional args to use in the formatting call for the query string * @throws Exception */ public static void test(final DremioClient client, final String query, Object... args) throws Exception { test(client, String.format(query, args)); }
@BeforeClass public static void openClient() throws Exception { config = SabotConfig.create(TEST_CONFIGURATIONS); clusterCoordinator = LocalClusterCoordinator.newRunningCoordinator(); bit = new SabotNode(config, clusterCoordinator, DremioTest.CLASSPATH_SCAN_RESULT, true); bit.run(); client = QueryTestUtil.createClient(config, clusterCoordinator, 2, null); }
public static List<QueryDataBatch> testRunAndReturn(QueryType type, Object query) throws Exception{ if (type == QueryType.PREPARED_STATEMENT) { Preconditions.checkArgument(query instanceof PreparedStatementHandle, "Expected an instance of PreparedStatement as input query"); return testPreparedStatement((PreparedStatementHandle)query); } else { Preconditions.checkArgument(query instanceof String, "Expected a string as input query"); query = QueryTestUtil.normalizeQuery((String)query); return client.runQuery(type, (String)query); } }
public static void test(String query, Object... args) throws Exception { QueryTestUtil.test(client, String.format(query, args)); }
private static void openClient() throws Exception { clusterCoordinator = LocalClusterCoordinator.newRunningCoordinator(); dfsTestTmpSchemaLocation = TestUtilities.createTempDir(); nodes = new SabotNode[nodeCount]; for(int i = 0; i < nodeCount; i++) { // first node has all roles, and all others are only executors nodes[i] = SABOT_NODE_RULE.newSabotNode(new SabotProviderConfig(i == 0)); BINDER_RULE.updateBindingCreator(nodes[i].getBindingCreator()); nodes[i].run(); if(i == 0) { TestUtilities.addDefaultTestPlugins(nodes[i].getContext().getCatalogService(), dfsTestTmpSchemaLocation); } } client = QueryTestUtil.createClient(config, clusterCoordinator, MAX_WIDTH_PER_NODE, defaultProperties); // turn off re-attempts, this needs to be set at the system level as many unit test will // reset the user session by restarting the client setEnableReAttempts(false); }
/** * This method will take a SQL string statement, get the PHYSICAL plan in json * format. Then check the physical plan against the list expected substrs. * Verify all the expected strings are contained in the physical plan string. */ public static void testPhysicalPlan(String sql, String... expectedSubstrs) throws Exception { sql = "EXPLAIN PLAN for " + QueryTestUtil.normalizeQuery(sql); final String planStr = getPlanInString(sql, OPTIQ_FORMAT); for (final String colNames : expectedSubstrs) { assertTrue(String.format("Unable to find expected string %s in plan: %s!", colNames, planStr), planStr.contains(colNames)); } }
@Test public void tpch01() throws Exception { final String query = getFile("memory/tpch01_memory_leak.sql"); try { QueryTestUtil.test(client, "alter session set \"planner.slice_target\" = 10; " + query); } catch (UserRemoteException e) { if (e.getMessage().contains("Allocator closed with outstanding buffers allocated")) { return; } throw e; } fail("Expected UserRemoteException indicating memory leak"); }
private void submitRandomQuery() { final String filename = queryFile[random.nextInt(queryFile.length)]; final String query; try { query = QueryTestUtil.normalizeQuery(getFile(filename)).replace(';', ' '); } catch(IOException e) { throw new RuntimeException("Caught exception", e); } final UserResultsListener listener = new ChainingSilentListener(query); client.runQuery(UserBitShared.QueryType.SQL, query, listener); synchronized(this) { listeners.add(listener); } }
@Test public void testGroupScanWithPartitionIdentificationOff() throws Exception { long defaultValue = ExecConstants.PARQUET_MAX_PARTITION_COLUMNS_VALIDATOR.getDefault().getNumVal(); try { QueryTestUtil.test(getRpcClient(), "alter system set \"store.parquet" + ".partition_column_limit\" = 0"); List<String> partitionColumnList = getPartitionColumnsForDataSet ("datasets/parquet_no_partition_identification"); Assert.assertEquals(1, partitionColumnList.size()); Assert.assertEquals( "$_dremio_$_update_$", partitionColumnList.get(0)); } finally { QueryTestUtil.test(getRpcClient(), "alter system set \"store.parquet" + ".partition_column_limit\" = " + defaultValue); } }
@Test public void ensureFieldNotDependentOnFlattenIsPushedBelowFlatten() throws Exception { final String query = "SELECT errorCode, errorMessage, nested_1.data.q AS q, nested_1.data.v AS v, TO_TIMESTAMP(nested_1.data.ts, 'YYYY-MM-DD\"T\"HH24:MI:SS.FFFTZO') AS ts, tagId\n" + "FROM (\n" + " SELECT errorCode, errorMessage, flatten(nested_0.tagList.data) AS data, nested_0.tagList.tagId AS tagId\n" + " FROM (\n" + " SELECT errorCode, errorMessage, flatten(tagList) AS tagList\n" + " FROM cp.\"flatten/multiflatten.json\"\n" + " ) nested_0\n" + ") nested_1"; final String plan = getPlanInString("EXPLAIN PLAN for " + QueryTestUtil.normalizeQuery(query), OPTIQ_FORMAT); assertTrue(Pattern.compile(".*Flatten.*ITEM[^\\)]*tagId.*", Pattern.MULTILINE + Pattern.DOTALL).matcher(plan).matches()); }
+ QueryTestUtil.normalizeQuery(sql);
final String plan = getPlanInString("EXPLAIN PLAN for " + QueryTestUtil.normalizeQuery(query), OPTIQ_FORMAT);