rowType, viewTable, nonPartitionColumns, partitionColumns, new ArrayList<>(), conf, new HashMap<>(), new HashMap<>(), new AtomicInteger()); DruidTable druidTable = new DruidTable(new DruidSchema(address, address, false), dataSource, RelDataTypeImpl.proto(rowType), metrics, DruidTable.DEFAULT_TIMESTAMP_COLUMN, intervals, null, null);
DruidTable druidTable = new DruidTable(new DruidSchema(address, address, false), dataSource, RelDataTypeImpl.proto(rowType), metrics, DruidTable.DEFAULT_TIMESTAMP_COLUMN, intervals); final TableScan scan = new HiveTableScan(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION),
DruidTable druidTable = new DruidTable(new DruidSchema(address, address, false), dataSource, RelDataTypeImpl.proto(rowType), metrics, DruidTable.DEFAULT_TIMESTAMP_COLUMN, intervals); final TableScan scan = new HiveTableScan(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION),
DruidTable druidTable = new DruidTable(new DruidSchema(address, address, false), dataSource, RelDataTypeImpl.proto(rowType), metrics, DruidTable.DEFAULT_TIMESTAMP_COLUMN, intervals, null, null);
/** Creates a {@link DruidTable} by copying the given parameters. * * @param druidSchema Druid schema * @param dataSourceName Data source name in Druid, also table name * @param intervals Intervals, or null to use default * @param fieldMap Fully populated map of fields (dimensions plus metrics) * @param metricNameSet Fully populated set of metric names * @param timestampColumnName Name of timestamp column, or null * @param complexMetrics List of complex metrics in Druid (thetaSketch, hyperUnique) * * @return A table */ static Table create(DruidSchema druidSchema, String dataSourceName, List<Interval> intervals, Map<String, SqlTypeName> fieldMap, Set<String> metricNameSet, String timestampColumnName, Map<String, List<ComplexMetric>> complexMetrics) { final ImmutableMap<String, SqlTypeName> fields = ImmutableMap.copyOf(fieldMap); return new DruidTable(druidSchema, dataSourceName, new MapRelProtoDataType(fields, timestampColumnName), ImmutableSet.copyOf(metricNameSet), timestampColumnName, intervals, complexMetrics, fieldMap); }
/** Creates a {@link DruidTable} by copying the given parameters. * * @param druidSchema Druid schema * @param dataSourceName Data source name in Druid, also table name * @param intervals Intervals, or null to use default * @param fieldMap Fully populated map of fields (dimensions plus metrics) * @param metricNameSet Fully populated set of metric names * @param timestampColumnName Name of timestamp column, or null * @param complexMetrics List of complex metrics in Druid (thetaSketch, hyperUnique) * * @return A table */ static Table create(DruidSchema druidSchema, String dataSourceName, List<Interval> intervals, Map<String, SqlTypeName> fieldMap, Set<String> metricNameSet, String timestampColumnName, Map<String, List<ComplexMetric>> complexMetrics) { final ImmutableMap<String, SqlTypeName> fields = ImmutableMap.copyOf(fieldMap); return new DruidTable(druidSchema, dataSourceName, new MapRelProtoDataType(fields, timestampColumnName), ImmutableSet.copyOf(metricNameSet), timestampColumnName, intervals, complexMetrics, fieldMap); }
@Before public void testSetup() { druidQuery = Mockito.mock(DruidQuery.class); final CalciteConnectionConfig connectionConfigMock = Mockito .mock(CalciteConnectionConfig.class); Mockito.when(connectionConfigMock.timeZone()).thenReturn("UTC"); Mockito.when(druidQuery.getConnectionConfig()).thenReturn(connectionConfigMock); Mockito.when(druidQuery.getDruidTable()) .thenReturn( new DruidTable(Mockito.mock(DruidSchema.class), "dataSource", null, ImmutableSet.of(), "timestamp", null, null, null)); } @Test public void testInFilter() throws IOException {