public DataSourceMetadataQueryBuilder dataSource(String ds) { dataSource = new TableDataSource(ds); return this; }
private Object findInnerDatasource(Query query) { DataSource _ds = query.getDataSource(); if (_ds instanceof TableDataSource) { return ((TableDataSource) _ds).getName(); } if (_ds instanceof QueryDataSource) { return findInnerDatasource(((QueryDataSource) _ds).getQuery()); } if (_ds instanceof UnionDataSource) { return Joiner.on(",") .join( ((UnionDataSource) _ds) .getDataSources() .stream() .map(TableDataSource::getName) .collect(Collectors.toList()) ); } else { // should not come here return query.getDataSource(); } }
@Override public List<String> getNames() { return dataSources.stream().map(input -> Iterables.getOnlyElement(input.getNames())).collect(Collectors.toList()); }
return Collections.singletonList(query); String datasourceName = ((TableDataSource) query.getDataSource()).getName(); final List<Interval> derivativeIntervals = remainingQueryIntervals.stream() .flatMap(interval -> serverView .getTimeline((new TableDataSource(derivativeDataSource.getName()))) .lookup(interval) .stream() query.withDataSource(new TableDataSource(derivativeDataSource.getName())) .withQuerySegmentSpec(new MultipleIntervalSegmentSpec(derivativeIntervals)) );
public ScanQueryBuilder dataSource(String ds) { dataSource = new TableDataSource(ds); return this; }
private <T> VersionedIntervalTimeline<String, Segment> getTimelineForTableDataSource(Query<T> query) { if (query.getDataSource() instanceof TableDataSource) { return timelines.get(((TableDataSource) query.getDataSource()).getName()); } else { throw new UOE("DataSource type[%s] unsupported", query.getDataSource().getClass().getName()); } }
@Override public String apply(TableDataSource input) { return Iterables.getOnlyElement(input.getNames()); } }
public TopNQueryBuilder dataSource(String d) { dataSource = new TableDataSource(d); return this; }
|| !dataSource.equals(((TableDataSource) query.getDataSource()).getName())) { log.makeAlert("Received query for unknown dataSource") .addData("dataSource", query.getDataSource())
public SelectQueryBuilder dataSource(String ds) { dataSource = new TableDataSource(ds); return this; }
|| !dataSource.equals(((TableDataSource) query.getDataSource()).getName())) { log.makeAlert("Received query for unknown dataSource") .addData("dataSource", query.getDataSource())
public SearchQueryBuilder dataSource(String d) { dataSource = new TableDataSource(d); return this; }
public TimeseriesQueryBuilder dataSource(String ds) { dataSource = new TableDataSource(ds); return this; }
public TimeBoundaryQueryBuilder dataSource(String ds) { dataSource = new TableDataSource(ds); return this; }
public Builder setDataSource(String dataSource) { this.dataSource = new TableDataSource(dataSource); return this; }
public SegmentMetadataQueryBuilder dataSource(String ds) { dataSource = new TableDataSource(ds); return this; }
private static Sequence<SegmentAnalysis> runSegmentMetadataQuery( final QueryLifecycleFactory queryLifecycleFactory, final Iterable<DataSegment> segments, final AuthenticationResult authenticationResult ) { // Sanity check: getOnlyElement of a set, to ensure all segments have the same dataSource. final String dataSource = Iterables.getOnlyElement( StreamSupport.stream(segments.spliterator(), false) .map(DataSegment::getDataSource).collect(Collectors.toSet()) ); final MultipleSpecificSegmentSpec querySegmentSpec = new MultipleSpecificSegmentSpec( StreamSupport.stream(segments.spliterator(), false) .map(DataSegment::toDescriptor).collect(Collectors.toList()) ); final SegmentMetadataQuery segmentMetadataQuery = new SegmentMetadataQuery( new TableDataSource(dataSource), querySegmentSpec, new AllColumnIncluderator(), false, ImmutableMap.of(), EnumSet.noneOf(SegmentMetadataQuery.AnalysisType.class), false, false ); return queryLifecycleFactory.factorize().runSimple(segmentMetadataQuery, authenticationResult, null); }
private DruidTable buildDruidTable(final String dataSource) { synchronized (lock) { final Map<DataSegment, SegmentMetadataHolder> segmentMap = segmentMetadataInfo.get(dataSource); final Map<String, ValueType> columnTypes = new TreeMap<>(); if (segmentMap != null) { for (SegmentMetadataHolder segmentMetadataHolder : segmentMap.values()) { final RowSignature rowSignature = segmentMetadataHolder.getRowSignature(); if (rowSignature != null) { for (String column : rowSignature.getRowOrder()) { // Newer column types should override older ones. columnTypes.putIfAbsent(column, rowSignature.getColumnType(column)); } } } } final RowSignature.Builder builder = RowSignature.builder(); columnTypes.forEach(builder::add); return new DruidTable(new TableDataSource(dataSource), builder.build()); } }
new TableDataSource(dataSourceName) ); final Interval theInterval = Intervals.of(interval.replace('_', '/'));
private void setupQueries() { // queries for the basic schema Map<String, Druids.SelectQueryBuilder> basicQueries = new LinkedHashMap<>(); BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic"); { // basic.A QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); Druids.SelectQueryBuilder queryBuilderA = Druids.newSelectQueryBuilder() .dataSource(new TableDataSource("blah")) .dimensionSpecs(DefaultDimensionSpec.toSpec(Collections.emptyList())) .metrics(Collections.emptyList()) .intervals(intervalSpec) .granularity(Granularities.ALL) .descending(false); basicQueries.put("A", queryBuilderA); } SCHEMA_QUERY_MAP.put("basic", basicQueries); }