/** * Constructor. * * @param query Query that defines the DataSource. */ public QueryDataSource(DruidFactQuery<?> query) { super(DefaultDataSourceType.QUERY, query.getDataSource().getPhysicalTable()); this.query = query; }
@Override public Optional<? extends DruidAggregationQuery> getInnerQuery() { return (Optional<? extends DruidAggregationQuery>) this.dataSource.getQuery(); }
@Override @JsonIgnore public Set<String> getNames() { return super.getNames(); } }
/** * Returns a set of identifiers used by Fili to identify this data source's physical tables. * * @return The set of names used by Fili to identify this data source's physical tables */ @JsonInclude(JsonInclude.Include.NON_NULL) public Set<String> getNames() { return getPhysicalTable().getDataSourceNames().stream() .map(DataSourceName::asName) .collect( Collectors.collectingAndThen( Collectors.toCollection(LinkedHashSet::new), Collections::unmodifiableSet ) ); }
/** * Get the query that defines the data source. Empty queries become null for serialization. * * @return the serializable version of the query. */ @JsonInclude(JsonInclude.Include.NON_NULL) @JsonProperty("query") private DruidQuery<?> getQueryForSerialization() { return getQuery().orElse(null); }
@Override @JsonProperty(value = "dataSources") public Set<String> getNames() { return super.getNames(); } }
/** * Checks if a {@link Dimension} exists in a {@link DataSource}. * * @param dimension The dimension to look for in the datasource. * @param dataSource The datasource to look through for the dimension. * * @return true if the dimension was found. */ default boolean dimensionExistsInDataSource(Dimension dimension, DataSource dataSource) { return dataSource.getPhysicalTable() .getDimensions() .stream() .anyMatch(dimension::equals); }
/** * If this query is nestable, and has a nested query return it. * * @return the nested query or empty if there is no nested query */ @JsonIgnore default Optional<? extends DruidQuery> getInnerQuery() { return getDataSource().getQuery(); }
@Override @JsonIgnore public Set<String> getNames() { return query.getInnermostQuery().getDataSource().getNames(); }
@Override public void query(Dimension dimension, DataSource dataSource) { SuccessCallback successCallback = buildSuccessCallback(dimension); SqlPhysicalTable sqlTable = (SqlPhysicalTable) dataSource.getPhysicalTable().getSourceTable(); GroupByQuery groupByQuery = new GroupByQuery( dataSource, AllGranularity.INSTANCE, Collections.singletonList(dimension), null, null, Collections.emptyList(), Collections.emptyList(), Collections.singletonList(INTERVAL), new LimitSpec(Utils.asLinkedHashSet(), OptionalInt.of(ROW_LIMIT)) ); sqlBackedClient.executeQuery(groupByQuery, successCallback, failureCallback); }
@Override public GroupByQuery withInnermostDataSource(DataSource dataSource) { Optional<DruidFactQuery<?>> innerQuery = (Optional<DruidFactQuery<?>>) this.dataSource.getQuery(); return !innerQuery.isPresent() ? withDataSource(dataSource) : withDataSource(new QueryDataSource(innerQuery.get().withInnermostDataSource(dataSource))); }
metrics, dimensions, druidQuery.getDataSource().getNames(), readCache, request.getFormat().toString()
@Override public Optional<Long> getSegmentSetId(DruidAggregationQuery<?> query) { // Gather the data source names backing the query Set<DataSourceName> dataSourceNames = query.getInnermostQuery() .getDataSource() .getPhysicalTable() .getDataSourceNames() .stream() .collect(Collectors.toSet()); // Get all the segments for the data sources of the query's physical tables Set<SortedMap<DateTime, Map<String, SegmentInfo>>> tableSegments = dataSourceMetadataService.getSegments( dataSourceNames ); // Check if we have no tables with segments if (tableSegments.isEmpty()) { LOG.warn(DRUID_METADATA_SEGMENTS_MISSING.logFormat(dataSourceNames)); return Optional.empty(); } // Get requested intervals, then their segments, and sum their hash codes into a long return getSegmentHash( requestedIntervalsQueryExtractionFunctions.get(query.getClass()).apply(query).stream() .flatMap(interval -> tableSegments.stream() .map(segments -> segments.subMap(interval.getStart(), interval.getEnd())) ) ); }
@Override public SqlAggregationQuery withInnermostDataSource(DataSource dataSource) { Optional<DruidFactQuery<?>> innerQuery = (Optional<DruidFactQuery<?>>) this.dataSource.getQuery(); return !innerQuery.isPresent() ? withDataSource(dataSource) : withDataSource(new QueryDataSource(innerQuery.get().withInnermostDataSource(dataSource))); }
/** * Gets the timezone of the backing table for the given druid query. * * @param druidQuery The druid query to find the timezone for * * @return the {@link DateTimeZone} of the physical table for this query. */ private DateTimeZone getTimeZone(DruidAggregationQuery<?> druidQuery) { return druidQuery.getDataSource() .getPhysicalTable() .getSchema() .getTimeGrain() .getTimeZone(); }
@Override public LookbackQuery withInnermostDataSource(DataSource dataSource) { Optional<DruidFactQuery<?>> innerQuery = (Optional<DruidFactQuery<?>>) this.dataSource.getQuery(); return (innerQuery == null) ? withDataSource(dataSource) : withDataSource(new QueryDataSource(innerQuery.get().withInnermostDataSource(dataSource))); }
/** * JSON tree walk up to physical table to retrieve physical name for a dimension. * * @param value the dimension to retrieve api name. * @param gen the Json Generator to retrieve the tree to walk on. * * @return an Optional String of physical name */ public static Optional<String> findPhysicalName(Dimension value, JsonGenerator gen) { String apiName = value.getApiName(); // Search for physical name return mapNearestDruidQuery( gen, druidQuery -> druidQuery.getDataSource().getPhysicalTable().getPhysicalColumnName(apiName) ); }
@Override public SqlAggregationQuery withAllIntervals(Collection<Interval> intervals) { Optional<DruidFactQuery<?>> innerQuery = (Optional<DruidFactQuery<?>>) this.dataSource.getQuery(); return !innerQuery.isPresent() ? withIntervals(intervals) : withDataSource(new QueryDataSource(innerQuery.get().withAllIntervals(intervals))).withIntervals(intervals); }
query.getDataSource().getPhysicalTable().getAvailableIntervals() );
@Override public LookbackQuery withAllIntervals(Collection<Interval> intervals) { Optional<DruidFactQuery<?>> innerQuery = (Optional<DruidFactQuery<?>>) this.dataSource.getQuery(); return !innerQuery.isPresent() ? withIntervals(intervals) : withDataSource(new QueryDataSource(innerQuery.get().withAllIntervals(intervals))).withIntervals(intervals); }