/** * Build a list of interval strings. Format of interval string: yyyy-MM-dd' 'HH:mm:ss/yyyy-MM-dd' 'HH:mm:ss * * @param intervals list of intervals to be converted into string * * @return list of interval strings */ public List<String> buildIntervalStringList(Collection<Interval> intervals) { return intervals.stream() .map(it -> DateTimeUtils.intervalToString(it, DateTimeFormatterFactory.getOutputFormatter(), "/")) .collect(Collectors.toList()); }
/** * Merge an interval into the given interval set. * * @param intervals set of intervals to which an interval is to be added/merged * @param intervalToMerge interval to be merged * * @return set of intervals */ public static Set<Interval> mergeIntervalToSet(Set<Interval> intervals, Interval intervalToMerge) { LinkedHashSet<Interval> copyOfOriginalSet = new LinkedHashSet<>(intervals); copyOfOriginalSet.add(intervalToMerge); return mergeIntervalSet(copyOfOriginalSet); }
for (Interval needed : sortedNeededIntervals) { while (!canDetermineAvailability(available, needed) && availableIntervalsIterator.hasNext()) { available = availableIntervalsIterator.next();
while (sliceStart.isBefore(intervalEnd)) { DateTime sliceEnd = DateTimeUtils.addTimeGrain(sliceStart, timeGrain);
/** * Extract schema components from ResultSet schema. * * @param schema Schema object from the ResultSet * * @return Schema components. */ private Map<String, Object> getSchemaComponents(ResultSetSchema schema) { Map<String, Object> schemaComponents = new HashMap<>(); schemaComponents.put(SCHEMA_TIMEZONE, DateTimeUtils.getTimeZone(schema.getGranularity()).getID()); schemaComponents.put(SCHEMA_GRANULARITY, schema.getGranularity().getName()); schemaComponents.put( SCHEMA_DIM_COLUMNS, schema.getColumns(DimensionColumn.class) .stream() .map(Column::getName) .collect(Collectors.toCollection(LinkedHashSet::new)) ); schemaComponents.put( SCHEMA_METRIC_COLUMNS, getMetricColumnNames(schema) ); return schemaComponents; }
/** * Create a map of dimension and metric intervals from a map of intervals to dimensions and metrics. * * @param queryResult The java mapped data objects built directly from the Segment Metadata endpoint JSON */ public SegmentMetadata(Map<String, Map<String, List<String>>> queryResult) { Map<String, Set<Interval>> tempDimensionIntervals = new HashMap<>(); Map<String, Set<Interval>> tempMetricIntervals = new HashMap<>(); for (Map.Entry<String, Map<String, List<String>>> intervalColumns : queryResult.entrySet()) { Interval interval = Interval.parse(intervalColumns.getKey()); // Store dimensions in pivoted map intervalColumns.getValue().get("dimensions").forEach(column -> tempDimensionIntervals.computeIfAbsent(column, k -> new LinkedHashSet<>()).add(interval) ); // Store metrics in pivoted map intervalColumns.getValue().get("metrics").forEach(column -> tempMetricIntervals.computeIfAbsent(column, k -> new LinkedHashSet<>()).add(interval) ); } // Stitch the intervals together for (Map.Entry<String, Set<Interval>> entry : tempDimensionIntervals.entrySet()) { tempDimensionIntervals.put(entry.getKey(), DateTimeUtils.mergeIntervalSet(entry.getValue())); } for (Map.Entry<String, Set<Interval>> entry: tempMetricIntervals.entrySet()) { tempMetricIntervals.put(entry.getKey(), DateTimeUtils.mergeIntervalSet(entry.getValue())); } dimensionIntervals = ImmutableMap.copyOf(tempDimensionIntervals); metricIntervals = ImmutableMap.copyOf(tempMetricIntervals); isEmpty = dimensionIntervals.isEmpty() && metricIntervals.isEmpty(); }