@Override public void getSamplesBySourceIdsAndMetricIds(final List<Integer> hostIds, @Nullable final List<Integer> sampleKindIds, final DateTime startTime, final DateTime endTime, final TimelineChunkConsumer chunkConsumer, final InternalTenantContext context) throws UnableToObtainConnectionException, CallbackFailedException { for (final Integer hostId : samplesPerHostAndSampleKind.keySet()) { if (hostIds.indexOf(hostId) == -1) { continue; } final Map<Integer, List<TimelineChunk>> samplesPerSampleKind = samplesPerHostAndSampleKind.get(hostId); for (final Integer sampleKindId : samplesPerSampleKind.keySet()) { if (sampleKindIds != null && sampleKindIds.indexOf(sampleKindId) == -1) { continue; } for (final TimelineChunk chunk : samplesPerSampleKind.get(sampleKindId)) { if (chunk.getStartTime().isAfter(endTime) || chunk.getEndTime().isBefore(startTime)) { continue; } chunkConsumer.processTimelineChunk(chunk); } } } }
@Override public Void withHandle(final Handle handle) throws Exception { // MySQL needs special setup to make it stream the results. See: // http://javaquirks.blogspot.com/2007/12/mysql-streaming-result-set.html // http://stackoverflow.com/questions/2447324/streaming-large-result-sets-with-mysql final Query<Map<String, Object>> query = handle.createQuery("getStreamingAggregationCandidates") .setFetchSize(Integer.MIN_VALUE) .bind("aggregationLevel", aggregationLevel) .bind("tenantRecordId", createCallContext().getTenantRecordId()); query.setStatementLocator(new StringTemplate3StatementLocator(TimelineAggregatorSqlDao.class)); ResultIterator<TimelineChunk> iterator = null; try { iterator = query .map(timelineChunkMapper) .iterator(); while (iterator.hasNext()) { aggregationConsumer.processTimelineChunk(iterator.next()); } } catch (Exception e) { log.error(String.format("Exception during aggregation of level %d", aggregationLevel), e); } finally { if (iterator != null) { iterator.close(); } } return null; }
chunkConsumer.processTimelineChunk(iterator.next());