public AccumulatingJsonSamplesOutputer(final TimeAggregationMode timeAggregationMode, final TimelineEventHandler timelineEventHandler, final TimelineDao timelineDao, final InternalTenantContext context) { super(timelineEventHandler, timelineDao, context); this.accumulatorSampleConsumer = new AccumulatorSampleConsumer(timeAggregationMode, new CSVSampleProcessor()); }
public String flush() { outputAndResetAccumulators(); return toString(); } }
public static String getSamplesAsCSV(final SampleCoder sampleCoder, final TimelineChunk chunk) throws IOException { return getSamplesAsCSV(sampleCoder, chunk, null, null); }
@Test(groups = "fast") public void testDailyAggregation() throws Exception { clock.setTime(new DateTime(2012, 12, 1, 12, 40, DateTimeZone.UTC)); final DateTime start = clock.getUTCNow(); final AccumulatorSampleConsumer sampleConsumer = new AccumulatorSampleConsumer(TimeAggregationMode.DAYS, new CSVSampleProcessor()); // 5 for day 1 sampleConsumer.processOneSample(start, SampleOpcode.DOUBLE, (double) 1); sampleConsumer.processOneSample(start.plusHours(4), SampleOpcode.DOUBLE, (double) 4); // 1 for day 2 sampleConsumer.processOneSample(start.plusDays(1), SampleOpcode.DOUBLE, (double) 1); // 10 and 20 for day 3 (with different opcode) sampleConsumer.processOneSample(start.plusDays(2), SampleOpcode.DOUBLE, (double) 10); sampleConsumer.processOneSample(start.plusDays(2), SampleOpcode.INT, 20); Assert.assertEquals(sampleConsumer.flush(), "1354320000,5.0,1354406400,1.0,1354492800,10.0,1354492800,20.0"); } }
public static String getSamplesAsCSV(final SampleCoder sampleCoder, final TimelineChunk chunk, @Nullable final DateTime startTime, @Nullable final DateTime endTime) throws IOException { final CSVSampleProcessor processor = new CSVSampleProcessor(startTime, endTime); return getSamplesAsCSV(sampleCoder, chunk, processor); }
private DecimatingSampleFilter createDecimatingSampleFilter(final Integer outputCount, final DecimationMode decimationMode, final DateTime startTime, final DateTime endTime) { final DecimatingSampleFilter rangeSampleProcessor; if (outputCount == null) { rangeSampleProcessor = null; } else { // TODO Fix the polling interval rangeSampleProcessor = new DecimatingSampleFilter(startTime, endTime, outputCount, new TimeSpan("1s"), decimationMode, new CSVSampleProcessor()); } return rangeSampleProcessor; } }
@Override protected void writeJsonForChunks(final JsonGenerator generator, final Collection<? extends TimelineChunk> chunksForSourceAndMetric) throws IOException { for (final TimelineChunk chunk : chunksForSourceAndMetric) { if (withBinaryData) { writer.writeValue(generator, new TimelineChunkDecoded(chunk, sampleCoder)); } else { final String source = timelineDao.getSource(chunk.getSourceId(), context); final CategoryRecordIdAndMetric categoryIdAndMetric = timelineDao.getCategoryIdAndMetric(chunk.getMetricId(), context); final String category = timelineDao.getEventCategory(categoryIdAndMetric.getEventCategoryId(), context); final String metric = categoryIdAndMetric.getMetric(); final String samples = CSVConsumer.getSamplesAsCSV(sampleCoder, chunk); // Don't write out empty samples if (!Strings.isNullOrEmpty(samples)) { generator.writeObject(new SamplesForMetricAndSource(source, category, metric, samples)); } } } } }
@Override public void getSamplesBySourceIdsAndMetricIds(final List<Integer> hostIds, @Nullable final List<Integer> sampleKindIds, final DateTime startTime, final DateTime endTime, final TimelineChunkConsumer chunkConsumer, final InternalTenantContext context) throws UnableToObtainConnectionException, CallbackFailedException { for (final Integer hostId : samplesPerHostAndSampleKind.keySet()) { if (hostIds.indexOf(hostId) == -1) { continue; } final Map<Integer, List<TimelineChunk>> samplesPerSampleKind = samplesPerHostAndSampleKind.get(hostId); for (final Integer sampleKindId : samplesPerSampleKind.keySet()) { if (sampleKindIds != null && sampleKindIds.indexOf(sampleKindId) == -1) { continue; } for (final TimelineChunk chunk : samplesPerSampleKind.get(sampleKindId)) { if (chunk.getStartTime().isAfter(endTime) || chunk.getEndTime().isBefore(startTime)) { continue; } chunkConsumer.processTimelineChunk(chunk); } } } }
private String getDecodedSamples() throws IOException { final DecodedSampleOutputProcessor processor = new DecodedSampleOutputProcessor(); sampleCoder.scan(chunk, processor); return processor.getDecodedSamples(); }
@Override protected void writeRemainingData(final JsonGenerator generator) throws IOException { final String samples = accumulatorSampleConsumer.flush(); // Don't write out empty samples if (!Strings.isNullOrEmpty(samples)) { generator.writeObject(new SamplesForMetricAndSource(lastSource, lastEventCategory, lastMetric, samples)); } } }
private void outputAndResetAccumulators() { if (aggregatedSampleNumber != 0) { // TODO Assume CSV builder.append(","); } // Output one opcode at a time for (final SampleOpcode opcode : accumulators.keySet()) { aggregatedSampleNumber++; sampleProcessor.processOneSample(lastRoundedTime, opcode, accumulators.get(opcode)); } // This will flush (clear) the sample consumer builder.append(sampleProcessor.toString()); accumulators.clear(); }
@JsonValue @Override public String toString() { try { final ByteArrayOutputStream out = new ByteArrayOutputStream(); final JsonGenerator generator = objectMapper.getJsonFactory().createJsonGenerator(out); generator.writeStartObject(); generator.writeFieldName("metric"); generator.writeNumber(chunk.getMetricId()); generator.writeFieldName("decodedSamples"); generator.writeString(getDecodedSamples()); generator.writeEndObject(); generator.close(); return out.toString(); } catch (IOException e) { log.error("IOException in toString()", e); } return null; }
final Object value = decodeScalarValue(inputStream, repeatedOpcode); final SampleOpcode replacementOpcode = repeatedOpcode.getReplacement(); processor.processSamples(timeCursor, repeatCount, replacementOpcode, value); sampleNumber += repeatCount; timeCursor.skipToSampleNumber(sampleNumber); break; default: processor.processSamples(timeCursor, 1, opcode.getReplacement(), decodeScalarValue(inputStream, opcode)); break;
@Override protected void writeJsonForChunks(final JsonGenerator generator, final Collection<? extends TimelineChunk> chunksForSourceAndMetric) throws IOException { for (final TimelineChunk chunk : chunksForSourceAndMetric) { final String source = timelineDao.getSource(chunk.getSourceId(), context); final CategoryRecordIdAndMetric categoryIdAndMetric = timelineDao.getCategoryIdAndMetric(chunk.getMetricId(), context); final String eventCategory = timelineDao.getEventCategory(categoryIdAndMetric.getEventCategoryId(), context); final String metric = categoryIdAndMetric.getMetric(); final TimeRangeSampleProcessor filter = filters.get(chunk.getSourceId()).get(chunk.getMetricId()); final String samples = filter == null ? CSVConsumer.getSamplesAsCSV(sampleCoder, chunk) : CSVConsumer.getSamplesAsCSV(sampleCoder, chunk, filter); // Don't write out empty samples if (!Strings.isNullOrEmpty(samples)) { generator.writeObject(new SamplesForMetricAndSource(source, eventCategory, metric, samples)); } } }
@Override public Void withHandle(final Handle handle) throws Exception { // MySQL needs special setup to make it stream the results. See: // http://javaquirks.blogspot.com/2007/12/mysql-streaming-result-set.html // http://stackoverflow.com/questions/2447324/streaming-large-result-sets-with-mysql final Query<Map<String, Object>> query = handle.createQuery("getStreamingAggregationCandidates") .setFetchSize(Integer.MIN_VALUE) .bind("aggregationLevel", aggregationLevel) .bind("tenantRecordId", createCallContext().getTenantRecordId()); query.setStatementLocator(new StringTemplate3StatementLocator(TimelineAggregatorSqlDao.class)); ResultIterator<TimelineChunk> iterator = null; try { iterator = query .map(timelineChunkMapper) .iterator(); while (iterator.hasNext()) { aggregationConsumer.processTimelineChunk(iterator.next()); } } catch (Exception e) { log.error(String.format("Exception during aggregation of level %d", aggregationLevel), e); } finally { if (iterator != null) { iterator.close(); } } return null; }
/** * Process sampleCount sequential samples with identical values. sampleCount will usually be 1, * but may be larger than 1. Implementors may just loop processing identical values, but some * implementations may optimize adding a bunch of repeated values * * @param timeCursor a TimeCursor instance, which supplies successive int UNIX times * @param sampleCount the count of sequential, identical values * @param opcode the opcode of the sample value, which may not be a REPEAT opcode * @param value the value of this kind of sample over the sampleCount samples */ @Override public void processSamples(final TimelineCursor timeCursor, final int sampleCount, final SampleOpcode opcode, final Object value) { for (int i = 0; i < sampleCount; i++) { // Check if the sample is in the right time range final DateTime sampleTime = timeCursor.getNextTime(); if ((startTime == null || !sampleTime.isBefore(startTime)) && ((endTime == null || !sampleTime.isAfter(endTime)))) { processOneSample(sampleTime, opcode, value); } } }
@Override protected void writeJsonForChunks(final JsonGenerator generator, final Collection<? extends TimelineChunk> chunksForSourceAndMetric) throws IOException { for (final TimelineChunk chunk : chunksForSourceAndMetric) { final String source = timelineDao.getSource(chunk.getSourceId(), context); final CategoryRecordIdAndMetric categoryIdAndMetric = timelineDao.getCategoryIdAndMetric(chunk.getMetricId(), context); final String eventCategory = timelineDao.getEventCategory(categoryIdAndMetric.getEventCategoryId(), context); final String metric = categoryIdAndMetric.getMetric(); final String samples = CSVConsumer.getSamplesAsCSV(sampleCoder, chunk); // Don't write out empty samples if (!Strings.isNullOrEmpty(samples)) { generator.writeObject(new SamplesForMetricAndSource(source, eventCategory, metric, samples)); } } } }
chunkConsumer.processTimelineChunk(iterator.next());
@Override protected void writeJsonForChunks(final JsonGenerator generator, final Collection<? extends TimelineChunk> chunksForSourceAndMetric) throws IOException { for (final TimelineChunk chunk : chunksForSourceAndMetric) { final String source = timelineDao.getSource(chunk.getSourceId(), context); final CategoryRecordIdAndMetric categoryIdAndMetric = timelineDao.getCategoryIdAndMetric(chunk.getMetricId(), context); final String eventCategory = timelineDao.getEventCategory(categoryIdAndMetric.getEventCategoryId(), context); final String metric = categoryIdAndMetric.getMetric(); final String samples = CSVConsumer.getSamplesAsCSV(sampleCoder, chunk, accumulatorSampleConsumer); // Don't write out empty samples if (!Strings.isNullOrEmpty(samples)) { generator.writeObject(new SamplesForMetricAndSource(source, eventCategory, metric, samples)); } lastSource = source; lastEventCategory = eventCategory; lastMetric = metric; } }