createAOneHourTimelineChunk(125); createAOneHourTimelineChunk(60); checkSamplesForATimeline(185, 126, 0); checkSamplesForATimeline(185, 125, 2); checkSamplesForATimeline(64, 61, 0); checkSamplesForATimeline(125, 65, 2); checkSamplesForATimeline(60, 0, 2); checkSamplesForATimeline(125, 0, 4); checkSamplesForATimeline(124, 0, 4); checkSamplesForATimeline(124, 66, 2); checkSamplesForATimeline(185, 126, 0); checkSamplesForATimeline(185, 125, 2); checkSamplesForATimeline(64, 61, 2); checkSamplesForATimeline(125, 65, 2); checkSamplesForATimeline(60, 0, 2); checkSamplesForATimeline(125, 0, 2); checkSamplesForATimeline(124, 0, 2); checkSamplesForATimeline(124, 66, 2);
@BeforeMethod(groups = "mysql") public void setUp() throws Exception { timelineDao = new DefaultTimelineDao(getDBI()); final Properties properties = System.getProperties(); properties.put("killbill.usage.timelines.chunksToAggregate", "2,2"); final MeterConfig config = new ConfigurationObjectFactory(properties).build(MeterConfig.class); aggregator = new TimelineAggregator(getDBI(), timelineDao, timelineCoder, sampleCoder, config, internalCallContextFactory); }
private void createAOneHourTimelineChunk(final int startTimeMinutesAgo) throws IOException { final DateTime firstSampleTime = START_TIME.minusMinutes(startTimeMinutesAgo); final TimelineSourceEventAccumulator accumulator = new TimelineSourceEventAccumulator(timelineDao, timelineCoder, sampleCoder, hostId, EVENT_TYPE_ID, firstSampleTime, internalCallContextFactory); // 120 samples per hour for (int i = 0; i < 120; i++) { final DateTime eventDateTime = firstSampleTime.plusSeconds(i * 30); final Map<Integer, ScalarSample> event = createEvent(eventDateTime.getMillis()); final SourceSamplesForTimestamp samples = new SourceSamplesForTimestamp(hostId, EVENT_TYPE, eventDateTime, event); accumulator.addSourceSamples(samples); } accumulator.extractAndQueueTimelineChunks(); }