@Override public TimelineDao get() { final TimelineDao delegate = new DefaultTimelineDao(dbi); return new CachingTimelineDao(delegate); } }
private void addChunkAndMaybeSave(final List<TimelineChunk> timelineChunkList, final TimelineChunk timelineChunk) { timelineChunkList.add(timelineChunk); if (timelineChunkList.size() >= CREATE_BATCH_SIZE) { defaultTimelineDAO.bulkInsertTimelineChunks(timelineChunkList, internalCallContext); timelineChunkList.clear(); log.info("Inserted %d TimelineChunk rows", timelineChunkIdCounter.get()); } }
this.defaultTimelineDAO = new DefaultTimelineDao(dbi); this.timelineDAO = new CachingTimelineDao(defaultTimelineDAO); log.info("DBI initialized"); final String hostName = String.format("host-%d", i + 1); hostNames.add(hostName); defaultTimelineDAO.getOrAddSource(hostName, internalCallContext); final String category = String.format("category-%d", i); categoryNames.add(category); defaultTimelineDAO.getOrAddEventCategory(category, internalCallContext); final String sampleKind = String.format("%s-sample-kind-%d", eventCategories.get(eventCategoryId), i + 1); categoriesAndSampleKinds.add(new CategoryRecordIdAndMetric(eventCategoryId, sampleKind)); defaultTimelineDAO.getOrAddMetric(eventCategoryId, sampleKind, internalCallContext);
@BeforeMethod(groups = "mysql") public void setUp() throws Exception { timelineDao = new DefaultTimelineDao(getDBI()); final Properties properties = System.getProperties(); properties.put("killbill.usage.timelines.chunksToAggregate", "2,2"); final MeterConfig config = new ConfigurationObjectFactory(properties).build(MeterConfig.class); aggregator = new TimelineAggregator(getDBI(), timelineDao, timelineCoder, sampleCoder, config, internalCallContextFactory); }
/** * This method simulates adding a ton of timelines, in more-or-less the way they would be added in real life. */ private void insertManyTimelines() throws Exception { final List<TimelineChunk> timelineChunkList = new ArrayList<TimelineChunk>(); DateTime startTime = new DateTime().minusDays(1); DateTime endTime = startTime.plusHours(1); final int sampleCount = 120; // 1 hours worth for (int i = 0; i < 12; i++) { for (final int hostId : hostIds) { for (final int categoryId : categoriesForHostId.get(hostId)) { final List<DateTime> dateTimes = new ArrayList<DateTime>(sampleCount); for (int sc = 0; sc < sampleCount; sc++) { dateTimes.add(startTime.plusSeconds(sc * 30)); } final byte[] timeBytes = timelineCoder.compressDateTimes(dateTimes); for (final int sampleKindId : categorySampleKindIds.get(categoryId)) { final TimelineChunk timelineChunk = makeTimelineChunk(hostId, sampleKindId, startTime, endTime, timeBytes, sampleCount); addChunkAndMaybeSave(timelineChunkList, timelineChunk); } } } if (timelineChunkList.size() > 0) { defaultTimelineDAO.bulkInsertTimelineChunks(timelineChunkList, internalCallContext); } log.info("After hour %d, inserted %d TimelineChunk rows", i, timelineChunkIdCounter.get()); startTime = endTime; endTime = endTime.plusHours(1); } }
@Test(groups = "slow") public void testGetSampleKindsByHostName() throws Exception { final TimelineDao dao = new DefaultTimelineDao(getDBI()); final DateTime startTime = new DateTime(DateTimeZone.UTC); final DateTime endTime = startTime.plusSeconds(2);