/** * {@inheritDoc} */ @Override protected boolean shouldBePassedToChainedProcessors(DefaultData defaultData) { return isInTimeframe(defaultData); }
/** * {@inheritDoc} */ @Override protected Collection<Future<Void>> processData(DefaultData defaultData) { if (defaultData instanceof InvocationSequenceData) { InvocationSequenceData invocation = (InvocationSequenceData) defaultData; extractDataFromInvocation(invocation); } return Collections.emptyList(); }
Collections.addAll(classesToSave, InvocationSequenceData.class, HttpTimerData.class, ExceptionSensorData.class, MemoryInformationData.class, CpuInformationData.class, ClassLoadingInformationData.class, ThreadInformationData.class, SystemInformationData.class); DataSaverProcessor dataSaverProcessor = new DataSaverProcessor(classesToSave, true); normalProcessors.add(dataSaverProcessor); normalProcessors.add(new DataAggregatorProcessor<>(TimerData.class, 5000, new TimerDataAggregator(), true)); normalProcessors.add(new DataAggregatorProcessor<>(SqlStatementData.class, 5000, new SqlStatementDataAggregator(true), true)); List<AbstractDataProcessor> chainedProcessorsForExtractor = new ArrayList<>(); chainedProcessorsForExtractor.addAll(normalProcessors); InvocationExtractorDataProcessor invocationExtractorDataProcessor = new InvocationExtractorDataProcessor(chainedProcessorsForExtractor); normalProcessors.add(invocationExtractorDataProcessor); normalProcessors.add(new InvocationClonerDataProcessor());
/** * Tests that the {@link DataSaverProcessor} will not save data type if type is not registered * with a processor. */ @Test public void testSimpleSaverExclusive() { List<Class<? extends DefaultData>> includedClasses = new ArrayList<>(); includedClasses.add(InvocationSequenceData.class); DataSaverProcessor dataSaverProcessor = new DataSaverProcessor(includedClasses, true); dataSaverProcessor.setStorageWriter(storageWriter); TimerData timerData = new TimerData(); assertThat(dataSaverProcessor.canBeProcessed(timerData), is(false)); Collection<Future<Void>> futures = dataSaverProcessor.process(timerData); assertThat(futures, is(empty())); verifyZeroInteractions(storageWriter); }
/** * Test the {@link DataAggregatorProcessor} for a correct aggregation of data. */ @Test public void dataAggregatorProcessorAggregation() { int aggregationPeriod = 100; DataAggregatorProcessor<TimerData> dataAggregatorProcessor = new DataAggregatorProcessor<>(TimerData.class, aggregationPeriod, new TimerDataAggregator(), true); dataAggregatorProcessor.setStorageWriter(storageWriter); long timestampValue = new Date().getTime(); Random random = new Random(); long platformIdent = random.nextLong(); long sensorTypeIdent = random.nextLong(); long methodIdent = random.nextLong(); TimerData timerData = new TimerData(new Timestamp(timestampValue), platformIdent, sensorTypeIdent, methodIdent); assertThat(dataAggregatorProcessor.canBeProcessed(timerData), is(true)); final int elements = 1000; for (int i = 0; i < (elements / 2); i++) { dataAggregatorProcessor.process(timerData); } Collection<Future<Void>> futures = dataAggregatorProcessor.flush(); assertThat(futures, hasSize(1)); verify(storageWriter, times(1)).write(Matchers.<TimerData> anyObject()); }
/** * Test the {@link InvocationClonerDataProcessor}. */ @Test public void invocationClonerDataProcessor() { InvocationClonerDataProcessor dataProcessor = new InvocationClonerDataProcessor(); InvocationSequenceData invocationSequenceData = mock(InvocationSequenceData.class); DefaultData defaultData = mock(DefaultData.class); IWriter writer = mock(IWriter.class); dataProcessor.setStorageWriter(writer); assertThat(dataProcessor.canBeProcessed(invocationSequenceData), is(true)); assertThat(dataProcessor.canBeProcessed(defaultData), is(false)); dataProcessor.process(invocationSequenceData); dataProcessor.process(defaultData); verify(invocationSequenceData, times(1)).getClonedInvocationSequence(); verify(writer, times(1)).write(invocationSequenceData.getClonedInvocationSequence()); verify(writer, times(0)).write(invocationSequenceData); }
/** * {@inheritDoc} */ @Override @SuppressWarnings("unchecked") protected Collection<Future<Void>> processData(DefaultData defaultData) { E timerData = (E) defaultData; long alteredTimestamp = getAlteredTimestamp(timerData); int cacheHash = getCacheHash(timerData, alteredTimestamp); IAggregatedData<E> aggData = map.get(cacheHash); if (null == aggData) { aggData = clone(timerData, alteredTimestamp); IAggregatedData<E> insertedData = map.putIfAbsent(cacheHash, aggData); // if put happened null will be returned if (null == insertedData) { queue.add(aggData); int count = elementCount.incrementAndGet(); if (maxElements < count) { this.writeOldest(); } } else { aggData = insertedData; } } dataAggregator.aggregate(aggData, timerData); return Collections.emptyList(); }
/** * Init. */ @BeforeClass public void createStorageData() { storageData = getStorageData(); createdInvocations = new ArrayList<>(); List<Class<? extends DefaultData>> saverClasses = new ArrayList<>(); saverClasses.add(InvocationSequenceData.class); dataSaverProcessor = new DataSaverProcessor(saverClasses, true); }
/** * Test that the agent filtering is correct. */ @Test public void agentFilterDataProcessor() { AbstractDataProcessor abstractDataProcessor = mock(AbstractDataProcessor.class); AgentFilterDataProcessor dataProcessor = new AgentFilterDataProcessor(Collections.singletonList(abstractDataProcessor), Collections.singleton(10L)); DefaultData data1 = mock(DefaultData.class); DefaultData data2 = mock(DefaultData.class); when(data1.getPlatformIdent()).thenReturn(10L); when(data2.getPlatformIdent()).thenReturn(20L); dataProcessor.process(data1); dataProcessor.process(data2); assertThat(dataProcessor.canBeProcessed(data1), is(true)); assertThat(dataProcessor.canBeProcessed(data2), is(true)); verify(abstractDataProcessor, times(1)).process(data1); verify(abstractDataProcessor, times(0)).process(data2); }
/** * Test that {@link TimeFrameDataProcessor} only passed the data that belongs to the given time * frame. */ @Test public void testTimeframeProcessor() { DefaultData defaultData = mock(DefaultData.class); long time = 10000000; long past = time - 1000; long future = time + 1000; AbstractDataProcessor dataProcessor = mock(AbstractDataProcessor.class); List<AbstractDataProcessor> chainedProcessors = new ArrayList<>(); chainedProcessors.add(dataProcessor); TimeFrameDataProcessor timeFrameDataProcessor = new TimeFrameDataProcessor(new Date(past), new Date(future), chainedProcessors); assertThat(timeFrameDataProcessor.canBeProcessed(defaultData), is(true)); Mockito.when(defaultData.getTimeStamp()).thenReturn(new Timestamp(time)); timeFrameDataProcessor.process(defaultData); Mockito.when(defaultData.getTimeStamp()).thenReturn(new Timestamp(past)); timeFrameDataProcessor.process(defaultData); Mockito.when(defaultData.getTimeStamp()).thenReturn(new Timestamp(future)); timeFrameDataProcessor.process(defaultData); verify(dataProcessor, times(3)).process(defaultData); Mockito.when(defaultData.getTimeStamp()).thenReturn(new Timestamp(past - 1000)); timeFrameDataProcessor.process(defaultData); Mockito.when(defaultData.getTimeStamp()).thenReturn(new Timestamp(future + 1000)); timeFrameDataProcessor.process(defaultData); verify(dataProcessor, times(3)).process(defaultData); }
/** * Writes the oldest data to the storage. */ private void writeOldest() { IAggregatedData<E> oldest = queue.poll(); E data = oldest.getData(); map.remove(getCacheHash(data, data.getTimeStamp().getTime())); elementCount.decrementAndGet(); passToStorageWriter(data); }
/** * Returns the properly initialized {@link TimeFrameDataProcessor}. * * @param chainedProcessors * Processors that need to be chained to {@link TimeFrameDataProcessor}. * @return {@link TimeFrameDataProcessor} * @see {AbstractChainedDataProcessor} */ public TimeFrameDataProcessor getTimeFrameDataProcessor(Collection<AbstractDataProcessor> chainedProcessors) { List<AbstractDataProcessor> normalProcessors = new ArrayList<>(chainedProcessors); Date fromDate = getFromDate(); Date toDate = getToDate(); TimeFrameDataProcessor timeFrameDataProcessor = new TimeFrameDataProcessor(fromDate, toDate, normalProcessors); return timeFrameDataProcessor; }
/** * Extract data from the invocation and return it to the storage writer to process it. * * @param invocation * {@link InvocationSequenceData} */ private void extractDataFromInvocation(InvocationSequenceData invocation) { if (null != invocation.getTimerData()) { passToChainedProcessors(invocation.getTimerData()); } if (null != invocation.getSqlStatementData()) { passToChainedProcessors(invocation.getSqlStatementData()); } if (null != invocation.getExceptionSensorDataObjects()) { for (ExceptionSensorData exceptionSensorData : invocation.getExceptionSensorDataObjects()) { if (exceptionSensorData.getExceptionEvent() == ExceptionEvent.CREATED) { passToChainedProcessors(exceptionSensorData); } } } for (InvocationSequenceData child : invocation.getNestedSequences()) { extractDataFromInvocation(child); } }
TimeFrameDataProcessor timeFrameDataProcessor = (TimeFrameDataProcessor) dataProcessor; if (timeFrameDataProcessor.getFromDate().after(fromDate)) { fromDate = timeFrameDataProcessor.getFromDate(); toDate = timeFrameDataProcessor.getToDate(); break;
/** * {@inheritDoc} */ @Override protected Collection<Future<Void>> processData(DefaultData defaultData) { if (defaultData instanceof InvocationSequenceData) { InvocationSequenceData invocation = (InvocationSequenceData) defaultData; InvocationSequenceData clone = invocation.getClonedInvocationSequence(); Future<Void> future = getStorageWriter().write(clone); if (null != future) { return Collections.singleton(future); } } return Collections.emptyList(); }
/** * {@inheritDoc} */ @Override protected Collection<Future<Void>> processData(DefaultData defaultData) { // if I am writing the InvocationAwareData and invocations are not saved // make sure we don't save the invocation affiliation if ((defaultData instanceof InvocationAwareData) && !writeInvocationAffiliation) { Map<String, Boolean> kryoPreferences = new HashMap<>(1); kryoPreferences.put(KryoSerializationPreferences.WRITE_INVOCATION_AFFILIATION_DATA, Boolean.FALSE); Future<Void> future = getStorageWriter().write(defaultData, kryoPreferences); if (null != future) { return Collections.singleton(future); } } else { Future<Void> future = getStorageWriter().write(defaultData); if (null != future) { return Collections.singleton(future); } } return Collections.emptyList(); }
/** * Passes data to StorageWriter to be written. * * @param data * Data to be written. * @return {@link Future} received from Storage writer. */ private Future<Void> passToStorageWriter(DefaultData data) { // clear aggregated ids when saving to storage if (data instanceof IIdsAwareAggregatedData) { ((IIdsAwareAggregatedData<?>) data).clearAggregatedIds(); } // if I am writing the InvocationAwareData and invocations are not saved // make sure we don't save the invocation affiliation if (!writeInvocationAffiliation) { Map<String, Boolean> kryoPreferences = new HashMap<>(1); kryoPreferences.put(KryoSerializationPreferences.WRITE_INVOCATION_AFFILIATION_DATA, Boolean.FALSE); return getStorageWriter().write(data, kryoPreferences); } else { return getStorageWriter().write(data); } }
normalProcessors.add(new DataSaverProcessor(saveClassesList, writeInvocationAffiliation)); if (saveClassesList.contains(TimerData.class)) { saveClassesList.remove(TimerData.class); DataAggregatorProcessor<TimerData> dataAggregatorProcessor = new DataAggregatorProcessor<>(TimerData.class, aggregationPeriod, new TimerDataAggregator(), writeInvocationAffiliation); normalProcessors.add(dataAggregatorProcessor); DataAggregatorProcessor<SqlStatementData> dataAggregatorProcessor = new DataAggregatorProcessor<>(SqlStatementData.class, aggregationPeriod, new SqlStatementDataAggregator(true), writeInvocationAffiliation); normalProcessors.add(dataAggregatorProcessor); List<AbstractDataProcessor> chainedProcessorsForExtractor = new ArrayList<>(); chainedProcessorsForExtractor.addAll(normalProcessors); InvocationExtractorDataProcessor invocationExtractorDataProcessor = new InvocationExtractorDataProcessor(chainedProcessorsForExtractor); normalProcessors.add(invocationExtractorDataProcessor); normalProcessors.add(new InvocationClonerDataProcessor());
/** * Tests that the {@link DataSaverProcessor} will save data type if type is not registered with * a processor. */ @Test public void testSimpleSaverInclusive() { List<Class<? extends DefaultData>> includedClasses = new ArrayList<>(); includedClasses.add(InvocationSequenceData.class); DataSaverProcessor dataSaverProcessor = new DataSaverProcessor(includedClasses, true); dataSaverProcessor.setStorageWriter(storageWriter); InvocationSequenceData invocation = new InvocationSequenceData(); assertThat(dataSaverProcessor.canBeProcessed(invocation), is(true)); Collection<Future<Void>> futures = dataSaverProcessor.process(invocation); assertThat(futures, hasSize(1)); verify(storageWriter, times(1)).write(invocation); }
/** * {@inheritDoc} */ @Override public Collection<Future<Void>> flush() { Collection<Future<Void>> futures = new ArrayList<>(); IAggregatedData<E> oldest = queue.poll(); while (null != oldest) { E data = oldest.getData(); map.remove(getCacheHash(data, data.getTimeStamp().getTime())); elementCount.decrementAndGet(); Future<Void> future = passToStorageWriter(data); CollectionUtils.addIgnoreNull(futures, future); oldest = queue.poll(); } return futures; }