private void save(UnitTestIndex index, SensorContext context) { for (Map.Entry<String, UnitTestClassReport> entry : index.getIndexByClassname().entrySet()) { UnitTestClassReport report = entry.getValue(); if (report.getTests() > 0) { InputFile resource = getUnitTestResource(entry.getKey()); if (resource != null) { save(entry.getValue(), resource, context); } else { LOGGER.warn("Resource not found: {}", entry.getKey()); } } } }
private void save(UnitTestIndex index, SensorContext context) { for (Map.Entry<String, UnitTestClassReport> entry : index.getIndexByClassname().entrySet()) { UnitTestClassReport report = entry.getValue(); if (report.getTests() > 0) { Resource resource = getUnitTestResource(entry.getKey()); if (resource != null) { save(entry.getValue(), resource, context); } else { LOGGER.warn("Resource not found: {}", entry.getKey()); } } } }
private void save(UnitTestIndex index, SensorContext context, FileSystem myFS) { for (Map.Entry<String, UnitTestClassReport> entry : index.getIndexByClassname().entrySet()) { UnitTestClassReport report = entry.getValue(); if (report.getTests() > 0) { InputFile inputFile = getUnitTestInputFile(entry.getKey(), myFS); if (inputFile != null) { save(entry.getValue(), inputFile, context); } } } }
private void save(UnitTestIndex index, SensorContext context) { long negativeTimeTestNumber = 0; for (Map.Entry<String, UnitTestClassReport> entry : index.getIndexByClassname().entrySet()) { UnitTestClassReport report = entry.getValue(); if (report.getTests() > 0) { negativeTimeTestNumber += report.getNegativeTimeTestNumber(); Resource resource = getUnitTestResource(entry.getKey()); if (resource != null) { save(report, resource, context); } else { LOGGER.warn("Resource not found: {}", entry.getKey()); } } } if (negativeTimeTestNumber > 0) { LOGGER.warn("There is {} test(s) reported with negative time by surefire, total duration may not be accurate.", negativeTimeTestNumber); } }
private void save(UnitTestIndex index, SensorContext context) { long negativeTimeTestNumber = 0; Map<InputFile, UnitTestClassReport> indexByInputFile = mapToInputFile(index.getIndexByClassname()); for (Map.Entry<InputFile, UnitTestClassReport> entry : indexByInputFile.entrySet()) { UnitTestClassReport report = entry.getValue(); if (report.getTests() > 0) { negativeTimeTestNumber += report.getNegativeTimeTestNumber(); save(report, entry.getKey(), context); } } if (negativeTimeTestNumber > 0) { LOGGER.warn("There is {} test(s) reported with negative time by surefire, total duration may not be accurate.", negativeTimeTestNumber); } }
private void save(UnitTestIndex index, SensorContext context) { long negativeTimeTestNumber = 0; Map<InputFile, UnitTestClassReport> indexByInputFile = mapToInputFile(index.getIndexByClassname()); for (Map.Entry<InputFile, UnitTestClassReport> entry : indexByInputFile.entrySet()) { UnitTestClassReport report = entry.getValue(); if (report.getTests() > 0) { negativeTimeTestNumber += report.getNegativeTimeTestNumber(); save(report, entry.getKey(), context); } } if (negativeTimeTestNumber > 0) { LOGGER.warn("There is {} test(s) reported with negative time by surefire, total duration may not be accurate.", negativeTimeTestNumber); } }
private void save(UnitTestIndex index, SensorContext context) { for (Map.Entry<String, UnitTestClassReport> entry : index.getIndexByClassname().entrySet()) { UnitTestClassReport report = entry.getValue(); if (report.getTests() > 0) { Resource resource = getUnitTestResource(entry.getKey()); double testsCount = report.getTests() - report.getSkipped(); saveMeasure(context, resource, CoreMetrics.SKIPPED_TESTS, report.getSkipped()); saveMeasure(context, resource, CoreMetrics.TESTS, testsCount); saveMeasure(context, resource, CoreMetrics.TEST_ERRORS, report.getErrors()); saveMeasure(context, resource, CoreMetrics.TEST_FAILURES, report.getFailures()); saveMeasure(context, resource, CoreMetrics.TEST_EXECUTION_TIME, report.getDurationMilliseconds()); double passedTests = testsCount - report.getErrors() - report.getFailures(); if (testsCount > 0) { double percentage = passedTests * 100d / testsCount; saveMeasure(context, resource, CoreMetrics.TEST_SUCCESS_DENSITY, ParsingUtils.scaleValue(percentage)); } saveResults(context, resource, report); } } }
private void save(UnitTestClassReport report, InputFile inputFile, SensorContext context) { int testsCount = report.getTests() - report.getSkipped(); saveMeasure(context, inputFile, CoreMetrics.SKIPPED_TESTS, report.getSkipped()); saveMeasure(context, inputFile, CoreMetrics.TESTS, testsCount); saveMeasure(context, inputFile, CoreMetrics.TEST_ERRORS, report.getErrors()); saveMeasure(context, inputFile, CoreMetrics.TEST_FAILURES, report.getFailures()); saveMeasure(context, inputFile, CoreMetrics.TEST_EXECUTION_TIME, report.getDurationMilliseconds()); saveResults(inputFile, report); }
private void save(UnitTestClassReport report, InputFile inputFile, SensorContext context) { int testsCount = report.getTests() - report.getSkipped(); saveMeasure(context, inputFile, CoreMetrics.SKIPPED_TESTS, report.getSkipped()); saveMeasure(context, inputFile, CoreMetrics.TESTS, testsCount); saveMeasure(context, inputFile, CoreMetrics.TEST_ERRORS, report.getErrors()); saveMeasure(context, inputFile, CoreMetrics.TEST_FAILURES, report.getFailures()); saveMeasure(context, inputFile, CoreMetrics.TEST_EXECUTION_TIME, report.getDurationMilliseconds()); saveResults(inputFile, report); }
private void save(UnitTestClassReport report, Resource resource, SensorContext context) { double testsCount = report.getTests() - report.getSkipped(); saveMeasure(context, resource, CoreMetrics.SKIPPED_TESTS, report.getSkipped()); saveMeasure(context, resource, CoreMetrics.TESTS, testsCount); saveMeasure(context, resource, CoreMetrics.TEST_ERRORS, report.getErrors()); saveMeasure(context, resource, CoreMetrics.TEST_FAILURES, report.getFailures()); saveMeasure(context, resource, CoreMetrics.TEST_EXECUTION_TIME, report.getDurationMilliseconds()); double passedTests = testsCount - report.getErrors() - report.getFailures(); if (testsCount > 0) { double percentage = passedTests * 100d / testsCount; saveMeasure(context, resource, CoreMetrics.TEST_SUCCESS_DENSITY, ParsingUtils.scaleValue(percentage)); } saveResults(context, resource, report); }
private void save(UnitTestClassReport report, InputFile inputFile, SensorContext context) { double testsCount = report.getTests() - report.getSkipped(); saveMeasure(context, inputFile, CoreMetrics.SKIPPED_TESTS, report.getSkipped()); saveMeasure(context, inputFile, CoreMetrics.TESTS, testsCount); saveMeasure(context, inputFile, CoreMetrics.TEST_ERRORS, report.getErrors()); saveMeasure(context, inputFile, CoreMetrics.TEST_FAILURES, report.getFailures()); saveMeasure(context, inputFile, CoreMetrics.TEST_EXECUTION_TIME, report.getDurationMilliseconds()); double passedTests = testsCount - report.getErrors() - report.getFailures(); if (testsCount > 0) { double percentage = passedTests * 100d / testsCount; saveMeasure(context, inputFile, CoreMetrics.TEST_SUCCESS_DENSITY, ParsingUtils.scaleValue(percentage)); } saveResults(context, inputFile, report); }
private void save(UnitTestClassReport report, InputFile resource, SensorContext context) { double testsCount = report.getTests() - report.getSkipped(); saveMeasure(context, resource, CoreMetrics.SKIPPED_TESTS, report.getSkipped()); saveMeasure(context, resource, CoreMetrics.TESTS, testsCount); saveMeasure(context, resource, CoreMetrics.TEST_ERRORS, report.getErrors()); saveMeasure(context, resource, CoreMetrics.TEST_FAILURES, report.getFailures()); saveMeasure(context, resource, CoreMetrics.TEST_EXECUTION_TIME, report.getDurationMilliseconds()); double passedTests = testsCount - report.getErrors() - report.getFailures(); if (testsCount > 0) { double percentage = passedTests * 100d / testsCount; saveMeasure(context, resource, CoreMetrics.TEST_SUCCESS_DENSITY, ParsingUtils.scaleValue(percentage)); } }
private void save(UnitTestClassReport report, Resource resource, SensorContext context) { double testsCount = report.getTests() - report.getSkipped(); saveMeasure(context, resource, CoreMetrics.SKIPPED_TESTS, report.getSkipped()); saveMeasure(context, resource, CoreMetrics.TESTS, testsCount); saveMeasure(context, resource, CoreMetrics.TEST_ERRORS, report.getErrors()); saveMeasure(context, resource, CoreMetrics.TEST_FAILURES, report.getFailures()); saveMeasure(context, resource, CoreMetrics.TEST_EXECUTION_TIME, report.getDurationMilliseconds()); double passedTests = testsCount - report.getErrors() - report.getFailures(); if (testsCount > 0) { double percentage = passedTests * 100d / testsCount; saveMeasure(context, resource, CoreMetrics.TEST_SUCCESS_DENSITY, ParsingUtils.scaleValue(percentage)); } saveResults(resource, report); }