@Override public boolean apply(InputFile f) { return path.equals(f.absolutePath()); }
/** * Compute a MD5 hash of each line of the file after removing of all blank chars */ public static void computeLineHashesForIssueTracking(InputFile f, LineHashConsumer consumer) { try { readFile(f.inputStream(), f.charset(), f.absolutePath(), new CharHandler[] {new LineHashComputer(consumer, f.file())}); } catch (IOException e) { throw new IllegalStateException("Failed to compute line hashes for " + f.absolutePath(), e); } } }
private void parseFiles(SMInputCursor fileCursor, SensorContext context) throws XMLStreamException { while (fileCursor.getNext() != null) { checkElementName(fileCursor, "file"); String filePath = mandatoryAttribute(fileCursor, "path"); InputFile inputFile = context.fileSystem().inputFile(context.fileSystem().predicates().hasPath(filePath)); if (inputFile == null) { numberOfUnknownFiles++; if (numberOfUnknownFiles <= MAX_STORED_UNKNOWN_FILE_PATHS) { firstUnknownFiles.add(filePath); } continue; } Preconditions.checkState( inputFile.language() != null, "Line %s of report refers to a file with an unknown language: %s", fileCursor.getCursorLocation().getLineNumber(), filePath); Preconditions.checkState( inputFile.type() != InputFile.Type.MAIN, "Line %s of report refers to a file which is not configured as a test file: %s", fileCursor.getCursorLocation().getLineNumber(), filePath); matchedFileKeys.add(inputFile.absolutePath()); MutableTestPlan testPlan = testPlanBuilder.loadPerspective(MutableTestPlan.class, inputFile); SMInputCursor testCaseCursor = fileCursor.childElementCursor(); while (testCaseCursor.getNext() != null) { parseTestCase(testCaseCursor, testPlan); } } }
public void insert(InputFile inputFile, Collection<Block> blocks) { if (settings.isCrossProjectDuplicationEnabled()) { int id = ((DefaultInputFile) inputFile).scannerId(); if (publisher.getWriter().hasComponentData(FileStructure.Domain.CPD_TEXT_BLOCKS, id)) { throw new UnsupportedOperationException("Trying to save CPD tokens twice for the same file is not supported: " + inputFile.absolutePath()); } final ScannerReport.CpdTextBlock.Builder builder = ScannerReport.CpdTextBlock.newBuilder(); publisher.getWriter().writeCpdTextBlocks(id, blocks.stream().map(block -> { builder.clear(); builder.setStartLine(block.getStartLine()); builder.setEndLine(block.getEndLine()); builder.setStartTokenIndex(block.getStartUnit()); builder.setEndTokenIndex(block.getEndUnit()); builder.setHash(block.getBlockHash().toHexString()); return builder.build(); }).collect(Collectors.toList())); } for (Block block : blocks) { mem.insert(block); } if (blocks.isEmpty()) { LOG.debug("Not enough content in '{}' to have CPD blocks, it will not be part of the duplication detection", inputFile.relativePath()); } indexedFiles.add(inputFile); }
@Override public String canonicalPath() { return inputFile.absolutePath(); } }
@Override public String canonicalPath() { return inputFile.absolutePath(); } }
@Override public boolean apply(InputFile f) { return path.equals(f.absolutePath()); }
@Override public boolean apply(InputFile f) { return path.equals(f.absolutePath()); }
/** * {@inheritDoc} */ @Override public boolean executeOnResource(InputFile resource) { return DelphiUtils.acceptFile(resource.absolutePath()); }
/** * {@inheritDoc} */ @Override public boolean executeOnResource(InputFile resource) { return DelphiUtils.acceptFile(resource.absolutePath()); }
/** * {@inheritDoc} */ @Override public boolean executeOnResource(InputFile resource) { return DelphiUtils.acceptFile(resource.absolutePath()); }
public void finish() { progressReport.stop(count + "/" + total + " files analyzed"); if (!allFilesToBlame.isEmpty()) { LOG.warn("Missing blame information for the following files:"); for (InputFile f : allFilesToBlame) { LOG.warn(" * " + f.absolutePath()); } LOG.warn("This may lead to missing/broken features in SonarQube"); } } }
public GMetricsSourceAnalyzer(FileSystem fileSystem, List<InputFile> sourceFiles) { this.fileSystem = fileSystem; this.fileSystemBaseDir = fileSystem.baseDir(); for (InputFile inputFile : sourceFiles) { pathToInputFile.put(inputFile.absolutePath(), inputFile); files.add(inputFile.file()); } }
private Map<File, List<FileResults>> processFiles(RuleSet ruleSet) { Map<File, List<FileResults>> results = new HashMap<>(); for (InputFile inputFile : sourceFiles) { List<Violation> violations = collectViolations(new SourceFile(inputFile.file()), ruleSet); violationsByFile.put(inputFile, violations); FileResults result = new FileResults(inputFile.absolutePath(), violations); results.putIfAbsent(inputFile.file().getParentFile(), new LinkedList<>()); results.get(inputFile.file().getParentFile()).add(result); } return results; }
@Override public boolean match(InputFile inputFile, boolean caseSensitiveFileExtension) { String path = inputFile.absolutePath(); if (!caseSensitiveFileExtension) { String extension = super.sanitizeExtension(FilenameUtils.getExtension(inputFile.file().getName())); if (StringUtils.isNotBlank(extension)) { path = StringUtils.removeEndIgnoreCase(path, extension); path = path + extension; } } return pattern.match(path); }
/** * Compute a MD5 hash of each line of the file after removing of all blank chars */ public static void computeLineHashesForIssueTracking(InputFile f, LineHashConsumer consumer) { try { readFile(f.inputStream(), f.charset(), f.absolutePath(), new CharHandler[] {new LineHashComputer(consumer, f.file())}); } catch (IOException e) { throw new IllegalStateException("Failed to compute line hashes for " + f.absolutePath(), e); } } }
public void finish() { progressReport.stop(count + "/" + total + " files analyzed"); if (!allFilesToBlame.isEmpty()) { LOG.warn("Missing blame information for the following files:"); for (InputFile f : allFilesToBlame) { LOG.warn(" * " + f.absolutePath()); } LOG.warn("This may lead to missing/broken features in SonarQube"); } } }
private static <T extends Serializable> void saveMeasure(SensorContext context, InputFile inputFile, Metric<T> metric, T value) { LOGGER.warn(inputFile.absolutePath()); LOGGER.warn(metric.toString()); context.<T>newMeasure().forMetric(metric).on(inputFile).withValue(value).save(); } }
private void analyzeFile(SensorContext context, InputFile file, File out) throws IOException { FileSystem fileSystem = context.fileSystem(); List<Issue> issues = analyzer.analyze(file.absolutePath(), fileSystem.encoding(), out); for (Issue pylintIssue : issues) { ActiveRule rule = context.activeRules().find(RuleKey.of(PylintRuleRepository.REPOSITORY_KEY, pylintIssue.getRuleId())); PylintImportSensor.processRule(pylintIssue, file, rule, context); } }
private void analyzeFile(SensorContext context, InputFile file, File out) throws IOException { FileSystem fileSystem = context.fileSystem(); String pylintConfigPath = conf.getPylintConfigPath(fileSystem); String pylintPath = conf.getPylintPath(); PylintIssuesAnalyzer analyzer = new PylintIssuesAnalyzer(pylintPath, pylintConfigPath); List<Issue> issues = analyzer.analyze(file.absolutePath(), fileSystem.encoding(), out); for (Issue pylintIssue : issues) { ActiveRule rule = context.activeRules().find(RuleKey.of(PylintRuleRepository.REPOSITORY_KEY, pylintIssue.getRuleId())); PylintImportSensor.processRule(pylintIssue, file, rule, context); } }