/** * Run TreeParser01 with any TP01Action object. Takes care of calling parse() first, if that has not already been * done. */ public void treeParser01(ITreeParserAction action) throws ANTLRException { if (this.getTopNode() == null) parse(); TreeParser01 tp = new TreeParser01(action); treeParser(tp); }
/** * Returns a TokenSource object for the main file. Include files are not expanded, and preprocessor is not used * * @throws UncheckedIOException If main file can't be opened */ public TokenSource lex4() { return new ProgressLexer(session, getByteSource(), relativeName, true); }
/** * Returns a TokenSource object for the main file. Include files are not expanded, and preprocessor is not used * * @throws UncheckedIOException If main file can't be opened */ public TokenSource lex4() { return new ProgressLexer(session, getInputStream(), relativeName, true); }
/** * Run any IJPTreeParser against the AST. This will call parse() if the JPNode AST has not already been built. */ public void treeParser(IJPTreeParser tp) throws ANTLRException { LOGGER.trace("Entering ParseUnit#treeParser()"); if (this.getTopNode() == null) parse(); tp.program(getTopNode()); LOGGER.trace("Exiting ParseUnit#treeParser()"); }
@Override public void execute(InputFile file, ParseUnit unit) { if (unit.getTransactionBlocks() == null) return; for (JPNode node : unit.getTopNode().queryStateHead(ABLNodeType.TRIGGER)) { if (node.getFirstChild().getNodeType() == ABLNodeType.PROCEDURE) { return; } } for (Integer line : unit.getTransactionBlocks()) { if (line == 0) { reportIssue(file, "Transaction scope of main block spans the entire procedure"); } } }
unit = new ParseUnit(InputFileUtils.getInputStream(file), InputFileUtils.getRelativePath(file, context.fileSystem()), session); unit.treeParser01(); unit.attachXref(doc); unit.attachTransactionBlocks(trxBlocks); unit.attachTypeInfo(session.getTypeInfo(unit.getRootScope().getClassName())); updateParseTime(System.currentTimeMillis() - startTime); } catch (UncheckedIOException caught) { generateProparseFlatFiles(unit.getTopNode(), unit.getSupport(), false, InputFileUtils.getRelativePath(file, context.fileSystem()));
@Override public void execute(InputFile file, ParseUnit unit) { if (unit.getRootScope().isInterface() || unit.getRootScope().isAbstractClass()) { for (JPNode node : unit.getTopNode().queryStateHead(ABLNodeType.METHOD)) { JPNode lastChild = node.getLastDescendant(); if (lastChild.getNodeType() == ABLNodeType.LEXCOLON) { reportIssue(file, node, "METHOD ending with colon instead of period"); } } } }
@SuppressWarnings({"unchecked", "rawtypes"}) private void parseIncludeFile(SensorContext context, InputFile file, RefactorSession session) { long startTime = System.currentTimeMillis(); ParseUnit lexUnit = null; try { lexUnit = new ParseUnit(InputFileUtils.getInputStream(file), InputFileUtils.getRelativePath(file, context.fileSystem()), session); lexUnit.lexAndGenerateMetrics(); } catch (UncheckedIOException caught) { numFailures++; if (caught.getCause() instanceof XCodedFileException) { LOG.error("Unable to generate file metrics for xcode'd file '{}", file); } else { LOG.error("Unable to generate file metrics for file '" + file + "'", caught); } return; } catch (ProparseRuntimeException caught) { LOG.error("Unable to generate file metrics for file '" + file + "'", caught); return; } updateParseTime(System.currentTimeMillis() - startTime); if (lexUnit.getMetrics() != null) { // Saving LOC and COMMENTS metrics context.newMeasure().on(file).forMetric((Metric) CoreMetrics.NCLOC).withValue( lexUnit.getMetrics().getLoc()).save(); ncLocs += lexUnit.getMetrics().getLoc(); context.newMeasure().on(file).forMetric((Metric) CoreMetrics.COMMENT_LINES).withValue( lexUnit.getMetrics().getComments()).save(); } }
private void generateProparseDebugFile(InputFile file, ParseUnit unit) { String fileName = ".proparse/" + file.relativePath() + ".json"; File dbgFile = new File(fileName); dbgFile.getParentFile().mkdirs(); try (PrintWriter writer = new PrintWriter(dbgFile)) { JsonNodeLister nodeLister = new JsonNodeLister(unit.getTopNode(), writer, ABLNodeType.LEFTPAREN, ABLNodeType.RIGHTPAREN, ABLNodeType.COMMA, ABLNodeType.PERIOD, ABLNodeType.LEXCOLON, ABLNodeType.OBJCOLON, ABLNodeType.THEN, ABLNodeType.END); nodeLister.print(); debugFiles.add(file.relativePath() + ".json"); } catch (IOException caught) { LOG.error("Unable to write proparse debug file", caught); } }
@SuppressWarnings({"unchecked", "rawtypes"}) private void computeSimpleMetrics(SensorContext context, InputFile file, ParseUnit unit) { // Saving LOC and COMMENTS metrics context.newMeasure().on(file).forMetric((Metric) CoreMetrics.NCLOC).withValue(unit.getMetrics().getLoc()).save(); ncLocs += unit.getMetrics().getLoc(); context.newMeasure().on(file).forMetric((Metric) CoreMetrics.COMMENT_LINES).withValue( unit.getMetrics().getComments()).save(); }
@SuppressWarnings({"unchecked", "rawtypes"}) private void computeComplexity(SensorContext context, InputFile file, ParseUnit unit) { // Interfaces don't contribute to complexity if (unit.getRootScope().isInterface()) return; int complexity = 0; int complexityWithInc = 0; // Procedure has a main block, so starting at 1 if (!unit.getRootScope().isClass()) { complexity++; complexityWithInc++; } complexity += unit.getTopNode().queryMainFile(ABLNodeType.IF, ABLNodeType.REPEAT, ABLNodeType.FOR, ABLNodeType.WHEN, ABLNodeType.AND, ABLNodeType.OR, ABLNodeType.RETURN, ABLNodeType.PROCEDURE, ABLNodeType.FUNCTION, ABLNodeType.METHOD, ABLNodeType.ENUM).size(); complexityWithInc += unit.getTopNode().query(ABLNodeType.IF, ABLNodeType.REPEAT, ABLNodeType.FOR, ABLNodeType.WHEN, ABLNodeType.AND, ABLNodeType.OR, ABLNodeType.RETURN, ABLNodeType.PROCEDURE, ABLNodeType.FUNCTION, ABLNodeType.METHOD, ABLNodeType.ENUM).size(); context.newMeasure().on(file).forMetric((Metric) CoreMetrics.COMPLEXITY).withValue(complexity).save(); context.newMeasure().on(file).forMetric((Metric) OpenEdgeMetrics.COMPLEXITY).withValue(complexityWithInc).save(); }
/** * Run any IJPTreeParser against the AST. This will call parse() if the JPNode AST has not already been built. */ public void treeParser(IJPTreeParser tp) throws ANTLRException { LOGGER.trace("Entering ParseUnit#treeParser()"); if (this.getTopNode() == null) parse(); tp.program(getTopNode()); LOGGER.trace("Exiting ParseUnit#treeParser()"); }
private void computeCpd(SensorContext context, InputFile file, ParseUnit unit) { CPDCallback cpdCallback = new CPDCallback(context, file, settings, unit); unit.getTopNode().walk(cpdCallback); cpdCallback.getResult().save(); }
/** * Run TreeParser01 with any TP01Action object. Takes care of calling parse() first, if that has not already been * done. */ public void treeParser01(ITreeParserAction action) throws ANTLRException { if (this.getTopNode() == null) parse(); TreeParser01 tp = new TreeParser01(action); treeParser(tp); }
@SuppressWarnings({"unchecked", "rawtypes"}) private void computeCommonMetrics(SensorContext context, InputFile file, ParseUnit unit) { context.newMeasure().on(file).forMetric((Metric) CoreMetrics.STATEMENTS).withValue( unit.getTopNode().queryStateHead().size()).save(); int numProcs = 0; int numFuncs = 0; int numMethds = 0; for (TreeParserSymbolScope child : unit.getRootScope().getChildScopesDeep()) { int scopeType = child.getRootBlock().getNode().getType(); switch (scopeType) {
public TokenSource preprocess4() { return new ProgressLexer(session, getInputStream(), relativeName, false); }
public TokenSource preprocess4() { return new ProgressLexer(session, getByteSource(), relativeName, false); }
/** * Run TreeParser01. Takes care of calling parse() first, if that has not already been done. */ public void treeParser01() throws ANTLRException { LOGGER.trace("Entering ParseUnit#treeParser01()"); if (this.getTopNode() == null) parse(); ITreeParserAction action = new TP01Support(session, this); TreeParser01 tp = new TreeParser01(action); treeParser(tp); LOGGER.trace("Exiting ParseUnit#treeParser01()"); }
/** * Returns a TokenStream object for the main file. Include files are not expanded, and preprocessor is not used * * @throws UncheckedIOException If main file can't be opened */ public TokenStream lex() { ProgressLexer lexer = new ProgressLexer(session, getInputStream(), relativeName, true); return lexer.getANTLR2TokenStream(false); }