Tabnine Logo
CommonTokenStream.<init>
Code IndexAdd Tabnine to your IDE (free)

How to use
org.antlr.v4.runtime.CommonTokenStream
constructor

Best Java code snippets using org.antlr.v4.runtime.CommonTokenStream.<init> (Showing top 20 results out of 2,223)

origin: apache/hive

/**
 * Execute statements from an include file
 */
void include(String content) throws Exception {
 InputStream input = new ByteArrayInputStream(content.getBytes("UTF-8"));
 HplsqlLexer lexer = new HplsqlLexer(new ANTLRInputStream(input));
 CommonTokenStream tokens = new CommonTokenStream(lexer);
 HplsqlParser parser = new HplsqlParser(tokens);
 ParseTree tree = parser.program(); 
 visit(tree);    
}

origin: apache/incubator-shardingsphere

  private static SQLParser createSQLParser(final DatabaseType databaseType, final Lexer lexer) {
    TokenStream tokenStream = new CommonTokenStream(lexer);
    switch (databaseType) {
      case H2:
      case MySQL:
        return new MySQLParser(tokenStream);
      case PostgreSQL:
        return new PostgreSQLParser(tokenStream);
      case SQLServer:
        return new SQLServerParser(tokenStream);
      case Oracle:
        return new OracleParser(tokenStream);
      default:
        throw new UnsupportedOperationException(String.format("Can not support database type [%s].", databaseType));
    }
  }
}
origin: apache/incubator-druid

@VisibleForTesting
static Expr parse(String in, ExprMacroTable macroTable, boolean withFlatten)
{
 ExprLexer lexer = new ExprLexer(new ANTLRInputStream(in));
 CommonTokenStream tokens = new CommonTokenStream(lexer);
 ExprParser parser = new ExprParser(tokens);
 parser.setBuildParseTree(true);
 ParseTree parseTree = parser.expr();
 ParseTreeWalker walker = new ParseTreeWalker();
 ExprListenerImpl listener = new ExprListenerImpl(parseTree, macroTable);
 walker.walk(listener, parseTree);
 return withFlatten ? flatten(listener.getAST()) : listener.getAST();
}
origin: apache/ignite

  /** {@inheritDoc} */
  @Override public XGModelComposition parse(byte[] mdl) {
    try (ByteArrayInputStream bais = new ByteArrayInputStream(mdl)) {
      CharStream cStream = CharStreams.fromStream(bais);
      XGBoostModelLexer lexer = new XGBoostModelLexer(cStream);
      CommonTokenStream tokens = new CommonTokenStream(lexer);
      XGBoostModelParser parser = new XGBoostModelParser(tokens);

      XGModelVisitor visitor = new XGModelVisitor();

      return visitor.visit(parser.xgModel());
    }
    catch (IOException e) {
      throw new RuntimeException(e);
    }
  }
}
origin: pxb1988/dex2jar

private static void smali0(DexFileVisitor dcv, CharStream is) throws IOException {
  SmaliLexer lexer = new SmaliLexer(is);
  CommonTokenStream ts = new CommonTokenStream(lexer);
  SmaliParser parser = new SmaliParser(ts);
  for (SmaliParser.SFileContext ctx : parser.sFiles().sFile()) {
    AntlrSmaliUtil.acceptFile(ctx, dcv);
  }
}
origin: sleekbyte/tailor

/** Runs SwiftLexer on input file to generate token stream.
 *
 * @param input Lexer input
 * @return Token stream
 */
private Optional<CommonTokenStream> getTokenStream(File input) {
  try (FileInputStream inputStream = new FileInputStream(input)) {
    SwiftLexer lexer = new SwiftLexer(new ANTLRInputStream(inputStream));
    if (!configuration.debugFlagSet()) {
      lexer.removeErrorListeners();
      lexer.addErrorListener(new ErrorListener());
    }
    return Optional.of(new CommonTokenStream(lexer));
  } catch (IOException e) {
    handleIOException(e);
  } catch (CLIArgumentParserException e) {
    handleCLIException(e);
  }
  return Optional.empty();
}
origin: zendesk/maxwell

private static List<SchemaChange> parseSQL(String currentDB, String sql) {
  ANTLRInputStream input = new ANTLRInputStream(sql);
  mysqlLexer lexer = new mysqlLexer(input);
  lexer.removeErrorListeners();
  TokenStream tokens = new CommonTokenStream(lexer);
  LOGGER.debug("SQL_PARSE <- \"" + sql + "\"");
  mysqlParser parser = new mysqlParser(tokens);
  parser.removeErrorListeners();
  MysqlParserListener listener = new MysqlParserListener(currentDB, tokens);
  ParseTree tree = parser.parse();
  ParseTreeWalker.DEFAULT.walk(listener, tree);
  LOGGER.debug("SQL_PARSE ->   " + tree.toStringTree(parser));
  return listener.getSchemaChanges();
}
origin: Graylog2/graylog2-server

public Pipeline parsePipeline(String id, String source) {
  final ParseContext parseContext = new ParseContext(false);
  final SyntaxErrorListener errorListener = new SyntaxErrorListener(parseContext);
  final RuleLangLexer lexer = new RuleLangLexer(new ANTLRInputStream(source));
  lexer.removeErrorListeners();
  lexer.addErrorListener(errorListener);
  final RuleLangParser parser = new RuleLangParser(new CommonTokenStream(lexer));
  parser.setErrorHandler(new DefaultErrorStrategy());
  parser.removeErrorListeners();
  parser.addErrorListener(errorListener);
  final RuleLangParser.PipelineContext pipelineContext = parser.pipeline();
  WALKER.walk(new PipelineAstBuilder(parseContext), pipelineContext);
  if (parseContext.getErrors().isEmpty()) {
    final Pipeline pipeline = parseContext.pipelines.get(0);
    return pipeline.withId(id);
  }
  throw new ParseException(parseContext.getErrors());
}
origin: Graylog2/graylog2-server

public List<Pipeline> parsePipelines(String pipelines) throws ParseException {
  final ParseContext parseContext = new ParseContext(false);
  final SyntaxErrorListener errorListener = new SyntaxErrorListener(parseContext);
  final RuleLangLexer lexer = new RuleLangLexer(new ANTLRInputStream(pipelines));
  lexer.removeErrorListeners();
  lexer.addErrorListener(errorListener);
  final RuleLangParser parser = new RuleLangParser(new CommonTokenStream(lexer));
  parser.setErrorHandler(new DefaultErrorStrategy());
  parser.removeErrorListeners();
  parser.addErrorListener(errorListener);
  final RuleLangParser.PipelineDeclsContext pipelineDeclsContext = parser.pipelineDecls();
  WALKER.walk(new PipelineAstBuilder(parseContext), pipelineDeclsContext);
  if (parseContext.getErrors().isEmpty()) {
    return parseContext.pipelines;
  }
  throw new ParseException(parseContext.getErrors());
}
origin: apache/incubator-druid

TokenStream tokenStream = new CommonTokenStream(lexer);
InfluxLineProtocolParser parser = new InfluxLineProtocolParser(tokenStream);
origin: jooby-project/jooby

public List<DocItem> collect(Path file) throws IOException {
 this.file = file;
 doc = new ArrayList<>();
 CharStream input = CharStreams.fromPath(file, StandardCharsets.UTF_8);
 FuzzyDocLexer lexer = new FuzzyDocLexer(input);
 ANTLRErrorListener error = errorListener(LoggerFactory.getLogger(DocCollector.class), file);
 lexer.removeErrorListeners();
 lexer.addErrorListener(error);
 TokenStream tokens = new CommonTokenStream(lexer);
 FuzzyDocParser parser = new FuzzyDocParser(tokens);
 parser.removeErrorListeners();
 parser.addErrorListener(error);
 ParseTreeWalker.DEFAULT.walk(this, parser.source());
 return doc;
}
origin: jooby-project/jooby

private static JavadocParser.DocumentationContext parseJavadoc(Path file, String doc) {
 ANTLRErrorListener error = errorListener(LoggerFactory.getLogger(DocCollector.class), file);
 CharStream stream = CharStreams.fromString(doc == null ? "" : doc);
 JavadocLexer lexer = new JavadocLexer(stream);
 lexer.removeErrorListeners();
 lexer.addErrorListener(error);
 CommonTokenStream tokens = new CommonTokenStream(lexer);
 JavadocParser parser = new JavadocParser(tokens);
 parser.removeErrorListeners();
 parser.addErrorListener(error);
 return parser.documentation();
}
origin: prestodb/presto

private static ParserRuleContext parseTypeCalculation(String calculation)
{
  TypeCalculationLexer lexer = new TypeCalculationLexer(new CaseInsensitiveStream(new ANTLRInputStream(calculation)));
  CommonTokenStream tokenStream = new CommonTokenStream(lexer);
  TypeCalculationParser parser = new TypeCalculationParser(tokenStream);
  lexer.removeErrorListeners();
  lexer.addErrorListener(ERROR_LISTENER);
  parser.removeErrorListeners();
  parser.addErrorListener(ERROR_LISTENER);
  ParserRuleContext tree;
  try {
    // first, try parsing with potentially faster SLL mode
    parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
    tree = parser.typeCalculation();
  }
  catch (ParseCancellationException ex) {
    // if we fail, parse with LL mode
    tokenStream.reset(); // rewind input stream
    parser.reset();
    parser.getInterpreter().setPredictionMode(PredictionMode.LL);
    tree = parser.typeCalculation();
  }
  return tree;
}
origin: ballerina-platform/ballerina-lang

  /**
   * Generate the proxy object by passing in the toml file.
   *
   * @param stream charstream object containing the content
   * @param path path to the toml file
   * @return proxy object
   */
  public static ParseTree parseTomlContent(CharStream stream, String path) {
    TomlLexer lexer = new TomlLexer(stream);
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    TomlParser parser = new TomlParser(tokens);
    parser.removeErrorListeners();
    parser.addErrorListener(TomlErrorListener.getErrorListener(path));
    return parser.toml();
  }
}
origin: checkstyle/checkstyle

/**
 * Parses block comment content as javadoc comment.
 * @param blockComment
 *        block comment content.
 * @return parse tree
 */
private JavadocParser createJavadocParser(String blockComment) {
  final JavadocLexer lexer = new JavadocLexer(CharStreams.fromString(blockComment));
  final CommonTokenStream tokens = new CommonTokenStream(lexer);
  final JavadocParser parser = new JavadocParser(tokens);
  // remove default error listeners
  parser.removeErrorListeners();
  // add custom error listener that logs syntax errors
  parser.addErrorListener(errorListener);
  // JavadocParserErrorStrategy stops parsing on first parse error encountered unlike the
  // DefaultErrorStrategy used by ANTLR which rather attempts error recovery.
  parser.setErrorHandler(new JavadocParserErrorStrategy());
  return parser;
}
origin: apache/drill

/**
 * Initializes logical expression lexer and parser, add error listener that converts all
 * syntax error into {@link org.apache.drill.common.exceptions.ExpressionParsingException}.
 * Parses given expression into logical expression instance.
 *
 * @param expr expression to be parsed
 * @return logical expression instance
 */
public static LogicalExpression parse(String expr) {
 ExprLexer lexer = new ExprLexer(CharStreams.fromString(expr));
 lexer.removeErrorListeners(); // need to remove since default listener will output warning
 lexer.addErrorListener(ErrorListener.INSTANCE);
 CommonTokenStream tokens = new CommonTokenStream(lexer);
 ExprParser parser = new ExprParser(tokens);
 parser.removeErrorListeners(); // need to remove since default listener will output warning
 parser.addErrorListener(ErrorListener.INSTANCE);
 ExprParser.ParseContext parseContext = parser.parse();
 logger.trace("Tokens: [{}]. Parsing details: [{}].", tokens.getText(), parseContext.toInfoString(parser));
 return parseContext.e;
}
origin: confluentinc/ksql

private ParserRuleContext getParseTree(final String sql) {
 final SqlBaseLexer sqlBaseLexer = new SqlBaseLexer(
   new CaseInsensitiveStream(CharStreams.fromString(sql)));
 final CommonTokenStream tokenStream = new CommonTokenStream(sqlBaseLexer);
 final SqlBaseParser sqlBaseParser = new SqlBaseParser(tokenStream);
 sqlBaseLexer.removeErrorListeners();
 sqlBaseLexer.addErrorListener(ERROR_LISTENER);
 sqlBaseParser.removeErrorListeners();
 sqlBaseParser.addErrorListener(ERROR_LISTENER);
 final Function<SqlBaseParser, ParserRuleContext> parseFunction = SqlBaseParser::statements;
 try {
  // first, try parsing with potentially faster SLL mode
  sqlBaseParser.getInterpreter().setPredictionMode(PredictionMode.SLL);
  return parseFunction.apply(sqlBaseParser);
 } catch (final ParseCancellationException ex) {
  // if we fail, parse with LL mode
  tokenStream.seek(0); // rewind input stream
  sqlBaseParser.reset();
  sqlBaseParser.getInterpreter().setPredictionMode(PredictionMode.LL);
  return parseFunction.apply(sqlBaseParser);
 }
}
origin: graphql-java/graphql-java

CommonTokenStream tokens = new CommonTokenStream(lexer);
origin: prestodb/presto

CommonTokenStream tokenStream = new CommonTokenStream(lexer);
SqlBaseParser parser = new SqlBaseParser(tokenStream);
origin: debezium/debezium

@Override
public void parse(String ddlContent, Tables databaseTables) {
  this.databaseTables = databaseTables;
  CodePointCharStream ddlContentCharStream = CharStreams.fromString(ddlContent);
  L lexer = createNewLexerInstance(new CaseChangingCharStream(ddlContentCharStream, isGrammarInUpperCase()));
  P parser = createNewParserInstance(new CommonTokenStream(lexer));
  dataTypeResolver = initializeDataTypeResolver();
  // remove default console output printing error listener
  parser.removeErrorListener(ConsoleErrorListener.INSTANCE);
  ParsingErrorListener parsingErrorListener = new ParsingErrorListener(AbstractDdlParser::accumulateParsingFailure);
  parser.addErrorListener(parsingErrorListener);
  ParseTree parseTree = parseTree(parser);
  if (parsingErrorListener.getErrors().isEmpty()) {
    antlrDdlParserListener = createParseTreeWalkerListener();
    if (antlrDdlParserListener != null) {
      ParseTreeWalker.DEFAULT.walk(antlrDdlParserListener, parseTree);
      if (throwErrorsFromTreeWalk && !antlrDdlParserListener.getErrors().isEmpty()) {
        throwParsingException(antlrDdlParserListener.getErrors());
      }
    }
  }
  else {
    throwParsingException(parsingErrorListener.getErrors());
  }
}
org.antlr.v4.runtimeCommonTokenStream<init>

Javadoc

Constructs a new CommonTokenStream using the specified token source and the default token channel ( Token#DEFAULT_CHANNEL).

Popular methods of CommonTokenStream

  • getTokens
  • fill
  • get
  • getHiddenTokensToLeft
  • size
  • LA
  • getHiddenTokensToRight
  • reset
  • seek
  • LB
  • getText
  • getTokenSource
  • getText,
  • getTokenSource,
  • lazyInit,
  • nextTokenOnChannel,
  • previousTokenOnChannel,
  • sync,
  • LT,
  • consume,
  • index

Popular in Java

  • Running tasks concurrently on multiple threads
  • scheduleAtFixedRate (Timer)
  • getExternalFilesDir (Context)
  • onCreateOptionsMenu (Activity)
  • GridBagLayout (java.awt)
    The GridBagLayout class is a flexible layout manager that aligns components vertically and horizonta
  • EOFException (java.io)
    Thrown when a program encounters the end of a file or stream during an input operation.
  • Iterator (java.util)
    An iterator over a sequence of objects, such as a collection.If a collection has been changed since
  • HttpServlet (javax.servlet.http)
    Provides an abstract class to be subclassed to create an HTTP servlet suitable for a Web site. A sub
  • IsNull (org.hamcrest.core)
    Is the value null?
  • LoggerFactory (org.slf4j)
    The LoggerFactory is a utility class producing Loggers for various logging APIs, most notably for lo
  • Top Vim plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now