HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); if (ctx != null) { lexer.setHiveConf(ctx.getConf()); if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { LOG.debug("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { throw new ParseException(parser.errors);
LOG.info("Parsing command: " + command); HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); if (ctx != null) { if (lexer.getErrors().size() == 0 && parser.getErrors().size() == 0) { LOG.info("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { throw new ParseException(parser.getErrors());
LOG.info("Parsing command: " + command); HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); if (ctx != null) { ctx.setTokenRewriteStream(tokens); lexer.setHiveConf(ctx.getConf()); if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { LOG.info("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { throw new ParseException(parser.errors);
public ASTNode parseSelect(String command, Context ctx) throws ParseException { LOG.info("Parsing command: " + command); HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); if (ctx != null) { ctx.setTokenRewriteStream(tokens); } HiveParser parser = new HiveParser(tokens); parser.setTreeAdaptor(adaptor); HiveParser_SelectClauseParser.selectClause_return r = null; try { r = parser.selectClause(); } catch (RecognitionException e) { e.printStackTrace(); throw new ParseException(parser.errors); } if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { LOG.info("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { throw new ParseException(parser.errors); } return (ASTNode) r.getTree(); } }
/** * Parse an Hive QL into an Abstract Syntax Tree(AST). * @param query * @return * @throws RecognitionException */ public ASTNode generateAST(String query) throws RecognitionException { // https://issues.apache.org/jira/browse/HIVE-10731 // https://issues.apache.org/jira/browse/HIVE-6617 HiveConf hiveConf = new HiveConf(); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_SQL11_RESERVED_KEYWORDS, false); ParseDriver pd = new ParseDriver(); ParseDriver.ANTLRNoCaseStringStream antlrStream = pd.new ANTLRNoCaseStringStream(query); ParseDriver.HiveLexerX lexer = pd.new HiveLexerX(antlrStream); lexer.setHiveConf(hiveConf); TokenRewriteStream tokens = new TokenRewriteStream(lexer); HiveParser parser = new HiveParser(tokens); parser.setHiveConf(hiveConf); parser.setTreeAdaptor(adaptor); HiveParser.statement_return r = parser.statement(); return (ASTNode)r.getTree(); }
@Override public String getErrorMessage(RecognitionException e, String[] tokenNames) { String msg = null; if (e instanceof NoViableAltException) { @SuppressWarnings("unused") NoViableAltException nvae = (NoViableAltException) e; // for development, can add // "decision=<<"+nvae.grammarDecisionDescription+">>" // and "(decision="+nvae.decisionNumber+") and // "state "+nvae.stateNumber msg = "character " + getCharErrorDisplay(e.c) + " not supported here"; } else { msg = super.getErrorMessage(e, tokenNames); } return msg; }
@Override public String getErrorMessage(RecognitionException e, String[] tokenNames) { String msg = null; if (e instanceof NoViableAltException) { @SuppressWarnings("unused") NoViableAltException nvae = (NoViableAltException) e; // for development, can add // "decision=<<"+nvae.grammarDecisionDescription+">>" // and "(decision="+nvae.decisionNumber+") and // "state "+nvae.stateNumber msg = "character " + getCharErrorDisplay(e.c) + " not supported here"; } else { msg = super.getErrorMessage(e, tokenNames); } return msg; }
public static ASTNode parseExpression(String command, Configuration conf) throws ParseException, RecognitionException { ParseDriver driver = new ParseDriver(); ParseDriver.HiveLexerX lexer = driver.new HiveLexerX(driver.new ANTLRNoCaseStringStream(command)); if (conf != null) { lexer.setHiveConf(conf); } TokenRewriteStream tokens = new TokenRewriteStream(lexer); HiveParser parser = new HiveParser(tokens); if (conf != null) { parser.setHiveConf(conf); } parser.setTreeAdaptor(ParseDriver.adaptor); return (ASTNode)parser.expression().getTree(); }
public static ASTNode parseExpression(String command, Configuration conf) throws ParseException, RecognitionException { ParseDriver driver = new ParseDriver(); ParseDriver.HiveLexerX lexer = driver.new HiveLexerX(driver.new ANTLRNoCaseStringStream(command)); if (conf != null) { lexer.setHiveConf(conf); } TokenRewriteStream tokens = new TokenRewriteStream(lexer); HiveParser parser = new HiveParser(tokens); if (conf != null) { parser.setHiveConf(conf); } parser.setTreeAdaptor(ParseDriver.adaptor); return (ASTNode)parser.expression().getTree(); }
HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); if (ctx != null) { lexer.setHiveConf(ctx.getConf()); if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { LOG.debug("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { throw new ParseException(parser.errors);
public ASTNode parseSelect(String command, Context ctx) throws ParseException { if (LOG.isDebugEnabled()) { LOG.debug("Parsing command: " + command); } HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); if (ctx != null) { ctx.setTokenRewriteStream(tokens); } HiveParser parser = new HiveParser(tokens); parser.setTreeAdaptor(adaptor); HiveParser_SelectClauseParser.selectClause_return r = null; try { r = parser.selectClause(); } catch (RecognitionException e) { e.printStackTrace(); throw new ParseException(parser.errors); } if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { LOG.debug("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { throw new ParseException(parser.errors); } return (ASTNode) r.getTree(); } public ASTNode parseExpression(String command) throws ParseException {
public ASTNode parseExpression(String command) throws ParseException { LOG.info("Parsing expression: " + command); HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); HiveParser parser = new HiveParser(tokens); parser.setTreeAdaptor(adaptor); HiveParser_IdentifiersParser.expression_return r = null; try { r = parser.expression(); } catch (RecognitionException e) { e.printStackTrace(); throw new ParseException(parser.errors); } if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { LOG.info("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { throw new ParseException(parser.errors); } return (ASTNode) r.getTree(); } }
public ASTNode parseHint(String command) throws ParseException { LOG.info("Parsing hint: " + command); HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); HintParser parser = new HintParser(tokens); parser.setTreeAdaptor(adaptor); HintParser.hint_return r = null; try { r = parser.hint(); } catch (RecognitionException e) { e.printStackTrace(); throw new ParseException(parser.errors); } if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { LOG.info("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { throw new ParseException(parser.errors); } return (ASTNode) r.getTree(); }
public ASTNode parseSelect(String command, Context ctx) throws ParseException { LOG.debug("Parsing command: {}", command); HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); if (ctx != null) { ctx.setTokenRewriteStream(tokens); } HiveParser parser = new HiveParser(tokens); parser.setTreeAdaptor(adaptor); HiveParser_SelectClauseParser.selectClause_return r = null; try { r = parser.selectClause(); } catch (RecognitionException e) { throw new ParseException(parser.errors); } if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { LOG.debug("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { throw new ParseException(parser.errors); } return r.getTree(); } public ASTNode parseExpression(String command) throws ParseException {
public ASTNode parseExpression(String command) throws ParseException { LOG.debug("Parsing expression: {}", command); HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); HiveParser parser = new HiveParser(tokens); parser.setTreeAdaptor(adaptor); HiveParser_IdentifiersParser.expression_return r = null; try { r = parser.expression(); } catch (RecognitionException e) { throw new ParseException(parser.errors); } if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { LOG.debug("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { throw new ParseException(parser.errors); } return (ASTNode) r.getTree(); }
public ASTNode parseHint(String command) throws ParseException { LOG.debug("Parsing hint: {}", command); HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); HintParser parser = new HintParser(tokens); parser.setTreeAdaptor(adaptor); HintParser.hint_return r = null; try { r = parser.hint(); } catch (RecognitionException e) { throw new ParseException(parser.errors); } if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) { LOG.debug("Parse Completed"); } else if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else { throw new ParseException(parser.errors); } return (ASTNode) r.getTree(); }
@Override public String getErrorMessage(RecognitionException e, String[] tokenNames) { String msg = null; if (e instanceof NoViableAltException) { @SuppressWarnings("unused") NoViableAltException nvae = (NoViableAltException) e; // for development, can add // "decision=<<"+nvae.grammarDecisionDescription+">>" // and "(decision="+nvae.decisionNumber+") and // "state "+nvae.stateNumber msg = "character " + getCharErrorDisplay(e.c) + " not supported here"; } else { msg = super.getErrorMessage(e, tokenNames); } return msg; }
public ASTNode parseTriggerActionExpression(String command) throws ParseException { HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); HiveParser parser = new HiveParser(tokens); parser.setTreeAdaptor(adaptor); HiveParser_ResourcePlanParser.triggerActionExpressionStandalone_return r = null; try { r = parser.gResourcePlanParser.triggerActionExpressionStandalone(); } catch (RecognitionException e) { throw new ParseException(parser.errors); } if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else if (parser.errors.size() != 0) { throw new ParseException(parser.errors); } return r.getTree(); } }
public ASTNode parseTriggerExpression(String command) throws ParseException { HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); HiveParser parser = new HiveParser(tokens); parser.setTreeAdaptor(adaptor); HiveParser_ResourcePlanParser.triggerExpressionStandalone_return r = null; try { r = parser.gResourcePlanParser.triggerExpressionStandalone(); } catch (RecognitionException e) { throw new ParseException(parser.errors); } if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else if (parser.errors.size() != 0) { throw new ParseException(parser.errors); } return r.getTree(); }
@Override public String getErrorMessage(RecognitionException e, String[] tokenNames) { String msg = null; if (e instanceof NoViableAltException) { @SuppressWarnings("unused") NoViableAltException nvae = (NoViableAltException) e; // for development, can add // "decision=<<"+nvae.grammarDecisionDescription+">>" // and "(decision="+nvae.decisionNumber+") and // "state "+nvae.stateNumber msg = "character " + getCharErrorDisplay(e.c) + " not supported here"; } else { msg = super.getErrorMessage(e, tokenNames); } return msg; }