Refine search
private int getWindowSpecIndx(ASTNode wndAST) { int wi = wndAST.getChildCount() - 1; if (wi <= 0 || (wndAST.getChild(wi).getType() != HiveParser.TOK_WINDOWSPEC)) { wi = -1; } return wi; }
private String extractJoinAlias(ASTNode node, String tableName) { // ptf node form is: // ^(TOK_PTBLFUNCTION $name $alias? partitionTableFunctionSource partitioningSpec? expression*) // guaranteed to have an alias here: check done in processJoin if (node.getType() == HiveParser.TOK_PTBLFUNCTION) { return unescapeIdentifier(node.getChild(1).getText().toLowerCase()); } if (node.getChildCount() == 1) { return tableName; } for (int i = node.getChildCount() - 1; i >= 1; i--) { if (node.getChild(i).getType() == HiveParser.Identifier) { return unescapeIdentifier(node.getChild(i).getText().toLowerCase()); } } return tableName; }
int relyIndex = 2; int cnt = 1; String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); for (int j = 0; j < child.getChild(0).getChildCount(); j++) { Tree grandChild = child.getChild(0).getChild(j); boolean rely = child.getChild(relyIndex).getType() == HiveParser.TOK_VALIDATE; boolean enable = child.getChild(relyIndex+1).getType() == HiveParser.TOK_ENABLE; boolean validate = child.getChild(relyIndex+2).getType() == HiveParser.TOK_VALIDATE; if (enable) { throw new SemanticException( new SQLPrimaryKey( qualifiedTabName[0], qualifiedTabName[1], unescapeIdentifier(grandChild.getText().toLowerCase()), cnt++, unescapeIdentifier(child.getChild(1).getText().toLowerCase()), false, false, rely));
public boolean isSimpleSelectQuery() { if (joinExpr != null || !destToOrderby.isEmpty() || !destToSortby.isEmpty() || !destToGroupby.isEmpty() || !destToClusterby.isEmpty() || !destToDistributeby.isEmpty() || !destRollups.isEmpty() || !destCubes.isEmpty() || !destGroupingSets.isEmpty() || !destToHaving.isEmpty()) { return false; } for (Map<String, ASTNode> entry : destToAggregationExprs.values()) { if (entry != null && !entry.isEmpty()) { return false; } } for (Map<String, ASTNode> entry : destToWindowingExprs.values()) { if (entry != null && !entry.isEmpty()) { return false; } } for (List<ASTNode> ct : destToDistinctFuncExprs.values()) { if (!ct.isEmpty()) { return false; } } // exclude insert queries for (ASTNode v : nameToDest.values()) { if (!(v.getChild(0).getType() == HiveParser.TOK_TMP_FILE)) { return false; } } return true; }
private String extractJoinAlias(ASTNode node, String tableName) { // ptf node form is: // ^(TOK_PTBLFUNCTION $name $alias? partitionTableFunctionSource partitioningSpec? expression*) // guaranteed to have an alias here: check done in processJoin if (node.getType() == HiveParser.TOK_PTBLFUNCTION) { return unescapeIdentifier(node.getChild(1).getText().toLowerCase()); } if (node.getChildCount() == 1) { return tableName; } for (int i = node.getChildCount() - 1; i >= 1; i--) { if (node.getChild(i).getType() == HiveParser.Identifier) { return unescapeIdentifier(node.getChild(i).getText().toLowerCase()); } } return tableName; }
static ASTNode subQueryWhere(ASTNode insertClause) { if (insertClause.getChildCount() > 2 && insertClause.getChild(2).getType() == HiveParser.TOK_WHERE ) { return (ASTNode) insertClause.getChild(2); } return null; }
private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo, RowResolver gByInputRR, RowResolver gByRR) { if (gByExpr.getType() == HiveParser.DOT && gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL) { String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getChild(0) .getText().toLowerCase()); String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(1).getText().toLowerCase()); gByRR.put(tab_alias, col_alias, colInfo); } else if (gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL) { String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getText().toLowerCase()); String tab_alias = null; /* * If the input to the GBy has a tab alias for the column, then add an * entry based on that tab_alias. For e.g. this query: select b.x, * count(*) from t1 b group by x needs (tab_alias=b, col_alias=x) in the * GBy RR. tab_alias=b comes from looking at the RowResolver that is the * ancestor before any GBy/ReduceSinks added for the GBY operation. */ try { ColumnInfo pColInfo = gByInputRR.get(tab_alias, col_alias); tab_alias = pColInfo == null ? null : pColInfo.getTabAlias(); } catch (SemanticException se) { } gByRR.put(tab_alias, col_alias, colInfo); } }
private String getSelectedName(final Tree selectable) { if (selectable.getChildCount() == 0) { return selectable.getText(); } else if (selectable.getType() == DOT) { return getSelectedName(selectable.getChild(0)) + "." + getSelectedName(selectable.getChild(1)); } else { return selectable.getChild(selectable.getChildCount() - 1).getText(); } }
public boolean isSimpleSelectQuery() { if (joinExpr != null || !destToOrderby.isEmpty() || !destToSortby.isEmpty() || !destToGroupby.isEmpty() || !destToClusterby.isEmpty() || !destToDistributeby.isEmpty() || !destRollups.isEmpty() || !destCubes.isEmpty() || !destGroupingSets.isEmpty() || !destToHaving.isEmpty()) { return false; } for (Map<String, ASTNode> entry : destToAggregationExprs.values()) { if (entry != null && !entry.isEmpty()) { return false; } } for (Map<String, ASTNode> entry : destToWindowingExprs.values()) { if (entry != null && !entry.isEmpty()) { return false; } } for (List<ASTNode> ct : destToDistinctFuncExprs.values()) { if (!ct.isEmpty()) { return false; } } // exclude insert queries for (ASTNode v : nameToDest.values()) { if (!(v.getChild(0).getType() == HiveParser.TOK_TMP_FILE)) { return false; } } return true; }
/** * disableMapJoinWithHint * @param hints * @return true if hint to disable hint is provided, else false * @throws SemanticException */ private boolean disableMapJoinWithHint(List<ASTNode> hints) throws SemanticException { if (hints == null || hints.size() == 0) { return false; } for (ASTNode hintNode : hints) { for (Node node : hintNode.getChildren()) { ASTNode hint = (ASTNode) node; if (hint.getChild(0).getType() != HintParser.TOK_MAPJOIN) { continue; } Tree args = hint.getChild(1); if (args.getChildCount() == 1) { String text = args.getChild(0).getText(); if (text.equalsIgnoreCase("None")) { // Hint to disable mapjoin. return true; } } } } return false; }
private int getWindowSpecIndx(ASTNode wndAST) { int wi = wndAST.getChildCount() - 1; if (wi <= 0 || (wndAST.getChild(wi).getType() != HiveParser.TOK_WINDOWSPEC)) { wi = -1; } return wi; }
private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo, RowResolver gByInputRR, RowResolver gByRR) { if (gByExpr.getType() == HiveParser.DOT && gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL) { String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getChild(0) .getText().toLowerCase()); String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(1).getText().toLowerCase()); gByRR.put(tab_alias, col_alias, colInfo); } else if (gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL) { String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getText().toLowerCase()); String tab_alias = null; /* * If the input to the GBy has a tab alias for the column, then add an * entry based on that tab_alias. For e.g. this query: select b.x, * count(*) from t1 b group by x needs (tab_alias=b, col_alias=x) in the * GBy RR. tab_alias=b comes from looking at the RowResolver that is the * ancestor before any GBy/ReduceSinks added for the GBY operation. */ try { ColumnInfo pColInfo = gByInputRR.get(tab_alias, col_alias); tab_alias = pColInfo == null ? null : pColInfo.getTabAlias(); } catch (SemanticException se) { } gByRR.put(tab_alias, col_alias, colInfo); } }
private void processDeclare(final Tree declare) { for (int i = 0; i < declare.getChildCount(); i++) { final Tree declarationTree = declare.getChild(i); final String identifier = declarationTree.getChild(0).getText(); final Tree requiredOrOptionalTree = declarationTree.getChild(1); final boolean required = requiredOrOptionalTree.getType() == REQUIRED; final String segmentName = declarationTree.getChild(2).getText(); final Declaration declaration = new Declaration() { @Override public String getAlias() { return identifier; } @Override public boolean isRequired() { return required; } @Override public Object getDeclaredValue(final HL7Message message) { if (message == null) { return null; } return message.getSegments(segmentName); } }; declarations.add(declaration); } }
private void setQueryHints(QB qb) throws SemanticException { QBParseInfo qbp = getQBParseInfo(qb); String selClauseName = qbp.getClauseNames().iterator().next(); Tree selExpr0 = qbp.getSelForClause(selClauseName).getChild(0); if (selExpr0.getType() != HiveParser.QUERY_HINT) return; String hint = ctx.getTokenRewriteStream().toString( selExpr0.getTokenStartIndex(), selExpr0.getTokenStopIndex()); LOG.debug("Handling query hints: " + hint); ParseDriver pd = new ParseDriver(); try { ASTNode hintNode = pd.parseHint(hint); qbp.setHints(hintNode); } catch (ParseException e) { throw new SemanticException("failed to parse query hint: "+e.getMessage(), e); } }
private boolean containsLeadLagUDF(ASTNode expressionTree) { int exprTokenType = expressionTree.getToken().getType(); if (exprTokenType == HiveParser.TOK_FUNCTION) { assert (expressionTree.getChildCount() != 0); if (expressionTree.getChild(0).getType() == HiveParser.Identifier) { String functionName = unescapeIdentifier(expressionTree.getChild(0) .getText()); functionName = functionName.toLowerCase(); if ( FunctionRegistry.LAG_FUNC_NAME.equals(functionName) || FunctionRegistry.LEAD_FUNC_NAME.equals(functionName) ) { return true; } } } for (int i = 0; i < expressionTree.getChildCount(); i++) { if ( containsLeadLagUDF((ASTNode) expressionTree.getChild(i))) { return true; } } return false; }
static ASTNode subQueryWhere(ASTNode insertClause) { if (insertClause.getChildCount() > 2 && insertClause.getChild(2).getType() == HiveParser.TOK_WHERE ) { return (ASTNode) insertClause.getChild(2); } return null; }
private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo, Operator<? extends OperatorDesc> reduceSinkOp, RowResolver gByRR) { if ( gByExpr.getType() == HiveParser.DOT && gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL ) { String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr .getChild(0).getChild(0).getText().toLowerCase()); String col_alias = BaseSemanticAnalyzer.unescapeIdentifier( gByExpr.getChild(1).getText().toLowerCase()); gByRR.put(tab_alias, col_alias, colInfo); } else if ( gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL ) { String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr .getChild(0).getText().toLowerCase()); String tab_alias = null;
private Evaluator<?> buildReferenceEvaluator(final Tree tree) { switch (tree.getType()) { case MESSAGE: return new MessageEvaluator(); case SEGMENT_NAME: return new SegmentEvaluator(new StringLiteralEvaluator(tree.getText())); case IDENTIFIER: return new DeclaredReferenceEvaluator(new StringLiteralEvaluator(tree.getText())); case DOT: final Tree firstChild = tree.getChild(0); final Tree secondChild = tree.getChild(1); return new DotEvaluator(buildReferenceEvaluator(firstChild), buildIntegerEvaluator(secondChild)); case STRING_LITERAL: return new StringLiteralEvaluator(tree.getText()); case NUMBER: return new IntegerLiteralEvaluator(Integer.parseInt(tree.getText())); default: throw new HL7QueryParsingException("Failed to build evaluator for " + tree.getText()); } }
/** * Exporting an Acid table is more complicated than a flat table. It may contains delete events, * which can only be interpreted properly withing the context of the table/metastore where they * were generated. It may also contain insert events that belong to transactions that aborted * where the same constraints apply. * In order to make the export artifact free of these constraints, the export does a * insert into tmpTable select * from <export table> to filter/apply the events in current * context and then export the tmpTable. This export artifact can now be imported into any * table on any cluster (subject to schema checks etc). * See {@link #analyzeAcidExport(ASTNode)} * @param tree Export statement * @return true if exporting an Acid table. */ public static boolean isAcidExport(ASTNode tree) throws SemanticException { assert tree != null && tree.getToken() != null && tree.getToken().getType() == HiveParser.TOK_EXPORT; Tree tokTab = tree.getChild(0); assert tokTab != null && tokTab.getType() == HiveParser.TOK_TAB; Table tableHandle = null; try { tableHandle = getTable((ASTNode) tokTab.getChild(0), Hive.get(), false); } catch(HiveException ex) { throw new SemanticException(ex); } //tableHandle can be null if table doesn't exist return tableHandle != null && AcidUtils.isFullAcidTable(tableHandle); } private static String getTmptTableNameForExport(Table exportTable) {