Refine search
private String getSelectedName(final Tree selectable) { if (selectable.getChildCount() == 0) { return selectable.getText(); } else if (selectable.getType() == DOT) { return getSelectedName(selectable.getChild(0)) + "." + getSelectedName(selectable.getChild(1)); } else { return selectable.getChild(selectable.getChildCount() - 1).getText(); } }
public static Map<String,String> getPartKeyValuePairsFromAST(Table tbl, ASTNode tree, HiveConf hiveConf) throws SemanticException { ASTNode child = ((ASTNode) tree.getChild(0).getChild(1)); Map<String,String> partSpec = new HashMap<String, String>(); if (child != null) { partSpec = DDLSemanticAnalyzer.getValidatedPartSpec(tbl, child, hiveConf, false); } //otherwise, it is the case of analyze table T compute statistics for columns; return partSpec; } }
private static RecordPathSegment[] getArgPaths(final Tree argumentListTree, final int minCount, final int maxCount, final String functionName, final boolean absolute) { final int numArgs = argumentListTree.getChildCount(); if (numArgs < minCount || numArgs > maxCount) { throw new RecordPathException("Invalid number of arguments: " + functionName + " function takes at least" + minCount + " arguments, and at most " + maxCount + "arguments, but got " + numArgs); } final List<RecordPathSegment> argPaths = new ArrayList<>(); for (int i=0; i < argumentListTree.getChildCount(); i++) { argPaths.add(buildPath(argumentListTree.getChild(i), null, absolute)); } return argPaths.toArray(new RecordPathSegment[argPaths.size()]); } }
/** * Converts parsed key/value properties pairs into a map. * * @param prop ASTNode parent of the key/value pairs * * @param mapProp property map which receives the mappings */ public static void readProps( ASTNode prop, Map<String, String> mapProp) { for (int propChild = 0; propChild < prop.getChildCount(); propChild++) { String key = unescapeSQLString(prop.getChild(propChild).getChild(0) .getText()); String value = null; if (prop.getChild(propChild).getChild(1) != null) { value = unescapeSQLString(prop.getChild(propChild).getChild(1).getText()); } mapProp.put(key, value); } }
public boolean isTopLevelSimpleSelectStarQuery() { if (alias != null || destToSelExpr.size() != 1 || !isSimpleSelectQuery()) { return false; } for (ASTNode selExprs : destToSelExpr.values()) { if (selExprs.getChildCount() != 1) { return false; } Tree sel = selExprs.getChild(0).getChild(0); if (sel == null || sel.getType() != HiveParser.TOK_ALLCOLREF) { return false; } } return true; }
return new EqualsEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1))); case NOT_EQUALS: return new NotEqualsEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1))); case GT: return new GreaterThanEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1))); case LT: return new LessThanEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1))); case GE: return new GreaterThanOrEqualEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1))); case LE: return new LessThanOrEqualEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1))); case NOT: return new NotEvaluator(buildBooleanEvaluator(tree.getChild(0))); case AND: return new AndEvaluator(buildBooleanEvaluator(tree.getChild(0)), buildBooleanEvaluator(tree.getChild(1))); case OR: return new OrEvaluator(buildBooleanEvaluator(tree.getChild(0)), buildBooleanEvaluator(tree.getChild(1))); case IS_NULL: return new IsNullEvaluator(buildReferenceEvaluator(tree.getChild(0))); case NOT_NULL: return new NotNullEvaluator(buildReferenceEvaluator(tree.getChild(0))); default: throw new HL7QueryParsingException("Cannot build boolean evaluator for '" + tree.getText() + "'");
/** * Converts parsed key/value properties pairs into a map. * * @param prop ASTNode parent of the key/value pairs * * @param mapProp property map which receives the mappings */ public static void readProps( ASTNode prop, Map<String, String> mapProp) { for (int propChild = 0; propChild < prop.getChildCount(); propChild++) { String key = unescapeSQLString(prop.getChild(propChild).getChild(0) .getText()); String value = null; if (prop.getChild(propChild).getChild(1) != null) { value = unescapeSQLString(prop.getChild(propChild).getChild(1).getText()); } mapProp.put(key, value); } }
private void toTreeString(final Tree tree, final StringBuilder sb, final int indentLevel) { final String nodeName = tree.getText(); for (int i = 0; i < indentLevel; i++) { sb.append(" "); } sb.append(nodeName); sb.append("\n"); for (int i = 0; i < tree.getChildCount(); i++) { final Tree child = tree.getChild(i); toTreeString(child, sb, indentLevel + 2); } } }
private static RecordPathSegment[] getArgPaths(final Tree argumentListTree, final int expectedCount, final String functionName, final boolean absolute) { final int numArgs = argumentListTree.getChildCount(); if (numArgs != expectedCount) { throw new RecordPathException("Invalid number of arguments: " + functionName + " function takes " + expectedCount + " arguments but got " + numArgs); } final RecordPathSegment[] argPaths = new RecordPathSegment[expectedCount]; for (int i = 0; i < expectedCount; i++) { argPaths[i] = buildPath(argumentListTree.getChild(i), null, absolute); } return argPaths; }
public static Map<String,String> getPartKeyValuePairsFromAST(Table tbl, ASTNode tree, HiveConf hiveConf) throws SemanticException { ASTNode child = ((ASTNode) tree.getChild(0).getChild(1)); Map<String,String> partSpec = new HashMap<String, String>(); if (child != null) { partSpec = DDLSemanticAnalyzer.getValidatedPartSpec(tbl, child, hiveConf, false); } //otherwise, it is the case of analyze table T compute statistics for columns; return partSpec; } }
public boolean isTopLevelSimpleSelectStarQuery() { if (alias != null || destToSelExpr.size() != 1 || !isSimpleSelectQuery()) { return false; } for (ASTNode selExprs : destToSelExpr.values()) { if (selExprs.getChildCount() != 1) { return false; } Tree sel = selExprs.getChild(0).getChild(0); if (sel == null || sel.getType() != HiveParser.TOK_ALLCOLREF) { return false; } } return true; }
private Evaluator<?> buildReferenceEvaluator(final Tree tree) { switch (tree.getType()) { case MESSAGE: return new MessageEvaluator(); case SEGMENT_NAME: return new SegmentEvaluator(new StringLiteralEvaluator(tree.getText())); case IDENTIFIER: return new DeclaredReferenceEvaluator(new StringLiteralEvaluator(tree.getText())); case DOT: final Tree firstChild = tree.getChild(0); final Tree secondChild = tree.getChild(1); return new DotEvaluator(buildReferenceEvaluator(firstChild), buildIntegerEvaluator(secondChild)); case STRING_LITERAL: return new StringLiteralEvaluator(tree.getText()); case NUMBER: return new IntegerLiteralEvaluator(Integer.parseInt(tree.getText())); default: throw new HL7QueryParsingException("Failed to build evaluator for " + tree.getText()); } }
/** * Get the constraint from the AST and populate the cstrInfos with the required * information. * @param child The node with the constraint token * @param cstrInfos Constraint information * @throws SemanticException */ private static void generateConstraintInfos(ASTNode child, List<ConstraintInfo> cstrInfos) throws SemanticException { ImmutableList.Builder<String> columnNames = ImmutableList.builder(); for (int j = 0; j < child.getChild(0).getChildCount(); j++) { Tree columnName = child.getChild(0).getChild(j); checkColumnName(columnName.getText()); columnNames.add(unescapeIdentifier(columnName.getText().toLowerCase())); } generateConstraintInfos(child, columnNames.build(), cstrInfos, null, null); }
private String poolPath(Tree ast) { StringBuilder builder = new StringBuilder(); builder.append(unescapeIdentifier(ast.getText())); for (int i = 0; i < ast.getChildCount(); ++i) { // DOT is not affected builder.append(unescapeIdentifier(ast.getChild(i).getText())); } return builder.toString(); }
private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo, RowResolver gByInputRR, RowResolver gByRR) { if (gByExpr.getType() == HiveParser.DOT && gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL) { String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getChild(0) .getText().toLowerCase()); String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(1).getText().toLowerCase()); gByRR.put(tab_alias, col_alias, colInfo); } else if (gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL) { String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getText().toLowerCase()); String tab_alias = null; /* * If the input to the GBy has a tab alias for the column, then add an * entry based on that tab_alias. For e.g. this query: select b.x, * count(*) from t1 b group by x needs (tab_alias=b, col_alias=x) in the * GBy RR. tab_alias=b comes from looking at the RowResolver that is the * ancestor before any GBy/ReduceSinks added for the GBY operation. */ try { ColumnInfo pColInfo = gByInputRR.get(tab_alias, col_alias); tab_alias = pColInfo == null ? null : pColInfo.getTabAlias(); } catch (SemanticException se) { } gByRR.put(tab_alias, col_alias, colInfo); } }
private Evaluator<?> buildFunctionExpressionEvaluator(final Tree tree, final int offset) { if (tree.getChildCount() == 0) { throw new AttributeExpressionLanguageParsingException("EXPRESSION tree node has no children"); } final int firstChildIndex = tree.getChildCount() - offset - 1; if (firstChildIndex == 0) { return buildEvaluator(tree.getChild(0)); } final Tree functionTree = tree.getChild(firstChildIndex); final Evaluator<?> subjectEvaluator = buildFunctionExpressionEvaluator(tree, offset + 1); final Tree functionNameTree = functionTree.getChild(0); final List<Evaluator<?>> argEvaluators = new ArrayList<>(); for (int i = 1; i < functionTree.getChildCount(); i++) { argEvaluators.add(buildEvaluator(functionTree.getChild(i))); } return buildFunctionEvaluator(functionNameTree, subjectEvaluator, argEvaluators); }
if (selectExprs != null) { for (int i = 0; i < selectExprs.getChildCount(); ++i) { if (((ASTNode) selectExprs.getChild(i)).getToken().getType() == HiveParser.QUERY_HINT) { continue; ASTNode grpbyExpr = (ASTNode) selectExprs.getChild(i).getChild(0); result.add(grpbyExpr); if (grpByExprs != null) { for (int i = 0; i < grpByExprs.getChildCount(); ++i) { ASTNode grpbyExpr = (ASTNode) grpByExprs.getChild(i); if (grpbyExpr.getType() != HiveParser.TOK_GROUPING_SETS_EXPRESSION) { result.add(grpbyExpr);