@Override public ArrayList<Node> getChildren() { if (super.getChildCount() == 0) { return null; } ArrayList<Node> ret_vec = new ArrayList<Node>(); for (int i = 0; i < super.getChildCount(); ++i) { ret_vec.add((Node) super.getChild(i)); } return ret_vec; }
@Override public ArrayList<Node> getChildren() { if (super.getChildCount() == 0) { return null; } ArrayList<Node> ret_vec = new ArrayList<Node>(); for (int i = 0; i < super.getChildCount(); ++i) { ret_vec.add((Node) super.getChild(i)); } return ret_vec; }
/** * Get the partition specs from the tree. * * @param ast * Tree to extract partitions from. * @return A list of partition name to value mappings. * @throws SemanticException */ private List<Map<String, String>> getPartitionSpecs(Table tbl, CommonTree ast) throws SemanticException { List<Map<String, String>> partSpecs = new ArrayList<Map<String, String>>(); int childIndex = 0; // get partition metadata if partition specified for (childIndex = 0; childIndex < ast.getChildCount(); childIndex++) { ASTNode partSpecNode = (ASTNode)ast.getChild(childIndex); // sanity check if (partSpecNode.getType() == HiveParser.TOK_PARTSPEC) { Map<String,String> partSpec = getValidatedPartSpec(tbl, partSpecNode, conf, false); partSpecs.add(partSpec); } } return partSpecs; }
/** * Get the partition specs from the tree. * * @param ast * Tree to extract partitions from. * @return A list of partition name to value mappings. * @throws SemanticException */ private List<Map<String, String>> getPartitionSpecs(Table tbl, CommonTree ast) throws SemanticException { List<Map<String, String>> partSpecs = new ArrayList<Map<String, String>>(); int childIndex = 0; // get partition metadata if partition specified for (childIndex = 0; childIndex < ast.getChildCount(); childIndex++) { ASTNode partSpecNode = (ASTNode)ast.getChild(childIndex); // sanity check if (partSpecNode.getType() == HiveParser.TOK_PARTSPEC) { Map<String,String> partSpec = getValidatedPartSpec(tbl, partSpecNode, conf, false); partSpecs.add(partSpec); } } return partSpecs; }
private CommonTree parseParamList(CommonToken paramListToken) { String paramList = paramListToken.getText(); CommonTree root = new CommonTree(); int startIndex = paramListToken.getStartIndex(); int i=0; while (i<paramList.length()) { CommonToken token = getParamListSubToken(paramListToken, paramList, i); root.addChild(new CommonTree(token)); i += token.getText().length(); } if (root.getChildCount() == 0) { return null; } return root; }
ASTNode next = stack.peek(); if (!next.visited) { if (next.parent != null && next.parent.getChildCount() > 1 && next != next.parent.getChild(0)) { rootNode.addtoMemoizedString(" ");
ASTNode next = stack.peek(); if (!next.visited) { if (next.parent != null && next.parent.getChildCount() > 1 && next != next.parent.getChild(0)) { rootNode.addtoMemoizedString(" ");
private void findTableNames(final Object obj, final Set<TableName> tableNames) { if (!(obj instanceof CommonTree)) { return; } final CommonTree tree = (CommonTree) obj; final int childCount = tree.getChildCount(); if ("TOK_TABNAME".equals(tree.getText())) { final TableName tableName; final boolean isInput = "TOK_TABREF".equals(tree.getParent().getText()); switch (childCount) { case 1 : tableName = new TableName(null, tree.getChild(0).getText(), isInput); break; case 2: tableName = new TableName(tree.getChild(0).getText(), tree.getChild(1).getText(), isInput); break; default: throw new IllegalStateException("TOK_TABNAME does not have expected children, childCount=" + childCount); } // If parent is TOK_TABREF, then it is an input table. tableNames.add(tableName); return; } for (int i = 0; i < childCount; i++) { findTableNames(tree.getChild(i), tableNames); } }
private void findTableNames(final Object obj, final Set<TableName> tableNames) { if (!(obj instanceof CommonTree)) { return; } final CommonTree tree = (CommonTree) obj; final int childCount = tree.getChildCount(); if ("TOK_TABNAME".equals(tree.getText())) { final TableName tableName; final boolean isInput = "TOK_TABREF".equals(tree.getParent().getText()); switch (childCount) { case 1 : tableName = new TableName(null, tree.getChild(0).getText(), isInput); break; case 2: tableName = new TableName(tree.getChild(0).getText(), tree.getChild(1).getText(), isInput); break; default: throw new IllegalStateException("TOK_TABNAME does not have expected children, childCount=" + childCount); } // If parent is TOK_TABREF, then it is an input table. tableNames.add(tableName); return; } for (int i = 0; i < childCount; i++) { findTableNames(tree.getChild(i), tableNames); } }
/** * Verify that the information in the metastore matches up with the data on * the fs. * * @param ast * Query tree. * @throws SemanticException */ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { String tableName = null; boolean repair = false; if (ast.getChildCount() > 0) { repair = ast.getChild(0).getType() == HiveParser.KW_REPAIR; if (!repair) { tableName = getUnescapedName((ASTNode) ast.getChild(0)); } else if (ast.getChildCount() > 1) { tableName = getUnescapedName((ASTNode) ast.getChild(1)); } } Table tab = getTable(tableName); List<Map<String, String>> specs = getPartitionSpecs(tab, ast); outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED)); MsckDesc checkDesc = new MsckDesc(tableName, specs, ctx.getResFile(), repair); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), checkDesc), conf)); }
if (ast.getChildCount() > 0) { repair = ast.getChild(0).getType() == HiveParser.KW_REPAIR; if (!repair) { tableName = getUnescapedName((ASTNode) ast.getChild(0)); if (ast.getChildCount() > 1) { addPartitions = isMsckAddPartition(ast.getChild(1).getType()); dropPartitions = isMsckDropPartition(ast.getChild(1).getType()); } else if (ast.getChildCount() > 1) { tableName = getUnescapedName((ASTNode) ast.getChild(1)); if (ast.getChildCount() > 2) { addPartitions = isMsckAddPartition(ast.getChild(2).getType()); dropPartitions = isMsckDropPartition(ast.getChild(2).getType());
for (int childIndex = 0; childIndex < ast.getChildCount(); childIndex++) { Tree partSpecTree = ast.getChild(childIndex); if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC) {
: WriteEntity.WriteType.DDL_SHARED)); int numCh = ast.getChildCount(); int start = ifNotExists ? 1 : 0;
outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED)); int numCh = ast.getChildCount(); int start = ifNotExists ? 1 : 0;
for (int childIndex = 0; childIndex < ast.getChildCount(); childIndex++) { Tree partSpecTree = ast.getChild(childIndex); if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC) continue;
for (int j = 0; j < rootNode.getChildCount(); ++j) { Tree child = rootNode.getChild(j); if (child.getType() == HiveParser.TOK_FROM) {
for (int j = 0; j < rootNode.getChildCount(); ++j) { Tree child = rootNode.getChild(j); if (child.getType() == HiveParser.TOK_FROM) {
private boolean hasOption(CommonTree optionsNode, String name) { for (int i = 0; i < optionsNode.getChildCount(); i++) { CommonTree optionNode = (CommonTree) optionsNode.getChild(i); if (name.equalsIgnoreCase(optionNode.getText())) { return true; } } return false; }
public final int countAltsForRule( CommonTree t ) { CommonTree block = (CommonTree)t.getFirstChildWithType(BLOCK); int altCount = 0; for (int i = 0; i < block.getChildCount(); i++) { if (block.getChild(i).getType() == ALT) altCount++; } return altCount; }
public void addTree(StringBuffer buffer, CommonTree tree, int offset) { buffer.append(tree.getText()); for (int i = 0; i < tree.getChildCount(); i++) { addPrefix(buffer, offset); addTree(buffer, (CommonTree) tree.getChild(i), offset + 1); addSuffix(buffer, offset); } }